How to use the h5pyd._hl.h5type.Reference function in h5pyd

To help you get started, we’ve selected a few h5pyd examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github HDFGroup / h5pyd / h5pyd / _hl / group.py View on Github external
# (like checking for hyphens in the right places)
            if isinstance(name, str) and len(name) >= 38:
                if name.startswith("groups/") or name.startswith("g-"):
                    return True
                elif name.startswith("datatypes/") or name.startswith("t-"):
                    return True
                elif name.startswith("datasets/") or name.startswith("d-"):
                    return True
                else:
                    return False
            else:
                return False


        tgt = None
        if isinstance(name, h5type.Reference):
            tgt = name.objref()  # weak reference to ref object
            if tgt is not None:
                return tgt  # ref'd object has not been deleted
            if isinstance(name.id, GroupID):
                tgt = getObjByUuid(name.id.uuid, collection_type="groups")
            elif isinstance(name.id, DatasetID):
                tgt = getObjByUuid(name.id.uuid, collection_type="datasets")
            elif isinstance(name.id, TypeID):
                tgt = getObjByUuid(name.id.uuid, collection_type="datasets")
            else:
                raise IOError("Unexpected Error - ObjectID type: " + name.__class__.__name__)
            return tgt

        if isUUID(name):
            tgt = getObjByUuid(name)
            return tgt
github HDFGroup / h5pyd / h5pyd / _hl / h5type.py View on Github external
raise TypeError("Array Type base type must be integer, float, or string")

        baseType = createDataType(arrayBaseType)
        metadata = None
        if baseType.metadata:
            metadata = dict(baseType.metadata)
            dtRet = np.dtype(dims+baseType.str, metadata=metadata)
        else:
            dtRet =  np.dtype(dims+baseType.str)

        return dtRet  # return predefined type
    elif typeClass == 'H5T_REFERENCE':
        if 'base' not in typeItem:
            raise KeyError("'base' not provided")
        if typeItem['base'] == 'H5T_STD_REF_OBJ':
        	dtRet = special_dtype(ref=Reference)
        elif typeItem['base'] == 'H5T_STD_REF_DSETREG':
        	dtRet = special_dtype(ref=RegionReference)
        else:
            raise TypeError("Invalid base type for reference type")
    elif typeClass == 'H5T_ENUM':
        if 'base' not in typeItem:
            raise KeyError("Expected 'base' to be provided for enum type")
        base_json = typeItem["base"]
        if 'class' not in base_json:
            raise KeyError("Expected class field in base type")
        if base_json['class'] != 'H5T_INTEGER':
            raise TypeError("Only integer base types can be used with enum type")
        if 'mapping' not in typeItem:
            raise KeyError("'mapping' not provided for enum type")
        mapping = typeItem["mapping"]
        if len(mapping) == 0:
github HDFGroup / h5pyd / h5pyd / _hl / attrs.py View on Github external
Name of the new attribute (required)
        data
            An array to initialize the attribute (required)
        shape
            Shape of the attribute.  Overrides data.shape if both are
            given, in which case the total number of points must be unchanged.
        dtype
            Data type of the attribute.  Overrides data.dtype if both
            are given.
        """
        self._parent.log.info("attrs.create({})".format(name))

        # First, make sure we have a NumPy array.  We leave the data
        # type conversion for HDF5 to perform.
        if isinstance(data, Reference):
            dtype = special_dtype(ref=Reference)
        data = numpy.asarray(data, dtype=dtype, order='C')

        if shape is None:
            shape = data.shape

        use_htype = None    # If a committed type is given, we must use it
                            # in the call to h5a.create.

        if isinstance(dtype, Datatype):
            use_htype = dtype.id
            dtype = dtype.dtype

            # Special case if data are complex numbers
            if (data.dtype.kind == 'c' and
                (dtype.names is None or
                    dtype.names != ('r', 'i') or
github HDFGroup / h5pyd / h5pyd / _hl / dataset.py View on Github external
if isinstance(dtype, Datatype):
        # Named types are used as-is
        type_json = dtype.id.type_json

    else:
        # Validate dtype
        if dtype is None:
            dtype = numpy.dtype("=f4")
        else:
            dtype = numpy.dtype(dtype)

        if dtype.kind == 'O' and 'ref' in dtype.metadata:
            type_json = {}
            type_json["class"] = "H5T_REFERENCE"
            meta_type = dtype.metadata['ref']
            if meta_type is Reference:
                type_json["base"] = "H5T_STD_REF_OBJ"
            elif meta_type is RegionReference:
                type_json["base"] = "H5T_STD_REF_DSETREG"
            else:
                errmsg = "Unexpected metadata type"
                raise ValueError(errmsg)
        else:
            type_json = getTypeItem(dtype)
            #tid = h5t.py_create(dtype, logical=1)
    body['type'] = type_json

    # Legacy
    if compression is True:
        if compression_opts is None:
            compression_opts = 4
        compression = 'gzip'
github HDFGroup / h5pyd / h5pyd / _hl / base.py View on Github external
def ref(self):
        """ An (opaque) HDF5 reference to this object """
        return Reference(self)
        # return h5r.create(self.id, b'.', h5r.OBJECT)
github HDFGroup / h5pyd / h5pyd / _hl / table.py View on Github external
if self._item_size != "H5T_VARIABLE":
            use_base64 = True   # may need to set this to false below for some types
        else:
            use_base64 = False  # never use for variable length types
            self.log.debug("Using JSON since type is variable length")

        val = rows  # for compatibility with dataset code...
        # get the val dtype if we're passed a numpy array
        val_dtype = None
        try:
            val_dtype = val.dtype
        except AttributeError:
            pass # not a numpy object, just leave dtype as None

        if isinstance(val, Reference):
            # h5pyd References are just strings
            val = val.tolist()

        # Generally we try to avoid converting the arrays on the Python
        # side.  However, for compound literals this is unavoidable.
        # For h5pyd, do extra check and convert type on client side for efficiency
        vlen = check_dtype(vlen=self.dtype)
        if vlen is not None and vlen not in (bytes, str):
            self.log.debug("converting ndarray for vlen data")
            try:
                val = numpy.asarray(val, dtype=vlen)
            except ValueError:
                try:
                    val = numpy.array([numpy.array(x, dtype=vlen)
                                       for x in val], dtype=self.dtype)
                except ValueError:
github HDFGroup / h5pyd / h5pyd / _hl / dataset.py View on Github external
self.log.debug("arg: [{},...] type: {}".format(arg[0], type(arg)))

        # Sort field indices from the rest of the args.
        names = tuple(x for x in args if isinstance(x, str))
        args = tuple(x for x in args if not isinstance(x, str))
         
        new_dtype = getattr(self._local, 'astype', None)
        if new_dtype is not None:
            new_dtype = readtime_dtype(new_dtype, names)
        else:
            # This is necessary because in the case of array types, NumPy
            # discards the array information at the top level.
            new_dtype = readtime_dtype(self.dtype, names)
            self.log.debug("new_dtype: {}".format(new_dtype))
        if new_dtype.kind == 'S' and check_dtype(ref=self.dtype):
            new_dtype = special_dtype(ref=Reference)

        mtype = new_dtype


        # === Special-case region references ====
        """
        TODO
        if len(args) == 1 and isinstance(args[0], h5r.RegionReference):

            obj = h5r.dereference(args[0], self.id)
            if obj != self.id:
                raise ValueError("Region reference must point to this dataset")

            sid = h5r.get_region(args[0], self.id)
            mshape = sel.guess_shape(sid)
            if mshape is None:
github HDFGroup / h5pyd / h5pyd / _hl / dataset.py View on Github external
#if self._item_size != "H5T_VARIABLE":
        use_base64 = True   # may need to set this to false below for some types
        #else:
        #    use_base64 = False  # never use for variable length types
        #    self.log.debug("Using JSON since type is variable length")

        args = args if isinstance(args, tuple) else (args,)

        # get the val dtype if we're passed a numpy array
        val_dtype = None
        try:
            val_dtype = val.dtype
        except AttributeError:
            pass # not a numpy object, just leave dtype as None

        if isinstance(val, Reference):
            # h5pyd References are just strings
            val = val.tolist()

        # Sort field indices from the slicing
        names = tuple(x for x in args if isinstance(x, str))
        args = tuple(x for x in args if not isinstance(x, str))
         
        # Generally we try to avoid converting the arrays on the Python
        # side.  However, for compound literals this is unavoidable.
        # For h5pyd, do extra check and convert type on client side for efficiency
        vlen = check_dtype(vlen=self.dtype)
        if vlen is not None and vlen not in (bytes, str):
            self.log.debug("converting ndarray for vlen data")
            try:
                val = numpy.asarray(val, dtype=vlen)
            except ValueError:
github HDFGroup / h5pyd / h5pyd / _hl / attrs.py View on Github external
name
            Name of the new attribute (required)
        data
            An array to initialize the attribute (required)
        shape
            Shape of the attribute.  Overrides data.shape if both are
            given, in which case the total number of points must be unchanged.
        dtype
            Data type of the attribute.  Overrides data.dtype if both
            are given.
        """
        self._parent.log.info("attrs.create({})".format(name))

        # First, make sure we have a NumPy array.  We leave the data
        # type conversion for HDF5 to perform.
        if isinstance(data, Reference):
            dtype = special_dtype(ref=Reference)
        data = numpy.asarray(data, dtype=dtype, order='C')

        if shape is None:
            shape = data.shape

        use_htype = None    # If a committed type is given, we must use it
                            # in the call to h5a.create.

        if isinstance(dtype, Datatype):
            use_htype = dtype.id
            dtype = dtype.dtype

            # Special case if data are complex numbers
            if (data.dtype.kind == 'c' and
                (dtype.names is None or