How to use the nptdms.types.String function in npTDMS

To help you get started, we’ve selected a few npTDMS examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github adamreeve / npTDMS / nptdms / base_segment.py View on Github external
def read_property(f, endianness="<"):
    """ Read a property from a segment's metadata """

    prop_name = types.String.read(f, endianness)
    prop_data_type = types.tds_data_types[types.Uint32.read(f, endianness)]
    value = prop_data_type.read(f, endianness)
    log.debug("Property '%s' = %r", prop_name, value)
    return prop_name, value
github adamreeve / npTDMS / nptdms / channel_data.py View on Github external
def __init__(self, channel):
        """Initialise new data receiver for a TDMS object
        """
        if channel.data_type == types.String:
            self._dtype = np.dtype('O')
        else:
            self._dtype = None
        self._data = []
        self.scaler_data = {}
github adamreeve / npTDMS / nptdms / tdms.py View on Github external
def _raw_data_dtype(self):
        if self.data_type is types.String:
            return np.dtype('O')
        elif self.data_type is types.TimeStamp:
            return np.dtype('
github adamreeve / npTDMS / nptdms / tdms_segment.py View on Github external
if (self.data_type.size is None and
                self.data_type != types.String):
            raise ValueError(
                "Unsupported data type: %r" % self.data_type)

        # Read data dimension
        dimension = types.Uint32.read(f, self.endianness)
        # In TDMS version 2.0, 1 is the only valid value for dimension
        if dimension != 1:
            raise ValueError("Data dimension is not 1")

        # Read number of values
        self.number_values = types.Uint64.read(f, self.endianness)

        # Variable length data types have total size
        if self.data_type in (types.String,):
            self.data_size = types.Uint64.read(f, self.endianness)
        else:
            self.data_size = self.number_values * self.data_type.size

        log.debug(
            "Object number of values in segment: %d", self.number_values)
github adamreeve / npTDMS / nptdms / export / hdf_export.py View on Github external
container_group.attrs[property_name] = _hdf_attr_value(property_value)

    # Now iterate through groups and channels,
    # writing the properties and creating data sets
    datasets = {}
    for group in tdms_file.groups():
        # Write the group's properties
        container_group.create_group(group.name)
        for prop_name, prop_value in group.properties.items():
            container_group[group.name].attrs[prop_name] = _hdf_attr_value(prop_value)

        # Write properties and data for each channel
        for channel in group.channels():
            channel_key = group.name + '/' + channel.name

            if channel.data_type is types.String:
                # Encode as variable length UTF-8 strings
                datasets[channel.path] = container_group.create_dataset(
                    channel_key, (len(channel),), dtype=h5py.string_dtype())
            elif channel.data_type is types.TimeStamp:
                # Timestamps are represented as fixed length ASCII strings
                # because HDF doesn't natively support timestamps
                datasets[channel.path] = container_group.create_dataset(
                    channel_key, (len(channel),), dtype='S27')
            else:
                datasets[channel.path] = container_group.create_dataset(
                    channel_key, (len(channel),), dtype=channel.dtype)

            for prop_name, prop_value in channel.properties.items():
                container_group[channel_key].attrs[prop_name] = _hdf_attr_value(prop_value)

    # Set data