Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
ch_cntr += 1
# channel group
kargs = {
'cycles_nr': cycles_nr,
'samples_byte_nr': offset,
}
gp['channel_group'] = ChannelGroup(**kargs)
gp['size'] = cycles_nr * offset
# data group
gp['data_group'] = DataGroup()
# data block
if PYVERSION == 2:
types = fix_dtype_fields(types)
types = dtype(types)
gp['sorted'] = True
gp['types'] = types
gp['parents'] = parents
samples = fromarrays(fields, dtype=types)
block = samples.tostring()
if self.load_measured_data:
gp['data_location'] = v4c.LOCATION_MEMORY
gp['data_block'] = DataBlock(data=block)
else:
gp['data_location'] = v4c.LOCATION_TEMPORARY_FILE
if self._tempfile is None:
self._tempfile = TemporaryFile()
record_shape = tuple(shape)
arrays = [
self.get(group=dg_nr, index=ch_nr, samples_only=True)
for ch_nr, dg_nr in dep.referenced_channels
]
if cycles_nr:
shape.insert(0, cycles_nr)
vals = column_stack(arrays).flatten().reshape(tuple(shape))
arrays = [vals, ]
types = [(channel.name, vals.dtype, record_shape), ]
if PYVERSION == 2:
types = fix_dtype_fields(types)
types = dtype(types)
vals = fromarrays(arrays, dtype=types)
else:
# get channel values
try:
parents, dtypes = grp['parents'], grp['types']
except KeyError:
grp['parents'], grp['types'] = self._prepare_record(grp)
parents, dtypes = grp['parents'], grp['types']
try:
parent, bit_offset = parents[ch_nr]
except KeyError:
parent, bit_offset = None, None
else:
kargs['block_len'] = v23c.CG_PRE_330_BLOCK_SIZE
gp['channel_group'] = ChannelGroup(**kargs)
gp['channel_group'].comment = acquisition_info
gp['size'] = cycles_nr * (offset >> 3)
# data group
if self.version >= '3.20':
block_len = v23c.DG_POST_320_BLOCK_SIZE
else:
block_len = v23c.DG_PRE_320_BLOCK_SIZE
gp['data_group'] = DataGroup(block_len=block_len)
# data block
if PYVERSION == 2:
types = fix_dtype_fields(types, 'latin-1')
types = dtype(types)
gp['types'] = types
gp['parents'] = parents
gp['sorted'] = True
if signals:
samples = fromarrays(fields, dtype=types)
else:
samples = array([])
block = samples.tostring()
if memory == 'full':
gp['data_location'] = v23c.LOCATION_MEMORY
kargs = {'data': block}
v23c.SIGNAL_TYPE_CANOPEN,
v23c.SIGNAL_TYPE_STRUCTURE_COMPOSITION):
new_group_offset += 1
new_gp = self.groups[index + new_group_offset]
new_fields = []
new_types = []
names = signal.dtype.names
for name in names:
new_fields.append(signal[name])
new_types.append(('', signal.dtype))
# data block
if PYVERSION == 2:
new_types = fix_dtype_fields(new_types)
new_types = dtype(new_types)
samples = fromarrays(new_fields, dtype=new_types)
samples = samples.tostring()
record_size = new_gp['channel_group']['samples_byte_nr']
extended_size = cycles_nr * record_size
new_gp['size'] += extended_size
if memory == 'full':
if samples:
data = new_gp['data_block']['data'] + samples
new_gp['data_block'] = DataBlock(data=data)
else:
if samples:
stream.seek(0, 2)
max_overlapping = next_byte_aligned_position - start_offset
if max_overlapping >= bit_count:
parents[original_index] = (
current_parent,
start_offset - parent_start_offset,
)
if next_byte_aligned_position > record_size:
break
gap = (record_size - next_byte_aligned_position) >> 3
if gap:
dtype_pair = ('', 'a{}'.format(gap))
types.append(dtype_pair)
if PYVERSION == 2:
types = fix_dtype_fields(types)
return parents, dtype(types)
max_overlapping_size = (next_byte_aligned_position - start_offset) * 8
needed_size = bit_offset + bit_count
if max_overlapping_size >= needed_size:
parents[original_index] = current_parent, ((start_offset - parent_start_offset) << 3) + bit_offset
if next_byte_aligned_position > record_size:
break
gap = record_size - next_byte_aligned_position
if gap > 0:
dtype_pair = '', 'a{}'.format(gap)
types.append(dtype_pair)
dtype_pair = 'invalidation_bytes', 'a{}'.format(invalidation_bytes_nr)
types.append(dtype_pair)
if PYVERSION == 2:
types = fix_dtype_fields(types)
return parents, dtype(types)
'samples_byte_nr': offset >> 3,
}
gp['channel_group'] = ChannelGroup(**kargs)
gp['channel_group']['ch_nr'] = ch_cntr
gp['size'] = cycles_nr * (offset >> 3)
# data group
if self.version in ('3.20', '3.30'):
block_len = v3c.DG32_BLOCK_SIZE
else:
block_len = v3c.DG31_BLOCK_SIZE
gp['data_group'] = DataGroup(block_len=block_len)
# data block
if PYVERSION == 2:
types = fix_dtype_fields(types)
types = dtype(types)
gp['types'] = types
gp['parents'] = parents
gp['sorted'] = True
samples = fromarrays(fields, dtype=types)
try:
block = samples.tostring()
if memory == 'full':
gp['data_location'] = v3c.LOCATION_MEMORY
kargs = {'data': block}
gp['data_block'] = DataBlock(**kargs)
else:
gp['data_location'] = v3c.LOCATION_TEMPORARY_FILE
record_shape = tuple(shape)
arrays = [
self.get(group=dg_nr, index=ch_nr, samples_only=True, raw=raw, data=original_data)[0]
for ch_nr, dg_nr in dep.referenced_channels
]
if cycles_nr:
shape.insert(0, cycles_nr)
vals = column_stack(arrays).flatten().reshape(tuple(shape))
arrays = [vals, ]
types = [(channel.name, vals.dtype, record_shape), ]
if PYVERSION == 2:
types = fix_dtype_fields(types, 'latin-1')
types = dtype(types)
vals = fromarrays(arrays, dtype=types)
if not samples_only or raster:
timestamps = self.get_master(gp_nr, original_data)
if raster and len(timestamps):
t = arange(
timestamps[0],
timestamps[-1],
raster,
)
vals = Signal(
vals,
timestamps,
max_overlapping = next_byte_aligned_position - start_offset
if max_overlapping >= bit_count:
parents[original_index] = (
current_parent,
start_offset - parent_start_offset,
)
if next_byte_aligned_position > record_size:
break
gap = (record_size - next_byte_aligned_position) >> 3
if gap:
dtype_pair = ('', 'a{}'.format(gap))
types.append(dtype_pair)
if PYVERSION == 2:
types = fix_dtype_fields(types)
return parents, dtype(types)
'ch_nr': new_ch_cntr,
}
new_gp['channel_group'] = ChannelGroup(**kargs)
new_gp['channel_group'].comment = channel_group_comment
new_gp['size'] = cycles_nr * (new_offset >> 3)
# data group
if self.version >= '3.20':
block_len = v23c.DG_POST_320_BLOCK_SIZE
else:
block_len = v23c.DG_PRE_320_BLOCK_SIZE
new_gp['data_group'] = DataGroup(block_len=block_len)
# data block
if PYVERSION == 2:
new_types = fix_dtype_fields(new_types)
new_types = dtype(new_types)
new_gp['types'] = new_types
new_gp['parents'] = new_parents
new_gp['sorted'] = True
samples = fromarrays(new_fields, dtype=new_types)
try:
block = samples.tostring()
if memory == 'full':
new_gp['data_location'] = v23c.LOCATION_MEMORY
kargs = {'data': block}
new_gp['data_block'] = DataBlock(**kargs)
else:
new_gp['data_location'] = v23c.LOCATION_TEMPORARY_FILE