Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
gp['parents'] = parents
samples = fromarrays(fields, dtype=types)
block = samples.tostring()
if self.load_measured_data:
gp['data_location'] = v4c.LOCATION_MEMORY
gp['data_block'] = DataBlock(data=block)
else:
gp['data_location'] = v4c.LOCATION_TEMPORARY_FILE
if self._tempfile is None:
self._tempfile = TemporaryFile()
self._tempfile.seek(0, v4c.SEEK_END)
data_address = self._tempfile.tell()
gp['data_group']['data_block_addr'] = data_address
self._tempfile.write(bytes(block))
blocks.append(gp['channel_group'])
for gp in self.groups:
for dep_list in gp['channel_dependencies']:
if dep_list:
if all(isinstance(dep, ChannelArrayBlock) for dep in dep_list):
for dep in dep_list:
for i, (ch_nr, gp_nr) in enumerate(dep.referenced_channels):
grp = self.groups[gp_nr]
ch = grp['channels'][ch_nr]
dep['scale_axis_{}_dg_addr'.format(i)] = grp['data_group'].address
dep['scale_axis_{}_cg_addr'.format(i)] = grp['channel_group'].address
dep['scale_axis_{}_ch_addr'.format(i)] = ch.address
for block in blocks:
write(bytes(block))
if self.groups:
addr_ = self.groups[0]['data_group'].address
self.header['first_dg_addr'] = addr_
else:
self.header['first_dg_addr'] = 0
self.header['file_history_addr'] = self.file_history[0][0].address
if self.attachments:
addr_ = self.attachments[0][0].address
self.header['first_attachment_addr'] = addr_
else:
self.header['first_attachment_addr'] = 0
if self.file_comment:
self.header['comment_addr'] = self.file_comment.address
else:
self.header['comment_addr'] = 0
write = dst_.write
seek = dst_.seek
# list of all blocks
blocks = []
address = 0
write(bytes(self.identification))
address += v23c.ID_BLOCK_SIZE
write(bytes(self.header))
address += self.header['block_len']
if self.header.program:
write(bytes(self.header.program))
self.header['program_addr'] = address
address += self.header.program['block_len']
else:
self.header['program_addr'] = 0
comment = TextBlock(text=self.header.comment)
write(bytes(comment))
self.header['comment_addr'] = address
address += comment['block_len']
# DataGroup
# put them first in the block list so they will be written first to
# disk this way, in case of memory=False, we can safely
# restore he original data block address
gp_rec_ids = []
destination = dst
with open(destination, 'wb+') as dst_:
defined_texts = {}
write = dst_.write
tell = dst_.tell
seek = dst_.seek
# list of all blocks
blocks = []
address = 0
write(bytes(self.identification))
write(bytes(self.header))
address = tell()
self.file_history.address = address
write(bytes(self.file_history))
# DataGroup
# put them first in the block list so they will be written first to
# disk this way, in case of memory=False, we can safely
# restore he original data block address
data_address = []
for gp in self.groups:
gp_texts = deepcopy(gp['texts'])
if gp['data_location'] == v2c.LOCATION_ORIGINAL_FILE:
stream = self._file
write = dst_.write
tell = dst_.tell
seek = dst_.seek
# list of all blocks
blocks = []
address = 0
write(bytes(self.identification))
write(bytes(self.header))
address = tell()
self.file_history.address = address
write(bytes(self.file_history))
# DataGroup
# put them first in the block list so they will be written first to
# disk this way, in case of memory=False, we can safely
# restore he original data block address
data_address = []
for gp in self.groups:
gp_texts = deepcopy(gp['texts'])
if gp['data_location'] == v3c.LOCATION_ORIGINAL_FILE:
stream = self._file
else:
stream = self._tempfile
# Texts
for key, item in pairs:
conv[key] = item
write(bytes(conv))
# Channel Extension
cs = gp['temp_channel_extensions'] = []
for source in gp['channel_extensions']:
if source:
address = tell()
gp['temp_channel_extensions'].append(address)
source = ChannelExtension(
address=source,
stream=stream,
)
write(bytes(source))
else:
gp['temp_channel_extensions'].append(0)
# Channel Dependency
cd = gp['temp_channel_dependencies'] = []
for dep in gp['channel_dependencies']:
if dep:
address = tell()
gp['temp_channel_dependencies'].append(address)
dep.address = address
write(bytes(dep))
else:
gp['temp_channel_dependencies'].append(0)
# Channels
blocks = []
cg['next_cg_addr'] = 0
cg_texts = gp_texts['channel_group'][0]
if 'comment_addr' in cg_texts:
addr = cg_texts['comment_addr']
cg['comment_addr'] = addr
write(bytes(cg))
address = tell()
# TriggerBLock
trigger, trigger_text = gp['trigger']
if trigger:
if trigger_text:
trigger_text.address = address
write(bytes(trigger_text))
trigger['comment_addr'] = trigger_text.address
else:
trigger['comment_addr'] = 0
address = tell()
trigger.address = address
write(bytes(trigger))
address = tell()
# DataBlock
data = self._load_group_data(gp)
if data:
data_address.append(address)
write(bytes(data))
self._callback(66 + count, 100)
count += 1
threshold += blocks_nr / 33
else:
for block in blocks:
write(bytes(block))
for gp, rec_id, original_address in zip(
self.groups,
gp_rec_ids,
original_data_block_addrs):
gp['data_group']['record_id_len'] = rec_id
gp['data_group']['data_block_addr'] = original_address
seek(0)
write(bytes(self.identification))
write(bytes(self.header))
if self.memory == 'low' and dst == self.name:
self.close()
os.remove(self.name)
os.rename(destination, self.name)
self.groups = []
self.header = None
self.identification = None
self.channels_db = {}
self.masters_db = {}
self._master_channel_cache = {}
self._tempfile = TemporaryFile()
address = tell()
# TriggerBLock
trigger, trigger_text = gp['trigger']
if trigger:
if trigger_text:
trigger_text.address = address
write(bytes(trigger_text))
trigger['comment_addr'] = trigger_text.address
else:
trigger['comment_addr'] = 0
address = tell()
trigger.address = address
write(bytes(trigger))
address = tell()
# DataBlock
data = self._load_group_data(gp)
if data:
data_address.append(address)
write(bytes(data))
else:
data_address.append(0)
del gp['temp_channel_conversions']
del gp['temp_channel_extensions']
orig_addr = [gp['data_group']['data_block_addr'] for gp in self.groups]