Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def key_for(self, run_id, target):
p = self._get_plugins((target,), run_id)[target]
return strax.DataKey(run_id, target, p.lineage)
f" turned on.")
return
if self.context_config['allow_incomplete']:
self.log.warning(f"Not saving {d} while loading incomplete"
f" data is allowed.")
return
# Save the target and any other outputs of the plugin.
for d_to_save in set([d] + list(p.provides)):
if d_to_save in savers and len(savers[d_to_save]):
# This multi-output plugin was scanned before
# let's not create doubled savers
assert p.multi_output
continue
key = strax.DataKey(run_id, d_to_save, p.lineage)
for sf in self.storage:
if sf.readonly:
continue
if loading_this_data:
# Usually, we don't save if we're loading
if not self.context_config['storage_converter']:
continue
# ... but in storage converter mode we do:
try:
sf.find(key,
**self._find_options)
# Already have this data in this backend
continue
except strax.DataNotAvailable:
# Don't have it, so let's save it!
key,
chunk_number=chunk_number,
time_range=time_range)
if not ldr and run_id.startswith('_'):
if time_range is not None:
raise NotImplementedError("time range loading not yet "
"supported for superruns")
sub_run_spec = self.run_metadata(
run_id, 'sub_run_spec')['sub_run_spec']
self.make(list(sub_run_spec.keys()), d)
ldrs = []
for subrun in sub_run_spec:
sub_key = strax.DataKey(
subrun,
d,
self._get_plugins((d,), subrun)[d].lineage)
if sub_run_spec[subrun] == 'all':
_subrun_time_range = None
else:
_subrun_time_range = sub_run_spec[subrun]
ldr = self._get_partial_loader_for(
sub_key,
time_range=_subrun_time_range,
chunk_number=chunk_number)
if not ldr:
raise RuntimeError(
f"Could not load {d} for subrun {subrun} "
f"even though we made it??")
ldrs.append(ldr)
def check_cache(d):
nonlocal plugins, loaders, savers, seen
if d in seen:
return
seen.add(d)
p = plugins[d]
# Can we load this data?
loading_this_data = False
key = strax.DataKey(run_id, d, p.lineage)
ldr = self._get_partial_loader_for(
key,
chunk_number=chunk_number,
time_range=time_range)
if not ldr and run_id.startswith('_'):
if time_range is not None:
raise NotImplementedError("time range loading not yet "
"supported for superruns")
sub_run_spec = self.run_metadata(
run_id, 'sub_run_spec')['sub_run_spec']
self.make(list(sub_run_spec.keys()), d)
ldrs = []
def metadata(self, run_id, data_type):
"""Metadata to save along with produced data"""
if not data_type in self.provides:
raise RuntimeError(f"{data_type} not in {self.provides}?")
return dict(
run_id=run_id,
data_type=data_type,
data_kind=self.data_kind_for(data_type),
dtype=self.dtype_for(data_type),
lineage_hash=strax.DataKey(
run_id, data_type, self.lineage).lineage_hash,
compressor=self.compressor,
lineage=self.lineage)