Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
register=[Records, Peaks],
config=dict(crash=True))
# Check correct exception is thrown
with pytest.raises(SomeCrash):
st.make(run_id=run_id, targets='peaks',
max_workers=2)
# Check exception is recorded in metadata
# in both its original data type and dependents
for target in ('peaks', 'records'):
assert 'SomeCrash' in st.get_meta(run_id, target)['exception']
# Check corrupted data does not load
st.context_config['forbid_creation_of'] = ('peaks',)
with pytest.raises(strax.DataNotAvailable):
st.get_df(run_id=run_id, targets='peaks')
chunk_number=None,
executor=None):
"""Iterates over strax data in backend_key
:param time_range: 2-length arraylike of (start, exclusive end)
of desired data. Will return all data that partially overlaps with
the range.
Default is None, which means get the entire
:param chunk_number: Chunk number to get exclusively
:param executor: Executor to push load/decompress operations to
"""
metadata = self.get_metadata(backend_key)
if 'strax_version' in metadata:
v_old = metadata['strax_version']
if version.parse(v_old) < version.parse('0.9.0'):
raise strax.DataNotAvailable(
f"Cannot load data at {backend_key}: "
f"it was created with strax {v_old}, "
f"but you have strax {strax.__version__}. ")
else:
warnings.warn(f"Data at {backend_key} does not say what strax "
"version it was generated with. This means it is "
"corrupted, or very, very old. Probably "
"we cannot load this.")
# 'start' and 'end' are not required, to allow allow_incomplete
required_fields = (
'run_id data_type data_kind dtype compressor').split()
missing_fields = [x for x in required_fields if x not in metadata]
if len(missing_fields):
raise strax.DataNotAvailable(
f"Cannot load data at {backend_key}: metadata is "
def _find(self, key, write, allow_incomplete, fuzzy_for,
fuzzy_for_options):
if write:
return self.backends[0].__class__.__name__, str(key)
if self.db[str(key)].count():
self.log.debug(f"{key} is in cache.")
return self.backends[0].__class__.__name__, str(key)
self.log.debug(f"{key} is NOT in cache.")
raise strax.DataNotAvailable
yield from x(*args, **kwargs)
ldr = lambda *args, **kwargs : concat_loader(*args, **kwargs)
if ldr:
# Found it! No need to make it or look in other frontends
loading_this_data = True
loaders[d] = ldr
del plugins[d]
else:
# Data not found anywhere. We will be computing it.
if (time_range is not None
and plugins[d].save_when != strax.SaveWhen.NEVER):
# While the data type providing the time information is
# available (else we'd have failed earlier), one of the
# other requested data types is not.
raise strax.DataNotAvailable(
f"Time range selection assumes data is already "
f"available, but {d} for {run_id} is not.")
if '*' in self.context_config['forbid_creation_of']:
raise strax.DataNotAvailable(
f"{d} for {run_id} not found in any storage, and "
"your context specifies no new data can be created.")
if d in self.context_config['forbid_creation_of']:
raise strax.DataNotAvailable(
f"{d} for {run_id} not found in any storage, and "
"your context specifies it cannot be created.")
to_compute[d] = p
for dep_d in p.depends_on:
check_cache(dep_d)
# Should we save this data? If not, return.
if (loading_this_data
# Found it! No need to make it or look in other frontends
loading_this_data = True
loaders[d] = ldr
del plugins[d]
else:
# Data not found anywhere. We will be computing it.
if (time_range is not None
and plugins[d].save_when != strax.SaveWhen.NEVER):
# While the data type providing the time information is
# available (else we'd have failed earlier), one of the
# other requested data types is not.
raise strax.DataNotAvailable(
f"Time range selection assumes data is already "
f"available, but {d} for {run_id} is not.")
if '*' in self.context_config['forbid_creation_of']:
raise strax.DataNotAvailable(
f"{d} for {run_id} not found in any storage, and "
"your context specifies no new data can be created.")
if d in self.context_config['forbid_creation_of']:
raise strax.DataNotAvailable(
f"{d} for {run_id} not found in any storage, and "
"your context specifies it cannot be created.")
to_compute[d] = p
for dep_d in p.depends_on:
check_cache(dep_d)
# Should we save this data? If not, return.
if (loading_this_data
and not self.context_config['storage_converter']):
return
if p.save_when == strax.SaveWhen.NEVER:
if d in save:
def _zipname(self, key):
zipname = osp.join(self.path, key.run_id + '.zip')
# Since we're never writing, this check can be here
# TODO: sounds like a bad idea?
if not osp.exists(zipname):
raise strax.DataNotAvailable
return zipname
def get_meta(self, run_id, target) -> dict:
"""Return metadata for target for run_id, or raise DataNotAvailable
if data is not yet available.
:param run_id: run id to get
:param target: data type to get
"""
key = self.key_for(run_id, target)
for sf in self.storage:
try:
return sf.get_metadata(key, **self._find_options)
except strax.DataNotAvailable as e:
self.log.debug(f"Frontend {sf} does not have {key}")
raise strax.DataNotAvailable(f"Can't load metadata, "
f"data for {key} not available")
def _saver(self, dirname, metadata):
# Test if the parent directory is writeable.
# We need abspath since the dir itself may not exist,
# even though its parent-to-be does
parent_dir = os.path.abspath(os.path.join(dirname, os.pardir))
# In case the parent dir also doesn't exist, we have to create is
# otherwise the write permission check below will certainly fail
try:
os.makedirs(parent_dir, exist_ok=True)
except OSError as e:
raise strax.DataNotAvailable(
f"Can't write data to {dirname}, "
f"{parent_dir} does not exist and we could not create it."
f"Original error: {e}")
# Finally, check if we have permission to create the new subdirectory
# (which the Saver will do)
if not os.access(parent_dir, os.W_OK):
raise strax.DataNotAvailable(
f"Can't write data to {dirname}, "
f"no write permissions in {parent_dir}.")
return FileSaver(dirname, metadata=metadata)