Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_AnyPyProcessOutput():
out = AnyPyProcessOutputList( [ AnyPyProcessOutput({'AAAA':1}),
AnyPyProcessOutput({'AAAA':2}),
AnyPyProcessOutput({'AAAA':3}),
AnyPyProcessOutput({'AAAA':4}),
AnyPyProcessOutput({'AAAA':5}),
AnyPyProcessOutput({'AAAA':6, 'ERROR':0})
])
assert(len(out)==6)
assert(isinstance(out[0], AnyPyProcessOutput ))
# Test slice get
assert(len(out[1:3])==2)
assert(isinstance(out[0:2][0], AnyPyProcessOutput))
# Test slice set
out[:] = [e for e in out if 'ERROR' not in e]
if hasattr(pbar, "container"):
pbar.container.children[0].bar_style = "danger"
pbar.update()
except KeyboardInterrupt as e:
tqdm.write("KeyboardInterrupt: User aborted")
time.sleep(1)
finally:
if not self.silent:
tqdm.write(tasklist_summery(tasklist))
self.cleanup_logfiles(tasklist)
# Cache the processed tasklist for restarting later
self.cached_tasklist = tasklist
# self.summery.final_summery(process_time, tasklist)
task_output = [task.get_output() for task in tasklist]
return AnyPyProcessOutputList(task_output)
def filter(self, function):
"""Filter elements for whichfunction returns true."""
return AnyPyProcessOutputList(filter(function, self))
>>> app.load_results('unfinished_results.db')
>>> results = app.start_macro() # rerun unfinished
"""
loadkey = "processed_tasks"
db = shelve.open(filename)
loaded_data = db[loadkey]
db.close()
# Hack to help Enrico convert data to the new structured
if not isinstance(loaded_data[0].output, AnyPyProcessOutput):
for task in loaded_data:
task.output = AnyPyProcessOutput(task.output)
self.cached_tasklist = loaded_data
results = [task.get_output(True) for task in loaded_data]
return AnyPyProcessOutputList(results)
be used.
Returns
-------
None
"""
import h5py
if not self.cached_tasklist:
raise ValueError("No data available for saving")
if batch_name is None:
batch_name = str(self.cached_arg_hash)
any_output = AnyPyProcessOutputList(
[task.get_output() for task in self.cached_tasklist]
)
task_names = [elem["task_name"] for elem in any_output]
unique_names = len(task_names) == len(set(task_names))
with h5py.File(filename, "w") as h5file:
h5_batch_group = h5file.create_group(batch_name)
for run in any_output:
task_name = run["task_name"] if unique_names else str(run["task_id"])
task_name = task_name.replace("/", "|")
h5_task_group = h5_batch_group.create_group(task_name)
for k, v in run.items():
if not isinstance(v, np.ndarray):
if isinstance(v, list):
h5_task_group.attrs[k] = str(v)
else:
h5_task_group.attrs[k] = v
@convert.register(nd.array, AnyPyProcessOutputList, cost=1.0)
def convert(res, **kwargs):
# Hack to ensure values are always interpreted as floats
# This is necessary because the dump command in AnyBody
# may cause whole number float to look like int.
res = deepcopy(res)
for elem in res:
for key in elem:
if key.startswith('Main') and isinstance(elem[key], np.ndarray):
try:
elem[key] = elem[key].astype('float')
except ValueError:
pass
###
prepared_data, ds = convert_and_extract_dshape(res, **kwargs)
return nd.array(prepared_data, dtype=str(ds))