Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
wf = script.workflow()
env.config['sig_mode'] = 'default'
# generate files (default step 0 and 1)
Base_Executor(wf, config={'default_queue': 'localhost'}).run()
# now, rerun in build mode
env.config['sig_mode'] = 'build'
res = Base_Executor(wf, config={'default_queue': 'localhost'}).run()
self.assertEqual(res['__completed__']['__step_completed__'], 0)
#
self.assertTrue(os.path.isfile('temp/c.txt'))
self.assertTrue(os.path.isfile('temp/d.txt'))
with open('temp/c.txt') as tc:
self.assertTrue(tc.read(), 'a.txt')
with open('temp/d.txt') as td:
self.assertTrue(td.read(), 'b.txt')
self.assertEqual(env.sos_dict['oa'],
sos_targets('temp/c.txt', 'temp/d.txt'))
#
# now in assert mode, the signature should be there
env.config['sig_mode'] = 'assert'
res = Base_Executor(wf, config={'default_queue': 'localhost'}).run()
self.assertEqual(res['__completed__']['__step_completed__'], 0)
#
env.config['sig_mode'] = 'default'
res = Base_Executor(wf, config={'default_queue': 'localhost'}).run()
self.assertEqual(res['__completed__']['__step_completed__'], 0)
#
# change script a little bit
script = SoS_Script('# comment\n' + text)
wf = script.workflow()
'''Test return of output from dynamic output'''
for i in range(5):
if os.path.exists(f'rep_{i}'):
shutil.rmtree(f'rep_{i}')
script = SoS_Script(r'''
[1: shared={'step1': 'step_output'}]
input: for_each={'i': range(5)}, concurrent=True
output: dynamic(f'rep_{i}/*.res')
import random, os
os.makedirs(f'rep_{i}', exist_ok=True)
path(f'rep_{i}/{random.randint(0, 10000)}.res').touch()
''')
wf = script.workflow()
res = Base_Executor(wf).run()
files = env.sos_dict['step1']
self.assertEqual(len(files), 5)
self.assertEqual(res['__completed__']['__substep_completed__'], 5)
# rerun
res = Base_Executor(wf).run()
files_again = env.sos_dict['step1']
self.assertEqual(files, files_again)
self.assertEqual(res['__completed__']['__substep_completed__'], 0)
if args.__to__:
env.sos_dict.set(args.__to__, content)
elif args.__append__:
if args.__append__ not in env.sos_dict:
env.sos_dict.set(args.__append__, content)
elif isinstance(env.sos_dict[args.__append__], str):
if isinstance(content, str):
env.sos_dict[args.__append__] += content
else:
self.sos_kernel.warn(
f'Cannot append new content of type {type(content).__name__} to {args.__append__} of type {type(env.sos_dict[args.__append__]).__name__}'
)
elif isinstance(env.sos_dict[args.__append__], dict):
if isinstance(content, dict):
env.sos_dict[args.__append__].update(content)
else:
self.sos_kernel.warn(
f'Cannot append new content of type {type(content).__name__} to {args.__append__} of type {type(env.sos_dict[args.__append__]).__name__}'
)
elif isinstance(env.sos_dict[args.__append__], pd.DataFrame):
if isinstance(content, pd.DataFrame):
env.sos_dict.set(
args.__append__,
env.sos_dict[args.__append__].append(content))
else:
self.sos_kernel.warn(
f'Cannot append new content of type {type(content).__name__} to {args.__append__} of type {type(env.sos_dict[args.__append__]).__name__}'
)
elif isinstance(env.sos_dict[args.__append__], list):
env.sos_dict[args.__append__].append(content)
else:
except Exception as e:
self.warn('Failed to save workflow: {}'.format(e))
return {'status': 'error',
'ename': e.__class__.__name__,
'evalue': str(e),
'traceback': [],
'execution_count': self._execution_count,
}
return self._do_execute(remaining_code, silent, store_history, user_expressions, allow_stdin)
elif self.MAGIC_RERUN.match(code):
options, remaining_code = self.get_magic_and_code(code, True)
old_options = self.options
self.options = options + ' ' + self.options
try:
self._workflow_mode = True
old_dict = env.sos_dict
self._reset_dict()
if not self.last_executed_code:
self.warn('No saved script')
self.last_executed_code = ''
return self._do_execute(self.last_executed_code, silent, store_history, user_expressions, allow_stdin)
except Exception as e:
self.warn('Failed to execute workflow: {}'.format(e))
raise
finally:
old_dict.quick_update(env.sos_dict._dict)
env.sos_dict = old_dict
self.options = old_options
self._workflow_mode = False
elif self.MAGIC_SANDBOX.match(code):
import tempfile
import shutil
def reset_dict(self):
# if creating a new dictionary, set it up with some basic varibles
# and functions
if self.nested:
#
# if this is a nested workflow, we do not clear sos_dict because it contains all
# the symbols from the main workflow. _base_symbols need to be defined though.
self._base_symbols = set(dir(__builtins__)) | set(env.sos_dict['sos_symbols_']) | set(SOS_KEYWORDS) | set(keyword.kwlist)
self._base_symbols -= {'dynamic'}
return
env.sos_dict = WorkflowDict()
env.parameter_vars.clear()
# inject a few things
env.sos_dict.set('__workflow_sig__', os.path.join(env.exec_dir, '.sos', '{}.sig'.format(self.md5)))
env.sos_dict.set('__null_func__', __null_func__)
env.sos_dict.set('__args__', self.args)
env.sos_dict.set('__unknown_args__', self.args)
# initial values
env.sos_dict.set('SOS_VERSION', __version__)
env.sos_dict.set('__step_output__', [])
# load configuration files
def get_vars(self, names):
for name in names:
if name.startswith('_'):
self.sos_kernel.warn('Variable {} is passed from SoS to kernel {} as {}'.format(name, self.kernel_name, '.' + name[1:]))
newname = '.' + name[1:]
else:
newname = name
r_repr = _R_repr(env.sos_dict[name])
self.sos_kernel.run_cell('{} <- {}'.format(newname, r_repr), True, False, on_error='Failed to get variable {} to R'.format(name))
env.sos_dict.set('__args__', self.args)
if self.md5:
env.sos_dict.set('__workflow_sig__', os.path.join(env.exec_dir, '.sos', '{}.sig'.format(self.md5)))
self._base_symbols = set(dir(__builtins__)) | set(env.sos_dict['sos_symbols_']) | set(SOS_KEYWORDS) | set(keyword.kwlist)
self._base_symbols -= {'dynamic'}
# load configuration files
cfg = load_config_files(self.config['config_file'])
# if check_readonly is set to True, allow checking readonly vars
if cfg.get('sos', {}).get('change_all_cap_vars', None) is not None:
if cfg['sos']['change_all_cap_vars'] not in ('warning', 'error'):
env.logger.error('Configuration sos.change_all_cap_vars can only be warning or error: {} provided'.format(cfg['sos']['change_all_cap_vars']))
else:
env.sos_dict._change_all_cap_vars = cfg['sos']['change_all_cap_vars']
env.sos_dict.set('CONFIG', cfg)
# set config to CONFIG
FileTarget('config.yml').remove('both')
# remove some variables because they would interfere with step analysis
for key in ('_input', 'input'):
if key in env.sos_dict:
env.sos_dict.pop(key)
env.sos_dict.quick_update(self.shared)
if isinstance(self.args, dict):
for key, value in self.args.items():
if not key.startswith('__'):
env.sos_dict.set(key, value)
def get_vars(self, names):
self.sos_kernel.run_cell("import pickle", True, False)
for name in names:
if self.kernel_name == 'python3':
stmt = "globals().update(pickle.loads({!r}))\n".format(pickle.dumps({name:env.sos_dict[name]}))
else:
stmt = "globals().update(pickle.loads({!r}))\n".format(pickle.dumps({name: env.sos_dict[name]}, protocol=2, fix_imports=True))
self.sos_kernel.run_cell(stmt, True, False, on_error='Failed to get variable {} from SoS to {}'.format(name, self.kernel_name))
if x in env.sos_dict:
env.sos_dict.pop(x)
return
if args.keys:
if args.all:
self.send_result(env.sos_dict._dict.keys())
elif args.vars:
self.send_result(set(args.vars))
else:
self.send_result({x for x in env.sos_dict._dict.keys() if not x.startswith('__')} - self.original_keys)
else:
if args.all:
self.send_result(env.sos_dict._dict)
elif args.vars:
self.send_result({x:y for x,y in env.sos_dict._dict.items() if x in args.vars})
else:
self.send_result({x:y for x,y in env.sos_dict._dict.items() if x not in self.original_keys and not x.startswith('__')})