Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def setUp(self):
super().setUp()
self.entity = Entity.objects.create(
name="Test entity", contributor=self.contributor
)
process = Process.objects.create(
name="Test process",
contributor=self.contributor,
output_schema=[
{
"name": "foo",
"label": "Foo",
"group": [
{"name": "bar", "label": "Bar", "type": "basic:integer:"},
{"name": "hello", "label": "Hello", "type": "basic:string:"},
],
},
{"name": "another", "label": "Another", "type": "basic:integer:"},
],
)
data_output = {
"foo": {"bar": 42, "hello": "world",},
def submit(self, data, runtime_dir, argv):
"""Run process.
For details, see
:meth:`~resolwe.flow.managers.workload_connectors.base.BaseConnector.submit`.
"""
queue = "ordinary"
if data.process.scheduling_class == Process.SCHEDULING_CLASS_INTERACTIVE:
queue = "hipri"
logger.debug(
__(
"Connector '{}' running for Data with id {} ({}) in celery queue {}, EAGER is {}.",
self.__class__.__module__,
data.id,
repr(argv),
queue,
getattr(settings, "CELERY_ALWAYS_EAGER", None),
)
)
celery_run.apply_async((data.id, runtime_dir, argv), queue=queue)
django_settings.update(kwargs)
files[ExecutorFiles.DJANGO_SETTINGS] = django_settings
# Add scheduling classes.
files[ExecutorFiles.PROCESS_META] = {
k: getattr(Process, k)
for k in dir(Process)
if k.startswith("SCHEDULING_CLASS_")
and isinstance(getattr(Process, k), str)
}
# Add Data status constants.
files[ExecutorFiles.DATA_META] = {
k: getattr(Data, k)
for k in dir(Data)
if k.startswith("STATUS_") and isinstance(getattr(Data, k), str)
}
# Prepare storage connectors settings and secrets.
connectors_settings = copy.deepcopy(STORAGE_CONNECTORS)
# Local connector in executor in always named 'local'.
connectors_settings["local"] = connectors_settings.pop(STORAGE_LOCAL_CONNECTOR)
for connector_settings in connectors_settings.values():
# Fix class name for inclusion in the executor.
klass = connector_settings["connector"]
klass = "executors." + klass.rsplit(".storage.")[-1]
connector_settings["connector"] = klass
connector_config = connector_settings["config"]
# Prepare credentials for executor.
if "credentials" in connector_config:
src_credentials = connector_config["credentials"]
base_credentials_name = os.path.basename(src_credentials)
return
# Set allocated resources:
resource_limits = data.process.get_resource_limits()
data.process_memory = resource_limits["memory"]
data.process_cores = resource_limits["cores"]
else:
# If there is no run section, then we should not try to run anything. But the
# program must not be set to None as then the process will be stuck in waiting
# state.
program = ""
if data.status != Data.STATUS_DONE:
# The data object may already be marked as done by the execution engine. In this
# case we must not revert the status to STATUS_WAITING.
data.status = Data.STATUS_WAITING
data.save(render_name=True)
# Actually run the object only if there was nothing with the transaction.
transaction.on_commit(
# Make sure the closure gets the right values here, since they're
# changed in the loop.
lambda d=data, p=program: self._data_execute(d, p, executor)
)
def submit(self, data, runtime_dir, argv):
"""Run process.
For details, see
:meth:`~resolwe.flow.managers.workload_connectors.base.BaseConnector.submit`.
"""
queue = "ordinary"
if data.process.scheduling_class == Process.SCHEDULING_CLASS_INTERACTIVE:
queue = "hipri"
logger.debug(
__(
"Connector '{}' running for Data with id {} ({}) in celery queue {}, EAGER is {}.",
self.__class__.__module__,
data.id,
repr(argv),
queue,
getattr(settings, "CELERY_ALWAYS_EAGER", None),
)
)
celery_run.apply_async((data.id, runtime_dir, argv), queue=queue)
try:
process = process_query.latest()
except Process.DoesNotExist:
return Response({'process': ['Invalid process slug "{}" - object does not exist.'.format(process_slug)]},
status=status.HTTP_400_BAD_REQUEST)
request.data['process'] = process.pk
# perform "get_or_create" if requested - return existing object
# if found
if kwargs.pop('get_or_create', False):
process_input = request.data.get('input', {})
# use default values if they are not given
for field_schema, fields, path in iterate_schema(process_input, process.input_schema):
if 'default' in field_schema and field_schema['name'] not in fields:
dict_dot(process_input, path, field_schema['default'])
checksum = get_data_checksum(process_input, process.slug, process.version)
data_qs = Data.objects.filter(
checksum=checksum,
process__persistence__in=[Process.PERSISTENCE_CACHED, Process.PERSISTENCE_TEMP],
)
data_qs = get_objects_for_user(request.user, 'view_data', data_qs)
if data_qs.exists():
data = data_qs.order_by('created').last()
serializer = self.get_serializer(data)
return Response(serializer.data)
# create the objects
resp = super(ResolweCreateDataModelMixin, self).create(request, *args, **kwargs)
# run manager
elif data.get("id", None) is not None:
kwargs = {"id": data["id"]}
elif data.get("slug", None) is not None:
if self.root.instance:
# ``self.root.instance != None`` means that an instance is
# already present, so this is not "create" request.
self.fail("slug_not_allowed")
kwargs = {"slug": data["slug"]}
else:
self.fail("null", name=self.field_name)
user = getattr(self.context.get("request"), "user")
queryset = self.get_queryset()
permission = get_full_perm(self.write_permission, queryset.model)
try:
return get_objects_for_user(
user, permission, queryset.filter(**kwargs)
).latest("version")
except ObjectDoesNotExist:
# Differentiate between "user has no permission" and "object does not exist"
view_permission = get_full_perm("view", queryset.model)
if permission != view_permission:
try:
get_objects_for_user(
user, view_permission, queryset.filter(**kwargs)
).latest("version")
raise exceptions.PermissionDenied(
"You do not have {} permission for {}: {}.".format(
self.write_permission, self.model_name, data
)
)
except ObjectDoesNotExist:
else:
self.fail("null", name=self.field_name)
user = getattr(self.context.get("request"), "user")
queryset = self.get_queryset()
permission = get_full_perm(self.write_permission, queryset.model)
try:
return get_objects_for_user(
user, permission, queryset.filter(**kwargs)
).latest("version")
except ObjectDoesNotExist:
# Differentiate between "user has no permission" and "object does not exist"
view_permission = get_full_perm("view", queryset.model)
if permission != view_permission:
try:
get_objects_for_user(
user, view_permission, queryset.filter(**kwargs)
).latest("version")
raise exceptions.PermissionDenied(
"You do not have {} permission for {}: {}.".format(
self.write_permission, self.model_name, data
)
)
except ObjectDoesNotExist:
pass
self.fail(
"does_not_exist", value=smart_text(data), model_name=self.model_name
)
# asynchronously with respect to the main Django code
# here; the manager can get nudged from elsewhere.
with transaction.atomic():
parent_data = Data.objects.get(pk=data_id)
# Spawn processes.
for d in obj[ExecutorProtocol.FINISH_SPAWN_PROCESSES]:
d["contributor"] = parent_data.contributor
d["process"] = Process.objects.filter(
slug=d["process"]
).latest()
d["tags"] = parent_data.tags
d["collection"] = parent_data.collection
d["subprocess_parent"] = parent_data
for field_schema, fields in iterate_fields(
d.get("input", {}), d["process"].input_schema
):
type_ = field_schema["type"]
name = field_schema["name"]
value = fields[name]
if type_ == "basic:file:":
fields[name] = self.hydrate_spawned_files(
exported_files_mapper, value, data_id
)
elif type_ == "list:basic:file:":
fields[name] = [
self.hydrate_spawned_files(
exported_files_mapper, fn, data_id
)
for fn in value
latest_version = Process.objects.filter(slug=slug).aggregate(
Max("version")
)["version__max"]
if latest_version is not None and latest_version > int_version:
self.stderr.write(
"Skip processor {}: newer version installed".format(slug)
)
continue
previous_process_qs = Process.objects.filter(slug=slug)
if previous_process_qs.exists():
previous_process = previous_process_qs.latest()
else:
previous_process = None
process_query = Process.objects.filter(slug=slug, version=version)
if process_query.exists():
if not force:
if verbosity > 0:
self.stdout.write(
"Skip processor {}: same version installed".format(slug)
)
continue
process_query.update(**p)
log_processors.append("Updated {}".format(slug))
else:
process = Process.objects.create(contributor=user, **p)
assign_contributor_permissions(process)
if previous_process:
copy_permissions(previous_process, process)
log_processors.append("Inserted {}".format(slug))