How to use the resolwe.flow.models.Process function in resolwe

To help you get started, we’ve selected a few resolwe examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github genialis / resolwe / resolwe / rest / tests.py View on Github external
def setUp(self):
        super().setUp()

        self.entity = Entity.objects.create(
            name="Test entity", contributor=self.contributor
        )
        process = Process.objects.create(
            name="Test process",
            contributor=self.contributor,
            output_schema=[
                {
                    "name": "foo",
                    "label": "Foo",
                    "group": [
                        {"name": "bar", "label": "Bar", "type": "basic:integer:"},
                        {"name": "hello", "label": "Hello", "type": "basic:string:"},
                    ],
                },
                {"name": "another", "label": "Another", "type": "basic:integer:"},
            ],
        )
        data_output = {
            "foo": {"bar": 42, "hello": "world",},
github genialis / resolwe / resolwe / flow / managers / workload_connectors / celery.py View on Github external
def submit(self, data, runtime_dir, argv):
        """Run process.

        For details, see
        :meth:`~resolwe.flow.managers.workload_connectors.base.BaseConnector.submit`.
        """
        queue = "ordinary"
        if data.process.scheduling_class == Process.SCHEDULING_CLASS_INTERACTIVE:
            queue = "hipri"

        logger.debug(
            __(
                "Connector '{}' running for Data with id {} ({}) in celery queue {}, EAGER is {}.",
                self.__class__.__module__,
                data.id,
                repr(argv),
                queue,
                getattr(settings, "CELERY_ALWAYS_EAGER", None),
            )
        )
        celery_run.apply_async((data.id, runtime_dir, argv), queue=queue)
github genialis / resolwe / resolwe / flow / managers / dispatcher.py View on Github external
settings_dict = {}
        settings_dict["DATA_DIR"] = data_dir
        settings_dict["REDIS_CHANNEL_PAIR"] = state.MANAGER_EXECUTOR_CHANNELS
        files[ExecutorFiles.EXECUTOR_SETTINGS] = settings_dict

        django_settings = {}
        django_settings.update(self.settings_actual)
        django_settings.update(kwargs)
        files[ExecutorFiles.DJANGO_SETTINGS] = django_settings

        # Add scheduling classes.
        files[ExecutorFiles.PROCESS_META] = {
            k: getattr(Process, k)
            for k in dir(Process)
            if k.startswith("SCHEDULING_CLASS_")
            and isinstance(getattr(Process, k), str)
        }

        # Add Data status constants.
        files[ExecutorFiles.DATA_META] = {
            k: getattr(Data, k)
            for k in dir(Data)
            if k.startswith("STATUS_") and isinstance(getattr(Data, k), str)
        }

        # Prepare storage connectors settings and secrets.
        connectors_settings = copy.deepcopy(STORAGE_CONNECTORS)
        # Local connector in executor in always named 'local'.
        connectors_settings["local"] = connectors_settings.pop(STORAGE_LOCAL_CONNECTOR)
        for connector_settings in connectors_settings.values():
            # Fix class name for inclusion in the executor.
            klass = connector_settings["connector"]
github genialis / resolwe / resolwe / flow / views / data.py View on Github external
def perform_get_or_create(self, request, *args, **kwargs):
        """Perform "get_or_create" - return existing object if found."""
        self.define_contributor(request)
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        process = serializer.validated_data.get("process")
        process_input = request.data.get("input", {})

        fill_with_defaults(process_input, process.input_schema)

        checksum = get_data_checksum(process_input, process.slug, process.version)
        data_qs = Data.objects.filter(
            checksum=checksum,
            process__persistence__in=[
                Process.PERSISTENCE_CACHED,
                Process.PERSISTENCE_TEMP,
            ],
        )
        data_qs = get_objects_for_user(request.user, "view_data", data_qs)
        if data_qs.exists():
            data = data_qs.order_by("created").last()
            serializer = self.get_serializer(data)
            return Response(serializer.data)
github genialis / resolwe / resolwe / flow / serializers / data.py View on Github external
from .base import ResolweBaseSerializer
from .collection import CollectionSerializer
from .descriptor import DescriptorSchemaSerializer
from .entity import EntitySerializer
from .fields import DictRelatedField
from .process import ProcessSerializer


class DataSerializer(ResolweBaseSerializer):
    """Serializer for Data objects."""

    input = ProjectableJSONField(required=False)
    output = ProjectableJSONField(required=False)
    descriptor = ProjectableJSONField(required=False)
    process = DictRelatedField(
        queryset=Process.objects.all(), serializer=ProcessSerializer
    )
    descriptor_schema = DictRelatedField(
        queryset=DescriptorSchema.objects.all(),
        serializer=DescriptorSchemaSerializer,
        allow_null=True,
        required=False,
    )
    collection = DictRelatedField(
        queryset=Collection.objects.all(),
        serializer=CollectionSerializer,
        allow_null=True,
        required=False,
        write_permission="edit",
    )
    entity = DictRelatedField(
        queryset=Entity.objects.all(),
github genialis / resolwe / resolwe / flow / management / commands / genesis_migrate.py View on Github external
def migrate_process(self, process):
        """Migrate processes."""
        new = Process()
        new.name = process[u'label']
        new.slug = self.process_slug(process[u'name'])
        new.version = process[u'version']
        new.type = process[u'type']
        new.description = process.get(u'description', '')
        new.contributor = self.get_contributor(process['author_id'])
        new.category = process.get(u'category', '')
        # XXX: Django will change this on create
        new.created = process[u'date_created']
        # XXX: Django will change this on save
        new.modified = process[u'date_modified']
        new.output_schema = process[u'output_schema']
        new.input_schema = process.get(u'input_schema', {})
        new.persistence = self.persistence_dict[process[u'persistence']]
        new.run['script'] = process[u'run'][u'bash']  # pylint: disable=unsubscriptable-object
        new.save()
github genialis / resolwe / resolwe / flow / views / data.py View on Github external
def perform_get_or_create(self, request, *args, **kwargs):
        """Perform "get_or_create" - return existing object if found."""
        self.define_contributor(request)
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        process = serializer.validated_data.get("process")
        process_input = request.data.get("input", {})

        fill_with_defaults(process_input, process.input_schema)

        checksum = get_data_checksum(process_input, process.slug, process.version)
        data_qs = Data.objects.filter(
            checksum=checksum,
            process__persistence__in=[
                Process.PERSISTENCE_CACHED,
                Process.PERSISTENCE_TEMP,
            ],
        )
        data_qs = get_objects_for_user(request.user, "view_data", data_qs)
        if data_qs.exists():
            data = data_qs.order_by("created").last()
            serializer = self.get_serializer(data)
            return Response(serializer.data)
github genialis / resolwe / resolwe / flow / management / commands / genesis_migrate.py View on Github external
descriptor_schema.save()

            self.descriptor_schema_index[ds_fields_dumped] = descriptor_schema

        descriptor = {}
        descriptor.update(data.get(u'static', {}))
        descriptor.update(data.get(u'var', {}))

        # PROCESS ######################################################
        if u'processor_version' not in data:
            data[u'processor_version'] = '0.0.0'

        process_slug = self.process_slug(data[u'processor_name'])
        process_version = data[u'processor_version']
        try:
            process = Process.objects.get(slug=process_slug, version=process_version)
        except Process.DoesNotExist:
            latest = Process.objects.filter(slug=process_slug).order_by('-version').first()

            if latest:
                process = Process()
                process.name = latest.name
                process.slug = latest.slug
                process.category = latest.category
                process.description = latest.description
                process.contributor = latest.contributor

                process.version = process_version
                process.type = data[u'type']
                process.output_schema = data[u'output_schema']
                process.input_schema = data.get(u'input_schema', {})
                process.persistence = self.persistence_dict[data[u'persistence']]
github genialis / resolwe / resolwe / flow / managers / dispatcher.py View on Github external
secrets_dir = os.path.join(runtime_dir, ExecutorFiles.SECRETS_DIR)

        settings_dict = {}
        settings_dict["DATA_DIR"] = data_dir
        settings_dict["REDIS_CHANNEL_PAIR"] = state.MANAGER_EXECUTOR_CHANNELS
        files[ExecutorFiles.EXECUTOR_SETTINGS] = settings_dict

        django_settings = {}
        django_settings.update(self.settings_actual)
        django_settings.update(kwargs)
        files[ExecutorFiles.DJANGO_SETTINGS] = django_settings

        # Add scheduling classes.
        files[ExecutorFiles.PROCESS_META] = {
            k: getattr(Process, k)
            for k in dir(Process)
            if k.startswith("SCHEDULING_CLASS_")
            and isinstance(getattr(Process, k), str)
        }

        # Add Data status constants.
        files[ExecutorFiles.DATA_META] = {
            k: getattr(Data, k)
            for k in dir(Data)
            if k.startswith("STATUS_") and isinstance(getattr(Data, k), str)
        }

        # Prepare storage connectors settings and secrets.
        connectors_settings = copy.deepcopy(STORAGE_CONNECTORS)
        # Local connector in executor in always named 'local'.
        connectors_settings["local"] = connectors_settings.pop(STORAGE_LOCAL_CONNECTOR)
        for connector_settings in connectors_settings.values():