How to use the dffml.df.types.Definition function in dffml

To help you get started, we’ve selected a few dffml examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github intel / dffml / examples / shouldi / shouldi / pypi.py View on Github external
import shutil
import tempfile
from typing import Dict, Any

import aiohttp

from dffml.df.base import op
from dffml.df.types import Definition, Stage

from .safety import package, package_version
from .bandit import package_src_dir

package_json = Definition(name="package_json", primitive="Dict[str, Any]")
package_url = Definition(name="package_url", primitive="str")


@op(
    inputs={"package": package},
    outputs={"response_json": package_json},
    # imp_enter allows us to create instances of objects which are async context
    # managers and assign them to self.parent which is an object of type
    # OperationImplementation which will be alive for the lifetime of the
    # Orchestrator which runs all these operations.
    imp_enter={
        "session": (lambda self: aiohttp.ClientSession(trust_env=True))
    },
)
async def pypi_package_json(self, package: str) -> Dict[str, Any]:
    """
github intel / dffml / dffml / operation / output.py View on Github external
# # If only one and single is set then convert list to single
                # # item for output dict
                # if len(want[output_name]) == 1 and output.single:
                #     want[output_name] = want[output_name][0]
                # # If the output needs to be a dict then make it one. This
                # # will convert an array of arrays to a dict.
                # elif output.ismap:
                #     want[output_name] = dict(want[output_name])

            return want


get_single_spec = Definition(name="get_single_spec", primitive="array")

get_single_output = Definition(name="get_single_output", primitive="map")


@op(
    name="get_single",
    inputs={"spec": get_single_spec},
    outputs={"output": get_single_output},
    stage=Stage.OUTPUT,
)
class GetSingle(OperationImplementationContext):
    async def run(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
        # TODO Address the need to copy operation implementation inputs dict
        # In case the input is used elsewhere in the network
        exported = copy.deepcopy(inputs["spec"])
        # Look up the definiton for each
        for convert in range(0, len(exported)):
            exported[convert] = await self.octx.ictx.definition(
github intel / dffml / examples / shouldi / shouldi / safety.py View on Github external
import sys
import json
import asyncio
from typing import Dict, Any

from dffml.df.base import op
from dffml.df.types import Definition

package = Definition(name="package", primitive="str")
package_version = Definition(name="package_version", primitive="str")
safety_check_number_of_issues = Definition(
    name="safety_check_number_of_issues", primitive="int"
)


@op(
    name="safety_check",
    inputs={"package": package, "version": package_version},
    outputs={"issues": safety_check_number_of_issues},
    conditions=[],
)
async def safety_check(package: str, version: str) -> Dict[str, Any]:
    pinned = f"{package}=={version}"

    proc = await asyncio.create_subprocess_exec(
        sys.executable,
        "-m",
github intel / dffml / examples / shouldi / shouldi / safety.py View on Github external
import sys
import json
import asyncio
from typing import Dict, Any

from dffml.df.base import op
from dffml.df.types import Definition

package = Definition(name="package", primitive="str")
package_version = Definition(name="package_version", primitive="str")
safety_check_number_of_issues = Definition(
    name="safety_check_number_of_issues", primitive="int"
)


@op(
    name="safety_check",
    inputs={"package": package, "version": package_version},
    outputs={"issues": safety_check_number_of_issues},
    conditions=[],
)
async def safety_check(package: str, version: str) -> Dict[str, Any]:
    pinned = f"{package}=={version}"

    proc = await asyncio.create_subprocess_exec(
github intel / dffml / dffml / operation / output.py View on Github external
self.logger.debug("output spec: %s", exported)
        # Acquire all definitions within the context
        async with self.octx.ictx.definitions(self.ctx) as od:
            # Output dict
            want = {}
            # Group each requested output
            for definition in exported:
                async for item in od.inputs(definition):
                    want[definition.name] = item.value
                    break
            return want


associate_spec = Definition(name="associate_spec", primitive="List[str]")

associate_output = Definition(
    name="associate_output", primitive="Dict[str, Any]"
)


@op(
    name="associate",
    inputs={"spec": associate_spec},
    outputs={"output": associate_output},
    stage=Stage.OUTPUT,
)
class Associate(OperationImplementationContext):
    async def run(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
        # TODO Address the need to copy operation implementation inputs dict
        # In case the input is used elsewhere in the network
        exported = copy.deepcopy(inputs["spec"])
        # Look up the definiton for each
github intel / dffml / dffml / operation / output.py View on Github external
self.ctx, exported[convert]
            )
        self.logger.debug("output spec: %s", exported)
        # Acquire all definitions within the context
        async with self.octx.ictx.definitions(self.ctx) as od:
            # Output dict
            want = {}
            # Group each requested output
            for definition in exported:
                async for item in od.inputs(definition):
                    want[definition.name] = item.value
                    break
            return want


associate_spec = Definition(name="associate_spec", primitive="List[str]")

associate_output = Definition(
    name="associate_output", primitive="Dict[str, Any]"
)


@op(
    name="associate",
    inputs={"spec": associate_spec},
    outputs={"output": associate_output},
    stage=Stage.OUTPUT,
)
class Associate(OperationImplementationContext):
    async def run(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
        # TODO Address the need to copy operation implementation inputs dict
        # In case the input is used elsewhere in the network
github intel / dffml / dffml / df / types.py View on Github external
def _fromdict(cls, **kwargs):
        kwargs["definition"] = Definition._fromdict(**kwargs["definition"])
        return cls(**kwargs)
github intel / dffml / dffml / operation / output.py View on Github external
async def resolve(
        cls,
        ctx: BaseInputSetContext,
        ictx: BaseInputNetworkContext,
        exported: Dict[str, Any],
    ):
        # TODO Address the need to copy operation implementation inputs dict
        # In case the input is used elsewhere in the network
        exported = copy.deepcopy(exported)
        # Look up the definiton for the group and by fields
        for convert in ["group", "by"]:
            exported[convert] = await ictx.definition(ctx, exported[convert])
        return cls(**exported)


group_by_spec = Definition(
    name="group_by_spec", primitive="Dict[str, Any]", spec=GroupBySpec
)

group_by_output = Definition(
    name="group_by_output", primitive="Dict[str, List[Any]]"
)


@op(
    name="group_by",
    inputs={"spec": group_by_spec},
    outputs={"output": group_by_output},
    stage=Stage.OUTPUT,
)
class GroupBy(OperationImplementationContext):
    async def run(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
github intel / dffml / dffml / operation / output.py View on Github external
for value in results:
                            want[output_name].insert(index, value)

                # # If only one and single is set then convert list to single
                # # item for output dict
                # if len(want[output_name]) == 1 and output.single:
                #     want[output_name] = want[output_name][0]
                # # If the output needs to be a dict then make it one. This
                # # will convert an array of arrays to a dict.
                # elif output.ismap:
                #     want[output_name] = dict(want[output_name])

            return want


get_single_spec = Definition(name="get_single_spec", primitive="array")

get_single_output = Definition(name="get_single_output", primitive="map")


@op(
    name="get_single",
    inputs={"spec": get_single_spec},
    outputs={"output": get_single_output},
    stage=Stage.OUTPUT,
)
class GetSingle(OperationImplementationContext):
    async def run(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
        # TODO Address the need to copy operation implementation inputs dict
        # In case the input is used elsewhere in the network
        exported = copy.deepcopy(inputs["spec"])
        # Look up the definiton for each
github intel / dffml / dffml / skel / operations / REPLACE_IMPORT_PACKAGE_NAME / definitions.py View on Github external
import sys
from dffml.df.types import Definition

definitions = [
    Definition(name="calc_string", primitive="str"),
    Definition(name="is_add", primitive="bool"),
    Definition(name="is_mult", primitive="bool"),
    Definition(name="numbers", primitive="List[int]"),
    Definition(name="result", primitive="int"),
]

for definition in definitions:
    setattr(sys.modules[__name__], definition.name, definition)