How to use the dffml.df.types.Operation function in dffml

To help you get started, we’ve selected a few dffml examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github intel / dffml / dffml / df / base.py View on Github external
def wrap(func):
        if not "name" in kwargs:
            kwargs["name"] = func.__name__
        # TODO Make this grab from the defaults for Operation
        if not "conditions" in kwargs:
            kwargs["conditions"] = []

        func.op = Operation(**kwargs)
        cls_name = func.op.name.replace("_", " ").title().replace(" ", "")

        sig = inspect.signature(func)
        # Check if the function uses the operation implementation context
        uses_self = bool(
            (sig.parameters and list(sig.parameters.keys())[0] == "self")
            or imp_enter is not None
            or ctx_enter is not None
            or (
                [
                    name
                    for name, param in sig.parameters.items()
                    if param.annotation is OperationImplementationContext
                ]
            )
        )
github intel / dffml / dffml / cli / dataflow.py View on Github external
async def run(self):
        operations = []
        for load_operation in self.operations:
            if ":" in load_operation:
                operations += list(load(load_operation))
            else:
                operations += [Operation.load(load_operation)]
        async with self.config(BaseConfig()) as configloader:
            async with configloader() as loader:
                dataflow = DataFlow.auto(*operations)
                exported = dataflow.export(linked=not self.not_linked)
                print((await loader.dumpb(exported)).decode())
github intel / dffml / dffml / df / memory.py View on Github external
self,
        dataflow: DataFlow,
        *,
        input_set: Optional[BaseInputSet] = None,
        stage: Stage = Stage.PROCESSING,
    ) -> AsyncIterator[Operation]:
        operations: Dict[str, Operation] = {}
        if stage not in dataflow.by_origin:
            return
        if input_set is None:
            for operation in chain(*dataflow.by_origin[stage].values()):
                operations[operation.instance_name] = operation
        else:
            async for item in input_set.inputs():
                origin = item.origin
                if isinstance(origin, Operation):
                    origin = origin.instance_name
                if origin not in dataflow.by_origin[stage]:
                    continue
                for operation in dataflow.by_origin[stage][origin]:
                    operations[operation.instance_name] = operation
        for operation in operations.values():
            yield operation
github intel / dffml / dffml / df / types.py View on Github external
value: Any
    origin: Input
    definition: Definition


@dataclass
class InputFlow:
    """
    Inputs of an operation by their name as used by the operation implementation
    mapped to a list of locations they can come from. The list contains strings
    in the format of operation_instance_name.key_in_output_mapping or the
    literal "seed" which specifies that the value could be seeded to the
    network.
    """

    inputs: Dict[str, List[Dict[str, Operation]]] = field(default=None)
    conditions: List[Dict[str, Operation]] = field(default=None)

    def __post_init__(self):
        if self.inputs is None:
            self.inputs = {}
        if self.conditions is None:
            self.conditions = []

    def export(self):
        exported = export_dict(**asdict(self))
        if not exported["conditions"]:
            del exported["conditions"]
        return exported

    @classmethod
    def _fromdict(cls, **kwargs):
github intel / dffml / dffml / df / linker.py View on Github external
except KeyError as error:
                    raise KeyError(
                        "Definition missing while resolving %s.%s"
                        % (name, arg)
                    ) from error
            for arg in ["inputs", "outputs"]:
                try:
                    kwargs[arg] = {
                        i: definitions[kwargs[arg][i]] for i in kwargs[arg]
                    }
                except KeyError as error:
                    raise KeyError(
                        "Definition missing while resolving %s.%s"
                        % (name, arg)
                    ) from error
            operations[name] = Operation(name=name, **kwargs)
        return definitions, operations, outputs
github intel / dffml / dffml / df / memory.py View on Github external
async def operations(
        self,
        dataflow: DataFlow,
        *,
        input_set: Optional[BaseInputSet] = None,
        stage: Stage = Stage.PROCESSING,
    ) -> AsyncIterator[Operation]:
        operations: Dict[str, Operation] = {}
        if stage not in dataflow.by_origin:
            return
        if input_set is None:
            for operation in chain(*dataflow.by_origin[stage].values()):
                operations[operation.instance_name] = operation
        else:
            async for item in input_set.inputs():
                origin = item.origin
                if isinstance(origin, Operation):
                    origin = origin.instance_name
                if origin not in dataflow.by_origin[stage]:
                    continue
                for operation in dataflow.by_origin[stage][origin]:
                    operations[operation.instance_name] = operation
        for operation in operations.values():
            yield operation
github intel / dffml / feature / auth / dffml_feature_auth / feature / operations.py View on Github external
import hashlib
import asyncio
import warnings
import concurrent.futures
from typing import Dict, Any

from dffml.df.types import Operation
from dffml.df.base import (
    OperationImplementationContext,
    OperationImplementation,
)

# pylint: disable=no-name-in-module
from .definitions import UnhashedPassword, ScryptPassword

scrypt = Operation(
    name="scrypt",
    inputs={"password": UnhashedPassword},
    outputs={"password": ScryptPassword},
    conditions=[],
)


class ScryptContext(OperationImplementationContext):
    @staticmethod
    def hash_password(password):
        # ---- BEGIN Python hashlib docs ----

        # The function provides scrypt password-based key derivation function as
        # defined in RFC 7914.

        # password and salt must be bytes-like objects. Applications and
github intel / dffml / dffml_operations_binsec / operations.py View on Github external
import aiohttp
from elftools.elf.descriptions import describe_e_type
from elftools.elf.elffile import ELFFile
from rpmfile import RPMFile
from rpmfile.errors import RPMError

from dffml.df.types import Stage, Operation
from dffml.df.base import op, OperationImplementationContext, OperationImplementation

# pylint: disable=no-name-in-module
from .definitions import URL, URLBytes, RPMObject, rpm_filename, binary, binary_is_PIE

from .log import LOGGER

url_to_urlbytes = Operation(
    name="url_to_urlbytes",
    inputs={"URL": URL},
    outputs={"download": URLBytes},
    conditions=[],
)


class URLBytesObject(NamedTuple):
    URL: str
    body: bytes

    def __repr__(self):
        return "%s(URL=%s, body=%s...)" % (
            self.__class__.__qualname__,
            self.URL,
            self.body[:10],
github intel / dffml / dffml / df / memory.py View on Github external
def args(cls, args, *above) -> Dict[str, Arg]:
        cls.config_set(
            args, above, "ops", Arg(type=Operation.load, nargs="+", default=[])
        )
        return args