How to use the dace.properties.Property function in dace

To help you get started, we’ve selected a few dace examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github spcl / dace / dace / libraries / blas / nodes / gemm.py View on Github external
broadcastable (ONNX terminology) to A @ B.
    """

    # Global properties
    implementations = {
        "pure": ExpandGemmPure,
        "MKL": ExpandGemmMKL,
        "cuBLAS": ExpandGemmCuBLAS
    }
    default_implementation = None

    # Object fields
    dtype = dace.properties.TypeClassProperty(allow_none=True)
    transA = Property(dtype=bool,
                      desc="Whether to transpose A before multiplying")
    transB = Property(dtype=bool,
                      desc="Whether to transpose B before multiplying")
    alpha = Property(
        dtype=tuple(dace.dtypes._CONSTANT_TYPES),
        default=1,
        desc="A scalar which will be multiplied with A @ B before adding C")
    beta = Property(
        dtype=tuple(dace.dtypes._CONSTANT_TYPES),
        default=1,
        desc="A scalar which will be multiplied with C before adding C")

    def __init__(self,
                 name,
                 dtype=None,
                 location=None,
                 transA=False,
                 transB=False,
github spcl / dace / dace / codegen / control_flow.py View on Github external
scope = Property(dtype=LoopScope, allow_none=True)
    edge = Property(dtype=Edge, allow_none=True)

    def __init__(self, scope, edge, *args, **kwargs):
        self.scope = scope
        self.edge = edge
        scope.back = self
        super().__init__(*args, **kwargs)


# These will be assigned when the various control flow objects are created
LoopScope.assignment = Property(dtype=LoopAssignment, allow_none=True)
LoopScope.entry = Property(dtype=LoopEntry, allow_none=True)
LoopScope.back = Property(dtype=LoopBack, allow_none=True)
LoopScope.exit = Property(dtype=LoopExit, allow_none=True)
LoopScope = make_properties(LoopScope)


# Extra meta-object binding together then and else scopes.
# make_properties will be called after adding cyclic class reference members
class IfThenElse:

    entry = Property(allow_none=True)
    exit = Property(allow_none=True)

    def __init__(self, entry, exit):
        self.entry = entry
        self.exit = exit
        self.then_scope = None
        self.else_scope = None
github spcl / dace / dace / sdfg / state.py View on Github external
:param new_name: Name to replace.
        """
        from dace.sdfg.sdfg import replace
        replace(self, name, new_name)


@make_properties
class SDFGState(OrderedMultiDiConnectorGraph, StateGraphView):
    """ An acyclic dataflow multigraph in an SDFG, corresponding to a
        single state in the SDFG state machine. """

    is_collapsed = Property(dtype=bool,
                            desc="Show this node/scope/state as collapsed",
                            default=False)

    nosync = Property(dtype=bool,
                      default=False,
                      desc="Do not synchronize at the end of the state")

    instrument = Property(choices=dtypes.InstrumentationType,
                          desc="Measure execution statistics with given method",
                          default=dtypes.InstrumentationType.No_Instrumentation)

    location = DictProperty(
        key_type=str,
        value_type=symbolic.pystr_to_symbolic,
        desc='Full storage location identifier (e.g., rank, GPU ID)')

    def __init__(self, label=None, sdfg=None, debuginfo=None, location=None):
        """ Constructs an SDFG state.
            :param label: Name for the state (optional).
            :param sdfg: A reference to the parent SDFG.
github spcl / dace / dace / codegen / codeobject.py View on Github external
from dace.properties import (Property, DictProperty, SetProperty,
                             make_properties)


@make_properties
class CodeObject(object):
    name = Property(dtype=str, desc="Filename to use")
    code = Property(dtype=str, desc="The code attached to this object")
    language = Property(dtype=str,
                        desc="Language used for this code (same " +
                        "as its file extension)")
    target = Property(dtype=type,
                      desc="Target to use for compilation",
                      allow_none=True)
    target_type = Property(
        dtype=str,
        desc="Sub-target within target (e.g., host or device code)",
        default="")
    title = Property(dtype=str, desc="Title of code for GUI")
    extra_compiler_kwargs = DictProperty(key_type=str,
                                         value_type=str,
                                         desc="Additional compiler argument "
                                         "variables to add to template")
github spcl / dace / dace / graph / edges.py View on Github external
super().__init__(*args, **kwargs)


# These will be assigned when the various control flow objects are created
LoopScope.assignment = Property(dtype=LoopAssignment, allow_none=True)
LoopScope.entry = Property(dtype=LoopEntry, allow_none=True)
LoopScope.back = Property(dtype=LoopBack, allow_none=True)
LoopScope.exit = Property(dtype=LoopExit, allow_none=True)
LoopScope = make_properties(LoopScope)


# Extra meta-object binding together then and else scopes.
# make_properties will be called after adding cyclic class reference members
class IfThenElse:

    entry = Property(allow_none=True)
    exit = Property(allow_none=True)

    def __init__(self, entry, exit):
        self.entry = entry
        self.exit = exit
        self.then_scope = None
        self.else_scope = None


@make_properties
class IfEntry(ControlFlow):

    scope = Property(dtype=ControlFlowScope, allow_none=True)
    edge = Property(dtype=Edge, allow_none=True)

    def __init__(self, scope, edge, *args, **kwargs):
github spcl / dace / dace / transformation / dataflow / gpu_transform.py View on Github external
from dace.graph import nodes, nxutil
from dace.graph.graph import SubgraphView
from dace.transformation import pattern_matching, helpers
from dace.properties import Property, make_properties


@registry.autoregister_params(singlestate=True)
@make_properties
class GPUTransformMap(pattern_matching.Transformation):
    """ Implements the GPUTransformMap transformation.

        Converts a single map to a GPU-scheduled map and creates GPU arrays
        outside it, generating CPU<->GPU memory copies automatically.
    """

    fullcopy = Property(
        desc="Copy whole arrays rather than used subset",
        dtype=bool,
        default=False)

    toplevel_trans = Property(
        desc="Make all GPU transients top-level", dtype=bool, default=False)

    register_trans = Property(
        desc="Make all transients inside GPU maps registers",
        dtype=bool,
        default=False)

    sequential_innermaps = Property(
        desc="Make all internal maps Sequential", dtype=bool, default=False)

    _map_entry = nodes.MapEntry(nodes.Map("", [], []))
github spcl / dace / dace / transformation / dataflow / gpu_transform_local_storage.py View on Github external
@registry.autoregister_params(singlestate=True)
@make_properties
class GPUTransformLocalStorage(pattern_matching.Transformation):
    """Implements the GPUTransformLocalStorage transformation.

        Similar to GPUTransformMap, but takes multiple maps leading from the
        same data node into account, creating a local storage for each range.

        @see: GPUTransformMap
    """

    _arrays_removed = 0
    _maps_transformed = 0

    fullcopy = Property(desc="Copy whole arrays rather than used subset",
                        dtype=bool,
                        default=False)

    nested_seq = Property(
        desc="Makes nested code semantically-equivalent to single-core code,"
        "transforming nested maps and memory into sequential and "
        "local memory respectively.",
        dtype=bool,
        default=True,
    )

    _map_entry = nodes.MapEntry(nodes.Map("", [], []))

    import dace.libraries.standard as stdlib  # Avoid import loop
    _reduce = stdlib.Reduce("lambda: None", None)
github spcl / dace / dace / graph / edges.py View on Github external
def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.assignment = None
        self.entry = None
        self.back = None
        self.exit = None


class ControlFlow:
    pass


@make_properties
class LoopAssignment(ControlFlow):

    scope = Property(dtype=LoopScope, allow_none=True)
    edge = Property(dtype=Edge, allow_none=True)

    def __init__(self, scope, edge, *args, **kwargs):
        self.scope = scope
        self.edge = edge
        scope.assignment = self
        super().__init__(*args, **kwargs)


@make_properties
class LoopEntry(ControlFlow):

    scope = Property(dtype=LoopScope, allow_none=True)
    edge = Property(dtype=Edge, allow_none=True)

    def __init__(self, scope, edge, *args, **kwargs):
github spcl / dace / dace / graph / nodes.py View on Github external
ConsumeEntry = indirect_properties(Consume,
                                   lambda obj: obj.consume)(ConsumeEntry)

# ------------------------------------------------------------------------------


@make_properties
class Reduce(Node):
    """ An SDFG node that reduces an N-dimensional array to an
        (N-k)-dimensional array, with a list of axes to reduce and
        a reduction binary function. """

    # Properties
    axes = ListProperty(element_type=int, allow_none=True)
    wcr = LambdaProperty(default='lambda a,b: a')
    identity = Property(dtype=object, allow_none=True)
    schedule = Property(dtype=dtypes.ScheduleType,
                        desc="Reduction execution policy",
                        choices=dtypes.ScheduleType,
                        from_string=lambda x: dtypes.ScheduleType[x],
                        default=dtypes.ScheduleType.Default)
    debuginfo = DebugInfoProperty()

    instrument = Property(
        choices=dtypes.InstrumentationType,
        desc="Measure execution statistics with given method",
        default=dtypes.InstrumentationType.No_Instrumentation)

    def __init__(self,
                 wcr,
                 axes,
                 wcr_identity=None,
github spcl / dace / dace / transformation / dataflow / split_tiling.py View on Github external
@registry.autoregister_params(singlestate=True)
@make_properties
class SplitMapTiling(pattern_matching.Transformation):
    """ Implements the orthogonal split tiling transformation.

        Orthogonal split tiling is a type of nested map fission that creates
        tiles in every dimension of the matched Map. The difference from
        regular tiling is that it splits out the last and potentially
        imperfect tile to a separate map.
    """

    _map_entry = nodes.MapEntry(nodes.Map("", [], []))

    # Properties
    prefix = Property(dtype=str,
                      default="tile",
                      desc="Prefix for new range symbols")
    tile_sizes = ShapeProperty(dtype=tuple,
                               default=(128, 128, 128),
                               desc="Tile size per dimension")
    # strides = ShapeProperty(
    #     dtype=tuple,
    #     default=tuple(),
    #     desc="Tile stride (enables overlapping tiles). If empty, matches tile")
    # divides_evenly = Property(dtype=bool,
    #                           default=False,
    #                           desc="Tile size divides dimension length evenly")

    @staticmethod
    def annotates_memlets():
        return True