How to use the dace.subsets.Range function in dace

To help you get started, we’ve selected a few dace examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github spcl / dace / dace / transformation / dataflow / distributed_storage.py View on Github external
midx = mentry.map.params
        mspace = mentry.map.range

        space = sdfg.spaces[self.space]
        dims = len(mspace)
        pdims = len(space.process_grid)

        inv_mapping = {}
        for k, v in self.iterationspace_mapping.items():
            inv_mapping[v] = k

        pidx = ['p_' + str(i) for i in range(pdims)]
        for k, v in inv_mapping.items():
            pidx[k] = 'p_' + midx[v]
        pspace = [(0, s-1, 1) for s in space.process_grid]
        pmap = nodes.Map('p_' + mname, pidx, subsets.Range(pspace))
        pentry = nodes.MapEntry(pmap)
        pexit = nodes.MapExit(pmap)

        lidx = ['l_' + idx for idx in midx]
        oidx = [idx for idx in midx]
        lspace = [None] * dims
        ospace = [None] * dims
        for k, v in self.iterationspace_mapping.items():
            lspace[k] = (
                0, "int_ceil({} - {} + 1, ({}) * ({}))".format(
                    mspace[k][1], mspace[k][0], space.block_sizes[v],
                    space.process_grid[v]), 1)
            ospace[k] = (
                "{} + ({} * ({}) + {}) * ({})".format(
                    mspace[k][0], lidx[k], space.process_grid[v],
                    pidx[v], space.block_sizes[v]),
github spcl / dace / dace / frontend / python / newast.py View on Github external
inp_base_path.insert(0, ind_entry)
        out_base_path.append(ind_exit)

    input_index_memlets = []
    for arrname, arr_accesses in accesses.items():
        arr_name = arrname
        for i, access in enumerate(arr_accesses):
            if isinstance(access, (list, tuple)):
                access = access[0]
            if isinstance(access, sympy.Tuple):
                access = list(access)
            if not isinstance(access, (list, tuple)):
                access = [access]
            conn = None
            if PVisitor.nested:
                arr_rng = dace.subsets.Range([(a, a, 1) for a in access])
                if output:
                    arrname = PVisitor._add_write_access(arr_name,
                                                         arr_rng,
                                                         target=None)
                else:
                    arrname = PVisitor._add_read_access(arr_name,
                                                        arr_rng,
                                                        target=None)
                access = [0] * len(access)
                conn = 'index_%s_%d' % (arr_name, i)
            arr = sdfg.arrays[arrname]
            # Memlet to load the indirection index
            indexMemlet = Memlet(arrname, 1, subsets.Indices(access), 1)
            input_index_memlets.append(indexMemlet)
            read_node = graph.add_read(arrname)
            if PVisitor.nested or not isinstance(src, nodes.EntryNode):
github spcl / dace / dace / transformation / dataflow / strip_mining.py View on Github external
approx = m_range[i].approx
                else:
                    exact = m_range[i]
                    approx = overapproximate(m_range[i])
                if isinstance(new_range[i], SymExpr):
                    new_range[i] = SymExpr(
                        new_range[i].expr.subs([(symbol, exact)]),
                        new_range[i].approx.subs([(symbol, approx)]))
                elif issymbolic(new_range[i]):
                    new_range[i] = SymExpr(
                        new_range[i].subs([(symbol, exact)]),
                        new_range[i].subs([(symbol, approx)]))
                else:
                    new_range[i] = SymExpr(new_range[i], new_range[i])
        image.append(new_range)
    return subsets.Range(image)
github spcl / dace / dace / transformation / dataflow / strip_mining.py View on Github external
td_to_new_exact = symbolic.pystr_to_symbolic(
                'min(%s + 1, %s + %s * %s + %s) - 1' %
                (symbolic.symstr(td_to), symbolic.symstr(td_from), tile_stride,
                 str(new_dim), tile_size))
            td_to_new_approx = symbolic.pystr_to_symbolic(
                '%s + %s * %s + %s - 1' %
                (symbolic.symstr(td_from), tile_stride, str(new_dim),
                 tile_size))
        if divides_evenly or strided:
            td_to_new = td_to_new_approx
        else:
            td_to_new = dace.symbolic.SymExpr(td_to_new_exact,
                                              td_to_new_approx)
        # Special case: If range is 1 and no prefix was specified, skip range
        if td_from_new == td_to_new_approx and target_dim == new_dim:
            map_entry.map.range = subsets.Range(
                [r for i, r in enumerate(map_entry.map.range) if i != dim_idx])
            map_entry.map.params = [
                p for i, p in enumerate(map_entry.map.params) if i != dim_idx
            ]
            if len(map_entry.map.params) == 0:
                raise ValueError('Strip-mining all dimensions of the map with '
                                 'empty tiles is disallowed')
        else:
            map_entry.map.range[dim_idx] = (td_from_new, td_to_new, td_step)

        # Make internal map's schedule to "not parallel"
        new_map.schedule = map_entry.map.schedule
        map_entry.map.schedule = dtypes.ScheduleType.Sequential

        # Redirect edges
        new_map_entry.in_connectors = dcpy(map_entry.in_connectors)
github spcl / dace / dace / subsets.py View on Github external
def from_array(array: 'dace.data.Data'):
        """ Constructs a range that covers the full array given as input. """
        return Range([(0, s - 1, 1) for s in array.shape])
github spcl / dace / dace / frontend / python / newast.py View on Github external
src = graph.add_access(tmp_name)
        else:
            src = graph.add_read(tmp_name)
    elif dst is None:
        if end_dst:
            dst = graph.add_access(tmp_name)
        else:
            dst = graph.add_write(tmp_name)

    tmp_shape = storage.shape
    indirectRange = subsets.Range([(0, s - 1, 1) for s in tmp_shape])
    if ind_entry:  # Amend indirected range
        indirectRange = ','.join([ind for ind in ind_entry.map.params])

    # Create memlet that depends on the full array that we look up in
    fullRange = subsets.Range([(0, s - 1, 1) for s in array.shape])
    fullMemlet = Memlet(memlet.data, memlet.num_accesses, fullRange,
                        memlet.veclen)

    if output:
        if isinstance(dst, nodes.ExitNode):
            full_write_node = graph.add_write(memlet.data)
            path = out_base_path + [dst, full_write_node]
        elif isinstance(dst, nodes.AccessNode):
            path = out_base_path + [dst]
        else:
            raise Exception("Src node type for indirection is invalid.")
        graph.add_memlet_path(*path,
                              src_conn='__ind_' + local_name,
                              memlet=fullMemlet)
    else:
        if isinstance(src, nodes.EntryNode):
github spcl / dace / dace / frontend / octave / ast_arrayaccess.py View on Github external
rangelist.append((acc.get_value() - 1, acc.get_value() - 1, 1))
            elif isinstance(acc, AST_RangeExpression):
                if isinstance(acc.lhs, AST_Constant) and isinstance(
                        acc.rhs, AST_Constant):
                    l = acc.lhs.get_value()
                    r = acc.rhs.get_value()
                    rangelist.append((l, r, 1))
                else:
                    raise NotImplementedError(
                        "range with non-constant bounds not supported: " +
                        str(self))
            else:
                raise NotImplementedError(
                    "Non-constant array indexing not implemented: " +
                    str(self))
        ret = dace.subsets.Range(rangelist)
        return ret
github spcl / dace / dace / frontend / python / astparser.py View on Github external
self.program = astnodes._ProgramNode(node.name, node)
                curprim = self.program

            # Parse primitives
            # Dataflow primitives
            elif decname.endswith('map'):
                curprim = astnodes._MapNode(node.name, node)

                # If the arguments are defined in the decorator
                if 'args' in dir(dec) and len(dec.args) > 0:
                    curprim.range = subsets.Range(
                        subscript_to_slice(dec.args[0], arrays)[1])
                else:
                    try:
                        curprim.range = subsets.Range([
                            subscript_to_slice(arg.annotation, arrays)[1][0]
                            for arg in node.args.args
                        ])
                    except (AttributeError, TypeError, ValueError):
                        raise DaCeSyntaxError(
                            self, node,
                            'All arguments in DaCe primitive %s must be annotated with a range'
                            % node.name)
                self._add_possible_inputs(curprim.range, curprim)

            elif decname.endswith('consume'):
                curprim = astnodes._ConsumeNode(node.name, node)

                # If the arguments are defined in the decorator
                if 'args' in dir(dec) and len(dec.args) > 0:
                    if dec.args[0].id not in self.curnode.globals:
github spcl / dace / dace / codegen / targets / immaterial.py View on Github external
def memlet_view_ctor(self, sdfg, memlet, direction):
        useskip = False
        memlet_params = []

        memlet_name = memlet.data
        if isinstance(sdfg.arrays[memlet.data], data.Scalar):
            raise ValueError("This should never have happened")

        if isinstance(memlet.subset, subsets.Indices):
            # Compute address
            memlet_params.append(cpp_array_expr(sdfg, memlet, False))
            dims = 0

        elif isinstance(memlet.subset, subsets.Range):
            dims = len(memlet.subset.ranges)
            #memlet_params.append("")

            # Dimensions to remove from view (due to having one value)
            indexdims = []
            nonIndexDims = []

            for dim, (rb, re, rs) in enumerate(memlet.subset.ranges):
                if rs != 1:
                    useskip = True
                try:
                    if (re - rb) == 0:
                        indexdims.append(dim)
                    else:
                        nonIndexDims.append(dim)
                except TypeError:  # cannot determine truth value of Relational
github spcl / dace / dace / sdfg / propagation.py View on Github external
def match(self, expressions, variable_context, node_range, orig_edges):
        # Assuming correct dimensionality in each of the expressions
        data_dims = len(expressions[0])
        self.patterns_per_dim = [None] * data_dims

        overapprox_range = subsets.Range([
            (rb.approx if isinstance(rb, symbolic.SymExpr) else rb,
             re.approx if isinstance(re, symbolic.SymExpr) else re,
             rs.approx if isinstance(rs, symbolic.SymExpr) else rs)
            for rb, re, rs in node_range
        ])

        for dim in range(data_dims):

            dexprs = []
            for expr in expressions:
                if isinstance(expr[dim], symbolic.SymExpr):
                    dexprs.append(expr[dim].approx)
                elif isinstance(expr[dim], tuple):
                    dexprs.append(
                        (expr[dim][0].approx if isinstance(
                            expr[dim][0], symbolic.SymExpr) else expr[dim][0],