How to use the numba.types.Array function in numba

To help you get started, we’ve selected a few numba examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github IntelPython / sdc / sdc / hiframes / hiframes_typed.py View on Github external
if func_node.defaults is not None:
            raise ValueError("rolling apply kernel functions cannot have default arguments")
        # create a function from the code object
        glbs = self.state.func_ir.func_id.func.__globals__
        lcs = {}
        exec("def f(A): return A", glbs, lcs)
        kernel_func = lcs['f']
        kernel_func.__code__ = func_node.code
        kernel_func.__name__ = func_node.code.co_name
        # use hpat's sequential pipeline to enable pandas operations
        # XXX seq pipeline used since dist pass causes a hang
        m = numba.ir_utils._max_label
        impl_disp = numba.njit(
            kernel_func, pipeline_class=sdc.compiler.SDCPipelineSeq)
        # precompile to avoid REP counting conflict in testing
        sig = out_dtype(types.Array(dtype, 1, 'C'))
        impl_disp.compile(sig)
        numba.ir_utils._max_label += m
        return impl_disp
github numba / numba / numba / targets / arrayobj.py View on Github external
@lower_builtin('is', types.Array, types.Array)
def array_is(context, builder, sig, args):
    aty, bty = sig.args
    if aty != bty:
        return cgutils.false_bit

    def array_is_impl(a, b):
        return (a.shape == b.shape and
                a.strides == b.strides and
                a.ctypes.data == b.ctypes.data)

    return context.compile_internal(builder, array_is_impl, sig, args)
github numba / numba / numba / compiler.py View on Github external
def legalize_return_type(return_type, interp, targetctx):
    """
    Only accept array return type iff it is passed into the function.
    """
    assert assume.return_argument_array_only

    if not isinstance(return_type, types.Array):
        return

    # Walk IR to discover all return statements
    retstmts = []
    for bid, blk in interp.blocks.items():
        for inst in blk.body:
            if isinstance(inst, ir.Return):
                retstmts.append(inst)

    assert retstmts, "No return statemants?"

    # FIXME: In the future, we can return an array that is either a dynamically
    #        allocated array or an array that is passed as argument.  This
    #        must be statically resolvable.

    # The return value must be the first modification of the value.
github IntelPython / sdc / sdc / hiframes / aggregate.py View on Github external
def gen_all_update_func(update_funcs, reduce_var_types, in_col_types,
                        redvar_offsets, typingctx, targetctx, pivot_typ, pivot_values,
                        is_crosstab):

    num_cols = len(in_col_types)
    if pivot_values is not None:
        assert num_cols == 1

    reduce_arrs_tup_typ = types.Tuple([types.Array(t, 1, 'C') for t in reduce_var_types])
    col_tup_typ = types.Tuple(in_col_types)
    arg_typs = (reduce_arrs_tup_typ, col_tup_typ, types.intp, types.intp, pivot_typ)

    # redvar_arrs[0][w_ind], redvar_arrs[1][w_ind] = __update_redvars(
    #              redvar_arrs[0][w_ind], redvar_arrs[1][w_ind], data_in[0][i])

    num_redvars = redvar_offsets[num_cols]

    func_text = "def update_all_f(redvar_arrs, data_in, w_ind, i, pivot_arr):\n"
    if pivot_values is not None:
        func_text += "  pv = pivot_arr[i]\n"
        for j, pv in enumerate(pivot_values):
            el = "el" if j != 0 else ""
            func_text += "  {}if pv == '{}':\n".format(el, pv)  # TODO: non-string pivot
            init_offset = num_redvars * j
            redvar_access = ", ".join(["redvar_arrs[{}][w_ind]".format(i)
github numba / numba / numba / roc / hsaimpl.py View on Github external
def _make_array(context, builder, dataptr, dtype, shape, layout='C'):
    ndim = len(shape)
    # Create array object
    aryty = types.Array(dtype=dtype, ndim=ndim, layout='C')
    ary = context.make_array(aryty)(context, builder)

    targetdata = _get_target_data(context)
    lldtype = context.get_data_type(dtype)
    itemsize = lldtype.get_abi_size(targetdata)
    # Compute strides
    rstrides = [itemsize]
    for i, lastsize in enumerate(reversed(shape[1:])):
        rstrides.append(lastsize * rstrides[-1])
    strides = [s for s in reversed(rstrides)]

    kshape = [context.get_constant(types.intp, s) for s in shape]
    kstrides = [context.get_constant(types.intp, s) for s in strides]

    context.populate_array(ary,
                           data=builder.bitcast(dataptr, ary.data.type),
github IntelPython / sdc / hpat / io / xenon_ext.py View on Github external
col_items.append((cname, cvar))

        out_nodes += get_column_read_nodes(c_type, cvar, xe_connect_var, xe_dset_var, i, schema_arr_var)

    # we need to close in the URI case since we opened the connection/dataset
    if len(rhs.args) == 1:
        out_nodes += gen_close_xenon(xe_connect_var, xe_dset_var)

    return col_items, out_nodes


_xe_type_to_numba = {'BOOL': types.Array(types.boolean, 1, 'C'),
                     'I8': types.Array(types.char, 1, 'C'),
                     'I16': types.Array(types.int16, 1, 'C'),
                     'I32': types.Array(types.int32, 1, 'C'),
                     'I64': types.Array(types.int64, 1, 'C'),
                     'FLOAT': types.Array(types.float32, 1, 'C'),
                     'DOUBLE': types.Array(types.float64, 1, 'C'),
                     'CHAR': string_array_type,
                     # TODO: handle decimal and blob types
                     }

_type_to_xe_dtype_number = {'int8': 0, 'int16': 1, 'int32': 2, 'int64': 3,
                            'float32': 4, 'float64': 5, 'DECIMAL': 6,
                            'bool_': 7, 'string': 8, 'BLOB': 9}


def get_xe_typ_enum(c_type):
    if c_type == string_array_type:
        return _type_to_xe_dtype_number['string']
    assert isinstance(c_type, types.Array)
    return _type_to_xe_dtype_number[get_element_type(c_type.dtype)]
github numba / numba / numba / targets / smartarray.py View on Github external
@lower_builtin('__array_wrap__', types.SmartArrayType, types.Array)
def array_wrap_array(context, builder, sig, args):
    dest = context.make_helper(builder, sig.return_type)
    dest.data = args[1]
    return impl_ret_borrowed(context, builder, sig.return_type, dest._getvalue())
github IntelPython / sdc / sdc / hiframes / pd_series_ext.py View on Github external
def resolve_append(self, ary, args, kws):
        # TODO: ignore_index
        assert not kws
        arr_typ = if_series_to_array_type(ary)
        other, = args
        if isinstance(other, (SeriesType, types.Array)):
            all_arrs = types.Tuple((arr_typ, if_series_to_array_type(other)))
        elif isinstance(other, types.BaseTuple):
            all_arrs = types.Tuple((arr_typ, *[if_series_to_array_type(a) for a in other.types]))
        elif isinstance(other, (types.List, types.Set)):
            # add only one value from the list for typing since it shouldn't
            # matter for np.concatenate typing
            all_arrs = types.Tuple((arr_typ, if_series_to_array_type(other.dtype)))
        else:
            raise ValueError("Invalid input for Series.append (Series, or tuple/list of Series expected)")

        # TODO: list
        # call np.concatenate to handle type promotion e.g. int, float -> float
        ret_typ = self.context.resolve_function_type(np.concatenate, (all_arrs,), kws).return_type
        ret_typ = if_arr_to_series_type(ret_typ)
        return signature(ret_typ, *args)
github IntelPython / sdc / sdc / io / pio_api.py View on Github external
def generic(self, args, kws):
        assert not kws
        ndim = args[1].literal_value
        dtype = getattr(types, args[2].literal_value)
        ret_typ = types.Array(dtype, ndim, 'C')
        return signature(ret_typ, *args)
github IntelPython / sdc / sdc / datatypes / hpat_pandas_dataframe_pass.py View on Github external
def _get_csv_col_info(self, dtype_map, date_cols, col_names, lhs):
        if isinstance(dtype_map, types.Type):
            typ = dtype_map
            data_arrs = [ir.Var(lhs.scope, ir_utils.mk_unique_var(cname), lhs.loc)
                         for cname in col_names]
            return col_names, data_arrs, [typ] * len(col_names)

        columns = []
        data_arrs = []
        out_types = []
        for i, (col_name, typ) in enumerate(dtype_map.items()):
            columns.append(col_name)
            # get array dtype
            if i in date_cols:
                typ = types.Array(types.NPDatetime('ns'), 1, 'C')
            out_types.append(typ)
            # output array variable
            data_arrs.append(
                ir.Var(lhs.scope, ir_utils.mk_unique_var(col_name), lhs.loc))

        return columns, data_arrs, out_types