How to use the datashader.reductions.Reduction function in datashader

To help you get started, we’ve selected a few datashader examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github holoviz / holoviews / holoviews / operation / datashader.py View on Github external
if not element.vdims and agg.column is None and not isinstance(agg, (rd.count, rd.any)):
            return ds.any()
        return super(contours_rasterize, self)._get_aggregator(element, add_field)



class trimesh_rasterize(aggregate):
    """
    Rasterize the TriMesh element using the supplied aggregator. If
    the TriMesh nodes or edges define a value dimension, will plot
    filled and shaded polygons; otherwise returns a wiremesh of the
    data.
    """

    aggregator = param.ClassSelector(default=ds.mean(),
                                     class_=(ds.reductions.Reduction, basestring))

    interpolation = param.ObjectSelector(default='bilinear',
                                         objects=['bilinear', 'linear', None, False], doc="""
        The interpolation method to apply during rasterization.""")

    def _precompute(self, element, agg):
        from datashader.utils import mesh
        if element.vdims and getattr(agg, 'column', None) not in element.nodes.vdims:
            simplices = element.dframe([0, 1, 2, 3])
            verts = element.nodes.dframe([0, 1])
        elif element.nodes.vdims:
            simplices = element.dframe([0, 1, 2])
            verts = element.nodes.dframe([0, 1, 3])
        for c, dtype in zip(simplices.columns[:3], simplices.dtypes):
            if dtype.kind != 'i':
                simplices[c] = simplices[c].astype('int')
github pyviz-dev / nbsite / examples / sites / holoviews / holoviews / operation / datashader.py View on Github external
but other aggregators can be supplied implementing mean, max, min
    and other reduction operations.

    The bins of the aggregate are defined by the width and height and
    the x_range and y_range. If x_sampling or y_sampling are supplied
    the operation will ensure that a bin is no smaller than the minimum
    sampling distance by reducing the width and height when zoomed in
    beyond the minimum sampling distance.

    By default, the PlotSize stream is applied when this operation
    is used dynamically, which means that the height and width
    will automatically be set to match the inner dimensions of
    the linked plot.
    """

    aggregator = param.ClassSelector(class_=ds.reductions.Reduction,
                                     default=ds.count())

    @classmethod
    def get_agg_data(cls, obj, category=None):
        """
        Reduces any Overlay or NdOverlay of Elements into a single
        xarray Dataset that can be aggregated.
        """
        paths = []
        if isinstance(obj, Graph):
            obj = obj.edgepaths
        kdims = list(obj.kdims)
        vdims = list(obj.vdims)
        dims = obj.dimensions()[:2]
        if isinstance(obj, Path):
            glyph = 'line'
github pyviz-topics / EarthSim / earthsim / analysis.py View on Github external
from geoviews import Points, WMTS, Path
from holoviews.streams import PolyDraw, PolyEdit, PointerX
from holoviews import Curve, NdOverlay, DynamicMap, VLine, Image, TriMesh
from holoviews.operation.datashader import datashade, rasterize
from holoviews.util import Dynamic


class LineCrossSection(param.Parameterized):
    """
    LineCrossSection rasterizes any HoloViews element and takes
    cross-sections of the resulting Image along poly-lines drawn
    using the PolyDraw tool.
    """

    aggregator = param.ClassSelector(class_=ds.reductions.Reduction,
                                     default=ds.mean())

    tile_url = param.String(default='http://c.tile.openstreetmap.org/{Z}/{X}/{Y}.png',
                            doc="URL for the tile source", precedence=-1)

    resolution = param.Number(default=1000, doc="""
        Distance between samples in meters. Used for interpolation
        of the cross-section paths.""")

    _num_objects = None

    def __init__(self, obj, paths=None, **params):
        super(LineCrossSection, self).__init__(**params)
        self.obj = obj
        paths = [] if paths is None else paths
        self.path = Path(paths, crs=ccrs.GOOGLE_MERCATOR)
github holoviz / datashader / datashader / reductions.py View on Github external
"""
    _dshape = dshape(Option(ct.float64))

    @property
    def _bases(self):
        return (sum(self.column), count(self.column))

    @staticmethod
    def _finalize(bases, **kwargs):
        sums, counts = bases
        with np.errstate(divide='ignore', invalid='ignore'):
            x = sums/counts
        return xr.DataArray(x, **kwargs)


class var(Reduction):
    """Variance of all elements in ``column``.

    Parameters
    ----------
    column : str
        Name of the column to aggregate over. Column data type must be numeric.
        ``NaN`` values in the column are skipped.
    """
    _dshape = dshape(Option(ct.float64))

    @property
    def _bases(self):
        return (sum(self.column), count(self.column), m2(self.column))

    @staticmethod
    def _finalize(bases, **kwargs):
github holoviz / datashader / datashader / reductions.py View on Github external
    @staticmethod
    @ngjit
    def _append_non_na(x, y, agg, field):
        if not np.isnan(field):
            agg[y, x] = True

    @staticmethod
    def _create(shape):
        return np.zeros(shape, dtype='bool')

    @staticmethod
    def _combine(aggs):
        return aggs.sum(axis=0, dtype='bool')


class FloatingReduction(Reduction):
    """Base classes for reductions that always have floating-point dtype."""
    _dshape = dshape(Option(ct.float64))

    @staticmethod
    def _create(shape):
        return np.full(shape, np.nan, dtype='f8')

    @staticmethod
    def _finalize(bases, **kwargs):
        return xr.DataArray(bases[0], **kwargs)


class WeightedReduction(FloatingReduction):
    """FloatingReduction, to be interpolated along each rasterized primitive.
    """
    pass
github holoviz / datashader / datashader / reductions.py View on Github external
"""
    _dshape = dshape(Option(ct.float64))

    @property
    def _bases(self):
        return (sum(self.column), count(self.column), m2(self.column))

    @staticmethod
    def _finalize(bases, **kwargs):
        sums, counts, m2s = bases
        with np.errstate(divide='ignore', invalid='ignore'):
            x = m2s/counts
        return xr.DataArray(x, **kwargs)


class std(Reduction):
    """Standard Deviation of all elements in ``column``.

    Parameters
    ----------
    column : str
        Name of the column to aggregate over. Column data type must be numeric.
        ``NaN`` values in the column are skipped.
    """
    _dshape = dshape(Option(ct.float64))

    @property
    def _bases(self):
        return (sum(self.column), count(self.column), m2(self.column))

    @staticmethod
    def _finalize(bases, **kwargs):
github holoviz / datashader / datashader / reductions.py View on Github external
``NaN`` values in the column are skipped.
    """
    @staticmethod
    @ngjit
    def _append(x, y, agg, field):
        if np.isnan(agg[y, x]):
            agg[y, x] = field
        elif agg[y, x] < field:
            agg[y, x] = field

    @staticmethod
    def _combine(aggs):
        return np.nanmax(aggs, axis=0)


class count_cat(Reduction):
    """Count of all elements in ``column``, grouped by category.

    Parameters
    ----------
    column : str
        Name of the column to aggregate over. Column data type must be
        categorical. Resulting aggregate has a outer dimension axis along the
        categories present.
    """
    def validate(self, in_dshape):
        if not isinstance(in_dshape.measure[self.column], ct.Categorical):
            raise ValueError("input must be categorical")

    def out_dshape(self, input_dshape):
        cats = input_dshape.measure[self.column].categories
        return dshape(Record([(c, ct.int32) for c in cats]))
github holoviz / datashader / datashader / reductions.py View on Github external
    @staticmethod
    def _combine(aggs):
        return aggs.sum(axis=0, dtype='i4')

    def _build_finalize(self, dshape):
        cats = list(dshape[self.column].categories)

        def finalize(bases, **kwargs):
            dims = kwargs['dims'] + [self.column]
            coords = kwargs['coords'] + [cats]
            return xr.DataArray(bases[0], dims=dims, coords=coords)
        return finalize


class mean(Reduction):
    """Mean of all elements in ``column``.

    Parameters
    ----------
    column : str
        Name of the column to aggregate over. Column data type must be numeric.
        ``NaN`` values in the column are skipped.
    """
    _dshape = dshape(Option(ct.float64))

    @property
    def _bases(self):
        return (sum(self.column), count(self.column))

    @staticmethod
    def _finalize(bases, **kwargs):
github holoviz / datashader / datashader / reductions.py View on Github external
return ()

    def _build_create(self, dshape):
        return self._create

    def _build_append(self, dshape):
        return self._append

    def _build_combine(self, dshape):
        return self._combine

    def _build_finalize(self, dshape):
        return self._finalize


class OptionalFieldReduction(Reduction):
    """Base class for things like ``count`` or ``any``"""
    def __init__(self, column=None):
        self.column = column

    @property
    def inputs(self):
        return (extract(self.column),) if self.column else ()

    def validate(self, in_dshape):
        pass

    def _build_append(self, dshape):
        return self._append if self.column is None else self._append_non_na

    @staticmethod
    def _finalize(bases, **kwargs):