How to use the xarray.broadcast function in xarray

To help you get started, we’ve selected a few xarray examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dcs4cop / xcube / xcube / core / reproject.py View on Github external
else src_xy_tp_gcp_step

    dst_width, dst_height = dst_size

    _assert(src_dataset is not None)
    _assert(dst_width > 1)
    _assert(dst_height > 1)
    _assert(gcp_i_step > 0)
    _assert(gcp_j_step > 0)

    _assert(x_name in src_dataset)
    _assert(y_name in src_dataset)
    x_var = src_dataset[x_name]
    y_var = src_dataset[y_name]
    if len(x_var.dims) == 1 and len(y_var.dims) == 1:
        y_var, x_var = xr.broadcast(y_var, x_var)
    _assert(len(x_var.dims) == 2)
    _assert(y_var.dims == x_var.dims)
    _assert(x_var.shape[-1] >= 2)
    _assert(x_var.shape[-2] >= 2)
    _assert(y_var.shape == x_var.shape)

    src_width = x_var.shape[-1]
    src_height = x_var.shape[-2]

    dst_region = _ensure_valid_region(dst_region, GLOBAL_GEO_EXTENT, x_var, y_var)
    dst_x1, dst_y1, dst_x2, dst_y2 = dst_region

    dst_res = max((dst_x2 - dst_x1) / dst_width, (dst_y2 - dst_y1) / dst_height)
    _assert(dst_res > 0)

    dst_geo_transform = (dst_x1, dst_res, 0.0,
github creare-com / podpac / podpac / core / algorithm / algorithm.py View on Github external
Attributes
        ----------
        inputs : dict
            Evaluated outputs of the input nodes. The keys are the attribute names.
        
        Returns
        -------
        UnitsDataArray
            Description
        """
        
        eqn = self.eqn.format(**self.params)        
        
        fields = [f for f in 'ABCDEFG' if getattr(self, f) is not None]
        res = xr.broadcast(*[inputs[f] for f in fields])
        f_locals = dict(zip(fields, res))

        if ne is None:
            result = eval(eqn, f_locals)
        else:
            result = ne.evaluate(eqn, f_locals)
        res = res[0].copy()  # Make an xarray object with correct dimensions
        res[:] = result
        return res
github creare-com / podpac / podpac / core / algorithm / generic.py View on Github external
UnitsDataArray
            Description
        """

        if not settings.allow_unsafe_eval:
            raise PermissionError(
                "Insecure evaluation of Python code using Arithmetic node has not been allowed. If "
                "this is an error, use: `podpac.settings.set_unsafe_eval(True)`. "
                "NOTE: Allowing unsafe evaluation enables arbitrary execution of Python code through PODPAC "
                "Node definitions."
            )

        eqn = self.eqn.format(**self.params)

        fields = self.inputs.keys()
        res = xr.broadcast(*[inputs[f] for f in fields])
        f_locals = dict(zip(fields, res))

        try:
            from numexpr import evaluate  # Needed for some systems to get around lazy_module issues

            result = ne.evaluate(eqn, f_locals)
        except (NotImplementedError, ImportError):
            result = eval(eqn, f_locals)
        res = res[0].copy()  # Make an xarray object with correct dimensions
        res[:] = result
        return res
github rustychris / stompy / stompy / xr_transect.py View on Github external
# for positive-up z coordinates, starting from the surface going to the bed,
    # a positive velocity at the surface yields a positive strength
    flip_sgn=1

    # ADCP: positive:down, first bin is at the surface, so ultimately want
    # flip_sgn=1.
    # suntans: positive:up, first bin is at the surface. so ultimately want
    # flip_sgn=1.
    if tran.z_ctr.attrs.get('positive','up')=='down':
        flip_sgn*=-1
    # if the first bin is at the bed
    if tran.z_dz.mean()>0:
        flip_sgn*=-1

    get_z_dz(tran)
    all_u_left,z_dz= xr.broadcast(tran.Uroz.isel(roz=1), tran.z_dz)

    for samp in range(tran.dims['sample']):
        u_left=all_u_left.isel(sample=samp).values
        valid_left=np.isfinite(u_left) & (z_dz.isel(sample=samp).values!=0.0)
        u_left=u_left[valid_left]
        u_left_sort=np.sort(u_left)
        mid_idx=np.searchsorted(u_left_sort,0)
        if mid_idx>0:
            circ_velocity[samp]=flip_sgn*u_left[:mid_idx].mean()
        else:
            pass # leave as zero
    if name is not None:
        tran[name]=('sample',),circ_velocity
        tran[name].attrs['units']='m s-1'
        tran[name].attrs['description']='Average left-ward velocity in upper water column'
    return circ_velocity
github bradyrx / climpred / climpred / metrics.py View on Github external
* perfect: 0
        * else: negative
    """
    # helper dim to calc mu
    rdim = [tdim for tdim in verif.dims if tdim in CLIMPRED_DIMS + ['time']]
    # inside compute_perfect_model
    if 'init' in forecast.dims:
        dim2 = 'init'
    # inside compute_hindcast
    elif 'time' in forecast.dims:
        dim2 = 'time'
    else:
        raise ValueError('dim2 not found automatically in ', forecast.dims)

    mu = verif.mean(rdim)
    forecast, ref2 = xr.broadcast(forecast, verif)
    mse_kwargs = metric_kwargs.copy()
    if 'dim' in mse_kwargs:
        del mse_kwargs['dim']
    sig_r = __mse.function(forecast, ref2, dim='member', **mse_kwargs).mean(dim2)
    sig_h = __mse.function(
        forecast.mean(dim2), ref2.mean(dim2), dim='member', **mse_kwargs
    )
    crps_h = _crps_gaussian(forecast, mu, sig_h)
    if 'member' in crps_h.dims:
        crps_h = crps_h.mean('member')
    crps_r = _crps_gaussian(forecast, mu, sig_r)
    if 'member' in crps_r.dims:
        crps_r = crps_r.mean('member')
    return 1 - crps_h / crps_r
github serazing / xscale / xscale / signal / fitting.py View on Github external
----------
	array : xarray.DataArray
		The data on which the trend is computed
	dim : str, optional
		Dimension over which the array will be detrended
	type : {'constant', 'linear', 'quadratic'}, optional
		Type of trend to be computed. Default is 'linear'.

	Returns
	-------
	array_trend : xarray.DataArray
		The trend associated with the input data
	"""
	if type is 'constant':
		array_trend = array.mean(dim=dim)
		_, array_trend = xr.broadcast(array, array_trend)
	elif type is 'linear':
		linfit = linreg(array, dim=dim)
		array_trend = array[dim] * linfit['slope'] + linfit['offset']
	elif type is 'quadratic':
		raise NotImplementedError
	else:
		raise ValueError('This type of trend is not supported')
	return array_trend
github manoharan-lab / holopy / holopy / scattering / interface.py View on Github external
if isinstance(illum_wavelen, xr.DataArray):
                pass
            else:
                if len(illum_wavelen) == 1:
                    illum_wavelen = illum_wavelen.repeat(
                        len(illum_polarization.illumination))
                illum_wavelen = xr.DataArray(
                    illum_wavelen, dims=illumination,
                    coords={illumination: illum_polarization.illumination})
        else:
            #  need to interpret illumination from detector.illum_wavelen
            if not isinstance(illum_wavelen, xr.DataArray):
                illum_wavelen = xr.DataArray(
                    illum_wavelen, dims=illumination,
                    coords={illumination: illum_wavelen})
            illum_polarization = xr.broadcast(
                illum_polarization, illum_wavelen, exclude=[vector])[0]

        if illumination in detector.dims:
            detector = detector.sel(
                illumination=detector.illumination[0], drop=True)
        detector = update_metadata(
            detector, illum_wavelen=illum_wavelen,
            illum_polarization=illum_polarization)

    return detector
github dcs4cop / xcube / xcube / core / geocoding.py View on Github external
Return new geo-coding for given *dataset*.

        :param xy: Tuple of x and y coordinate variables.
        :param xy_names: Optional tuple of the x- and y-coordinate variables in *dataset*.
        :return: The source dataset's geo-coding.
        """
        x, y = xy

        if xy_names is None:
            xy_names = x.name, y.name
        x_name, y_name = xy_names
        if x_name is None or y_name is None:
            raise ValueError(f'unable to determine x and y coordinate variable names')

        if x.ndim == 1 and y.ndim == 1:
            y, x = xr.broadcast(y, x)
        if x.ndim != 2 or y.ndim != 2:
            raise ValueError(
                f'coordinate variables {x_name!r} and {y_name!r} must both have either one or two dimensions')

        if x.shape != y.shape or x.dims != y.dims:
            raise ValueError(f"coordinate variables {x_name!r} and {y_name!r} must have same shape and dimensions")

        height, width = x.shape
        if width < 2 or height < 2:
            raise ValueError(f"size in each dimension of {x_name!r} and {y_name!r} must be greater two")

        is_geo_crs = _is_geo_crs(x_name, y_name)
        is_lon_normalized = False
        if is_geo_crs:
            x, is_lon_normalized = _maybe_normalise_2d_lon(x)
github rustychris / stompy / stompy / xr_transect.py View on Github external
shape=[ len(ds[d]) for d in dims]

        # For the moment, can assume that there are two dimensions,
        # and the first is sample.
        new_val=np.nan*np.ones( shape, np.float64 )

        # iter_shape=[ len(tran[d]) for d in dims
        iter_shape=var.shape

        z_num=list(dims).index(z_dim)

        if len(dims)==1:
            # print("Not sure how to resample %s"%v)
            continue
        # Not quite there -- this isn't smart enough to get the interfaces
        _,src_z,src_dz = xr.broadcast(var,tran['z_ctr'],get_z_dz(tran))

        all_sgns=np.sign(src_dz).values.ravel()
        # some of these may be nan - just look past those
        all_sgns=all_sgns[ np.isfinite(all_sgns) ]
        if all_sgns.max()>0:
            sgn=1
        elif all_sgns.min()<0:
            sgn=-1
        else:
            raise Exception("All signs are 0?")
        assert np.all( sgn*all_sgns>=0 )

        for index in np.ndindex( *iter_shape ):
            if index[z_num]>0:
                continue
            # Why are values getting filled to the bed?