Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
regression = numpy.polyfit(timesteps,
reshaped, deg=deg, w=weights)[0]
out_block = regression.reshape(blocks[0].shape)
# Mask out any pixel stacks where there's a nodata value in the stack.
# Out block is multiplied by 365.25 to convert m/day to m/year trend.
return numpy.where(numpy.min(stacked_array, axis=2) == 0, 0, out_block*365.25)
raster_cell_sizes = [pygeoprocessing.get_cell_size_from_uri(r)
for r in rasters]
min_cell_size = min(raster_cell_sizes)
if not len(set(raster_cell_sizes)) == 1:
warnings.warn(('Cell sizes of input rasters do not all match. '
'Using min pixelsize of %s. Mismatched values: %s') % (
min_cell_size, set(raster_cell_sizes)))
pygeoprocessing.vectorize_datasets(
dataset_uri_list=rasters,
dataset_pixel_op=_regression,
dataset_out_uri=out_filename,
datatype_out=gdal.GDT_Float32,
nodata_out=0,
pixel_size_out=min_cell_size,
bounding_box_mode='intersection',
vectorize_op=False,
datasets_are_pre_aligned=False)
Parameters:
stack_trend (numpy.ndarray): Array of values from the stack trend
raster.
pgp_trend (numpy.ndarray): Array of values from the pygeoprocessing
trend raster.
Returns:
``numpy.ndarray`` of the difference between ``stack_trend`` and
``pgp_trend``"""
valid_mask = ((stack_trend != stack_nodata) & (pgp_trend != pgp_nodata))
out_array = numpy.empty_like(stack_trend)
out_array[:] = -9999
out_array[valid_mask] = stack_trend[valid_mask] - pgp_trend[valid_mask]
return out_array
pygeoprocessing.vectorize_datasets(
dataset_uri_list=[stack_trend_file, pgp_trend_file],
dataset_pixel_op=_diff,
dataset_out_uri=diff_file,
datatype_out=gdal.GDT_Float32,
nodata_out=-9999,
pixel_size_out=32.,
bounding_box_mode='intersection',
vectorize_op=False,
datasets_are_pre_aligned=False)