Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_kwargs_resampling(tiffs):
with rasterio.open(str(tiffs.join('rgb_ndv.tif'))) as src:
other = src.dataset_mask(out_shape=(1, 5, 5), resampling=Resampling.bilinear) != 0
other = other.astype(np.uint8) * 255
assert np.array_equal(resampave, other)
def test_reproject_resampling_alpha(method):
"""Reprojection of a source with alpha band succeeds"""
# Expected count of nonzero pixels for each resampling method, based
# on running rasterio with each of the following configurations
expected = {
Resampling.nearest: 438113,
Resampling.bilinear: 439280,
Resampling.cubic: 437888,
Resampling.cubic_spline: 440475,
Resampling.lanczos: 436001,
Resampling.average: 439419,
Resampling.mode: 437298,
Resampling.max: 439464,
Resampling.min: 436397,
Resampling.med: 437194,
Resampling.q1: 436397,
Resampling.q3: 438948,
}
with rasterio.open("tests/data/RGBA.byte.tif") as src:
source = src.read(1)
out = np.empty(src.shape, dtype=np.uint8)
reproject(
properties.
"""
with rasterio.open(path_rgb_byte_tif) as src:
extent = (-20037508.34, 20037508.34)
size = (2 ** 16) * 256
resolution = (extent[1] - extent[0]) / size
dst_transform = affine.Affine(
resolution, 0.0, extent[0], 0.0, -resolution, extent[1]
)
vrt = WarpedVRT(
src, crs=DST_CRS, width=size, height=size, transform=dst_transform
)
assert vrt.dst_crs == CRS.from_string(DST_CRS)
assert vrt.src_nodata == 0.0
assert vrt.dst_nodata == 0.0
assert vrt.resampling == Resampling.nearest
assert vrt.width == size
assert vrt.height == size
assert vrt.transform == dst_transform
assert vrt.warp_extras == {"init_dest": "NO_DATA"}
from terracotta import cog
outfile = str(tmpdir / 'raster.tif')
raster_data = 1000 * np.random.rand(512, 512).astype(np.uint16)
profile = BASE_PROFILE.copy()
profile.update(
height=raster_data.shape[0],
width=raster_data.shape[1]
)
with rasterio.open(outfile, 'w', **profile) as dst:
dst.write(raster_data, 1)
overviews = [2 ** j for j in range(1, 4)]
dst.build_overviews(overviews, Resampling.nearest)
assert not cog.validate(outfile)
# filter to only variables present in this dataset
ds_variables = [v for v in variables if v in ds.variables]
ds_crs = get_crs(ds, ds_variables[0]) or src_crs
with Dataset(os.path.join(output_directory, os.path.split(filename)[1]), 'w') as out_ds:
click.echo('Processing: {0}'.format(filename))
warp_like(
ds,
ds_projection=ds_crs,
variables=ds_variables,
out_ds=out_ds,
template_ds=template_ds,
template_varname=template_varname,
resampling=getattr(Resampling, resampling)
)
[it.name for it in Resampling if it.value in [0, 2, 5, 6, 7]]),
default='nearest', show_default=True)
@click.pass_context
def overview(ctx, input, build, ls, rebuild, resampling):
"""Construct overviews in an existing dataset.
A pyramid of overviews computed once and stored in the dataset can
improve performance in some applications.
The decimation levels at which to build overviews can be specified as
a comma separated list
rio overview --build 2,4,8,16
or a base and range of exponents.
rio overview --build 2^1..4
overviews = [2 ** j for j in range(1, overview_level + 1)]
tmp_dst.build_overviews(overviews, ResamplingEnums[overview_resampling])
if not quiet:
click.echo("Updating dataset tags...", err=True)
for i, b in enumerate(indexes):
tmp_dst.set_band_description(i + 1, src_dst.descriptions[b - 1])
if forward_band_tags:
tmp_dst.update_tags(i + 1, **src_dst.tags(b))
tags = src_dst.tags()
tags.update(
dict(
OVR_RESAMPLING_ALG=ResamplingEnums[
overview_resampling
].name.upper()
)
)
tmp_dst.update_tags(**tags)
tmp_dst._set_all_scales([vrt_dst.scales[b - 1] for b in indexes])
tmp_dst._set_all_offsets([vrt_dst.offsets[b - 1] for b in indexes])
if not quiet:
click.echo("Writing output to: {}".format(dst_path), err=True)
copy(tmp_dst, dst_path, copy_src_overviews=True, **dst_kwargs)
mercantile.xy(*mercantile.ul(x, y + 1, z)),
mercantile.xy(*mercantile.ul(x + 1, y, z)),
)
for c in i
]
toaffine = transform.from_bounds(*bounds + [512, 512])
out = np.empty((512, 512), dtype=src.meta["dtype"])
reproject(
rasterio.band(src, 1),
out,
dst_transform=toaffine,
dst_crs="epsg:3857",
resampling=Resampling.bilinear,
)
out = data_to_rgb(out, global_args["base_val"], global_args["interval"])
return tile, global_args["writer_func"](out, global_args["kwargs"].copy(), toaffine)
# https://rasterio.readthedocs.io/en/stable/topics/reproject.html
transform, width, height = calculate_default_transform(image.crs, crs, image.width, image.height, *image.bounds)
profile = image.profile.copy()
profile.update({CRS: crs,
TRANSFORM: transform,
WIDTH: width,
HEIGHT: height})
transformed = rio.MemoryFile().open(**profile)
for i in range(1, image.count + 1):
reproject(source=rio.band(image, i),
destination=rio.band(transformed, i),
src_transform=image.transform,
src_crs=image.crs,
dst_transform=transform,
dst_crs=crs,
resampling=Resampling.nearest)
return transformed
def cogeo(values, output_fh, indexes, dst_kwargs, predictor=2, resampling=None, overview_level=None):
with rasterio.Env(**self.DEFAULT_GDAL_CONFIG):
with MemoryFile() as memfile:
with memfile.open(**dst_kwargs) as mem:
mem.write(values, indexes=indexes)
if resampling is not None:
overviews = [2 ** j for j in range(1, overview_level + 1)]
mem.build_overviews(overviews, Resampling[resampling])
mem.update_tags(
OVR_RESAMPLING_ALG=Resampling[resampling].name.upper()
)
else:
_LOG.warning('Not producing overview')
profile = self.default_profile.copy()
profile.update({'blockxsize': dst_kwargs['blockxsize'],
'blockysize': dst_kwargs['blockysize'],
'predictor': predictor,
'copy_src_overviews': True})
copy(mem, output_fh, **profile)