Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
gdfc = gpd.GeoDataFrame()
for i, ci in enumerate(catchment_indices):
# Catchment polygon
mask[:] = 0
mask[tuple(ci.T)] = 1
_, poly_no = _mask_to_polygon(mask, gdir=gdir)
gdfc.loc[i, 'geometry'] = poly_no
gdfi = utils.polygon_intersections(gdfc)
# We project them onto the mercator proj before writing. This is a bit
# inefficient (they'll be projected back later), but it's more sustainable
gdfc.crs = gdir.grid
gdfi.crs = gdir.grid
salem.transform_geopandas(gdfc, gdir.grid.proj, inplace=True)
salem.transform_geopandas(gdfi, gdir.grid.proj, inplace=True)
if hasattr(gdfc.crs, 'srs'):
# salem uses pyproj
gdfc.crs = gdfc.crs.srs
gdfi.crs = gdfi.crs.srs
gdir.write_shapefile(gdfc, 'flowline_catchments')
if len(gdfi) > 0:
gdir.write_shapefile(gdfi, 'catchments_intersects')
grids_file = gdir.get_filepath('gridded_data')
with ncDataset(grids_file) as nc:
topo_smoothed = nc.variables['topo_smoothed'][:]
glacier_mask = nc.variables['glacier_mask'][:]
# Glacier exterior including nunataks
erode = binary_erosion(glacier_mask)
glacier_ext = glacier_mask ^ erode
glacier_ext = np.where(glacier_mask == 1, glacier_ext, 0)
# Intersects between glaciers
gdfi = gpd.GeoDataFrame(columns=['geometry'])
if gdir.has_file('intersects'):
# read and transform to grid
gdf = gdir.read_shapefile('intersects')
salem.transform_geopandas(gdf, gdir.grid, inplace=True)
gdfi = pd.concat([gdfi, gdf[['geometry']]])
# Ice divide mask
# Probably not the fastest way to do this, but it works
dist = np.array([])
jj, ii = np.where(glacier_ext)
for j, i in zip(jj, ii):
dist = np.append(dist, np.min(gdfi.distance(shpg.Point(i, j))))
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=RuntimeWarning)
pok = np.where(dist <= 1)
glacier_ext_intersect = glacier_ext * 0
glacier_ext_intersect[jj[pok], ii[pok]] = 1
# Distance from border mask - Scipy does the job
# I remove the boundary pixs because they are likely to be higher
fpath = gdir.get_filepath('gridded_data', div_id=div_id)
with netCDF4.Dataset(fpath) as nc:
topo = nc.variables['topo'][:]
mask_ext = nc.variables['glacier_ext'][:]
mask_glacier = nc.variables['glacier_mask'][:]
topo[np.where(mask_glacier == 0)] = np.NaN
topo[np.where(mask_ext == 1)] = np.NaN
# Intersects between catchments/glaciers
gdfi = gpd.GeoDataFrame(columns=['geometry'])
if gdir.has_file('catchments_intersects', div_id=div_id):
# read and transform to grid
gdf = gpd.read_file(gdir.get_filepath('catchments_intersects',
div_id=div_id))
salem.transform_geopandas(gdf, gdir.grid, inplace=True)
gdfi = pd.concat([gdfi, gdf[['geometry']]])
if gdir.has_file('divides_intersects', div_id=0):
# read and transform to grid
gdf = gpd.read_file(gdir.get_filepath('divides_intersects'))
salem.transform_geopandas(gdf, gdir.grid, inplace=True)
gdfi = pd.concat([gdfi, gdf[['geometry']]])
if gdir.has_file('intersects', div_id=0):
# read and transform to grid
gdf = gpd.read_file(gdir.get_filepath('intersects', div_id=0))
salem.transform_geopandas(gdf, gdir.grid, inplace=True)
gdfi = pd.concat([gdfi, gdf[['geometry']]])
# apply a buffer to be sure we get the intersects right. Be generous
gdfi = gdfi.buffer(1.5)
# Filter parameters
gdfc = gpd.GeoDataFrame()
for i, ci in enumerate(catchment_indices):
# Catchment polygon
mask[:] = 0
mask[tuple(ci.T)] = 1
_, poly_no = _mask_to_polygon(mask, x=xmesh, y=ymesh, gdir=gdir)
gdfc.loc[i, 'geometry'] = poly_no
gdfi = utils.polygon_intersections(gdfc)
# We project them onto the mercator proj before writing. This is a bit
# inefficient (they'll be projected back later), but it's more sustainable
gdfc.crs = gdir.grid
gdfi.crs = gdir.grid
salem.transform_geopandas(gdfc, gdir.grid.proj, inplace=True)
salem.transform_geopandas(gdfi, gdir.grid.proj, inplace=True)
if hasattr(gdfc.crs, 'srs'):
# salem uses pyproj
gdfc.crs = gdfc.crs.srs
gdfi.crs = gdfi.crs.srs
gdfc.to_file(gdir.get_filepath('flowline_catchments', div_id=div_id))
if len(gdfi) > 0:
gdfi.to_file(gdir.get_filepath('catchments_intersects',
div_id=div_id))
gdfc = gpd.GeoDataFrame()
for i, ci in enumerate(catchment_indices):
# Catchment polygon
mask[:] = 0
mask[tuple(ci.T)] = 1
_, poly_no = _mask_to_polygon(mask, gdir=gdir)
gdfc.loc[i, 'geometry'] = poly_no
gdfi = utils.polygon_intersections(gdfc)
# We project them onto the mercator proj before writing. This is a bit
# inefficient (they'll be projected back later), but it's more sustainable
gdfc.crs = gdir.grid
gdfi.crs = gdir.grid
salem.transform_geopandas(gdfc, gdir.grid.proj, inplace=True)
salem.transform_geopandas(gdfi, gdir.grid.proj, inplace=True)
if hasattr(gdfc.crs, 'srs'):
# salem uses pyproj
gdfc.crs = gdfc.crs.srs
gdfi.crs = gdfi.crs.srs
gdir.write_shapefile(gdfc, 'flowline_catchments')
if len(gdfi) > 0:
gdir.write_shapefile(gdfi, 'catchments_intersects')
if not cfg.PARAMS['use_rgi_area']:
# Update Area
area = geometry.area * 1e-6
entity['Area'] = area
towrite['Area'] = area
# Write shapefile
gdir.write_shapefile(towrite, 'outlines')
# Also transform the intersects if necessary
gdf = cfg.PARAMS['intersects_gdf']
if len(gdf) > 0:
gdf = gdf.loc[((gdf.RGIId_1 == gdir.rgi_id) |
(gdf.RGIId_2 == gdir.rgi_id))]
if len(gdf) > 0:
gdf = salem.transform_geopandas(gdf, to_crs=proj_out)
if hasattr(gdf.crs, 'srs'):
# salem uses pyproj
gdf.crs = gdf.crs.srs
gdir.write_shapefile(gdf, 'intersects')
else:
# Sanity check
if cfg.PARAMS['use_intersects']:
raise InvalidParamsError('You seem to have forgotten to set the '
'intersects file for this run. OGGM '
'works better with such a file. If you '
'know what your are doing, set '
"cfg.PARAMS['use_intersects'] = False to "
"suppress this error.")
# 6. choose a spatial resolution with respect to the glacier area
dxmethod = cfg.PARAMS['grid_dx_method']
mask_ext = nc.variables['glacier_ext'][:]
mask_glacier = nc.variables['glacier_mask'][:]
topo[np.where(mask_glacier == 0)] = np.NaN
topo[np.where(mask_ext == 1)] = np.NaN
# Intersects between catchments/glaciers
gdfi = gpd.GeoDataFrame(columns=['geometry'])
if gdir.has_file('catchments_intersects'):
# read and transform to grid
gdf = gdir.read_shapefile('catchments_intersects')
salem.transform_geopandas(gdf, gdir.grid, inplace=True)
gdfi = pd.concat([gdfi, gdf[['geometry']]])
if gdir.has_file('intersects'):
# read and transform to grid
gdf = gdir.read_shapefile('intersects')
salem.transform_geopandas(gdf, gdir.grid, inplace=True)
gdfi = pd.concat([gdfi, gdf[['geometry']]])
# apply a buffer to be sure we get the intersects right. Be generous
gdfi = gdfi.buffer(1.5)
# Filter parameters
# Number of pixels to arbitrarily remove at junctions
jpix = int(cfg.PARAMS['flowline_junction_pix'])
# Loop over the lines
mask = np.zeros((gdir.grid.ny, gdir.grid.nx))
for fl, ci in zip(flowlines, catchment_indices):
n = len(fl.dis_on_line)
widths = np.zeros(n)