Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
utils.mkdir(cfg.PATHS['rgi_dir'])
# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = False
cfg.PARAMS['border'] = 20
cfg.PARAMS['continue_on_error'] = False
# Read in the RGI file
rgisel = os.path.join(WORKING_DIR, 'rgi_selection.shp')
if not os.path.exists(rgisel):
rgi_dir = utils.get_rgi_dir()
regions = ['{:02d}'.format(int(p)) for p in range(1, 20)]
files = [glob.glob(os.path.join(rgi_dir, '*', r + '_rgi50_*.shp'))[0] for r in regions]
rgidf = []
for fs in files:
sh = salem.read_shapefile(os.path.join(rgi_dir, fs), cached=True)
percs = np.asarray([0, 25, 50, 75, 100])
idppercs = np.round(percs * 0.01 * (len(sh)-1)).astype(int)
rgidf.append(sh.sort_values(by='Area').iloc[idppercs])
rgidf.append(sh.sort_values(by='CenLon').iloc[idppercs])
rgidf.append(sh.sort_values(by='CenLat').iloc[idppercs])
rgidf = gpd.GeoDataFrame(pd.concat(rgidf))
rgidf = rgidf.drop_duplicates('RGIId')
rgidf.to_file(rgisel)
else:
rgidf = salem.read_shapefile(rgisel)
rgidf = rgidf.loc[~rgidf.RGIId.isin(['RGI50-10.00012', 'RGI50-17.00850',
'RGI50-19.01497', 'RGI50-19.00990',
'RGI50-19.01440'])]
sel.loc[:, 'Name'] = name
rgidf.append(sel)
_rgi_ids_for_overwrite.extend(wgms_df.RGI_ID.values)
# GTD glaciers which are not already there
# Actually we should remove the data of those 2 to be honest...
gtd_df = gtd_df.loc[~ gtd_df.RGI_ID.isin(_rgi_ids_for_overwrite)]
log.info('N glaciers GTD: {}'.format(len(gtd_df)))
for i, row in gtd_df.iterrows():
rid = row.RGI_ID
reg = rid.split('-')[1].split('.')[0]
# read the rgi region
rgi_shp = find_path(RGI_DIR, reg + '_rgi50_*.shp')
rgi_df = salem.read_shapefile(rgi_shp, cached=True)
sel = rgi_df.loc[rgi_df.RGIId.isin([rid])].copy()
assert len(sel) == 1
# add glacier name to the entity
_corname = row.NAME.replace('/', 'or').replace('.', '').replace(' ', '-')
name = ['G:' + _corname] * len(sel)
for n in name:
if len(n) > 48:
raise
sel.loc[:, 'Name'] = name
rgidf.append(sel)
# Save for not computing each time
rgidf = pd.concat(rgidf)
rgidf.to_pickle(df_rgi_file)
# the results much (expectedly), so that it's ok to change it. All the rest
# (e.g. smoothing, dx, prcp factor...) should imply a re-calibration
mbf = 'https://dl.dropboxusercontent.com/u/20930277/ref_tstars_no_tidewater.csv'
mbf = utils.file_downloader(mbf)
shutil.copyfile(mbf, os.path.join(WORKING_DIR, 'ref_tstars.csv'))
# Copy the RGI file
# -----------------
# Download RGI files
rgi_dir = utils.get_rgi_dir()
rgi_shp = list(glob(os.path.join(rgi_dir, "*", rgi_reg+ '_rgi50_*.shp')))
assert len(rgi_shp) == 1
rgidf = salem.read_shapefile(rgi_shp[0], cached=True)
# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)
rgidf = rgidf.loc[rgidf.RGIId.isin(['RGI50-07.01394'])]
log.info('Starting run for RGI reg: ' + rgi_reg)
log.info('Number of glaciers: {}'.format(len(rgidf)))
# Go - initialize working directories
# -----------------------------------
gdirs = workflow.init_glacier_regions(rgidf)
# Prepro tasks
task_list = [
tasks.glacier_masks,
from oggm.utils import file_downloader, nicenumber, mkdir
cfg.initialize()
cfg.PARAMS['border'] = 10
reset = False
base_dir = os.path.join(os.path.expanduser('~/tmp'), 'OGGM_GMD', 'Grindelwald')
cfg.PATHS['working_dir'] = base_dir
mkdir(base_dir, reset=reset)
rgif = 'https://www.dropbox.com/s/zkprlr5ad5voysh/rgiv5_grindelwald.zip?dl=1'
rgif = file_downloader(rgif)
with zipfile.ZipFile(rgif) as zf:
zf.extractall(base_dir)
rgif = os.path.join(base_dir, 'rgiv5_grindelwald.shp')
rgidf = salem.read_shapefile(rgif, cached=True)
entity = rgidf.iloc[0]
gdir = oggm.GlacierDirectory(entity, base_dir=base_dir, reset=True)
tasks.define_glacier_region(gdir, entity=entity)
tasks.glacier_masks(gdir)
tasks.compute_centerlines(gdir)
tasks.initialize_flowlines(gdir)
tasks.compute_downstream_line(gdir)
tasks.catchment_area(gdir)
tasks.catchment_intersections(gdir)
tasks.catchment_width_geom(gdir)
tasks.catchment_width_correction(gdir)
with netCDF4.Dataset(gdir.get_filepath('gridded_data')) as nc:
mask = nc.variables['glacier_mask'][:]
topo = nc.variables['topo_smoothed'][:]
cfg.set_intersects_db(os.path.join(rgi_dir, '00_rgi50_AllRegs',
'intersects_rgi50_AllRegs.shp'))
# Pre-download other files which will be needed later
utils.get_cru_cl_file()
utils.get_cru_file(var='tmp')
utils.get_cru_file(var='pre')
# Some globals for more control on what to run
RUN_GIS_mask = False
RUN_GIS_PREPRO = False # run GIS pre-processing tasks (before climate)
RUN_CLIMATE_PREPRO = False # run climate pre-processing tasks
RUN_INVERSION = False # run bed inversion
# Read RGI file
rgidf = salem.read_shapefile(RGI_FILE, cached=True)
# get WGMS glaciers
flink, mbdatadir = utils.get_wgms_files()
ids_with_mb = flink['RGI50_ID'].values
if PC:
# Keep id's of glaciers in WGMS and GlathiDa V2
keep_ids = ['RGI50-01.02228', 'RGI50-01.00037', 'RGI50-01.16316',
'RGI50-01.00570', 'RGI50-01.22699']
# Glaciers in the McNabb data base
terminus_data_ids = ['RGI50-01.23642', 'RGI50-01.10689']
keep_indexes = [((i in keep_ids) or (i in ids_with_mb) or
(i in terminus_data_ids)) for i in rgidf.RGIID]
# Pre-download other files which will be needed later
_ = utils.get_cru_file(var='tmp')
_ = utils.get_cru_file(var='pre')
# Initialize OGGM and set up the run parameters
# ---------------------------------------------
# Download and read in the RGI file
n = 'RGI_list_WGMS_glaciers_noTidewater'
rgif = 'https://www.dropbox.com/s/ekkl99o1lglljyg/' + n + '.zip?dl=1'
rgif = utils.file_downloader(rgif)
with zipfile.ZipFile(rgif) as zf:
zf.extractall(WORKING_DIR)
rgif = os.path.join(WORKING_DIR, n + '.shp')
rgidf = salem.read_shapefile(rgif, cached=True)
# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)
print('Number of glaciers: {}'.format(len(rgidf)))
# Go - initialize working directories
# -----------------------------------
# you can use the command below to reset your run -- use with caution!
# gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
gdirs = workflow.init_glacier_regions(rgidf)
# Prepro tasks
task_list = [