Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
else:
rgidf = salem.read_shapefile(rgisel)
rgidf = rgidf.loc[~rgidf.RGIId.isin(['RGI50-10.00012', 'RGI50-17.00850',
'RGI50-19.01497', 'RGI50-19.00990',
'RGI50-19.01440'])]
log.info('Number of glaciers: {}'.format(len(rgidf)))
# Go - initialize working directories
# gdirs = workflow.init_glacier_regions(rgidf, reset=True, force=True)
gdirs = workflow.init_glacier_regions(rgidf)
# Prepro tasks
task_list = [
tasks.glacier_masks,
]
for task in task_list:
execute_entity_task(task, gdirs)
# Plots (if you want)
if PLOTS_DIR == '':
exit()
utils.mkdir(PLOTS_DIR)
for gd in gdirs:
bname = os.path.join(PLOTS_DIR, gd.rgi_id + '_')
demsource = ' (' + gd.read_pickle('dem_source') + ')'
# graphics.plot_googlemap(gd)
# plt.savefig(bname + 'ggl.png')
# Libs
import numpy as np
import pandas as pd
import geopandas as gpd
import matplotlib.pyplot as plt
import salem
# Locals
import oggm
import oggm.cfg as cfg
from oggm import workflow
from oggm import tasks
from oggm.workflow import execute_entity_task
from oggm import graphics, utils
# Initialize OGGM
cfg.initialize()
# Local paths (where to write output and where to download input)
DATA_DIR = '/home/mowglie/disk/OGGM_INPUT'
WORKING_DIR = '/home/mowglie/disk/OGGM_RUNS/TEST_DEMS'
PLOTS_DIR = os.path.join(WORKING_DIR, 'plots')
cfg.PATHS['working_dir'] = WORKING_DIR
cfg.PATHS['topo_dir'] = os.path.join(DATA_DIR, 'topo')
cfg.PATHS['rgi_dir'] = os.path.join(DATA_DIR, 'rgi')
utils.mkdir(WORKING_DIR)
utils.mkdir(cfg.PATHS['topo_dir'])
utils.mkdir(cfg.PATHS['rgi_dir'])
# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = False
cfg.PARAMS['border'] = 20
from oggm import graphics, utils
# Initialize OGGM
cfg.initialize()
# Local paths (where to write output and where to download input)
DATA_DIR = '/home/mowglie/disk/OGGM_INPUT'
WORKING_DIR = '/home/mowglie/disk/OGGM_RUNS/TEST_DEMS'
PLOTS_DIR = os.path.join(WORKING_DIR, 'plots')
cfg.PATHS['working_dir'] = WORKING_DIR
cfg.PATHS['topo_dir'] = os.path.join(DATA_DIR, 'topo')
cfg.PATHS['rgi_dir'] = os.path.join(DATA_DIR, 'rgi')
utils.mkdir(WORKING_DIR)
utils.mkdir(cfg.PATHS['topo_dir'])
utils.mkdir(cfg.PATHS['rgi_dir'])
# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = False
cfg.PARAMS['border'] = 20
cfg.PARAMS['continue_on_error'] = False
# Read in the RGI file
rgisel = os.path.join(WORKING_DIR, 'rgi_selection.shp')
if not os.path.exists(rgisel):
rgi_dir = utils.get_rgi_dir()
regions = ['{:02d}'.format(int(p)) for p in range(1, 20)]
files = [glob.glob(os.path.join(rgi_dir, '*', r + '_rgi50_*.shp'))[0] for r in regions]
rgidf = []
for fs in files:
sh = salem.read_shapefile(os.path.join(rgi_dir, fs), cached=True)
percs = np.asarray([0, 25, 50, 75, 100])
import oggm.cfg as cfg
from oggm import workflow
from oggm import tasks
from oggm.workflow import execute_entity_task
from oggm import graphics, utils
# Initialize OGGM
cfg.initialize()
# Local paths (where to write output and where to download input)
DATA_DIR = '/home/mowglie/disk/OGGM_INPUT'
WORKING_DIR = '/home/mowglie/disk/OGGM_RUNS/TEST_DEMS'
PLOTS_DIR = os.path.join(WORKING_DIR, 'plots')
cfg.PATHS['working_dir'] = WORKING_DIR
cfg.PATHS['topo_dir'] = os.path.join(DATA_DIR, 'topo')
cfg.PATHS['rgi_dir'] = os.path.join(DATA_DIR, 'rgi')
utils.mkdir(WORKING_DIR)
utils.mkdir(cfg.PATHS['topo_dir'])
utils.mkdir(cfg.PATHS['rgi_dir'])
# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = False
cfg.PARAMS['border'] = 20
cfg.PARAMS['continue_on_error'] = False
# Read in the RGI file
rgisel = os.path.join(WORKING_DIR, 'rgi_selection.shp')
if not os.path.exists(rgisel):
rgi_dir = utils.get_rgi_dir()
regions = ['{:02d}'.format(int(p)) for p in range(1, 20)]
files = [glob.glob(os.path.join(rgi_dir, '*', r + '_rgi50_*.shp'))[0] for r in regions]
def get_dl_verify_data():
"""Returns a dictionary with all known download object hashes.
The returned dictionary resolves str: cache_obj_name
to a tuple (int: size, bytes: sha256).
"""
global _dl_verify_data
if _dl_verify_data is not None:
return _dl_verify_data
verify_file_path = os.path.join(cfg.CACHE_DIR, 'downloads.sha256.xz')
try:
with requests.get(CHECKSUM_VALIDATION_URL) as req:
req.raise_for_status()
verify_file_sha256 = req.text.split(maxsplit=1)[0]
verify_file_sha256 = bytearray.fromhex(verify_file_sha256)
except Exception as e:
verify_file_sha256 = None
logger.warning('Failed getting verification checksum: ' + repr(e))
def do_verify():
if os.path.isfile(verify_file_path) and verify_file_sha256:
sha256 = hashlib.sha256()
with open(verify_file_path, 'rb') as f:
for b in iter(lambda: f.read(0xFFFF), b''):
sha256.update(b)
utils.mkdir(self.testdir, reset=True)
self.cfg_init()
# Pre-download other files which will be needed later
utils.get_cru_cl_file()
utils.get_cru_file(var='tmp')
utils.get_cru_file(var='pre')
# Get the RGI glaciers for the run.
rgi_list = ['RGI60-01.10299', 'RGI60-11.00897', 'RGI60-18.02342']
rgidf = utils.get_rgi_glacier_entities(rgi_list)
# We use intersects
db = utils.get_rgi_intersects_entities(rgi_list, version='61')
cfg.set_intersects_db(db)
# Sort for more efficient parallel computing
rgidf = rgidf.sort_values('Area', ascending=False)
# Go - initialize glacier directories
gdirs = workflow.init_glacier_regions(rgidf)
# Preprocessing tasks
task_list = [
tasks.glacier_masks,
tasks.compute_centerlines,
tasks.initialize_flowlines,
tasks.compute_downstream_line,
tasks.compute_downstream_bedshape,
tasks.catchment_area,
tasks.catchment_intersections,
# see if we can distribute
execute_entity_task(tasks.process_cru_data, gdirs)
tasks.compute_ref_t_stars(gdirs)
tasks.distribute_t_stars(gdirs)
execute_entity_task(tasks.apparent_mb, gdirs)
if RUN_INVERSION:
# Inversion
execute_entity_task(tasks.prepare_for_inversion, gdirs)
tasks.optimize_inversion_params(gdirs)
execute_entity_task(tasks.volume_inversion, gdirs)
if RUN_DYNAMICS:
# Random dynamics
execute_entity_task(tasks.init_present_time_glacier, gdirs)
execute_entity_task(tasks.random_glacier_evolution, gdirs)
# Plots (if you want)
PLOTS_DIR = ''
if PLOTS_DIR == '':
exit()
utils.mkdir(PLOTS_DIR)
for gd in gdirs:
bname = os.path.join(PLOTS_DIR, gd.name + '_' + gd.rgi_id + '_')
graphics.plot_googlemap(gd)
plt.savefig(bname + 'ggl.png')
plt.close()
graphics.plot_domain(gd)
plt.savefig(bname + 'dom.png')
plt.close()
graphics.plot_centerlines(gd)
plt.savefig(bname + 'cls.png')
for task in task_list:
execute_entity_task(task, gdirs)
if RUN_CLIMATE_PREPRO:
# Climate related tasks
# see if we can distribute
execute_entity_task(tasks.process_cru_data, gdirs)
tasks.compute_ref_t_stars(gdirs)
tasks.distribute_t_stars(gdirs)
execute_entity_task(tasks.apparent_mb, gdirs)
if RUN_INVERSION:
# Inversion
execute_entity_task(tasks.prepare_for_inversion, gdirs)
tasks.optimize_inversion_params(gdirs)
execute_entity_task(tasks.volume_inversion, gdirs)
if RUN_DYNAMICS:
# Random dynamics
execute_entity_task(tasks.init_present_time_glacier, gdirs)
execute_entity_task(tasks.random_glacier_evolution, gdirs)
# Plots (if you want)
PLOTS_DIR = ''
if PLOTS_DIR == '':
exit()
utils.mkdir(PLOTS_DIR)
for gd in gdirs:
bname = os.path.join(PLOTS_DIR, gd.name + '_' + gd.rgi_id + '_')
graphics.plot_googlemap(gd)
plt.savefig(bname + 'ggl.png')
plt.close()
"instead.")
if cfg.PARAMS['baseline_climate'] != 'HISTALP':
raise InvalidParamsError("cfg.PARAMS['baseline_climate'] should be "
"set to HISTALP.")
# read the time out of the pure netcdf file
ft = utils.get_histalp_file('tmp')
fp = utils.get_histalp_file('pre')
with utils.ncDataset(ft) as nc:
vt = nc.variables['time']
assert vt[0] == 0
assert vt[-1] == vt.shape[0] - 1
t0 = vt.units.split(' since ')[1][:7]
time_t = pd.date_range(start=t0, periods=vt.shape[0], freq='MS')
with utils.ncDataset(fp) as nc:
vt = nc.variables['time']
assert vt[0] == 0.5
assert vt[-1] == vt.shape[0] - .5
t0 = vt.units.split(' since ')[1][:7]
time_p = pd.date_range(start=t0, periods=vt.shape[0], freq='MS')
# Now open with salem
nc_ts_tmp = salem.GeoNetcdf(ft, time=time_t)
nc_ts_pre = salem.GeoNetcdf(fp, time=time_p)
# set temporal subset for the ts data (hydro years)
# the reference time is given by precip, which is shorter
sm = cfg.PARAMS['hydro_month_nh']
em = sm - 1 if (sm > 1) else 12
yrs = nc_ts_pre.time.year
y0, y1 = yrs[0], yrs[-1]
def cfg_init(self):
# Initialize OGGM and set up the default run parameters
cfg.initialize(logging_level='ERROR')
cfg.PATHS['working_dir'] = self.testdir
cfg.PARAMS['use_multiprocessing'] = True
cfg.PARAMS['border'] = 100
cfg.PARAMS['continue_on_error'] = False