Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _issue_time_generator(observation, fx, obs_mint, obs_maxt, next_issue_time,
max_run_time):
# now find all the run times that can be made based on the
# last observation timestamp
while next_issue_time <= max_run_time:
data_start, data_end = utils.get_data_start_end(
observation, fx, next_issue_time, next_issue_time)
if data_end > obs_maxt:
break
if data_start > obs_mint:
yield next_issue_time
next_issue_time = utils.get_next_issue_time(
fx, next_issue_time + pd.Timedelta('1ns'))
continue
if isinstance(fx, datamodel.ProbabilisticForecast):
fx_mint, fx_maxt = \
session.get_probabilistic_forecast_constant_value_time_range(
fx.constant_values[0].forecast_id)
else:
fx_mint, fx_maxt = session.get_forecast_time_range(fx.forecast_id)
# find the next issue time for the forecast based on the last value
# in the forecast series
if pd.isna(fx_maxt):
# if there is no forecast yet, go back a bit from the last
# observation. Don't use the start of observations, since it
# could really stress the workers if we have a few years of
# data before deciding to make a persistence fx
next_issue_time = utils.get_next_issue_time(
fx, obs_maxt - fx.run_length)
else:
next_issue_time = utils.find_next_issue_time_from_last_forecast(
fx, fx_maxt)
data_start, _ = utils.get_data_start_end(
observation, fx, next_issue_time, next_issue_time)
issue_times = tuple(_issue_time_generator(
observation, fx, obs_mint, obs_maxt,
next_issue_time, max_run_time))
if len(issue_times) == 0:
continue
out = namedtuple(
'PersistenceParameters',
if errors:
logger.error(
'Not all forecasts compatible in group with %s. '
'The following parameters may differ: %s', run_for, errors)
continue
try:
key_fx = group.loc[run_for].forecast
except KeyError:
logger.error('Forecast, %s, that others are piggybacking on not '
'found', run_for)
continue
model_str = group.loc[run_for].model
model = getattr(models, model_str)
issue_time = group.loc[run_for].next_issue_time
if issue_time is None:
issue_time = utils.get_next_issue_time(key_fx, run_time)
try:
nwp_result = run_nwp(key_fx, model, run_time, issue_time)
except FileNotFoundError as e:
logger.error('Could not process group of %s, %s', run_for, str(e))
continue
for fx_id, fx in group['forecast'].iteritems():
fx_vals = getattr(nwp_result, fx.variable)
if fx_vals is None:
logger.warning('No forecast produced for %s in group with %s',
fx_id, run_for)
continue
logger.info('Posting values %s for %s:%s issued at %s',
len(fx_vals), fx.name, fx_id, issue_time)
_post_forecast_values(session, fx, fx_vals, model_str)
except json.JSONDecodeError:
logger.warning(
'Failed to decode extra_parameters for %s: %s as JSON',
fx.name, fx.forecast_id)
continue
try:
model = extra_parameters['model']
except KeyError:
logger.error(
'Forecast, %s: %s, has no model. Cannot make forecast.',
fx.name, fx.forecast_id)
continue
if run_time is not None:
next_issue_time = utils.get_next_issue_time(fx, run_time)
else:
next_issue_time = None
piggyback_on = extra_parameters.get('piggyback_on', fx.forecast_id)
df_vals.append((fx.forecast_id, fx, piggyback_on, model,
next_issue_time))
forecast_df = pd.DataFrame(
df_vals, columns=['forecast_id', 'forecast', 'piggyback_on', 'model',
'next_issue_time']
).set_index('forecast_id')
return forecast_df