Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
rocs[lt, thr] = stp.verification.probscores.ROC_curve_init(thr)
# Loop the forecasts
startdate = datetime.datetime.strptime(p["data"][0], "%Y%m%d%H%M")
enddate = datetime.datetime.strptime(p["data"][1], "%Y%m%d%H%M")
countnwc = 0
while startdate + datetime.timedelta(minutes = p["n_lead_times"]*ds.timestep) <= enddate:
countnwc+=1
print("Verifying the nowcast (%02d) ..." % countnwc)
# Read observations
## find radar field filenames
input_files = stp.io.find_by_date(startdate, ds.root_path, ds.path_fmt, ds.fn_pattern,
ds.fn_ext, ds.timestep, 0, p["n_lead_times"])
## read radar field files
importer = stp.io.get_method(ds.importer, type="importer")
R_obs, _, metadata_obs = stp.io.read_timeseries(input_files, importer, **ds.importer_kwargs)
R_obs = R_obs[1:,:,:]
metadata_obs["timestamps"] = metadata_obs["timestamps"][1:]
## if necessary, convert to rain rates [mm/h]
converter = stp.utils.get_method("mm/h")
R_obs, metadata_obs = converter(R_obs, metadata_obs)
## threshold the data
R_obs[R_obs < p["r_threshold"]] = 0.0
metadata_obs["threshold"] = p["r_threshold"]
fns = io.archive.find_by_date(
date,
root_path,
path_fmt,
fn_pattern,
fn_ext,
timestep,
0,
num_next_files=n_leadtimes,
)
# Read the observations
R_o, _, metadata_o = io.read_timeseries(fns, importer, **importer_kwargs)
# Convert to mm/h
R_o, metadata_o = conversion.to_rainrate(R_o, metadata_o)
# Upscale data to 2 km
R_o, metadata_o = dimension.aggregate_fields_space(R_o, metadata_o, 2000)
# Compute the verification for the last lead time
# compute the exceedance probability of 0.1 mm/h from the ensemble
P_f = ensemblestats.excprob(R_f[:, -1, :, :], 0.1, ignore_nan=True)
# compute and plot the ROC curve
roc = verification.ROC_curve_init(0.1, n_prob_thrs=10)
verification.ROC_curve_accum(roc, P_f, R_o[-1, :, :])
fig, ax = subplots()
verification.plot_ROC(roc, ax, opt_prob_thr=True)
ax.set_title("ROC curve (+ %i min)" % (n_leadtimes * timestep))
nwc_method = stp.nowcasts.get_method(nwc_method)
R_fct = nwc_method(R, UV, n_lead_times, n_ens_members,
n_cascade_levels, kmperpixel=metadata["xpixelsize"]/1000,
timestep=ds.timestep, R_thr=metadata["threshold"],
extrap_method=adv_method, decomp_method=decomp_method,
bandpass_filter_method=bandpass_filter,
noise_method=noise_method, noise_stddev_adj=adjust_noise,
ar_order=ar_order, conditional=conditional,
mask_method=mask_method, seed=seed)
## if necessary, transform back all data
R_fct, _ = transformer(R_fct, metadata, inverse=True)
R, metadata = transformer(R, metadata, inverse=True)
## convert all data to mm/h
converter = stp.utils.get_method("mm/h")
R_fct, _ = converter(R_fct, metadata)
R, metadata = converter(R, metadata)
## readjust to initial domain shape
R_fct, _ = reshaper(R_fct, metadata, inverse=True)
R, metadata = reshaper(R, metadata, inverse=True)
## plot the nowcast..
R[Rmask] = np.nan # reapply radar mask
stp.plt.animate(R, nloops=2, timestamps=metadata["timestamps"],
R_fct=R_fct, timestep_min=ds.timestep,
UV=UV,
motion_plot=stp.rcparams.plot.motion_plot,
geodata=metadata,
colorscale=stp.rcparams.plot.colorscale,
plotanimation=True, savefig=False,
raise ValueError("the input is not two- or three-dimensional array")
if np.any(np.isnan(X)):
raise ValueError("X must not contain NaNs")
# defaults
win_size = kwargs.get("win_size", (128, 128))
if type(win_size) == int:
win_size = (win_size, win_size)
win_type = kwargs.get("win_type", "flat-hanning")
overlap = kwargs.get("overlap", 0.3)
war_thr = kwargs.get("war_thr", 0.1)
rm_rdisc = kwargs.get("rm_disc", True)
fft = kwargs.get("fft_method", "numpy")
if type(fft) == str:
fft_shape = X.shape if len(X.shape) == 2 else X.shape[1:]
fft = utils.get_method(fft, shape=fft_shape)
X = X.copy()
# remove rain/no-rain discontinuity
if rm_rdisc:
X[X > X.min()] -= X[X > X.min()].min() - X.min()
# dims
if len(X.shape) == 2:
X = X[None, :, :]
nr_fields = X.shape[0]
dim = X.shape[1:]
dim_x = dim[1]
dim_y = dim[0]
# make sure non-rainy pixels are set to zero
print("Prepare the data...")
## if requested, make sure we work with a square domain
reshaper = stp.utils.get_method(p["adjust_domain"])
R, metadata = reshaper(R, metadata)
## if necessary, convert to rain rates [mm/h]
converter = stp.utils.get_method("mm/h")
R, metadata = converter(R, metadata)
## threshold the data
R[R < p["r_threshold"]] = 0.0
metadata["threshold"] = p["r_threshold"]
## convert the data
converter = stp.utils.get_method(p["unit"])
R, metadata = converter(R, metadata)
## transform the data
transformer = stp.utils.get_method(p["transformation"])
R, metadata = transformer(R, metadata)
## set NaN equal to zero
R[~np.isfinite(R)] = metadata["zerovalue"]
# Compute motion field
oflow_method = stp.motion.get_method(p["oflow_method"])
UV = oflow_method(R)
# Perform the nowcast
## define the callback function to export the nowcast to netcdf
Lucas, B. D. and Kanade, T.: An iterative image registration technique with
an application to stereo vision, in: Proceedings of the 1981 DARPA Imaging
Understanding Workshop, pp. 121–130, 1981.
"""
input_images = input_images.copy()
if verbose:
print("Computing the motion field with the Lucas-Kanade method.")
t0 = time.time()
nr_fields = input_images.shape[0]
domain_size = (input_images.shape[1], input_images.shape[2])
feature_detection_method = utils.get_method(fd_method)
interpolation_method = utils.get_method(interp_method)
if fd_kwargs is None:
fd_kwargs = dict()
if lk_kwargs is None:
lk_kwargs = dict()
if interp_kwargs is None:
interp_kwargs = dict()
xy = np.empty(shape=(0, 2))
uv = np.empty(shape=(0, 2))
for n in range(nr_fields - 1):
# extract consecutive images
ds = stp.rcparams.data_sources[data_source]
## find radar field filenames
input_files = stp.io.find_by_date(startdate, ds.root_path, ds.path_fmt, ds.fn_pattern,
ds.fn_ext, ds.timestep, n_prvs_times, 0)
## read radar field files
importer = stp.io.get_method(ds.importer, type="importer")
R, _, metadata = stp.io.read_timeseries(input_files, importer, **ds.importer_kwargs)
Rmask = np.isnan(R)
# Prepare input files
print("Prepare the data...")
## if necessary, convert to rain rates [mm/h]
converter = stp.utils.get_method("mm/h")
R, metadata = converter(R, metadata)
## threshold the data
R[R
print("Prepare the data...")
## if necessary, convert to rain rates [mm/h]
converter = stp.utils.get_method("mm/h")
R, metadata = converter(R, metadata)
## threshold the data
R[R
## find radar field filenames
input_files = stp.io.find_by_date(startdate, ds.root_path, ds.path_fmt, ds.fn_pattern,
ds.fn_ext, ds.timestep, 0, 0)
## read radar field files
importer = stp.io.get_method(ds.importer, "importer")
R, _, metadata = stp.io.read_timeseries(input_files, importer, **ds.importer_kwargs)
R = R.squeeze() # since this contains just one frame
Rmask = np.isnan(R)
# Prepare input files
print("Prepare the data...")
## if necessary, convert to rain rates [mm/h]
converter = stp.utils.get_method(unit)
R, metadata = converter(R, metadata)
## threshold the data
R[R
Defaults to "numpy".
MASK : array_like
Optional mask to use for computing the statistics for the cascade
levels. Pixels with MASK==False are excluded from the computations.
Returns
-------
out : ndarray
A dictionary described in the module documentation.
The number of cascade levels is determined from the filter
(see :py:mod:`pysteps.cascade.bandpass_filters`).
"""
fft = kwargs.get("fft_method", "numpy")
if type(fft) == str:
fft = utils.get_method(fft, shape=field.shape)
mask = kwargs.get("MASK", None)
if len(field.shape) != 2:
raise ValueError("The input is not two-dimensional array")
if mask is not None and mask.shape != field.shape:
raise ValueError("Dimension mismatch between X and MASK:"
+ "X.shape=" + str(field.shape)
+ ",mask.shape" + str(mask.shape))
if field.shape[0] != bp_filter["weights_2d"].shape[1]:
raise ValueError(
"dimension mismatch between X and filter: "
+ "X.shape[0]=%d , " % field.shape[0]
+ "filter['weights_2d'].shape[1]"