Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
###############################################################################
# Load the data from the archive
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
root_path = data_source["root_path"]
path_fmt = data_source["path_fmt"]
fn_pattern = data_source["fn_pattern"]
fn_ext = data_source["fn_ext"]
importer_name = data_source["importer"]
importer_kwargs = data_source["importer_kwargs"]
timestep = data_source["timestep"]
# Get 1 hour of observations in the data archive
fns = io.archive.find_by_date(
date,
root_path,
path_fmt,
fn_pattern,
fn_ext,
timestep,
num_next_files=11,
)
# Read the radar composites
importer = io.get_method(importer_name, "importer")
Z, _, metadata = io.read_timeseries(fns, importer, **importer_kwargs)
# Keep only positive rainfall values
Z = Z[Z > metadata["zerovalue"]].flatten()
plot_precip_field(R_f[i, -1, :, :], geodata=metadata, colorbar=False, axis="off")
tight_layout()
###############################################################################
# Verification
# ------------
#
# Pysteps includes a number of verification metrics to help users to analyze
# the general characteristics of the nowcasts in terms of consistency and
# quality (or goodness).
# Here, we will verify our probabilistic forecasts using the ROC curve,
# reliability diagrams, and rank histograms, as implemented in the verification
# module of pysteps.
# Find the files containing the verifying observations
fns = io.archive.find_by_date(
date,
root_path,
path_fmt,
fn_pattern,
fn_ext,
timestep,
0,
num_next_files=n_leadtimes,
)
# Read the observations
R_o, _, metadata_o = io.read_timeseries(fns, importer, **importer_kwargs)
# Convert to mm/h
R_o, metadata_o = conversion.to_rainrate(R_o, metadata_o)
importer = io.get_method(datasource["importer"], "importer")
motionfields = {}
oflow = motion.get_method(args.oflow)
# compute motion fields
# ---------------------
# TODO: This keeps all motion fields in memory during the analysis period, which
# can take a lot of memory.
curdate = startdate
while curdate <= enddate:
try:
fns = io.archive.find_by_date(curdate, datasource["root_path"],
datasource["path_fmt"], datasource["fn_pattern"], datasource["fn_ext"],
datasource["timestep"], num_prev_files=9)
except IOError:
curdate += timedelta(minutes=datasource["timestep"])
continue
if any([fn[0] is None for fn in fns]):
curdate += timedelta(minutes=datasource["timestep"])
continue
R, _, metadata = io.readers.read_timeseries(fns, importer,
**datasource["importer_kwargs"])
# TODO: Here we assume that metadata["xpixelsize"] = metadata["ypixelsize"]
vsf = 60.0 / datasource["timestep"] * metadata["xpixelsize"] / 1000.0
R_f = transformation.dB_transform(R_f, threshold=-10.0, inverse=True)[0]
# Plot the motion field
plot_precip_field(R_, geodata=metadata)
quiver(V, geodata=metadata, step=50)
###############################################################################
# Verify with FSS
# ---------------
#
# The fractions skill score (FSS) provides an intuitive assessment of the
# dependency of skill on spatial scale and intensity, which makes it an ideal
# skill score for high-resolution precipitation forecasts.
# Find observations in the data archive
fns = io.archive.find_by_date(
date,
root_path,
path_fmt,
fn_pattern,
fn_ext,
timestep,
num_prev_files=0,
num_next_files=n_leadtimes,
)
# Read the radar composites
R_o, _, metadata_o = io.read_timeseries(fns, importer, **importer_kwargs)
R_o, metadata_o = conversion.to_rainrate(R_o, metadata_o, 223.0, 1.53)
# Compute fractions skill score (FSS) for all lead times, a set of scales and 1 mm/h
fss = verification.get_method("FSS")
scales = [2, 4, 8, 16, 32, 64, 128, 256, 512]
date = datetime.strptime("201609281600", "%Y%m%d%H%M")
data_source = "fmi"
n_leadtimes = 12
# Load data source config
root_path = rcparams.data_sources[data_source]["root_path"]
path_fmt = rcparams.data_sources[data_source]["path_fmt"]
fn_pattern = rcparams.data_sources[data_source]["fn_pattern"]
fn_ext = rcparams.data_sources[data_source]["fn_ext"]
importer_name = rcparams.data_sources[data_source]["importer"]
importer_kwargs = rcparams.data_sources[data_source]["importer_kwargs"]
timestep = rcparams.data_sources[data_source]["timestep"]
# Find the input files from the archive
fns = io.archive.find_by_date(
date, root_path, path_fmt, fn_pattern, fn_ext, timestep, num_prev_files=2
)
# Read the radar composites
importer = io.get_method(importer_name, "importer")
Z, _, metadata = io.read_timeseries(fns, importer, **importer_kwargs)
# Convert to rain rate using the finnish Z-R relationship
R, metadata = conversion.to_rainrate(Z, metadata, 223.0, 1.53)
# Plot the rainfall field
plot_precip_field(R[-1, :, :], geodata=metadata)
# Store the last frame for plotting it later later
R_ = R[-1, :, :].copy()
# MeteoSwiss uses a GIF format to store its radar composite data.
# First, we will use the pysteps methods to retrieve filenames.
date = dt.datetime.strptime("201505151630", "%Y%m%d%H%M")
data_source = sp.rcparams.data_sources["mch"]
root_path = data_source["root_path"]
path_fmt = data_source["path_fmt"]
fn_pattern = data_source["fn_pattern"]
fn_ext = data_source["fn_ext"]
importer_name = data_source["importer"]
importer_kwargs = data_source["importer_kwargs"]
timestep = data_source["timestep"]
# Find the input files from the archive
fns = sp.io.archive.find_by_date(
date, root_path, path_fmt, fn_pattern, fn_ext, timestep=5, num_prev_files=9
)
importer = sp.io.get_method(importer_name, "importer")
###############################################################################
# Import GIF file into xarray
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~
def load_mch(filename, timestep, **importer_kwargs):
R, quality, meta = importer(filename, **importer_kwargs)
x1 = meta["x1"]
y1 = meta["y1"]
xsize = meta["xpixelsize"]
ysize = meta["ypixelsize"]
date = datetime.strptime("201505151630", "%Y%m%d%H%M")
data_source = rcparams.data_sources["mch"]
###############################################################################
# Load the data from the archive
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
root_path = data_source["root_path"]
path_fmt = data_source["path_fmt"]
fn_pattern = data_source["fn_pattern"]
fn_ext = data_source["fn_ext"]
importer_name = data_source["importer"]
importer_kwargs = data_source["importer_kwargs"]
# Find the reference field in the archive
fns = io.archive.find_by_date(date, root_path, path_fmt, fn_pattern, fn_ext,
timestep=5, num_prev_files=0)
# Read the reference radar composite
importer = io.get_method(importer_name, "importer")
reference_field, quality, metadata = io.read_timeseries(fns, importer,
**importer_kwargs)
del quality # Not used
reference_field = np.squeeze(reference_field) # Remove time dimension
###############################################################################
# Preprocess the data
# ~~~~~~~~~~~~~~~~~~~
# Convert to mm/h
# transformed into units of dBR.
date = datetime.strptime("201505151630", "%Y%m%d%H%M")
data_source = "mch"
# Load data source config
root_path = rcparams.data_sources[data_source]["root_path"]
path_fmt = rcparams.data_sources[data_source]["path_fmt"]
fn_pattern = rcparams.data_sources[data_source]["fn_pattern"]
fn_ext = rcparams.data_sources[data_source]["fn_ext"]
importer_name = rcparams.data_sources[data_source]["importer"]
importer_kwargs = rcparams.data_sources[data_source]["importer_kwargs"]
timestep = rcparams.data_sources[data_source]["timestep"]
# Find the input files from the archive
fns = io.archive.find_by_date(
date, root_path, path_fmt, fn_pattern, fn_ext, timestep, num_prev_files=9
)
# Read the radar composites
importer = io.get_method(importer_name, "importer")
R, _, metadata = io.read_timeseries(fns, importer, **importer_kwargs)
# Convert to mm/h
R, metadata = conversion.to_rainrate(R, metadata)
# Store the last frame for polotting it later later
R_ = R[-1, :, :].copy()
# Log-transform the data
R, metadata = transformation.dB_transform(R, metadata, threshold=0.1, zerovalue=-15.0)
data_source = rcparams.data_sources["mch"]
###############################################################################
# Load the data from the archive
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
root_path = data_source["root_path"]
path_fmt = data_source["path_fmt"]
fn_pattern = data_source["fn_pattern"]
fn_ext = data_source["fn_ext"]
importer_name = data_source["importer"]
importer_kwargs = data_source["importer_kwargs"]
timestep = data_source["timestep"]
# Find the two input files from the archive
fns = io.archive.find_by_date(
date, root_path, path_fmt, fn_pattern, fn_ext, timestep=5, num_prev_files=1
)
# Read the radar composites
importer = io.get_method(importer_name, "importer")
R, quality, metadata = io.read_timeseries(fns, importer, **importer_kwargs)
del quality # Not used
###############################################################################
# Preprocess the data
# ~~~~~~~~~~~~~~~~~~~
# Convert to mm/h
R, metadata = conversion.to_rainrate(R, metadata)
# The extrapolation forecasts are computed using the dense UV motion fields
# estimted above.
# Get the advection routine and extrapolate the last radar frame by 12 time steps
# (i.e., 1 hour lead time)
extrapolate = nowcasts.get_method("extrapolation")
R[~np.isfinite(R)] = metadata["zerovalue"]
R_f1 = extrapolate(R[-1], UV1, 12)
R_f2 = extrapolate(R[-1], UV2, 12)
# Back-transform to rain rate
R_f1 = transformation.dB_transform(R_f1, threshold=-10.0, inverse=True)[0]
R_f2 = transformation.dB_transform(R_f2, threshold=-10.0, inverse=True)[0]
# Find the veriyfing observations in the archive
fns = io.archive.find_by_date(
date,
root_path,
path_fmt,
fn_pattern,
fn_ext,
timestep=5,
num_next_files=12,
)
# Read and convert the radar composites
R_o, _, metadata_o = io.read_timeseries(fns, importer, **importer_kwargs)
R_o, metadata_o = conversion.to_rainrate(R_o, metadata_o)
# Compute Spearman correlation
skill = verification.get_method("corr_s")
score_1 = []