def import_mch_gif():
    date = datetime.datetime.strptime("201505151630", "%Y%m%d%H%M")
    data_source = "mch"

    # Load data source config
    root_path = rcparams.data_sources[data_source]["root_path"]
    path_fmt = rcparams.data_sources[data_source]["path_fmt"]
    fn_pattern = rcparams.data_sources[data_source]["fn_pattern"]
    fn_ext = rcparams.data_sources[data_source]["fn_ext"]
    importer_name = rcparams.data_sources[data_source]["importer"]
    importer_kwargs = rcparams.data_sources[data_source]["importer_kwargs"]
    timestep = rcparams.data_sources[data_source]["timestep"]

    # Find the input files from the archive
    fns = io.archive.find_by_date(date,
                                  root_path,
                                  path_fmt,
                                  fn_pattern,
                                  fn_ext,
                                  timestep=5,
                                  num_prev_files=1)

    # Read the radar composites
    importer = io.get_method(importer_name, "importer")
    R, _, metadata = io.read_timeseries(fns, importer, **importer_kwargs)

    return R, metadata
Example #2
0
def _import_mch_gif(prv, nxt):

    date = datetime.datetime.strptime("201505151630", "%Y%m%d%H%M")
    data_source = rcparams.data_sources["mch"]

    # Load data source config
    root_path = data_source["root_path"]
    path_fmt = data_source["path_fmt"]
    fn_pattern = data_source["fn_pattern"]
    fn_ext = data_source["fn_ext"]
    importer_name = data_source["importer"]
    importer_kwargs = data_source["importer_kwargs"]
    timestep = data_source["timestep"]

    # Find the input files from the archive
    fns = io.archive.find_by_date(
        date,
        root_path,
        path_fmt,
        fn_pattern,
        fn_ext,
        timestep=timestep,
        num_prev_files=prv,
        num_next_files=nxt,
    )

    # Read the radar composites
    importer = io.get_method(importer_name, "importer")
    R, _, metadata = io.read_timeseries(fns, importer, **importer_kwargs)

    # Convert to rain rate
    R, metadata = utils.conversion.to_rainrate(R, metadata)

    # Upscale data to 2 km
    R, metadata = utils.dimension.aggregate_fields_space(R, metadata, 2000)

    # Log-transform the data to unit of dBR, set the threshold to 0.1 mm/h,
    # set the fill value to -15 dBR
    R, metadata = utils.transformation.dB_transform(R,
                                                    metadata,
                                                    threshold=0.1,
                                                    zerovalue=-15.0)

    # Set missing values with the fill value
    R[~np.isfinite(R)] = -15.0

    return R, metadata
Example #3
0
def get_precipitation_fields(num_prev_files=0):
    """Get a precipitation field from the archive to be used as reference."""

    # Selected case
    date = datetime.strptime("201505151630", "%Y%m%d%H%M")
    data_source = rcparams.data_sources["mch"]

    root_path = data_source["root_path"]
    path_fmt = data_source["path_fmt"]
    fn_pattern = data_source["fn_pattern"]
    fn_ext = data_source["fn_ext"]
    importer_name = data_source["importer"]
    importer_kwargs = data_source["importer_kwargs"]

    # Find the input files from the archive
    fns = io.archive.find_by_date(date,
                                  root_path,
                                  path_fmt,
                                  fn_pattern,
                                  fn_ext,
                                  timestep=5,
                                  num_prev_files=num_prev_files)

    # Read the radar composites
    importer = io.get_method(importer_name, "importer")
    reference_field, quality, metadata = io.read_timeseries(
        fns, importer, **importer_kwargs)

    del quality  # Not used

    if num_prev_files == 0:
        reference_field = np.squeeze(reference_field)  # Remove time dimension

    # Convert to mm/h
    reference_field, metadata = stp.utils.to_rainrate(reference_field,
                                                      metadata)

    # Mask invalid values
    reference_field = np.ma.masked_invalid(reference_field)

    # Log-transform the data [dBR]
    reference_field, metadata = stp.utils.dB_transform(reference_field,
                                                       metadata,
                                                       threshold=0.1,
                                                       zerovalue=-15.0)
    return reference_field
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

root_path = data_source["root_path"]
path_fmt = data_source["path_fmt"]
fn_pattern = data_source["fn_pattern"]
fn_ext = data_source["fn_ext"]
importer_name = data_source["importer"]
importer_kwargs = data_source["importer_kwargs"]

# Find the reference field in the archive
fns = io.archive.find_by_date(date, root_path, path_fmt, fn_pattern, fn_ext,
                              timestep=5, num_prev_files=0)

# Read the reference radar composite
importer = io.get_method(importer_name, "importer")
reference_field, quality, metadata = io.read_timeseries(fns, importer,
                                                        **importer_kwargs)

del quality  # Not used

reference_field = np.squeeze(reference_field)  # Remove time dimension

###############################################################################
# Preprocess the data
# ~~~~~~~~~~~~~~~~~~~

# Convert to mm/h
reference_field, metadata = stp.utils.to_rainrate(reference_field, metadata)

# Mask invalid values
reference_field = np.ma.masked_invalid(reference_field)
from pysteps.io.importers import import_fmi_pgm
from pysteps.cascade.decomposition import decomposition_fft

date = datetime.strptime("201609281600", "%Y%m%d%H%M")
# insert your data path here
root_path = ""
fn_pattern = "%Y%m%d%H%M_fmi.radar.composite.lowest_FIN_SUOMI1"
fn_ext = "pgm.gz"

cmap = cm.RdBu_r
vmin = -3
vmax = 3

fn = io.archive.find_by_date(date, root_path, "%Y%m%d", fn_pattern, fn_ext, 5)

R, _, metadata = io.read_timeseries(fn, import_fmi_pgm, gzipped=True)
R = R.squeeze()

R[R < 10.0] = 5.0
R[~np.isfinite(R)] = 5.0
R = (R - np.mean(R)) / np.std(R)

filter = filter_gaussian(R.shape, 8)
decomp = decomposition_fft(R, filter)

for i in range(8):
    mu = decomp["means"][i]
    sigma = decomp["stds"][i]
    decomp["cascade_levels"][i] = (decomp["cascade_levels"][i] - mu) / sigma

fig, ax = pyplot.subplots(nrows=2, ncols=4)
Example #6
0
    source = "".join([i for i in case if not i.isdigit()])
    data_source = pysteps.rcparams.data_sources[source]

    # Find the input files from the archive
    file_names = io.archive.find_by_date(
        case_date,
        data_source["root_path"],
        data_source["path_fmt"],
        data_source["fn_pattern"],
        data_source["fn_ext"],
        data_source["timestep"],
        num_prev_files=0,
        num_next_files=frames - 1,
    )

    if None in file_names[0]:
        raise FileNotFoundError(
            f"Error loading {case} case. Some files are missing.")

    # Read the radar composites
    importer = io.get_method(data_source["importer"], "importer")
    importer_kwargs = data_source["importer_kwargs"]
    reflectivity, _, metadata = io.read_timeseries(file_names, importer,
                                                   **importer_kwargs)

    # Convert to rain rate
    precip, metadata = conversion.to_rainrate(reflectivity, metadata)

    return precip, metadata, data_source["timestep"]
Example #7
0
importer_name = data_source["importer"]
importer_kwargs = data_source["importer_kwargs"]
timestep = data_source["timestep"]

# Find the two input files from the archive
fns = io.archive.find_by_date(date,
                              root_path,
                              path_fmt,
                              fn_pattern,
                              fn_ext,
                              timestep=5,
                              num_prev_files=1)

# Read the radar composites
importer = io.get_method(importer_name, "importer")
R, quality, metadata = io.read_timeseries(fns, importer, **importer_kwargs)

del quality  # Not used

###############################################################################
# Preprocess the data
# ~~~~~~~~~~~~~~~~~~~

# Convert to mm/h
R, metadata = conversion.to_rainrate(R, metadata)

# Keep the reference frame in mm/h and its mask (for plotting purposes)
ref_mm = R[0, :, :].copy()
mask = np.ones(ref_mm.shape)
mask[~np.isnan(ref_mm)] = np.nan
Example #8
0
fn_pattern = data_source["fn_pattern"]
fn_ext = data_source["fn_ext"]
importer_name = data_source["importer"]
importer_kwargs = data_source["importer_kwargs"]
timestep = data_source["timestep"]

# Load the data from the archive
fns = io.archive.find_by_date(date,
                              root_path,
                              path_fmt,
                              fn_pattern,
                              fn_ext,
                              timestep,
                              num_next_files=20)
importer = io.get_method(importer_name, "importer")
R, _, metadata = io.read_timeseries(fns, importer, **importer_kwargs)

# Convert to reflectivity (it is possible to give the a- and b- parameters of the
# Marshall-Palmer relationship here: zr_a = and zr_b =).
Z, metadata = to_reflectivity(R, metadata)

# Extract the list of timestamps
timelist = metadata["timestamps"]

pprint(metadata)

###############################################################################
# Example of thunderstorm identification in a single timestep.
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# The function tstorm_detect.detection requires a 2-D input image, all further inputs are
# optional.
Example #9
0
def get_precipitation_fields(
    num_prev_files=0,
    num_next_files=0,
    return_raw=False,
    metadata=False,
    upscale=None,
    source="mch",
    log_transform=True,
    clip=None,
    **importer_kwargs,
):
    """
    Get a precipitation field from the archive to be used as reference.

    Source: bom
    Reference time: 2018/06/16 10000 UTC

    Source: fmi
    Reference time: 2016/09/28 1600 UTC

    Source: knmi
    Reference time: 2010/08/26 0000 UTC

    Source: mch
    Reference time: 2015/05/15 1630 UTC

    Source: opera
    Reference time: 2018/08/24 1800 UTC

    Source: saf
    Reference time: 2018/06/01 0700 UTC

    Source: mrms
    Reference time: 2019/06/10 0000 UTC

    Parameters
    ----------

    num_prev_files: int, optional
        Number of previous times (files) to return with respect to the
        reference time.

    num_next_files: int, optional
        Number of future times (files) to return with respect to the
        reference time.

    return_raw: bool, optional
        Do not preprocess the precipitation fields. False by default.
        The pre-processing steps are: 1) Convert to mm/h,
        2) Mask invalid values, 3) Log-transform the data [dBR].

    metadata: bool, optional
        If True, also return file metadata.

    clip: scalars (left, right, bottom, top), optional
        The extent of the bounding box in data coordinates to be used to clip
        the data.

    upscale: float or None, optional
        Upscale fields in space during the pre-processing steps.
        If it is None, the precipitation field is not modified.
        If it is a float, represents the length of the space window that is
        used to upscale the fields.

    source: {"bom", "fmi" , "knmi", "mch", "opera", "saf", "mrms"}, optional
        Name of the data source to be used.

    log_transform: bool
        Whether to transform the output to dB.

    Other Parameters
    ----------------

    importer_kwargs : dict
        Additional keyword arguments passed to the importer.

    Returns
    -------
    reference_field : array

    metadata : dict
    """

    if source == "bom":
        pytest.importorskip("netCDF4")

    if source == "fmi":
        pytest.importorskip("pyproj")

    if source == "knmi":
        pytest.importorskip("h5py")

    if source == "mch":
        pytest.importorskip("PIL")

    if source == "opera":
        pytest.importorskip("h5py")

    if source == "saf":
        pytest.importorskip("netCDF4")

    if source == "mrms":
        pytest.importorskip("pygrib")

    try:
        date = _reference_dates[source]
    except KeyError:
        raise ValueError(
            f"Unknown source name '{source}'\n"
            "The available data sources are: "
            f"{str(list(_reference_dates.keys()))}"
        )

    data_source = rcparams.data_sources[source]
    root_path = data_source["root_path"]
    path_fmt = data_source["path_fmt"]
    fn_pattern = data_source["fn_pattern"]
    fn_ext = data_source["fn_ext"]
    importer_name = data_source["importer"]
    _importer_kwargs = data_source["importer_kwargs"].copy()
    _importer_kwargs.update(**importer_kwargs)
    timestep = data_source["timestep"]

    # Find the input files from the archive
    fns = io.archive.find_by_date(
        date,
        root_path,
        path_fmt,
        fn_pattern,
        fn_ext,
        timestep=timestep,
        num_prev_files=num_prev_files,
        num_next_files=num_next_files,
    )

    # Read the radar composites
    importer = io.get_method(importer_name, "importer")

    reference_field, __, ref_metadata = io.read_timeseries(
        fns, importer, **_importer_kwargs
    )

    if not return_raw:

        if (num_prev_files == 0) and (num_next_files == 0):
            # Remove time dimension
            reference_field = np.squeeze(reference_field)

        # Convert to mm/h
        reference_field, ref_metadata = stp.utils.to_rainrate(
            reference_field, ref_metadata
        )

        # Clip domain
        reference_field, ref_metadata = stp.utils.clip_domain(
            reference_field, ref_metadata, clip
        )

        # Upscale data
        reference_field, ref_metadata = aggregate_fields_space(
            reference_field, ref_metadata, upscale
        )

        # Mask invalid values
        reference_field = np.ma.masked_invalid(reference_field)

        if log_transform:
            # Log-transform the data [dBR]
            reference_field, ref_metadata = stp.utils.dB_transform(
                reference_field, ref_metadata, threshold=0.1, zerovalue=-15.0
            )

        # Set missing values with the fill value
        np.ma.set_fill_value(reference_field, ref_metadata["zerovalue"])
        reference_field.data[reference_field.mask] = ref_metadata["zerovalue"]

    if metadata:
        return reference_field, ref_metadata

    return reference_field
Example #10
0
ext = rcparams.data_sources["mch"]["fn_ext"]
timestep = rcparams.data_sources["mch"]["timestep"]
importer_name = rcparams.data_sources["mch"]["importer"]
importer_kwargs = rcparams.data_sources["mch"]["importer_kwargs"]

# read precip field
date = datetime.strptime("201607112100", "%Y%m%d%H%M")
fns = io.find_by_date(date,
                      root,
                      fmt,
                      pattern,
                      ext,
                      timestep,
                      num_prev_files=2)
importer = io.get_method(importer_name, "importer")
precip, __, metadata = io.read_timeseries(fns, importer, **importer_kwargs)
precip, metadata = utils.to_rainrate(precip, metadata)
# precip[np.isnan(precip)] = 0

# motion
motion = dense_lucaskanade(precip)

# parameters
nleadtimes = 6
thr = 1  # mm / h
slope = 1 * timestep  # km / min

# compute probability forecast
extrap_kwargs = dict(allow_nonfinite_values=True)
fct = forecast(precip[-1],
               motion,
Example #11
0
root_path = radar_data_source["root_path"]
path_fmt = "prcp-c10/66/%Y/%m/%d"
fn_pattern = "66_%Y%m%d_%H%M00.prcp-c10"
fn_ext = radar_data_source["fn_ext"]
importer_name = radar_data_source["importer"]
importer_kwargs = radar_data_source["importer_kwargs"]
timestep = 10.0

# Find the radar files in the archive
fns = io.find_by_date(
    date_radar, root_path, path_fmt, fn_pattern, fn_ext, timestep, num_prev_files=2
)

# Read the radar composites
importer = io.get_method(importer_name, "importer")
radar_precip, _, radar_metadata = io.read_timeseries(fns, importer, **importer_kwargs)

# Import the NWP data
filename = os.path.join(
    nwp_data_source["root_path"],
    datetime.strftime(date_nwp, nwp_data_source["path_fmt"]),
    datetime.strftime(date_nwp, nwp_data_source["fn_pattern"])
    + "."
    + nwp_data_source["fn_ext"],
)

nwp_importer = io.get_method("bom_nwp", "importer")
nwp_precip, _, nwp_metadata = nwp_importer(filename)

# Only keep the NWP forecasts from the last radar observation time (2020-10-31 04:00)
# onwards
Example #12
0
datasource_params = rcparams.data_sources[data_source]

# Find the radar files in the archive
fns = io.find_by_date(
    date,
    datasource_params["root_path"],
    datasource_params["path_fmt"],
    datasource_params["fn_pattern"],
    datasource_params["fn_ext"],
    datasource_params["timestep"],
    num_prev_files=2,
)

# Read the data from the archive
importer = io.get_method(datasource_params["importer"], "importer")
reflectivity, _, metadata = io.read_timeseries(
    fns, importer, **datasource_params["importer_kwargs"])

# Convert reflectivity to rain rate
rainrate, metadata = conversion.to_rainrate(reflectivity, metadata)

# Upscale data to 2 km to reduce computation time
rainrate, metadata = dimension.aggregate_fields_space(rainrate, metadata, 2000)

# Plot the most recent rain rate field
plt.figure()
plot_precip_field(rainrate[-1, :, :])
plt.show()

###############################################################################
# Estimate the advection field
# ----------------------------
                    rho.pop(0)
                if np.isfinite(rho[1]):
                    results["cc_ar"][i][t] += rho[1]
                    results["n_ar_samples"][i][t] += 1
                k += 1

        R_ep = extrapolator(R[-1, :, :], V, num_timesteps, outval=R_min)

        obs_fns = io.archive.find_by_date(curdate,
                                          root_path,
                                          datasource["path_fmt"],
                                          datasource["fn_pattern"],
                                          datasource["fn_ext"],
                                          datasource["timestep"],
                                          num_next_files=num_timesteps)
        R_obs, _, metadata = io.read_timeseries(
            obs_fns, importer, **datasource["importer_kwargs"])
        R_obs = R_obs[1:, :, :]

        R_obs, metadata = utils.conversion.to_rainrate(R_obs,
                                                       metadata,
                                                       a=to_rainrate_a,
                                                       b=to_rainrate_b)
        R_obs, metadata = utils.transformation.dB_transform(R_obs,
                                                            metadata,
                                                            threshold=R_thr)
        R_min = np.min(R_obs[np.isfinite(R_obs)])
        R_obs[~np.isfinite(R_obs)] = R_min

        for t in range(num_timesteps):
            if not np.any(np.isfinite(R_obs[t, :, :])):
                print(
Example #14
0
root_path = data_source["root_path"]
path_fmt = data_source["path_fmt"]
fn_pattern = data_source["fn_pattern"]
fn_ext = data_source["fn_ext"]
importer_name = data_source["importer"]
importer_kwargs = data_source["importer_kwargs"]

# Find the input files in the archive. Use history length of 5 timesteps
filenames = io.archive.find_by_date(
    date, root_path, path_fmt, fn_pattern, fn_ext, timestep=5, num_prev_files=5
)

# Read the input time series
importer = io.get_method(importer_name, "importer")
rainrate_field, quality, metadata = io.read_timeseries(
    filenames, importer, **importer_kwargs
)

# Convert to rain rate (mm/h)
rainrate_field, metadata = utils.to_rainrate(rainrate_field, metadata)

################################################################################
# Compute the advection field
# ---------------------------
#
# Apply the Lucas-Kanade method with the parameters given in Pulkkinen et al.
# (2020) to compute the advection field.

fd_kwargs = {}
fd_kwargs["max_corners"] = 1000
fd_kwargs["quality_level"] = 0.01
Example #15
0
def get_precipitation_fields(num_prev_files=0,
                             num_next_files=0,
                             return_raw=False,
                             metadata=False,
                             upscale=None):
    """
    Get a precipitation field from the archive to be used as reference.

    Source: mch
    Reference time: 2015/05/15 1630 UTC

    Parameters
    ----------

    num_prev_files: int
        Number of previous times (files) to return with respect to the
        reference time.

    num_next_files: int
        Number of future times (files) to return with respect to the
        reference time.

    return_raw: bool
        Do not preprocess the precipitation fields. False by default.
        The pre-processing steps are: 1) Convert to mm/h,
        2) Mask invalid values, 3) Log-transform the data [dBR].

    metadata : bool
        If True, also return file metadata.

    upscale: float or None
        Upscale fields in space during the pre-processing steps.
        If it is None, the precipitation field is not
        modified.
        If it is a float, represents the length of the space window that is
        used to upscale the fields.


    Returns
    -------
    reference_field : array

    metadata : dict


    """
    pytest.importorskip('PIL')
    # Selected case
    date = datetime.strptime("201505151630", "%Y%m%d%H%M")
    data_source = rcparams.data_sources["mch"]

    root_path = data_source["root_path"]
    path_fmt = data_source["path_fmt"]
    fn_pattern = data_source["fn_pattern"]
    fn_ext = data_source["fn_ext"]
    importer_name = data_source["importer"]
    importer_kwargs = data_source["importer_kwargs"]
    timestep = data_source["timestep"]

    # Find the input files from the archive
    fns = io.archive.find_by_date(date,
                                  root_path,
                                  path_fmt,
                                  fn_pattern,
                                  fn_ext,
                                  timestep=timestep,
                                  num_prev_files=num_prev_files,
                                  num_next_files=num_next_files)

    # Read the radar composites
    importer = io.get_method(importer_name, "importer")
    reference_field, __, ref_metadata = io.read_timeseries(fns, importer,
                                                           **importer_kwargs)

    if not return_raw:

        if (num_prev_files == 0) and (num_next_files == 0):
            # Remove time dimension
            reference_field = np.squeeze(reference_field)

        # Convert to mm/h
        reference_field, ref_metadata = stp.utils.to_rainrate(reference_field,
                                                              ref_metadata)

        # Upscale data to 2 km
        reference_field, ref_metadata = aggregate_fields_space(reference_field,
                                                               ref_metadata,
                                                               upscale)

        # Mask invalid values
        reference_field = np.ma.masked_invalid(reference_field)

        # Log-transform the data [dBR]
        reference_field, ref_metadata = stp.utils.dB_transform(reference_field,
                                                               ref_metadata,
                                                               threshold=0.1,
                                                               zerovalue=-15.0)

        # Set missing values with the fill value
        reference_field.data[reference_field.mask] = -15.0

    if metadata:
        return reference_field, ref_metadata

    return reference_field