Пример #1
0
def test_dB_transform(R, metadata, threshold, zerovalue, inverse, expected):
    """Test the dB_transform."""
    assert_array_almost_equal(
        transformation.dB_transform(R, metadata, threshold, zerovalue,
                                    inverse)[0],
        expected,
    )
Пример #2
0
# Compute 30-minute LINDA nowcast with 8 parallel workers
# Restrict the number of features to 15 to reduce computation time
nowcast_linda = linda.forecast(
    rainrate,
    advection,
    6,
    max_num_features=15,
    add_perturbations=False,
    num_workers=8,
    measure_time=True,
)[0]

# Compute S-PROG nowcast for comparison
rainrate_db, _ = transformation.dB_transform(rainrate,
                                             metadata,
                                             threshold=0.1,
                                             zerovalue=-15.0)
nowcast_sprog = sprog.forecast(
    rainrate_db[-3:, :, :],
    advection,
    6,
    n_cascade_levels=6,
    R_thr=-10.0,
)

# Convert reflectivity nowcast to rain rate
nowcast_sprog = transformation.dB_transform(nowcast_sprog,
                                            threshold=-10.0,
                                            inverse=True)[0]

# Plot the nowcasts
else:
    datasource = datasources.mch
    precipevents = precipevents.mch

root_path = os.path.join(datasources.root_path, datasource["root_path"])
importer = io.get_method(datasource["importer"], "importer")

results = {}

for m in oflow_methods:
    results[m] = {}
    results[m]["CSI"] = [0.0] * num_timesteps
    results[m]["MAE"] = [0.0] * num_timesteps
    results[m]["n_samples"] = [0.0] * num_timesteps

R_min_dB = transformation.dB_transform(np.array([R_min]))[0][0]

for pei, pe in enumerate(precipevents):
    curdate = datetime.strptime(pe[0], "%Y%m%d%H%M")
    enddate = datetime.strptime(pe[1], "%Y%m%d%H%M")

    while curdate <= enddate:
        print("Computing nowcasts for event %d, start date %s..." %
              (pei + 1, str(curdate)),
              end="")
        sys.stdout.flush()

        if curdate + num_timesteps * timedelta(minutes=5) > enddate:
            break

        fns = io.archive.find_by_date(curdate,
Пример #4
0
rainrate_field_log, _ = utils.transformation.dB_transform(
    rainrate_field, metadata=metadata
)
velocity = oflow(rainrate_field_log, **oflow_kwargs)

###############################################################################
# Compute the nowcasts and threshold rain rates below 0.5 mm/h
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
forecast_extrap = extrapolation.forecast(
    rainrate_field[-1], velocity, 3, extrap_kwargs={"allow_nonfinite_values": True}
)
forecast_extrap[forecast_extrap < 0.5] = 0.0

# log-transform the data and the threshold value to dBR units for S-PROG
rainrate_field_db, _ = transformation.dB_transform(
    rainrate_field, metadata, threshold=0.1, zerovalue=-15.0
)
rainrate_thr, _ = transformation.dB_transform(
    np.array([0.5]), metadata, threshold=0.1, zerovalue=-15.0
)
forecast_sprog = sprog.forecast(
    rainrate_field_db[-3:], velocity, 3, n_cascade_levels=8, R_thr=rainrate_thr[0]
)
forecast_sprog, _ = transformation.dB_transform(
    forecast_sprog, threshold=-10.0, inverse=True
)
forecast_sprog[forecast_sprog < 0.5] = 0.0

forecast_anvil = anvil.forecast(
    rainrate_field[-4:], velocity, 3, ar_window_radius=25, ar_order=2
)
Пример #5
0
    vsf = 60.0 / datasource["timestep"] * metadata["xpixelsize"] / 1000.0

    missing_data = False
    for i in range(R.shape[0]):
        if not np.any(np.isfinite(R[i, :, :])):
            missing_data = True
            break

    if missing_data:
        curdate += timedelta(minutes=datasource["timestep"])
        continue

    R[~np.isfinite(R)] = metadata["zerovalue"]
    if use_precip_mask:
        MASK = np.any(R < R_min, axis=0)
    R = transformation.dB_transform(R)[0]

    if args.oflow == "vet":
        R_ = R[-2:, :, :]
    else:
        R_ = R

    # TODO: Allow the user to supply parameters for the optical flow.
    V = oflow(R_) * vsf
    # discard the motion field if the mean velocity is abnormally large
    if np.nanmean(np.linalg.norm(V, axis=0)) > 0.5 * R.shape[1]:
        curdate += timedelta(minutes=datasource["timestep"])
        continue

    if use_precip_mask:
        V[0, :, :][MASK] = np.nan
Пример #6
0
def compute(nowcast_method, config_number):
    # the optical flow methods to use
    oflow_methods = ["darts"]

    # time step between computation of each nowcast (minutes)
    timestep = 30
    # the number of time steps for each nowcast (5 minutes for the MeteoSwiss and
    # FMI data)
    num_timesteps = 24
    # the threshold to use for precipitation/no precipitation
    R_min = 0.1

    R_min_dB = transformation.dB_transform(np.array([R_min]))[0][0]

    precip_events = [
        ("201104160800", "201104170000"),
        ("201111152300", "201111161000"),
        ("201304110000", "201304120000"),
        ("201304041800", "201304051800"),
        ("201305180600", "201305181200"),
        ("201305270000", "201305271200"),
    ]

    for pei, pe in enumerate(precip_events):
        start_date = datetime.strptime(pe[0], "%Y%m%d%H%M")
        curdate = datetime.strptime(pe[0], "%Y%m%d%H%M")
        enddate = datetime.strptime(pe[1], "%Y%m%d%H%M")

        results = {}

        for m in oflow_methods:
            results[m] = {}
            results[m]["comptimes"] = 0.0
            results[m]["CSI"] = [0.0] * num_timesteps
            results[m]["RMSE"] = [0.0] * num_timesteps
            results[m]["n_samples"] = [0.0] * num_timesteps

        my_observations = LowAltCompositeCollection()

        while curdate <= enddate:
            print("Computing nowcasts for event %d, start date %s..." %
                  (pei + 1, str(curdate)),
                  end="")
            sys.stdout.flush()

            if curdate + num_timesteps * timedelta(minutes=5) > enddate:
                break

            time_step_in_sec = 5 * 60
            times = [
                curdate - timedelta(seconds=time_step_in_sec * i)
                for i in range(9)
            ]

            times += [
                curdate + timedelta(seconds=time_step_in_sec * i)
                for i in range(1, num_timesteps + 1)
            ]

            times.sort()

            # Add elements to the collection if they don't exists
            for _time in times:
                my_observations.add(get_lowalt_file(_time))

            # First 9 times
            R = my_observations.get_data('Reflectivity', date=times[:9])

            R = dbz_to_r(R, a=300., b=1.5)

            _R = list()

            # The original data is at 1km resolutions
            # Downscale to 5 km resolution by 5x5 averaging
            for i in range(9):
                _R.append(downscale_local_mean(R[i, :-1, :], (5, 5)))
            R = np.asarray(_R)
            my_observations.clean_buffers()  # release memory

            missing_data = False
            for i in range(R.shape[0]):
                if not np.any(np.isfinite(R[i, :, :])):
                    print("Skipping, no finite values found for time step %d" %
                          (i + 1))
                    missing_data = True
                    break

            if missing_data:
                curdate += timedelta(minutes=timestep)
                continue

            R = transformation.dB_transform(R)[0]

            # Forecast times
            fcts_times = times[9:]
            R_obs = my_observations.get_data('Reflectivity', date=times[9:])
            R_obs = dbz_to_r(R_obs, a=300., b=1.5)

            # The original data is at 1km resolutions
            # Downscale to 5 km resolution by 5x5 averaging
            _R = list()
            for i in range(len(fcts_times)):
                _R.append(downscale_local_mean(R_obs[i, :-1, :], (5, 5)))
            R_obs = np.asarray(_R)
            my_observations.clean_buffers()  # release memory

            for oflow_method in oflow_methods:
                oflow = motion.get_method(oflow_method)
                if oflow_method == "vet":
                    R_ = R[-2:, :, :]
                else:
                    R_ = R

                starttime = time.time()
                V = oflow(R_, **configurations[config_number])
                print("%s optical flow computed in %.3f seconds." % \
                      (oflow_method, time.time() - starttime))

                if nowcast_method == "advection":
                    nc = nowcasts.get_method("extrapolation")
                    R_fct = nc(R[-1, :, :], V, num_timesteps)
                else:
                    nc = nowcasts.get_method("steps")
                    R_fct = nc(R[-3:, :, :],
                               V,
                               num_timesteps,
                               noise_method=None,
                               vel_pert_method=None,
                               n_ens_members=1,
                               mask_method="sprog",
                               R_thr=R_min_dB,
                               probmatching_method="mean",
                               fft_method="numpy")[0, :, :, :]

                R_fct = transformation.dB_transform(R_fct, inverse=True)[0]

                for lt in range(num_timesteps):
                    if not np.any(np.isfinite(R_obs[lt, :, :])):
                        print(
                            "Warning: no finite verifying observations for lead time %d."
                            % (lt + 1))
                        continue

                    csi = det_cat_fcst(R_fct[lt, :, :], R_obs[lt, :, :], R_min,
                                       ["CSI"])[0]
                    MASK = np.logical_and(R_fct[lt, :, :] > R_min,
                                          R_obs[lt, :, :] > R_min)
                    if np.sum(MASK) == 0:
                        print("Skipping, no precipitation for lead time %d." %
                              (lt + 1))
                        continue

                    rmse = det_cont_fcst(R_fct[lt, :, :][MASK],
                                         R_obs[lt, :, :][MASK], ["MAE_add"])[0]

                    results[oflow_method]["CSI"][lt] += csi
                    results[oflow_method]["RMSE"][lt] += rmse
                    results[oflow_method]["n_samples"][lt] += 1

            print("Done.")

            curdate += timedelta(minutes=timestep)

        data_dir = './data/dart_tests/config_{:d}'.format(config_number)
        create_dir(data_dir)
        file_name = "optflow_comparison_results_%s_%s.dat" % (
            nowcast_method, get_timestamp(start_date))
        with open(join(data_dir, file_name), "wb") as f:
            pickle.dump(results, f)
fn_pattern = "%Y%m%d%H%M_fmi.radar.composite.lowest_FIN_SUOMI1"
fn_ext = "pgm.gz"

# find the input files from the archive
fns = io.archive.find_by_date(date,
                              root_path,
                              "%Y%m%d",
                              fn_pattern,
                              fn_ext,
                              5,
                              num_prev_files=9)

# read the radar composites and apply thresholding
Z, _, metadata = io.read_timeseries(fns, import_fmi_pgm, gzipped=True)
R = conversion.to_rainrate(Z, metadata, 223.0, 1.53)[0]
R = transformation.dB_transform(R, threshold=0.1, zerovalue=-15.0)[0]
R[~np.isfinite(R)] = -15.0

# construct bandpass filter and apply the cascade decomposition
filter = filter_gaussian(R.shape[1:], 7)
decomp = decomposition_fft(R[-1, :, :], filter)

# plot the normalized cascade levels
for i in range(7):
    mu = decomp["means"][i]
    sigma = decomp["stds"][i]
    decomp["cascade_levels"][i] = (decomp["cascade_levels"][i] - mu) / sigma

fig, ax = pyplot.subplots(nrows=2, ncols=4)

ax[0, 0].imshow(R[-1, :, :], cmap=cm.RdBu_r, vmin=-3, vmax=3)
Пример #8
0
seed = 24
map_plotter = "basemap"
basemap_resolution = 'h'

inputfns = find_by_date(date,
                        root_path,
                        path_fmt,
                        fn_pattern,
                        fn_ext,
                        timestep,
                        num_prev_files=9)

Z, _, metadata = read_timeseries(inputfns, import_fmi_pgm, gzipped=True)

R = conversion.to_rainrate(Z, metadata, 223.0, 1.53)[0]
R = transformation.dB_transform(R, threshold=0.1, zerovalue=-15.0)[0]
R[~np.isfinite(R)] = -15.0

V = dense_lucaskanade(R)

# the S-PROG nowcast
nowcast_method = nowcasts.get_method("sprog")
R_f = nowcast_method(R[-3:, :, :],
                     V,
                     12,
                     n_cascade_levels=8,
                     R_thr=-10.0,
                     decomp_method="fft",
                     bandpass_filter_method="gaussian",
                     probmatching_method="mean",
                     fft_method="pyfftw")