def test_MissingErrors(self): with pytest.raises(ipol.MissingSourcesError): ipol.Nearest(np.array([]), self.trg) with pytest.raises(ipol.MissingTargetsError): ipol.Nearest(self.src, np.array([])) with pytest.raises(ipol.MissingSourcesError): ipol.Idw(np.array([]), self.trg) with pytest.raises(ipol.MissingTargetsError): ipol.Idw(self.src, np.array([])) with pytest.raises(ipol.MissingSourcesError): ipol.Linear(np.array([]), self.trg) with pytest.raises(ipol.MissingTargetsError): ipol.Linear(self.src, np.array([])) with pytest.raises(ipol.MissingSourcesError): ipol.OrdinaryKriging(np.array([]), self.trg) with pytest.raises(ipol.MissingTargetsError): ipol.OrdinaryKriging(self.src, np.array([])) with pytest.raises(ipol.MissingSourcesError): ipol.ExternalDriftKriging(np.array([]), self.trg) with pytest.raises(ipol.MissingTargetsError): ipol.ExternalDriftKriging(self.src, np.array([]))
def test_Nearest_1(self): """testing the basic behaviour of the Idw class""" ip = ipol.Nearest(self.src, self.trg) # input more than one dataset res = ip(self.vals) self.assertTrue( np.allclose( res, np.array([[1., 2., 3.], [1., 2., 3.], [1., 2., 3.], [3., 2., 1.]]))) # input only one flat array res = ip(self.vals[:, 2]) self.assertTrue(np.allclose(res, np.array([3., 3., 3., 1.])))
def test_Nearest_1(self): """testing the basic behaviour of the Idw class""" ip = ipol.Nearest(self.src, self.trg) # input more than one dataset res = ip(self.vals) assert np.allclose( res, np.array([[1.0, 2.0, 3.0], [1.0, 2.0, 3.0], [1.0, 2.0, 3.0], [3.0, 2.0, 1.0]]), ) # input only one flat array res = ip(self.vals[:, 2]) assert np.allclose(res, np.array([3.0, 3.0, 3.0, 1.0]))
def setUp(self): # read the radar volume scan filename = 'hdf5/20130429043000.rad.bewid.pvol.dbzh.scan1.hdf' filename = util.get_wradlib_data_file(filename) pvol = io.read_opera_hdf5(filename) nrays = int(pvol["dataset1/where"]["nrays"]) nbins = int(pvol["dataset1/where"]["nbins"]) val = pvol["dataset%d/data1/data" % (1)] gain = float(pvol["dataset1/data1/what"]["gain"]) offset = float(pvol["dataset1/data1/what"]["offset"]) self.val = val * gain + offset self.rscale = int(pvol["dataset1/where"]["rscale"]) elangle = pvol["dataset%d/where" % (1)]["elangle"] coord = georef.sweep_centroids(nrays, self.rscale, nbins, elangle) sitecoords = (pvol["where"]["lon"], pvol["where"]["lat"], pvol["where"]["height"]) coord, proj_radar = georef.spherical_to_xyz(coord[..., 0], coord[..., 1], coord[..., 2], sitecoords, re=6370040., ke=4. / 3.) filename = 'hdf5/SAFNWC_MSG3_CT___201304290415_BEL_________.h5' filename = util.get_wradlib_data_file(filename) sat_gdal = io.read_safnwc(filename) val_sat = georef.read_gdal_values(sat_gdal) coord_sat = georef.read_gdal_coordinates(sat_gdal) proj_sat = georef.read_gdal_projection(sat_gdal) coord_sat = georef.reproject(coord_sat, projection_source=proj_sat, projection_target=proj_radar) coord_radar = coord interp = ipol.Nearest(coord_sat[..., 0:2].reshape(-1, 2), coord_radar[..., 0:2].reshape(-1, 2)) self.val_sat = interp(val_sat.ravel()).reshape(val.shape) timelag = 9 * 60 wind = 10 self.error = np.absolute(timelag) * wind
def ex_clutter_cloud(): # read the radar volume scan path = os.path.dirname(__file__) + '/' pvol = io.read_OPERA_hdf5( path + 'data/20130429043000.rad.bewid.pvol.dbzh.scan1.hdf') # Count the number of dataset ntilt = 1 for i in range(100): try: pvol["dataset%d/what" % ntilt] ntilt += 1 except Exception: ntilt -= 1 break # Construct radar values nrays = int(pvol["dataset1/where"]["nrays"]) nbins = int(pvol["dataset1/where"]["nbins"]) val = np.empty((ntilt, nrays, nbins)) for t in range(ntilt): val[t, ...] = pvol["dataset%d/data1/data" % (t + 1)] gain = float(pvol["dataset1/data1/what"]["gain"]) offset = float(pvol["dataset1/data1/what"]["offset"]) val = val * gain + offset # Construct radar coordinates rscale = int(pvol["dataset1/where"]["rscale"]) coord = np.empty((ntilt, nrays, nbins, 3)) for t in range(ntilt): elangle = pvol["dataset%d/where" % (t + 1)]["elangle"] coord[t, ...] = georef.sweep_centroids(nrays, rscale, nbins, elangle) ascale = math.pi / nrays sitecoords = (pvol["where"]["lon"], pvol["where"]["lat"], pvol["where"]["height"]) proj_radar = georef.create_osr("aeqd", lat_0=pvol["where"]["lat"], lon_0=pvol["where"]["lon"]) coord[..., 0], coord[..., 1], coord[..., 2] = georef.polar2lonlatalt_n( coord[..., 0], np.degrees(coord[..., 1]), coord[..., 2], sitecoords, re=6370040., ke=4. / 3.) coord = georef.reproject(coord, projection_target=proj_radar) # Construct collocated satellite data sat_gdal = io.read_safnwc( path + 'data/SAFNWC_MSG3_CT___201304290415_BEL_________.h5') val_sat = georef.read_gdal_values(sat_gdal) coord_sat = georef.read_gdal_coordinates(sat_gdal) proj_sat = georef.read_gdal_projection(sat_gdal) coord_sat = georef.reproject(coord_sat, projection_source=proj_sat, projection_target=proj_radar) coord_radar = coord interp = ipol.Nearest(coord_sat[..., 0:2].reshape(-1, 2), coord_radar[..., 0:2].reshape(-1, 2)) val_sat = interp(val_sat.ravel()).reshape(val.shape) # Estimate localisation errors timelag = 9 * 60 wind = 10 error = np.absolute(timelag) * wind # Identify clutter based on collocated cloudtype clutter = cl.filter_cloudtype(val[0, ...], val_sat[0, ...], scale=rscale, smoothing=error) # visualize the result plt.figure() vis.plot_ppi(clutter) plt.suptitle('clutter') plt.savefig('clutter_cloud_example_1.png') plt.figure() vis.plot_ppi(val_sat[0, ...]) plt.suptitle('satellite') plt.savefig('clutter_cloud_example_2.png') plt.show()
class TestFilterCloudtype: if has_data: # read the radar volume scan filename = "hdf5/20130429043000.rad.bewid.pvol.dbzh.scan1.hdf" filename = util.get_wradlib_data_file(filename) pvol = io.read_opera_hdf5(filename) nrays = int(pvol["dataset1/where"]["nrays"]) nbins = int(pvol["dataset1/where"]["nbins"]) val = pvol["dataset%d/data1/data" % (1)] gain = float(pvol["dataset1/data1/what"]["gain"]) offset = float(pvol["dataset1/data1/what"]["offset"]) val = val * gain + offset rscale = int(pvol["dataset1/where"]["rscale"]) elangle = pvol["dataset%d/where" % (1)]["elangle"] coord = georef.sweep_centroids(nrays, rscale, nbins, elangle) sitecoords = ( pvol["where"]["lon"], pvol["where"]["lat"], pvol["where"]["height"], ) coord, proj_radar = georef.spherical_to_xyz( coord[..., 0], coord[..., 1], coord[..., 2], sitecoords, re=6370040.0, ke=4.0 / 3.0, ) filename = "hdf5/SAFNWC_MSG3_CT___201304290415_BEL_________.h5" filename = util.get_wradlib_data_file(filename) sat_gdal = io.read_safnwc(filename) val_sat = georef.read_gdal_values(sat_gdal) coord_sat = georef.read_gdal_coordinates(sat_gdal) proj_sat = georef.read_gdal_projection(sat_gdal) coord_sat = georef.reproject(coord_sat, projection_source=proj_sat, projection_target=proj_radar) coord_radar = coord interp = ipol.Nearest(coord_sat[..., 0:2].reshape(-1, 2), coord_radar[..., 0:2].reshape(-1, 2)) val_sat = interp(val_sat.ravel()).reshape(val.shape) timelag = 9 * 60 wind = 10 error = np.absolute(timelag) * wind @requires_data def test_filter_cloudtype(self): nonmet = clutter.filter_cloudtype(self.val, self.val_sat, scale=self.rscale, smoothing=self.error) nclutter = np.sum(nonmet) assert nclutter == 8141 nonmet = clutter.filter_cloudtype(self.val, self.val_sat, scale=self.rscale, smoothing=self.error, low=True) nclutter = np.sum(nonmet) assert nclutter == 17856
def gridplot(interpolated, title=""): plt.pcolormesh(xtrg, ytrg, interpolated.reshape((len(xtrg), len(ytrg)))) plt.axis("tight") plt.scatter(src[:, 0], src[:, 1], facecolor="None", s=50, marker='s') plt.title(title) plt.xlabel("X") plt.ylabel("Y") # Interpolation IDW idw = ipol.Idw(src, trg) gridplot(idw(vals.ravel()), "IDW") # Other approach for IDW ip_near = ipol.Nearest(src, trg) maxdist = trg[1, 0] - trg[0, 0] result_near = ip_near(vals.ravel(), maxdist=maxdist) # 5.2.3 Kriging =============================== df_meg_nodes_np = np.array(df_meg_nodes) gridx = np.arange(0.0, 5.5, 0.5) gridy = np.arange(0.0, 5.5, 0.5) # Ordninary Kriging ok = ipol.OrdinaryKriging(src, trg) gridplot(ok(vals.ravel()), "Ordinary Kriging") # Universal Kriging UK = UniversalKriging(df_meg_nodes_np[:, 0], df_meg_nodes_np[:, 4],
def getRadarDataByExtent(radar_nc_path, save_path, extent): x_min, x_max, y_min, y_max = extent scale = 111.194925 nc_ds = nc.Dataset(radar_nc_path, "r") lon = nc_ds.Longitude lat = nc_ds.Latitude zDr = np.array(nc_ds.variables["DifferentialReflectivity"]) Phi_dp = np.array(nc_ds.variables["DifferentialPhase"]) KDP = np.array(nc_ds.variables["KDP"]) reflectivity = np.array(nc_ds.variables["Reflectivity"]) ''' zDr_loc = np.where(zDr.flatten() != -8.125)[0] reflect_loc = np.where(reflectivity.flatten() != -33.0)[0] Kdp_loc = np.where(KDP.flatten() != -5.0)[0] sample_loc = reduce(np.union1d, (zDr_loc, reflect_loc, Kdp_loc)) ''' # ------convert mm to m GateWidth = np.array(nc_ds.variables["GateWidth"]) / 1000.0 w_r = GateWidth[0] / scale / 1000.0 dis_cum = np.cumsum(np.full(Phi_dp.shape[1], w_r)) # ---Determine the center coordinates of observed radar data azimuth = np.array(nc_ds.variables["Azimuth"]) * np.pi / 180.0 x_obs_loc = lon + np.outer(np.cos(azimuth), dis_cum) y_obs_loc = lat + np.outer(np.sin(azimuth), dis_cum) xy_obs_loc = np.concatenate( (np.reshape(x_obs_loc.flatten(), (-1, 1)), np.reshape(y_obs_loc.flatten(), (-1, 1))), axis=1) # xy_obs_loc = xy_obs_loc[sample_loc] # ---Determine the center coordinates of cells which are to be interpolated num_row = int(np.ceil((y_max - y_min) / w_r)) num_col = int(np.ceil((x_max - x_min) / w_r)) x_center_loc = np.linspace(x_min, x_min + (num_col - 1) * w_r, num_col) + 0.5 * w_r y_center_loc = np.linspace(y_max - (num_row - 1) * w_r, y_max, num_row) - 0.5 * w_r x_center_loc, y_center_loc = np.meshgrid(x_center_loc, y_center_loc) xy_center_loc = np.concatenate( (np.reshape(x_center_loc.flatten(), (-1, 1)), np.reshape(y_center_loc.flatten(), (-1, 1))), axis=1) # ---construct an Interpolation Object interp_obj = ipol.Nearest(xy_obs_loc, xy_center_loc) # zH_interploate = OrdinaryKriging_obj(reflectivity.flatten()[sample_loc]) zH_interploate = interp_obj(reflectivity.flatten()) zH_interploate = np.reshape(zH_interploate, (num_row, num_col)) # zDr_interploate = OrdinaryKriging_obj(zDr.flatten()[sample_loc]) zDr_interploate = interp_obj(zDr.flatten()) zDr_interploate = np.reshape(zDr_interploate, (num_row, num_col)) # Kdp_interploate = OrdinaryKriging_obj(KDP.flatten()[sample_loc]) Kdp_interploate = interp_obj(KDP.flatten()) Kdp_interploate = np.reshape(Kdp_interploate, (num_row, num_col)) # ---save the output .nc file output_ds = nc.Dataset(save_path, mode="w") output_ds.NetCDFRevision = "lyy_thu_data" output_ds.GenDate = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") output_ds.num_col = num_col output_ds.num_row = num_row output_ds.Resolution = w_r output_ds.X_min = x_min output_ds.Y_max = y_max RowDimId = output_ds.createDimension("row", num_row) ColDimId = output_ds.createDimension("col", num_col) zh_id = output_ds.createVariable("Reflectivity", np.float64, ("row", "col")) zh_id.Units = "dBz" zh_id[:, :] = zH_interploate zdr_id = output_ds.createVariable("DifferentialReflectivity", np.float64, ("row", "col")) zdr_id.Units = "dB" zdr_id[:, :] = zDr_interploate kdp_id = output_ds.createVariable("KDP", np.float64, ("row", "col")) kdp_id.Units = "unitless" kdp_id[:, :] = Kdp_interploate output_ds.close() nc_ds.close()