def plot_all(folder_path="", limit_levels=1): if folder_path == "": folder_path = sys.argv[1] for f in os.listdir(folder_path): # skip files other than monthly if not "monthly_fields.rpn" in f.lower(): continue r = RPN(os.path.join(folder_path, f)) #Exclude coordinate variables vlist = [v for v in r.get_list_of_varnames() if v not in ["^^", ">>"]] #remove coordinates from the list for varname in vlist: data = r.get_4d_field(name=varname) img_folder = os.path.join(folder_path, "img") params = r.get_proj_parameters_for_the_last_read_rec() rll = RotatedLatLon(**params) lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() plot_variable(varname, data, img_folder=img_folder, lons=lons, lats=lats, bmap=rll.get_basemap_object_for_lons_lats(lons2d=lons, lats2d=lats), limit_levels=limit_levels)
def get_mean_over_months_of_2d_var(self, start_year, end_year, months=None, var_name="", level=-1, level_kind=-1): """ level =-1 means any level """ monthly_means = [] for the_year in range(start_year, end_year + 1): for the_month in months: path = self.yearmonth_to_data_path[(the_year, the_month)] print("{0}/{1} -> {2}".format(the_year, the_month, path)) rpn_obj = RPN(path) records = rpn_obj.get_all_time_records_for_name_and_level( varname=var_name, level=level, level_kind=level_kind) monthly_means.append(np.mean(list(records.values()), axis=0)) rpn_obj.close() return np.mean(monthly_means, axis=0) pass
def test_polar_stereographic(): """ Testing polar stereographic grid functions """ path = get_input_file_path("mappe.rpnw", the_dir) r = None try: r = RPN(path) mk = r.get_first_record_for_name("MK") #print r.get_proj_parameters_for_the_last_read_rec() lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() amno_link = "http://www.cccma.ec.gc.ca/data/grids/geom_crcm_amno_182x174.shtml" msg_tpl = "Generated longitudes are not the same as {0}".format(amno_link) msg_tpl += "\n Expected: {0}" msg_tpl += "\n Got: {1}" #test with expected values from the EC website expect = 226.50 - 360.0 msg = msg_tpl.format(expect, lons[10, 10]) ok_(np.abs(lons[10, 10] - expect) < 1.0e-2, msg=msg) #latitudes expect = 41.25 msg = msg_tpl.format(expect, lats[-11, -11]) ok_(np.abs(lats[-11, -11] - expect) < 1.0e-2, msg=msg) finally: if r is not None: r.close()
def get_basemap_and_coords( file_path="data/CORDEX/NorthAmerica_0.44deg_CanHistoE1/Samples/NorthAmerica_0.44deg_CanHistoE1_198101/pm1950010100_00816912p", lon1=-97.0, lat1=47.50, lon2=-7, lat2=0, llcrnrlon=None, llcrnrlat=None, urcrnrlon=None, urcrnrlat=None, resolution="l", anchor="W", projection="omerc", round=False ): rpnObj = RPN(file_path) lons2D, lats2D = rpnObj.get_longitudes_and_latitudes() rpnObj.close() the_ll_lon = lons2D[0, 0] if llcrnrlon is None else llcrnrlon the_ll_lat = lats2D[0, 0] if llcrnrlat is None else llcrnrlat the_ur_lon = lons2D[-1, -1] if urcrnrlon is None else urcrnrlon the_ur_lat = lats2D[-1, -1] if urcrnrlat is None else urcrnrlat return Basemap(projection=projection, resolution=resolution, llcrnrlon=the_ll_lon, llcrnrlat=the_ll_lat, urcrnrlon=the_ur_lon, urcrnrlat=the_ur_lat, lat_1=lat1, lon_1=lon1, lat_2=lat2, lon_2=lon2, no_rot=True, anchor=anchor ), lons2D, lats2D
def get_land_sea_glaciers_mask_from_geophysics_file( path="/b10_fs1/winger/Arctic/OMSC26_Can_long_new_v01/Geophys/land_sea_glacier_mask_free"): r = RPN(path) mask = r.get_first_record_for_name("FMSK") < 0.5 r.close() return mask
def test_nbits24(): path = "/skynet3_rech1/huziy/geophys_West_NA_0.25deg_144x115.fst" if not os.path.isfile(path): return r = RPN(path=path) data = r.get_first_record_for_name_and_level(varname="VF", level=2) print(data.shape, data.max(), data.min(), data.mean(), data.var()) ok_(data.max() <= 1) proc = subprocess.Popen(["r.diag", "ggstat", path], stdout=subprocess.PIPE) (out, err) = proc.communicate() if err != 0: print("Warning: Could not find r.diag, this is not critical, but some tests will not be run.") return lines = out.split("\n") lines = filter(lambda line: ("VF" in line) and ("2 ar" in line), lines) fields = lines[0].split() the_mean = float(fields[12]) the_var = float(fields[13]) ok_(abs(data.mean() - the_mean) < 1e-6, msg="The mean does not correspond to ggstat") ok_(abs(data.var() - the_var) < 1e-6, msg="The variance does not correspond to ggstat") r.close()
def get_seasonal_mean_for_year_of_2d_var(self, the_year, months=None, var_name=""): """ Return mean over months of a given 2d field returns numpy array of dimensions (x, y) """ monthly_means = [] for the_month in months: key = (the_year, the_month) if key not in self.yearmonth_to_data_path: print(("Warning donot have data for {0}/{1}".format( the_year, the_month))) continue path = self.yearmonth_to_data_path[key] rpn_obj = RPN(path) records = rpn_obj.get_all_time_records_for_name(varname=var_name) monthly_means.append(np.mean(list(records.values()), axis=0)) rpn_obj.close() print((the_year, np.min(np.mean(monthly_means, axis=0)), np.max(np.mean(monthly_means, axis=0)))) return np.mean(monthly_means, axis=0)
def combine(): out_folder = "/skynet1_rech3/huziy/Converters/NetCDF_converter/" path1 = "/skynet1_rech3/huziy/Converters/NetCDF_converter/mappe.rpnw" r = RPN(path1) mask1 = r.get_first_record_for_name("MK")[10:-10, 10:-10] r.close() path2 = "/skynet1_rech3/huziy/Converters/NetCDF_converter/champs_st.rpnw" r = RPN(path2) mk = r.get_first_record_for_name("MK") print(r.get_dateo_of_last_read_record()) lons2d, lats2d = r.get_longitudes_and_latitudes_for_the_last_read_rec() r.close() #combine the masks mk = mk * 7 + mask1 # Write to netcdf file ds = Dataset(os.path.join(out_folder, "mask_combined.nc"), mode="w") #subset vars_list = [mk, lons2d, lats2d] ni, nj = vars_list[0].shape ds.createDimension("lon", ni) ds.createDimension("lat", nj) var_names = ["MK", "longitude", "latitude"] for the_name, field in zip(var_names, vars_list): ncVar = ds.createVariable(the_name, "f4", ("lat", "lon")) ncVar[:] = field.transpose() ds.close()
def get_bow_river_basin_mask( path="/RESCUE/skynet3_rech1/huziy/CNRCWP/Calgary_flood/Bow_river_basin_mask_NA_0.11deg.rpn" ): r = RPN(path) msk = r.get_first_record_for_name("FMSK") r.close() return msk
def get_bulk_field_capacity( path="/skynet3_rech1/huziy/geofields_interflow_exp/pm1979010100_00000000p" ): r = RPN(path) data = r.get_first_record_for_name("D9") r.close() return data
def test_area(self): path_rpn = "/home/huziy/skynet3_exec1/from_guillimin/quebec_highres_spinup_12_month_without_lakes/pm1985010100_00000000p" path_nc = "/home/huziy/skynet3_rech1/Netbeans Projects/Java/DDM/test_area.nc" area_rpn = RPN(path=path_rpn).get_first_record_for_name("DX") area_nc = Dataset(path_nc).variables["cell_area"][:] fig = plt.figure() assert isinstance(fig, Figure) flat_nc_area = area_nc[20:-20, 20:-20].flatten() flat_rpn_area = area_rpn.flatten() / 1.0e6 flat_rpn_area = flat_rpn_area[flat_nc_area > 0] flat_nc_area = flat_nc_area[flat_nc_area > 0] plt.scatter(flat_rpn_area, flat_nc_area, linewidths=0) print(flat_nc_area - flat_rpn_area) x1 = min(flat_rpn_area) x2 = max(flat_rpn_area) plt.plot([x1, x2], [x1, x2], "k") plt.xlabel("CRCM5") plt.ylabel("Upscaler") #plt.show() pass
def main(): path = "/skynet3_rech1/huziy/from_guillimin/new_outputs/quebec_0.1_crcm5-hcd-rl-intfl_spinup2/Samples/quebec_crcm5-hcd-rl-intfl_197901/pm1979010100_00000000p" rObj = RPN(path) sani = rObj.get_first_record_for_name("SANI") lons, lats = rObj.get_longitudes_and_latitudes_for_the_last_read_rec() basemap = Crcm5ModelDataManager.get_rotpole_basemap_using_lons_lats( lons2d=lons, lats2d=lats) x, y = basemap(lons, lats) sani = np.ma.masked_where(sani < 2, sani) levels = [2, 3, 4, 5, 6, 7, 8, 10, 15, 20, 25, 30, 40] cmap = cm.get_cmap("jet", len(levels) - 1) bn = BoundaryNorm(levels, cmap.N) fig = plt.figure() basemap.contourf(x, y, sani, levels=levels, cmap=cmap, norm=bn) basemap.drawcoastlines() basemap.colorbar(ticks=levels) fig.tight_layout() fig.savefig("soil_anis.jpeg") pass
def test_area(self): path_rpn = "/home/huziy/skynet3_exec1/from_guillimin/quebec_highres_spinup_12_month_without_lakes/pm1985010100_00000000p" path_nc = "/home/huziy/skynet3_rech1/Netbeans Projects/Java/DDM/test_area.nc" area_rpn = RPN(path=path_rpn).get_first_record_for_name("DX") area_nc = Dataset(path_nc).variables["cell_area"][:] fig = plt.figure() assert isinstance(fig, Figure) flat_nc_area = area_nc[20:-20,20:-20].flatten() flat_rpn_area = area_rpn.flatten() / 1.0e6 flat_rpn_area = flat_rpn_area[flat_nc_area > 0] flat_nc_area = flat_nc_area[flat_nc_area > 0] plt.scatter(flat_rpn_area, flat_nc_area, linewidths=0) print(flat_nc_area - flat_rpn_area) x1 = min(flat_rpn_area) x2 = max(flat_rpn_area) plt.plot([x1, x2], [x1, x2],"k") plt.xlabel("CRCM5") plt.ylabel("Upscaler") #plt.show() pass
def main(): path = "/skynet3_rech1/huziy/from_guillimin/new_outputs/quebec_0.1_crcm5-hcd-rl-intfl_spinup2/Samples/quebec_crcm5-hcd-rl-intfl_197901/pm1979010100_00000000p" rObj = RPN(path) sani = rObj.get_first_record_for_name("SANI") lons, lats = rObj.get_longitudes_and_latitudes_for_the_last_read_rec() basemap = Crcm5ModelDataManager.get_rotpole_basemap_using_lons_lats(lons2d=lons, lats2d=lats) x, y = basemap(lons, lats) sani = np.ma.masked_where(sani < 2, sani) levels = [2, 3, 4, 5, 6, 7, 8, 10, 15, 20, 25, 30, 40] cmap = cm.get_cmap("jet", len(levels) - 1) bn = BoundaryNorm(levels, cmap.N) fig = plt.figure() basemap.contourf(x, y, sani, levels=levels, cmap=cmap, norm=bn) basemap.drawcoastlines() basemap.colorbar(ticks=levels) fig.tight_layout() fig.savefig("soil_anis.jpeg") pass
def extract_field(name="VF", level=3, in_file="", out_file=None, margin=0): if out_file is None: out_file = in_file + "_lf.nc" rObj = RPN(in_file) field = rObj.get_first_record_for_name_and_level(varname=name, level=level) lons2d, lats2d = rObj.get_longitudes_and_latitudes_for_the_last_read_rec() rObj.close() lons2d[lons2d > 180] -= 360.0 ds = nc.Dataset(out_file, "w", format="NETCDF3_CLASSIC") nx, ny = field.shape ds.createDimension("lon", nx - margin) ds.createDimension("lat", ny - margin) var = ds.createVariable(name, "f4", dimensions=("lon", "lat")) lonVar = ds.createVariable("longitude", "f4", dimensions=("lon", "lat")) latVar = ds.createVariable("latitude", "f4", dimensions=("lon", "lat")) var[:] = field[:nx - margin, :ny - margin] var[:] = field[:nx - margin, :ny - margin] lonVar[:] = lons2d[:nx - margin, :ny - margin] latVar[:] = lats2d[:nx - margin, :ny - margin] ds.close() pass
def get_basemap_glaciers_nw_america(): r = RPN("/RESCUE/skynet3_rech1/huziy/CNRCWP/C3/Depth_to_bedrock_WestNA_0.25") r.get_first_record_for_name("8L") proj_params = r.get_proj_parameters_for_the_last_read_rec() lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() bsmp = RotatedLatLon(**proj_params).get_basemap_object_for_lons_lats(lons2d=lons, lats2d=lats) return bsmp
def demo_north_pole(): r = RPN(path = "/home/huziy/skynet3_rech1/classOff_Andrey/era2/temp_3d") t = r.get_first_record_for_name("I0") lon, lat = r.get_longitudes_and_latitudes_for_the_last_read_rec() r.close() nx, ny = lon.shape lon_0, lat_0 = lon[nx//2, ny//2], lat[nx//2, ny//2] basemap = Basemap(projection = "omerc", lon_1=60, lat_1 = 89.999, lon_2=-30, lat_2=0, no_rot=True, lon_0 = lon_0, lat_0 = lat_0, llcrnrlon=lon[0, 0], llcrnrlat=lat[0,0], urcrnrlon=lon[-1, -1], urcrnrlat=lat[-1, -1] ) x, y = basemap(lon, lat) basemap.contourf(x, y, t) basemap.drawcoastlines() basemap.colorbar() #basemap.shadedrelief() plt.show()
def _get_topography(): path = "/skynet3_rech1/huziy/geofields_interflow_exp/geophys_Quebec_0.1deg_260x260_with_dd_v6_with_ITFS" from rpn.rpn import RPN r = RPN(path=path) data = r.get_first_record_for_name_and_level("ME", level=0) r.close() return data
def get_bow_river_basin_mask( path="/RESCUE/skynet3_rech1/huziy/CNRCWP/Calgary_flood/Bow_river_basin_mask_NA_0.11deg.rpn" ): r = RPN(path) msk = r.get_first_record_for_name("FMSK") r.close() return msk
def test_write_rpn_compressed(): wfile = "test.rpn" try: r = RPN(wfile, mode="w") nx = ny = 10 arr = np.zeros((nx, ny), dtype="f4") for i in range(nx): for j in range(ny): arr[i, j] = i ** 2 + j ** 2 r.write_2D_field( name="TEST", data=arr, data_type=data_types.compressed_floating_point, nbits=-16 ) r.close() if is_rdiag_available(): proc = subprocess.Popen(["r.diag", "ggstat", wfile], stdout=subprocess.PIPE) (out, err) = proc.communicate() out = out.decode() print(out) print(type(out), type("some str")) ok_("{:E}".format(arr.max()) in out, "Could not find the max={:E} in {}".format(arr.max(), out)) ok_("{:E}".format(arr.min()) in out, "Could not find the min={:E} in {}".format(arr.min(), out)) ok_("{:E}".format(arr.mean()) in out, "Could not find the mean={:E} in {}".format(arr.mean(), out)) ok_("{}".format(16) in out, "Could not find 16 in the ggstat output") print("{:E}".format(arr.mean()), "{:E}".format(arr.min()), "{:E}".format(arr.max())) except Exception as e: raise e finally: os.remove(wfile)
def test_write_specified_projection(): """ Should determine the projection params (ig1,2,3,4) for rpn file save them and not fail """ lon1 = 180 lat1 = 0.0 lon2 = -84 lat2 = 1.0 wfile = "test.rpn" try: r = RPN(wfile, mode="w") nx = ny = 10 arr = np.zeros((nx, ny), dtype="f4") for i in range(nx): for j in range(ny): arr[i, j] = i ** 2 + j ** 2 print("Z".encode()) r.write_2D_field( name="TEST", data=arr, data_type=data_types.compressed_floating_point, nbits=-16, lon1=lon1, lon2=lon2, lat1=lat1, lat2=lat2, grid_type=b"E" ) r.close() except Exception as e: raise e finally: os.remove(wfile)
def get_mean_2d_from_climatologies(cls, path="", file_prefixes=None, file_suffixes=None, var_name=""): """ When you have a folder with climatologies, use this method """ field_list = [] if file_prefixes is None: file_prefixes = os.listdir(path) if file_suffixes is None: file_suffixes = os.listdir(path) for file_name in os.listdir(path): prefix_ok = False suffix_ok = False for p in file_prefixes: if file_name.startswith(p): prefix_ok = True break for s in file_suffixes: if file_name.endswith(s): suffix_ok = True break if prefix_ok and suffix_ok: rpn_obj = RPN(os.path.join(path, file_name)) data = rpn_obj.get_first_record_for_name(var_name) rpn_obj.close() field_list.append(data) return np.array(field_list).mean(axis=0)
def _get_topography(): path = "/skynet3_rech1/huziy/geofields_interflow_exp/geophys_Quebec_0.1deg_260x260_with_dd_v6_with_ITFS" from rpn.rpn import RPN r = RPN(path=path) data = r.get_first_record_for_name_and_level("ME", level=0) r.close() return data
def main(): varname_to_rpn_name = { "precipitation": "PR", "relativeError": "RERR" } varnames = list(varname_to_rpn_name.keys()) target_dir = "/skynet3_rech1/huziy/from_hdf4" source_dir = "/st1_fs2/winger/Validation/TRMM/HDF_format" for f_name in os.listdir(source_dir): if not f_name.endswith("HDF"): continue path = os.path.join(source_dir, f_name) ds = SD(path) print(ds.datasets()) target_path = os.path.join(target_dir, f_name + ".rpn") r_obj = RPN(target_path, mode="w") for varname in varnames: var_data = ds.select(varname)[0, :, :] r_obj.write_2D_field( name=varname_to_rpn_name[varname], data=var_data, label=varname, grid_type="L", ig = [25, 25, 4013, 18012]) r_obj.close()
def plot_only_vegetation_fractions( data_path="/RESCUE/skynet3_rech1/huziy/geof_lake_infl_exp/geophys_Quebec_0.1deg_260x260_with_dd_v6_with_ITFS", canopy_name="VF", label="QC_10km"): r = RPN(data_path) veg_fractions = r.get_2D_field_on_all_levels(name=canopy_name) print(list(veg_fractions.keys())) proj_params = r.get_proj_parameters_for_the_last_read_rec() lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() print(lons.shape) rll = RotatedLatLon(lon1=proj_params["lon1"], lat1=proj_params["lat1"], lon2=proj_params["lon2"], lat2=proj_params["lat2"]) lon0, lat0 = rll.get_true_pole_coords_in_rotated_system() plon, _ = rll.get_north_pole_coords() b = Basemap(projection="rotpole", llcrnrlon=lons[0, 0], llcrnrlat=lats[0, 0], urcrnrlon=lons[-1, -1], urcrnrlat=lats[-1, -1], lon_0=lon0 - 180, o_lon_p=lon0, o_lat_p=lat0) lons[lons > 180] -= 360 for lev in list(veg_fractions.keys()): veg_fractions[lev] = maskoceans(lons, lats, veg_fractions[lev], inlands=False) x, y = b(lons, lats) plot_veg_fractions(x, y, b, veg_fractions, out_image=os.path.join(os.path.dirname(data_path), "veg_fractions_{0}.png".format(label)))
def get_depth_to_bedrock( path="/home/huziy/skynet1_rech3/cordex/NorthAmerica_0.44deg_ERA40-Int_195801_static_data.rpn" ): #read depth to bedrock field rObj = RPN(path) dpth_to_bdrck = rObj.get_first_record_for_name("8L") rObj.close() return dpth_to_bdrck
def get_land_sea_glaciers_mask_from_geophysics_file( path="/b10_fs1/winger/Arctic/OMSC26_Can_long_new_v01/Geophys/land_sea_glacier_mask_free" ): r = RPN(path) mask = r.get_first_record_for_name("FMSK") < 0.5 r.close() return mask
def get_domain_coords_and_basemap(coord_file = "~/skynet3_exec1/from_guillimin/quebec_test_lake_level_260x260/Samples/quebec_220x220_198505/pm1985050100_00000000p", lon1 = 180, lat1 = 0, lon2 = 180, lat2 = 0 ): rpnObj = RPN(coord_file) lons2d, lats2d = rpnObj.get_longitudes_and_latitudes() basemap = Basemap(projection="omerc") pass
def main(): import application_properties application_properties.set_current_directory() # Create folder for output images if not img_folder.is_dir(): img_folder.mkdir(parents=True) rea_driven_path = "/RESCUE/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl.hdf5" rea_driven_label = "ERAI-CRCM5-L" gcm_driven_path = "/skynet3_rech1/huziy/hdf_store/cc-canesm2-driven/quebec_0.1_crcm5-hcd-rl-cc-canesm2-1980-2010.hdf5" gcm_driven_label = "CanESM2-CRCM5-L" start_year_c = 1980 end_year_c = 2010 varname = "STFL" params = dict( data_path=rea_driven_path, start_year=start_year_c, end_year=end_year_c, label=rea_driven_label) geo_data_file = "/skynet3_rech1/huziy/hdf_store/pm1979010100_00000000p" rea_driven_config = RunConfig(**params) params.update(dict(data_path=gcm_driven_path, label=gcm_driven_label)) gcm_driven_config = RunConfig(**params) r_obj = RPN(geo_data_file) facc = r_obj.get_first_record_for_name("FAA") fldr = r_obj.get_first_record_for_name("FLDR") lkfr = r_obj.get_first_record_for_name("ML") bmp_info = analysis.get_basemap_info_from_hdf(file_path=rea_driven_path) basin_name_to_out_indices_map, basin_name_to_basin_mask = get_basin_to_outlet_indices_map(bmp_info=bmp_info, accumulation_areas=facc, directions=fldr, lake_fraction_field=lkfr) # select lake rich basins sel_basins = ["ARN", "PYR", "LGR", "RDO", "SAG", "WAS"] basin_name_to_out_indices_map = {k: v for k, v in basin_name_to_out_indices_map.items() if k in sel_basins} rea_driven_daily = analysis.get_daily_climatology_for_rconf(rea_driven_config, var_name=varname, level=0) gcm_driven_daily = analysis.get_daily_climatology_for_rconf(gcm_driven_config, var_name=varname, level=0) rea_driven_config.data_daily = rea_driven_daily gcm_driven_config.data_daily = gcm_driven_daily plot_comparison_hydrographs(basin_name_to_out_indices_map, rea_config=rea_driven_config, gcm_config=gcm_driven_config)
def plot_lake_fraction(path = "data/from_guillimin/vary_lake_level1/pm1985010100_00000000p", var_name = "LF1", lons2d = None, lats2d = None, basemap = None): r = RPN(path) field = r.get_first_record_for_name(var_name) r.close() _plot_depth(field, lons2d, lats2d, basemap = basemap, clevels=np.arange(0, 1.1, 0.1), lowest_value=0.001) pass
def main(): #path = "/home/huziy/skynet3_rech1/test/snw_LImon_NA_CRCM5_CanESM2_historical_r1i1p1_185001-200512.rpn" path = "/home/sheena/skynet3_exec2/RPN/src/permafrost/snw_NA_CRCM5_CanESM2_rcp45_r1i1p1_200601-210012.rpn" months = [1,2,12] varname = "I5" rObj = RPN( path ) records = rObj.get_all_time_records_for_name(varname=varname) lons2d, lats2d = rObj.get_longitudes_and_latitudes() rObj.close() times = sorted(records.keys()) vals = np.array( [records[t] for t in times]) year_range = list(range(2006, 2101)) nc_file_name = "{0:s}_{1:d}_{2:d}.nc".format(varname, year_range[0], year_range[-1]) nx, ny = vals[0].shape #create netcdf file ds = Dataset(nc_file_name, "w", format = 'NETCDF3_CLASSIC') ds.createDimension('lon', nx) ds.createDimension('lat', ny) ds.createDimension("year", len(year_range)) the_var = ds.createVariable(varname, 'f', ("year",'lat','lon')) the_lon = ds.createVariable("xlon", 'f', ('lat','lon')) the_lat = ds.createVariable("xlat", 'f', ('lat','lon')) for i, the_year in enumerate(year_range): bool_vector = [t.year == the_year and t.month in months for t in times] bool_vector = np.array(bool_vector) the_var[i,:,:] = np.mean(vals[bool_vector], axis=0).transpose() the_lon[:] = lons2d[:,:].transpose() the_lat[:] = lats2d[:,:].transpose() ds.close() #TODO: implement pass
def test_write_field_2d_clean(): """ Testing write 2d field """ import os tfile = "temp.rpn" r = None try: r = RPN(tfile, mode="w") data = np.random.randn(10, 10) data = data.astype(np.float32) r.write_2d_field_clean(data, properties={"name": "RAND"}) r.close() r = RPN(tfile) data1 = r.get_first_record_for_name("RAND") v0, v1 = data.mean(), data1.mean() ok_(abs(v1 - v0) <= 1e-6, "Saved ({0}) and retrieved ({1}) means are not the same.".format(v0, v1)) finally: if r is not None: r.close() os.remove(tfile)
def main(): #path = "/b2_fs2/huziy/OMSC26_Can_long_new_v01/pm1958010100_02275344p" path = "/b2_fs2/huziy/OMSC26_Can_long_new_v01/pm1958010100_00008640p" r = RPN(path) res_date = c_int() res_time = c_int() mode = c_int(-3) #test1 dateo = 10158030 r._dll.newdate_wrapper(byref(c_int(dateo)), byref(res_date), byref(res_time), byref(mode)) s_date = "{0:08d}{1:08d}".format(res_date.value, res_time.value) print("stamp: {0:09d}, result: {1}".format(dateo, s_date)) dateo = 488069900 r._dll.newdate_wrapper(byref(c_int(dateo)), byref(res_date), byref(res_time), byref(mode)) s_date = "{0:08d}{1:08d}".format(res_date.value, res_time.value) print("stamp: {0:09d}, result: {1}".format(dateo, s_date)) dateo = 1069261100 r._dll.newdate_wrapper(byref(c_int(dateo)), byref(res_date), byref(res_time), byref(mode)) s_date = "{0:08d}{1:08d}".format(res_date.value, res_time.value) print("stamp: {0:09d}, result: {1}".format(dateo, s_date)) dateo = 632053700 r._dll.newdate_wrapper(byref(c_int(dateo)), byref(res_date), byref(res_time), byref(mode)) s_date = "{0:08d}{1:08d}".format(res_date.value, res_time.value) print("stamp: {0:09d}, result: {1}".format(dateo, s_date)) ts = r.get_all_time_records_for_name("TRAF") times = list(sorted(ts.keys())) print(times[:20]) print(times[0], times[-1]) r.close() folderPath = "/b2_fs2/huziy/OMSC26_ERA40I_long_new_v02/" for fName in os.listdir(folderPath): if not fName.startswith("pm"): continue fPath = os.path.join(folderPath, fName) r = RPN(fPath) r.suppress_log_messages() data = r.get_all_time_records_for_name(varname="TDRA") r.close() print(fName) print(sorted(data.keys())[:5]) print(25 * "*") input("press any key")
def plot_hydrographs(): plot_utils.apply_plot_params(font_size=14, width_pt=None, width_cm=20, height_cm=20) start_year = 1980 end_year = 2010 varname = "STFL" base_config = RunConfig(start_year=start_year, end_year=end_year, data_path="/RESCUE/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl.hdf5", label="NI") modif_config = RunConfig(start_year=start_year, end_year=end_year, data_path="/RESCUE/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl-intfl_ITFS.hdf5", label="WI") r_obj = RPN(GEO_DATA_FILE) facc = r_obj.get_first_record_for_name("FAA") fldr = r_obj.get_first_record_for_name("FLDR") lons, lats, bmp = analysis.get_basemap_from_hdf(file_path=base_config.data_path) basin_name_to_out_indices_map, basin_name_to_basin_mask = get_basin_to_outlet_indices_map(lons=lons, lats=lats, accumulation_areas=facc, directions=fldr) # Calculate the daily mean fields dates, stf_base = analysis.get_daily_climatology_for_rconf(base_config, var_name=varname, level=0) _, stf_modif = analysis.get_daily_climatology_for_rconf(modif_config, var_name=varname, level=0) for bname, (i_out, j_out) in basin_name_to_out_indices_map.items(): print(bname, i_out, j_out) fig = plt.figure() gs = GridSpec(2, 1, height_ratios=[1, 0.5], hspace=0.1) ax = fig.add_subplot(gs[0, 0]) ax.plot(dates, stf_base[:, i_out, j_out], "b", lw=2, label=base_config.label) ax.plot(dates, stf_modif[:, i_out, j_out], "r", lw=2, label=modif_config.label) ax.set_title(bname) format_axis(ax) # Hide the tick labels from the x-axis of the upper plot for tl in ax.xaxis.get_ticklabels(): tl.set_visible(False) ax = fig.add_subplot(gs[1, 0]) ax.plot(dates, stf_modif[:, i_out, j_out] - stf_base[:, i_out, j_out], "k", lw=2, label="{}-{}".format(modif_config.label, base_config.label)) format_axis(ax) fig.savefig(str(IMG_FOLDER.joinpath("{}_{}-{}.png".format(bname, start_year, end_year)))) plt.close(fig)
def plot_initial_lake_depth(path = "data/from_guillimin/vary_lake_level1/pm1985010100_00000000p", var_name = "CLDP", lons2d = None, lats2d = None, basemap = None ): """ returns initial lake depth field """ r = RPN(path) field = r.get_first_record_for_name(var_name) r.close() _plot_depth(field, lons2d, lats2d, basemap = basemap, clevels=range(0,310, 10)) return field
def main(): folder = "/home/huziy/skynet3_exec1/modify_igs_in_rpn_file" in_file = "ANAL_NorthAmerica_0.44deg_MPIRCP45_B1_100_2070120100" out_file = in_file + "_ig_changed" rObjIn = RPN(os.path.join(folder, in_file)) rObjOut = RPN(os.path.join(folder, out_file), mode="w") ig_to_change = [1375, 0, 56480, 56480] new_ig = [499, 1064, 0, 0] data = [] i = 0 while data is not None: data = rObjIn.get_next_record() if data is None: break info = rObjIn.get_current_info nbits = info["nbits"].value data_type = info["data_type"].value if nbits > 0: nbits = -nbits print("nbits = {0}, data_type = {1}".format(nbits, data_type)) ips = [x.value for x in info["ip"]] npas = info["npas"].value deet = info["dt_seconds"].value dateo = info["dateo"] igold = [int(ig.value) for ig in info["ig"]] if igold == ig_to_change: info["ig"] = [c_int(ig) for ig in new_ig] rObjOut.write_2D_field(name=info["varname"].value, data=data, ip=ips, ig=[x.value for x in info["ig"]], npas=npas, deet=deet, label="", dateo=dateo, grid_type=info["grid_type"].value, typ_var=info["var_type"].value, nbits=nbits, data_type=data_type ) i += 1 #check that all fields were copied nRecsIn = rObjIn.get_number_of_records() assert i == nRecsIn, "copied {0} records, but should be {1}".format(i, nRecsIn) rObjIn.close() rObjOut.close()
def compare_2d(path_base, path_list, label_list): """ compare only monthly fields """ delta_small = 1e-6 nvert_levs_for_soiltemp = 3 # Compare only 3 levels of the soil temperature img_folder = "{:%Y%m%d}".format(datetime.now()) for vname in ["TBAR", "SNO"]: r = RPN(os.path.join(path_base, "{}_monthly_fields.rpn".format(vname))) data_base = r.get_4d_field_fc_hour_as_time(name=vname) r.close()
def create_files(fnames=FILE_NAMES): for nf, f in enumerate(fnames): r = RPN(f, mode="w") nx = ny = 10 arr = np.zeros((nx, ny), dtype="f4") for i in range(nx): for j in range(ny): arr[i, j] = i ** 2 + j ** 2 r.write_2D_field( name="T{}".format(nf), data=arr, data_type=data_types.compressed_floating_point, nbits=-16 ) r.close()
def compare_2d(path_base, path_list, label_list): """ compare only monthly fields """ delta_small = 1e-6 nvert_levs_for_soiltemp = 3 # Compare only 3 levels of the soil temperature img_folder = "{:%Y%m%d}".format(datetime.now()) for vname in ["TBAR", "SNO"]: r = RPN(os.path.join(path_base, "{}_monthly_fields.rpn".format(vname))) data_base = r.get_4d_field_fc_hour_as_time(name=vname) r.close()
def extract_runoff_to_netcdf_file(filePath='data/pm1957090100_00589248p', outDir=None): surface_runoff_name = 'TRAF' subsurface_runoff_name = 'TDRA' level_tdra = 5 level_traf = 5 print(filePath) #get data from the rpn file rpnObj = RPN(filePath) assert rpnObj.get_number_of_records() > 4, filePath surfRunoff = rpnObj.get_first_record_for_name_and_level(surface_runoff_name, level=level_traf) subSurfRunoff = rpnObj.get_first_record_for_name_and_level(subsurface_runoff_name, level=level_tdra) nx, ny = surfRunoff.shape ncFile = nc.Dataset(filePath + '.nc', 'w', format='NETCDF3_CLASSIC') ncFile.createDimension('lon', nx) ncFile.createDimension('lat', ny) surfRunoffVar = ncFile.createVariable(surface_runoff_name, 'f', ('lon', 'lat')) subSurfRunoffVar = ncFile.createVariable(subsurface_runoff_name, 'f', ('lon', 'lat')) subSurfRunoffVar[:] = subSurfRunoff surfRunoffVar[:] = surfRunoff ncFile.forecast_hour = rpnObj.get_current_validity_date() ncFile.close() rpnObj.close()
def correct(path): #remove lake_fraction array and create a new one from the source (rpn data) #data print("Working on {0} ...".format(path)) h = tb.open_file(path, "a") #read data from the rpn file r = RPN(SOURCE_PATH) lkfr = r.get_first_record_for_name("ML") r.close() h.get_node("/", infovar.HDF_LAKE_FRACTION_NAME)[:] = lkfr h.close()
def correct(path): # remove lake_fraction array and create a new one from the source (rpn data) # data print("Working on {0} ...".format(path)) h = tb.open_file(path, "a") # read data from the rpn file r = RPN(SOURCE_PATH) lkfr = r.get_first_record_for_name("ML") r.close() h.get_node("/", infovar.HDF_LAKE_FRACTION_NAME)[:] = lkfr h.close()
def read_and_plot_ts_cross(path="", exp_name=""): var_interest = "ADD" path_to_dpth_to_bedrock = "/skynet1_rech3/huziy/CLASS_offline_VG/GEOPHYSICAL_FIELDS/test_analysis.rpn" # read depth to bedrock r = RPN(path_to_dpth_to_bedrock) _ = r.get_first_record_for_name("DPTH") lons2d, lats2d = r.get_longitudes_and_latitudes_for_the_last_read_rec() rll = RotatedLatLon(**r.get_proj_parameters_for_the_last_read_rec()) b = rll.get_basemap_object_for_lons_lats(lons2d=lons2d, lats2d=lats2d, resolution="c") r.close() layer_widths = [0.1, 0.2, 0.3, 0.5, 0.9, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5] nlayers = 7 nt = 200*12 layer_widths = layer_widths[:nlayers] print(len(layer_widths)) #calculate depths of soil layer centers soil_lev_tops = np.cumsum([0, ] + layer_widths[:-1]) soil_lev_bottoms = np.cumsum(layer_widths) soil_levs = 0.5 * (soil_lev_tops + soil_lev_bottoms) i_interest_list, j_interest_list = [120, 120, 160, 170], [50, 60, 60, 60] r = RPN(path) data = r.get_4d_field_fc_hour_as_time(name=var_interest) lev_sorted = list(sorted(list(data.items())[0][1].keys()))[:nlayers] fc_sorted = list(sorted(data.keys()))[:nt] for i_interest, j_interest in zip(i_interest_list, j_interest_list): data1 = np.asarray( [[data[fc][lev][i_interest, j_interest] for lev in lev_sorted] for fc in fc_sorted]) plot_time_series(data=data1, soil_levels=soil_levs, basemap=b, i_interest=i_interest, j_interest=j_interest, longitude=lons2d[i_interest, j_interest], latitude=lats2d[i_interest, j_interest], exp_name=exp_name)
def compare_soiltemp_1d(path_base, path_list, label_list): vname = "TBAR" level = 0 delta_small = 1e-6 r = RPN(os.path.join(path_base, "{}_monthly_fields.rpn".format(vname))) data_base = r.get_4d_field_fc_hour_as_time(name=vname) r.close() data_base = _convert_dict_to_4d_arr(data_base) to_mask = data_base < delta_small if vname == "SNO": to_mask = to_mask | (data_base > 1000) data_base = np.ma.masked_where(to_mask, data_base) fig = plt.figure(figsize=(15, 6)) data_base_ts = data_base.mean(axis=2).mean(axis=2)[:, level] - 273.15 plt.plot(data_base_ts, label="base") for the_path, the_label in zip(path_list, label_list): r1 = RPN(os.path.join(the_path, "{}_monthly_fields.rpn".format(vname))) data1 = _convert_dict_to_4d_arr( r1.get_4d_field_fc_hour_as_time(name=vname)) to_mask1 = (data1 < delta_small) if vname == "SNO": to_mask1 = to_mask1 | (data1 > 1000) data1 = np.ma.masked_where(to_mask1, data1) r1.close() data1_ts = data1.mean(axis=2).mean(axis=2)[:, level] - 273.15 plt.plot(data1_ts, label=the_label) plt.plot(data1_ts - data_base_ts, label=r"$\Delta$" + the_label, lw=5) # Shrink current axis by 20% ax = plt.gca() box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.) # fig.tight_layout() fig.savefig("{0}_{1}_diag_1d_{2:%Y-%m-%d_%H}.png".format( vname, level, datetime.now()))
def main(path_param=None): if path_param is None: if len(sys.argv) == 1: path = "/RESCUE/skynet3_rech1/huziy/NEI_geophysics/misc_fields/gridcell_areas/test.rpn" else: path = sys.argv[1] else: path = path_param with RPN(path, mode="a") as r: assert isinstance(r, RPN) # ignore coordinate records vname = [ v for v in r.get_list_of_varnames() if v not in [">>", "^^", "HY"] ][0] # get any field just to get the metadata and coord indicators field = r.get_first_record_for_name(vname) lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() info = r.get_current_info() print(info) info["varname"] = "LON" r.write_2d_field_clean(lons, properties=info) info["varname"] = "LAT" r.write_2d_field_clean(lons, properties=info)
def get_lons_lats_basemap(rpnfile_path="", varname=None, index_subset=None): """ Get longitudes, latitudes and the basemap object corresponding to the rpn file :param rpnfile_path: :param varname: :return: """ with RPN(rpnfile_path) as r: assert isinstance(r, RPN) if varname is None: varname = next(v for v in r.get_list_of_varnames() if v not in [">>", "^^", "HY"]) r.get_first_record_for_name(varname) lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() nx, ny = lons.shape if index_subset is None: index_subset = IndexSubspace(i_start=0, i_end=nx - 1, j_start=0, j_end=ny - 1) rll = RotatedLatLon(**r.get_proj_parameters_for_the_last_read_rec()) bmp = rll.get_basemap_object_for_lons_lats( lons2d=lons[index_subset.get_islice(), index_subset.get_jslice()], lats2d=lats[index_subset.get_islice(), index_subset.get_jslice()]) return lons, lats, bmp
def get_basemap(self, varname_internal, **bmap_kwargs): if self.data_source_type == data_source_types.SAMPLES_FOLDER_FROM_CRCM_OUTPUT: for month_dir in self.base_folder.iterdir(): if not month_dir.is_dir(): continue for data_file in month_dir.iterdir(): try: # skip files that do not contain the variable if varname_internal in self.varname_to_file_prefix: if not data_file.name.startswith(self.varname_to_file_prefix[varname_internal]): continue # print(self.varname_mapping) # print(data_file) with RPN(str(data_file)) as r: r.get_first_record_for_name(self.varname_mapping[varname_internal]) lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() rll = RotatedLatLon(**r.get_proj_parameters_for_the_last_read_rec()) return rll.get_basemap_object_for_lons_lats(lons, lats, **bmap_kwargs) except Exception as exc: # Try to look into several files before giving up print(exc) else: raise NotImplementedError("Not impelmented for the data_source_type = {}".format(self.data_source_type))
def get_target_lons_lats_basemap(run_config: RunConfig=None): base_dir = Path(run_config.data_path) for month_dir in base_dir.iterdir(): if month_dir.is_dir(): for f in month_dir.iterdir(): if f.name.startswith("."): continue with RPN(str(f)) as r: assert isinstance(r, RPN) vlist = r.get_list_of_varnames() vname = [v for v in vlist if v not in [">>", "^^", "HY"]][0] r.get_first_record_for_name(vname) lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() rll = RotatedLatLon(**r.get_proj_parameters_for_the_last_read_rec()) basemap = rll.get_basemap_object_for_lons_lats(lons2d=lons, lats2d=lats) return lons, lats, basemap
def __get_lons_lats_basemap_from_rpn(path=DEFAULT_PATH_FOR_GEO_DATA, vname="STBM", region_of_interest_shp=None, **bmp_kwargs): """ :param path: :param vname: :return: get basemap object for the variable in the given file """ with RPN(str(path)) as r: _ = r.variables[vname][:] proj_params = r.get_proj_parameters_for_the_last_read_rec() lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() rll = RotatedLatLon(**proj_params) if region_of_interest_shp is not None: mask = get_mask(lons, lats, region_of_interest_shp) delta_points = 10 i_arr, j_arr = np.where(mask >= 0.5) i_min, i_max = i_arr.min() - delta_points, i_arr.max() + delta_points j_min, j_max = j_arr.min() - delta_points, j_arr.max() + delta_points slices = (slice(i_min, i_max + 1), slice(j_min, j_max + 1)) bmp = rll.get_basemap_object_for_lons_lats(lons2d=lons[slices], lats2d=lats[slices], **bmp_kwargs) else: bmp = rll.get_basemap_object_for_lons_lats(lons2d=lons, lats2d=lats, **bmp_kwargs) return lons, lats, bmp
def _init_grid(self): for month_dir in self.data_dir.iterdir(): if not month_dir.is_dir(): continue for f in month_dir.iterdir(): if f.name.startswith("."): continue if f.is_dir(): continue with RPN(str(f)) as r: assert isinstance(r, RPN) vnames = r.get_list_of_varnames() vname = [ v for v in vnames if v not in [">>", "^^", "HY", "CONF", "GSET"] ][0] r.get_first_record_for_name(vname) self.lons, self.lats = r.get_longitudes_and_latitudes_for_the_last_read_rec( ) self.projection_params = r.get_proj_parameters_for_the_last_read_rec( ) # print(r.get_first_record_for_name("GSET")) # print(r.get_first_record_for_name("CONF")) return
def main(): #data_folder = "/home/huziy/b2_fs2/sim_links_frm_Katja/Arctic_0.5deg_Peat_SC_26L_CanHR85_spn_Vspng_Diagnostics/1971-2000" data_folder = "/home/huziy/b2_fs2/sim_links_frm_Katja/Arctic_0.5deg_Peat_SC_26L_CanHR85_spn_Vspng_Diagnostics/2071-2100" var_name = "I0" # layer_widths = [0.1, 0.2, 0.3, 0.5, 0.9, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, # 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, # 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5] layer_widths = [0.1, 0.2, 0.3, 0.4, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 1.0, 3.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, 5.0, ] print(len(layer_widths)) crcm_data_manager = CRCMDataManager(layer_widths=layer_widths, data_folder=data_folder) #get the mask r = RPN("/b1_fs2/winger/Arctic/land_sea_glacier_lake_mask_free") msk = r.get_first_record_for_name("FMSK") start_year = 2071 end_year = 2100 # alt - using globally max temperature profile #alt = crcm_data_manager.get_alt_using_files_in(data_folder, vname=var_name) #calculate alt for each year and then take mean alt_list = [] for y in range(start_year, end_year + 1): tmax = crcm_data_manager.get_Tmax_profiles_for_year_using_monthly_means(y, var_name=var_name) alt1 = crcm_data_manager.get_alt(tmax) alt1[alt1 < 0] = np.nan alt_list.append(alt1) alt = np.mean(alt_list, axis=0) alt[np.isnan(alt)] = -1 alt = np.ma.masked_where(alt < 0, alt) alt = np.ma.masked_where(msk < 0.1, alt) #get the coordinates fcoord = os.listdir(data_folder)[0] fcoord = os.path.join(data_folder, fcoord) r = RPN(fcoord) i0 = r.get_first_record_for_name(var_name) lons2d, lats2d = r.get_longitudes_and_latitudes_for_the_last_read_rec() rll = RotatedLatLon(**r.get_proj_parameters_for_the_last_read_rec()) basemap = rll.get_basemap_object_for_lons_lats(lons2d=lons2d, lats2d=lats2d) plot_values(basemap, lons2d, lats2d, alt, "{}-{}(Arctic-peat-26L)".format(start_year, end_year))
def __get_monthly_mean(vname="", month_dir: Path = None): for f in month_dir.iterdir(): if not f.name.startswith(vname_to_prefix[vname]): continue with RPN(str(f)) as r: v = r.variables[vname][:].squeeze() return v.mean(axis=0)
def calc_mean_gem(period, folder_gem, exp, datai, dataf): if period == "DJF": months = [12, 1, 2] elif period == "JJA": months = [6, 7, 8] elif period == "MAM": months = [3, 4, 5] elif period == "SON": months = [9, 10, 11] var_uu = [] var_vv = [] var_uv = [] i = 0 for y in range(datai, dataf + 1): for m in months: file_gem = "{0}/Samples/{1}_{2}{3:02d}/dm*".format( folder_gem, exp, y, m) arqs = glob(file_gem) for f in arqs: r = RPN(f) uu = np.squeeze(r.variables['UU'][:])[-1, :, :] / 1.944 vv = np.squeeze(r.variables['VV'][:])[-1, :, :] / 1.944 uv = np.sqrt(np.power(uu, 2) + np.power(vv, 2)) var_uu.append(uu) var_uu.append(vv) var_uu.append(uv) if i == 0: lons2d, lats2d = r.get_longitudes_and_latitudes_for_the_last_read_rec( ) i += 1 r.close() var_uu = np.array(var_uu) var_vv = np.array(var_vv) var_uv = np.array(var_uv) return np.mean(var_uu, axis=0), np.mean(var_vv, axis=0), np.mean(var_uv, axis=0), lons2d, lats2d
def plot_only_vegetation_fractions( data_path="/RESCUE/skynet3_rech1/huziy/geof_lake_infl_exp/geophys_Quebec_0.1deg_260x260_with_dd_v6_with_ITFS", canopy_name="VF", label="QC_10km"): r = RPN(data_path) veg_fractions = r.get_2D_field_on_all_levels(name=canopy_name) print(list(veg_fractions.keys())) proj_params = r.get_proj_parameters_for_the_last_read_rec() lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() print(lons.shape) rll = RotatedLatLon(lon1=proj_params["lon1"], lat1=proj_params["lat1"], lon2=proj_params["lon2"], lat2=proj_params["lat2"]) lon0, lat0 = rll.get_true_pole_coords_in_rotated_system() plon, _ = rll.get_north_pole_coords() b = Basemap(projection="rotpole", llcrnrlon=lons[0, 0], llcrnrlat=lats[0, 0], urcrnrlon=lons[-1, -1], urcrnrlat=lats[-1, -1], lon_0=lon0 - 180, o_lon_p=lon0, o_lat_p=lat0) lons[lons > 180] -= 360 for lev in list(veg_fractions.keys()): veg_fractions[lev] = maskoceans(lons, lats, veg_fractions[lev], inlands=False) x, y = b(lons, lats) plot_veg_fractions(x, y, b, veg_fractions, out_image=os.path.join( os.path.dirname(data_path), "veg_fractions_{0}.png".format(label)))
def plot_lake_fraction( path="data/from_guillimin/vary_lake_level1/pm1985010100_00000000p", var_name="LF1", lons2d=None, lats2d=None, basemap=None): r = RPN(path) field = r.get_first_record_for_name(var_name) r.close() _plot_depth(field, lons2d, lats2d, basemap=basemap, clevels=np.arange(0, 1.1, 0.1), lowest_value=0.001) pass
def main(): folder = "/home/huziy/skynet3_rech1/geof_lake_infl_exp" fName = "geophys_Quebec_0.1deg_260x260_with_dd_v6" path = os.path.join(folder, fName) rObj = RPN(path) glob_lakefr_limit = 0.6 lkou = rObj.get_first_record_for_name("LKOU")[7:-7, 7:-7] print("lkou(min-max):", lkou.min(), lkou.max()) print("n_outlets = {0}".format(lkou.sum())) lkfr = rObj.get_first_record_for_name("LKFR")[7:-7, 7:-7] print("lkfr(min-max):", lkfr.min(), lkfr.max()) dirs = rObj.get_first_record_for_name("FLDR")[7:-7, 7:-7] print("fldr(min-max):", dirs.min(), dirs.max()) rObj.close() lakes_mask = get_glob_lakes_mask(dirs, lakefr=lkfr, lake_outlets=lkou, glob_lakefr_limit=glob_lakefr_limit) lakes_mask = np.ma.masked_where(lakes_mask < 0, lakes_mask) plt.pcolormesh(lakes_mask.transpose()) plt.colorbar() plt.figure() plt.pcolormesh(np.ma.masked_where(lkfr >= 0.6, lkfr).transpose()) plt.show()
def main(): path = "/skynet1_rech3/huziy/Converters/NetCDF_converter/mappe.rpnw" #path = "/skynet1_rech3/huziy/Converters/NetCDF_converter/champs_st.rpnw" r = RPN(path) mk = r.get_first_record_for_name("MK") print(r.get_dateo_of_last_read_record()) lons2d, lats2d = r.get_longitudes_and_latitudes_for_the_last_read_rec() r.close() # Write to netcdf file ds = Dataset(path + ".nc", mode="w") #subset vars_list = [mk, lons2d, lats2d] vars_list = [v[10:-10, 10:-10] for v in vars_list] ni, nj = vars_list[0].shape ds.createDimension("lon", ni) ds.createDimension("lat", nj) var_names = ["MK", "longitude", "latitude"] for the_name, field in zip(var_names, vars_list): ncVar = ds.createVariable(the_name, "f4", ("lat", "lon")) ncVar[:] = field.transpose() ds.close()
def main( path="/skynet3_rech1/huziy/geof_lake_infl_exp/geophys_Quebec_0.1deg_260x260_with_dd_v6_with_ITFS" ): r = RPN(path) varnames = ["ITFS"] ncols = 3 nrows = len(varnames) // 3 fig = plt.figure() varname_to_field = {} for vname in varnames: data = r.get_first_record_for_name(vname) varname_to_field[vname] = data data = np.ma.masked_where(data < 0, data) lons2d, lats2d = r.get_longitudes_and_latitudes_for_the_last_read_rec() params = r.get_proj_parameters_for_the_last_read_rec() print(params) rll = RotatedLatLon(**params) b = rll.get_basemap_object_for_lons_lats(lons2d, lats2d) x, y = b(lons2d, lats2d) b.drawcoastlines() img = b.pcolormesh(x, y, data) b.colorbar() fig = plt.figure() itfs = varname_to_field["ITFS"] plt.hist(itfs[itfs >= 0], bins=100) plt.show() r.close() pass