def test_nbits24(): path = "/skynet3_rech1/huziy/geophys_West_NA_0.25deg_144x115.fst" if not os.path.isfile(path): return r = RPN(path=path) data = r.get_first_record_for_name_and_level(varname="VF", level=2) print(data.shape, data.max(), data.min(), data.mean(), data.var()) ok_(data.max() <= 1) proc = subprocess.Popen(["r.diag", "ggstat", path], stdout=subprocess.PIPE) (out, err) = proc.communicate() if err != 0: print("Warning: Could not find r.diag, this is not critical, but some tests will not be run.") return lines = out.split("\n") lines = filter(lambda line: ("VF" in line) and ("2 ar" in line), lines) fields = lines[0].split() the_mean = float(fields[12]) the_var = float(fields[13]) ok_(abs(data.mean() - the_mean) < 1e-6, msg="The mean does not correspond to ggstat") ok_(abs(data.var() - the_var) < 1e-6, msg="The variance does not correspond to ggstat") r.close()
def get_mean_over_months_of_2d_var(self, start_year, end_year, months=None, var_name="", level=-1, level_kind=-1): """ level =-1 means any level """ monthly_means = [] for the_year in range(start_year, end_year + 1): for the_month in months: path = self.yearmonth_to_data_path[(the_year, the_month)] print("{0}/{1} -> {2}".format(the_year, the_month, path)) rpn_obj = RPN(path) records = rpn_obj.get_all_time_records_for_name_and_level( varname=var_name, level=level, level_kind=level_kind) monthly_means.append(np.mean(list(records.values()), axis=0)) rpn_obj.close() return np.mean(monthly_means, axis=0) pass
def get_bow_river_basin_mask( path="/RESCUE/skynet3_rech1/huziy/CNRCWP/Calgary_flood/Bow_river_basin_mask_NA_0.11deg.rpn" ): r = RPN(path) msk = r.get_first_record_for_name("FMSK") r.close() return msk
def main(): varname_to_rpn_name = {"precipitation": "PR", "relativeError": "RERR"} varnames = list(varname_to_rpn_name.keys()) target_dir = "/skynet3_rech1/huziy/from_hdf4" source_dir = "/st1_fs2/winger/Validation/TRMM/HDF_format" for f_name in os.listdir(source_dir): if not f_name.endswith("HDF"): continue path = os.path.join(source_dir, f_name) ds = SD(path) print(ds.datasets()) target_path = os.path.join(target_dir, f_name + ".rpn") r_obj = RPN(target_path, mode="w") for varname in varnames: var_data = ds.select(varname)[0, :, :] r_obj.write_2D_field(name=varname_to_rpn_name[varname], data=var_data, label=varname, grid_type="L", ig=[25, 25, 4013, 18012]) r_obj.close()
def main(): varname_to_rpn_name = { "precipitation": "PR", "relativeError": "RERR" } varnames = list(varname_to_rpn_name.keys()) target_dir = "/skynet3_rech1/huziy/from_hdf4" source_dir = "/st1_fs2/winger/Validation/TRMM/HDF_format" for f_name in os.listdir(source_dir): if not f_name.endswith("HDF"): continue path = os.path.join(source_dir, f_name) ds = SD(path) print(ds.datasets()) target_path = os.path.join(target_dir, f_name + ".rpn") r_obj = RPN(target_path, mode="w") for varname in varnames: var_data = ds.select(varname)[0, :, :] r_obj.write_2D_field( name=varname_to_rpn_name[varname], data=var_data, label=varname, grid_type="L", ig = [25, 25, 4013, 18012]) r_obj.close()
def get_mean_2d_from_climatologies(cls, path="", file_prefixes=None, file_suffixes=None, var_name=""): """ When you have a folder with climatologies, use this method """ field_list = [] if file_prefixes is None: file_prefixes = os.listdir(path) if file_suffixes is None: file_suffixes = os.listdir(path) for file_name in os.listdir(path): prefix_ok = False suffix_ok = False for p in file_prefixes: if file_name.startswith(p): prefix_ok = True break for s in file_suffixes: if file_name.endswith(s): suffix_ok = True break if prefix_ok and suffix_ok: rpn_obj = RPN(os.path.join(path, file_name)) data = rpn_obj.get_first_record_for_name(var_name) rpn_obj.close() field_list.append(data) return np.array(field_list).mean(axis=0)
def main( path="/skynet3_rech1/huziy/geof_lake_infl_exp/geophys_Quebec_0.1deg_260x260_with_dd_v6_with_ITFS" ): r = RPN(path) varnames = ["ITFS"] ncols = 3 nrows = len(varnames) // 3 fig = plt.figure() varname_to_field = {} for vname in varnames: data = r.get_first_record_for_name(vname) varname_to_field[vname] = data data = np.ma.masked_where(data < 0, data) lons2d, lats2d = r.get_longitudes_and_latitudes_for_the_last_read_rec() params = r.get_proj_parameters_for_the_last_read_rec() print(params) rll = RotatedLatLon(**params) b = rll.get_basemap_object_for_lons_lats(lons2d, lats2d) x, y = b(lons2d, lats2d) b.drawcoastlines() img = b.pcolormesh(x, y, data) b.colorbar() fig = plt.figure() itfs = varname_to_field["ITFS"] plt.hist(itfs[itfs >= 0], bins=100) plt.show() r.close() pass
def _get_topography(): path = "/skynet3_rech1/huziy/geofields_interflow_exp/geophys_Quebec_0.1deg_260x260_with_dd_v6_with_ITFS" from rpn.rpn import RPN r = RPN(path=path) data = r.get_first_record_for_name_and_level("ME", level=0) r.close() return data
def get_basemap_and_coords( file_path="data/CORDEX/NorthAmerica_0.44deg_CanHistoE1/Samples/NorthAmerica_0.44deg_CanHistoE1_198101/pm1950010100_00816912p", lon1=-97.0, lat1=47.50, lon2=-7, lat2=0, llcrnrlon=None, llcrnrlat=None, urcrnrlon=None, urcrnrlat=None, resolution="l", anchor="W", projection="omerc", round=False ): rpnObj = RPN(file_path) lons2D, lats2D = rpnObj.get_longitudes_and_latitudes() rpnObj.close() the_ll_lon = lons2D[0, 0] if llcrnrlon is None else llcrnrlon the_ll_lat = lats2D[0, 0] if llcrnrlat is None else llcrnrlat the_ur_lon = lons2D[-1, -1] if urcrnrlon is None else urcrnrlon the_ur_lat = lats2D[-1, -1] if urcrnrlat is None else urcrnrlat return Basemap(projection=projection, resolution=resolution, llcrnrlon=the_ll_lon, llcrnrlat=the_ll_lat, urcrnrlon=the_ur_lon, urcrnrlat=the_ur_lat, lat_1=lat1, lon_1=lon1, lat_2=lat2, lon_2=lon2, no_rot=True, anchor=anchor ), lons2D, lats2D
def get_bulk_field_capacity( path="/skynet3_rech1/huziy/geofields_interflow_exp/pm1979010100_00000000p" ): r = RPN(path) data = r.get_first_record_for_name("D9") r.close() return data
def test_polar_stereographic(): """ Testing polar stereographic grid functions """ path = get_input_file_path("mappe.rpnw", the_dir) r = None try: r = RPN(path) mk = r.get_first_record_for_name("MK") #print r.get_proj_parameters_for_the_last_read_rec() lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec() amno_link = "http://www.cccma.ec.gc.ca/data/grids/geom_crcm_amno_182x174.shtml" msg_tpl = "Generated longitudes are not the same as {0}".format(amno_link) msg_tpl += "\n Expected: {0}" msg_tpl += "\n Got: {1}" #test with expected values from the EC website expect = 226.50 - 360.0 msg = msg_tpl.format(expect, lons[10, 10]) ok_(np.abs(lons[10, 10] - expect) < 1.0e-2, msg=msg) #latitudes expect = 41.25 msg = msg_tpl.format(expect, lats[-11, -11]) ok_(np.abs(lats[-11, -11] - expect) < 1.0e-2, msg=msg) finally: if r is not None: r.close()
def extract_field(name="VF", level=3, in_file="", out_file=None, margin=0): if out_file is None: out_file = in_file + "_lf.nc" rObj = RPN(in_file) field = rObj.get_first_record_for_name_and_level(varname=name, level=level) lons2d, lats2d = rObj.get_longitudes_and_latitudes_for_the_last_read_rec() rObj.close() lons2d[lons2d > 180] -= 360.0 ds = nc.Dataset(out_file, "w", format="NETCDF3_CLASSIC") nx, ny = field.shape ds.createDimension("lon", nx - margin) ds.createDimension("lat", ny - margin) var = ds.createVariable(name, "f4", dimensions=("lon", "lat")) lonVar = ds.createVariable("longitude", "f4", dimensions=("lon", "lat")) latVar = ds.createVariable("latitude", "f4", dimensions=("lon", "lat")) var[:] = field[:nx - margin, :ny - margin] var[:] = field[:nx - margin, :ny - margin] lonVar[:] = lons2d[:nx - margin, :ny - margin] latVar[:] = lats2d[:nx - margin, :ny - margin] ds.close() pass
def extract_runoff_to_netcdf_file(filePath='data/pm1957090100_00589248p', outDir=None): surface_runoff_name = 'TRAF' subsurface_runoff_name = 'TDRA' level_tdra = 5 level_traf = 5 print(filePath) #get data from the rpn file rpnObj = RPN(filePath) assert rpnObj.get_number_of_records() > 4, filePath surfRunoff = rpnObj.get_first_record_for_name_and_level(surface_runoff_name, level=level_traf) subSurfRunoff = rpnObj.get_first_record_for_name_and_level(subsurface_runoff_name, level=level_tdra) nx, ny = surfRunoff.shape ncFile = nc.Dataset(filePath + '.nc', 'w', format='NETCDF3_CLASSIC') ncFile.createDimension('lon', nx) ncFile.createDimension('lat', ny) surfRunoffVar = ncFile.createVariable(surface_runoff_name, 'f', ('lon', 'lat')) subSurfRunoffVar = ncFile.createVariable(subsurface_runoff_name, 'f', ('lon', 'lat')) subSurfRunoffVar[:] = subSurfRunoff surfRunoffVar[:] = surfRunoff ncFile.forecast_hour = rpnObj.get_current_validity_date() ncFile.close() rpnObj.close()
def get_land_sea_glaciers_mask_from_geophysics_file( path="/b10_fs1/winger/Arctic/OMSC26_Can_long_new_v01/Geophys/land_sea_glacier_mask_free"): r = RPN(path) mask = r.get_first_record_for_name("FMSK") < 0.5 r.close() return mask
def main(): path = "/skynet1_rech3/huziy/Converters/NetCDF_converter/mappe.rpnw" #path = "/skynet1_rech3/huziy/Converters/NetCDF_converter/champs_st.rpnw" r = RPN(path) mk = r.get_first_record_for_name("MK") print(r.get_dateo_of_last_read_record()) lons2d, lats2d = r.get_longitudes_and_latitudes_for_the_last_read_rec() r.close() # Write to netcdf file ds = Dataset(path + ".nc", mode="w") #subset vars_list = [mk, lons2d, lats2d] vars_list = [v[10:-10, 10:-10] for v in vars_list] ni, nj = vars_list[0].shape ds.createDimension("lon", ni) ds.createDimension("lat", nj) var_names = ["MK", "longitude", "latitude"] for the_name, field in zip(var_names, vars_list): ncVar = ds.createVariable(the_name, "f4", ("lat", "lon")) ncVar[:] = field.transpose() ds.close()
def combine(): out_folder = "/skynet1_rech3/huziy/Converters/NetCDF_converter/" path1 = "/skynet1_rech3/huziy/Converters/NetCDF_converter/mappe.rpnw" r = RPN(path1) mask1 = r.get_first_record_for_name("MK")[10:-10, 10:-10] r.close() path2 = "/skynet1_rech3/huziy/Converters/NetCDF_converter/champs_st.rpnw" r = RPN(path2) mk = r.get_first_record_for_name("MK") print(r.get_dateo_of_last_read_record()) lons2d, lats2d = r.get_longitudes_and_latitudes_for_the_last_read_rec() r.close() #combine the masks mk = mk * 7 + mask1 # Write to netcdf file ds = Dataset(os.path.join(out_folder, "mask_combined.nc"), mode="w") #subset vars_list = [mk, lons2d, lats2d] ni, nj = vars_list[0].shape ds.createDimension("lon", ni) ds.createDimension("lat", nj) var_names = ["MK", "longitude", "latitude"] for the_name, field in zip(var_names, vars_list): ncVar = ds.createVariable(the_name, "f4", ("lat", "lon")) ncVar[:] = field.transpose() ds.close()
def demo_north_pole(): r = RPN(path="/home/huziy/skynet3_rech1/classOff_Andrey/era2/temp_3d") t = r.get_first_record_for_name("I0") lon, lat = r.get_longitudes_and_latitudes_for_the_last_read_rec() r.close() nx, ny = lon.shape lon_0, lat_0 = lon[nx // 2, ny // 2], lat[nx // 2, ny // 2] basemap = Basemap(projection="omerc", lon_1=60, lat_1=89.999, lon_2=-30, lat_2=0, no_rot=True, lon_0=lon_0, lat_0=lat_0, llcrnrlon=lon[0, 0], llcrnrlat=lat[0, 0], urcrnrlon=lon[-1, -1], urcrnrlat=lat[-1, -1]) x, y = basemap(lon, lat) basemap.contourf(x, y, t) basemap.drawcoastlines() basemap.colorbar() #basemap.shadedrelief() plt.show()
def test_write_specified_projection(): """ Should determine the projection params (ig1,2,3,4) for rpn file save them and not fail """ lon1 = 180 lat1 = 0.0 lon2 = -84 lat2 = 1.0 wfile = "test.rpn" try: r = RPN(wfile, mode="w") nx = ny = 10 arr = np.zeros((nx, ny), dtype="f4") for i in range(nx): for j in range(ny): arr[i, j] = i ** 2 + j ** 2 print("Z".encode()) r.write_2D_field( name="TEST", data=arr, data_type=data_types.compressed_floating_point, nbits=-16, lon1=lon1, lon2=lon2, lat1=lat1, lat2=lat2, grid_type=b"E" ) r.close() except Exception as e: raise e finally: os.remove(wfile)
def save_mask_to_rpn(mask_field, in_file="", out_file=""): rin = RPN(in_file) rout = RPN(out_file, mode="w") # Read coordinates and reshape(needed for writing) x = rin.get_first_record_for_name(">>") x.shape = (-1, 1) print(x.shape) y = rin.get_first_record_for_name("^^") y.shape = (1, -1) # get parameters of the last read record coord_info = rin.get_current_info() print(coord_info) # write coordinates coord_info.update({"name": ">>", "label": "NGP", "typ_var": "X", "nbits": -coord_info["nbits"]}) rout.write_2d_field_clean(x, properties=coord_info) coord_info.update({"name": "^^"}) rout.write_2d_field_clean(y, properties=coord_info) # write the mask rout.write_2d_field_clean(mask_field, properties=dict(name="FMSK", label="NGP_MASK", ig=coord_info["ip"] + [0,])) rin.close() rout.close()
def test_write_rpn_compressed(): wfile = "test.rpn" try: r = RPN(wfile, mode="w") nx = ny = 10 arr = np.zeros((nx, ny), dtype="f4") for i in range(nx): for j in range(ny): arr[i, j] = i ** 2 + j ** 2 r.write_2D_field( name="TEST", data=arr, data_type=data_types.compressed_floating_point, nbits=-16 ) r.close() if is_rdiag_available(): proc = subprocess.Popen(["r.diag", "ggstat", wfile], stdout=subprocess.PIPE) (out, err) = proc.communicate() out = out.decode() print(out) print(type(out), type("some str")) ok_("{:E}".format(arr.max()) in out, "Could not find the max={:E} in {}".format(arr.max(), out)) ok_("{:E}".format(arr.min()) in out, "Could not find the min={:E} in {}".format(arr.min(), out)) ok_("{:E}".format(arr.mean()) in out, "Could not find the mean={:E} in {}".format(arr.mean(), out)) ok_("{}".format(16) in out, "Could not find 16 in the ggstat output") print("{:E}".format(arr.mean()), "{:E}".format(arr.min()), "{:E}".format(arr.max())) except Exception as e: raise e finally: os.remove(wfile)
def test_write_field_2d_clean(): """ Testing write 2d field """ import os tfile = "temp.rpn" r = None try: r = RPN(tfile, mode="w") data = np.random.randn(10, 10) data = data.astype(np.float32) r.write_2d_field_clean(data, properties={"name": "RAND"}) r.close() r = RPN(tfile) data1 = r.get_first_record_for_name("RAND") v0, v1 = data.mean(), data1.mean() ok_(abs(v1 - v0) <= 1e-6, "Saved ({0}) and retrieved ({1}) means are not the same.".format(v0, v1)) finally: if r is not None: r.close() os.remove(tfile)
def get_seasonal_mean_for_year_of_2d_var(self, the_year, months=None, var_name=""): """ Return mean over months of a given 2d field returns numpy array of dimensions (x, y) """ monthly_means = [] for the_month in months: key = (the_year, the_month) if key not in self.yearmonth_to_data_path: print(("Warning donot have data for {0}/{1}".format( the_year, the_month))) continue path = self.yearmonth_to_data_path[key] rpn_obj = RPN(path) records = rpn_obj.get_all_time_records_for_name(varname=var_name) monthly_means.append(np.mean(list(records.values()), axis=0)) rpn_obj.close() print((the_year, np.min(np.mean(monthly_means, axis=0)), np.max(np.mean(monthly_means, axis=0)))) return np.mean(monthly_means, axis=0)
def main(): folder = "/home/huziy/skynet3_rech1/geof_lake_infl_exp" fName = "geophys_Quebec_0.1deg_260x260_with_dd_v6" path = os.path.join(folder, fName) rObj = RPN(path) glob_lakefr_limit = 0.6 lkou = rObj.get_first_record_for_name("LKOU")[7:-7, 7:-7] print("lkou(min-max):", lkou.min(), lkou.max()) print("n_outlets = {0}".format(lkou.sum())) lkfr = rObj.get_first_record_for_name("LKFR")[7:-7, 7:-7] print("lkfr(min-max):", lkfr.min(), lkfr.max()) dirs = rObj.get_first_record_for_name("FLDR")[7:-7, 7:-7] print("fldr(min-max):", dirs.min(), dirs.max()) rObj.close() lakes_mask = get_glob_lakes_mask(dirs, lakefr=lkfr, lake_outlets=lkou, glob_lakefr_limit=glob_lakefr_limit) lakes_mask = np.ma.masked_where(lakes_mask < 0, lakes_mask) plt.pcolormesh(lakes_mask.transpose()) plt.colorbar() plt.figure() plt.pcolormesh(np.ma.masked_where(lkfr >= 0.6, lkfr).transpose()) plt.show()
def demo_north_pole(): r = RPN(path = "/home/huziy/skynet3_rech1/classOff_Andrey/era2/temp_3d") t = r.get_first_record_for_name("I0") lon, lat = r.get_longitudes_and_latitudes_for_the_last_read_rec() r.close() nx, ny = lon.shape lon_0, lat_0 = lon[nx//2, ny//2], lat[nx//2, ny//2] basemap = Basemap(projection = "omerc", lon_1=60, lat_1 = 89.999, lon_2=-30, lat_2=0, no_rot=True, lon_0 = lon_0, lat_0 = lat_0, llcrnrlon=lon[0, 0], llcrnrlat=lat[0,0], urcrnrlon=lon[-1, -1], urcrnrlat=lat[-1, -1] ) x, y = basemap(lon, lat) basemap.contourf(x, y, t) basemap.drawcoastlines() basemap.colorbar() #basemap.shadedrelief() plt.show()
def main(): folder = "/home/huziy/skynet3_exec1/modify_igs_in_rpn_file" in_file = "ANAL_NorthAmerica_0.44deg_MPIRCP45_B1_100_2070120100" out_file = in_file + "_ig_changed" rObjIn = RPN(os.path.join(folder, in_file)) rObjOut = RPN(os.path.join(folder, out_file), mode="w") ig_to_change = [1375, 0, 56480, 56480] new_ig = [499, 1064, 0, 0] data = [] i = 0 while data is not None: data = rObjIn.get_next_record() if data is None: break info = rObjIn.get_current_info nbits = info["nbits"].value data_type = info["data_type"].value if nbits > 0: nbits = -nbits print("nbits = {0}, data_type = {1}".format(nbits, data_type)) ips = [x.value for x in info["ip"]] npas = info["npas"].value deet = info["dt_seconds"].value dateo = info["dateo"] igold = [int(ig.value) for ig in info["ig"]] if igold == ig_to_change: info["ig"] = [c_int(ig) for ig in new_ig] rObjOut.write_2D_field(name=info["varname"].value, data=data, ip=ips, ig=[x.value for x in info["ig"]], npas=npas, deet=deet, label="", dateo=dateo, grid_type=info["grid_type"].value, typ_var=info["var_type"].value, nbits=nbits, data_type=data_type) i += 1 #check that all fields were copied nRecsIn = rObjIn.get_number_of_records() assert i == nRecsIn, "copied {0} records, but should be {1}".format( i, nRecsIn) rObjIn.close() rObjOut.close()
def get_depth_to_bedrock( path="/home/huziy/skynet1_rech3/cordex/NorthAmerica_0.44deg_ERA40-Int_195801_static_data.rpn" ): #read depth to bedrock field rObj = RPN(path) dpth_to_bdrck = rObj.get_first_record_for_name("8L") rObj.close() return dpth_to_bdrck
def get_land_sea_glaciers_mask_from_geophysics_file( path="/b10_fs1/winger/Arctic/OMSC26_Can_long_new_v01/Geophys/land_sea_glacier_mask_free" ): r = RPN(path) mask = r.get_first_record_for_name("FMSK") < 0.5 r.close() return mask
def main(): #path = "/b2_fs2/huziy/OMSC26_Can_long_new_v01/pm1958010100_02275344p" path = "/b2_fs2/huziy/OMSC26_Can_long_new_v01/pm1958010100_00008640p" r = RPN(path) res_date = c_int() res_time = c_int() mode = c_int(-3) #test1 dateo = 10158030 r._dll.newdate_wrapper(byref(c_int(dateo)), byref(res_date), byref(res_time), byref(mode)) s_date = "{0:08d}{1:08d}".format(res_date.value, res_time.value) print("stamp: {0:09d}, result: {1}".format(dateo, s_date)) dateo = 488069900 r._dll.newdate_wrapper(byref(c_int(dateo)), byref(res_date), byref(res_time), byref(mode)) s_date = "{0:08d}{1:08d}".format(res_date.value, res_time.value) print("stamp: {0:09d}, result: {1}".format(dateo, s_date)) dateo = 1069261100 r._dll.newdate_wrapper(byref(c_int(dateo)), byref(res_date), byref(res_time), byref(mode)) s_date = "{0:08d}{1:08d}".format(res_date.value, res_time.value) print("stamp: {0:09d}, result: {1}".format(dateo, s_date)) dateo = 632053700 r._dll.newdate_wrapper(byref(c_int(dateo)), byref(res_date), byref(res_time), byref(mode)) s_date = "{0:08d}{1:08d}".format(res_date.value, res_time.value) print("stamp: {0:09d}, result: {1}".format(dateo, s_date)) ts = r.get_all_time_records_for_name("TRAF") times = list(sorted(ts.keys())) print(times[:20]) print(times[0], times[-1]) r.close() folderPath = "/b2_fs2/huziy/OMSC26_ERA40I_long_new_v02/" for fName in os.listdir(folderPath): if not fName.startswith("pm"): continue fPath = os.path.join(folderPath, fName) r = RPN(fPath) r.suppress_log_messages() data = r.get_all_time_records_for_name(varname="TDRA") r.close() print(fName) print(sorted(data.keys())[:5]) print(25 * "*") input("press any key")
def plot_lake_fraction(path = "data/from_guillimin/vary_lake_level1/pm1985010100_00000000p", var_name = "LF1", lons2d = None, lats2d = None, basemap = None): r = RPN(path) field = r.get_first_record_for_name(var_name) r.close() _plot_depth(field, lons2d, lats2d, basemap = basemap, clevels=np.arange(0, 1.1, 0.1), lowest_value=0.001) pass
def select_last_year(inPath, outPath=None, label="last 6 year", npas_range=None): rObjIn = RPN(inPath) if outPath is None: outPath = inPath + "_last_year" rObjOut = RPN(outPath, mode="w") data = [] i = 0 while data is not None: data = rObjIn.get_next_record() if data is None: break info = rObjIn.get_current_info nbits = info["nbits"].value deet = info["dt_seconds"].value data_type = info["data_type"].value npas = info["npas"].value varname = info["varname"].value # if (npas not in npas_range) and varname.strip() not in [">>", "^^"]: continue print(npas) dateo = info["dateo"] if nbits > 0: nbits = -nbits print("nbits = {0}, data_type = {1}".format(nbits, data_type)) rObjOut.write_2D_field(name=varname, data=data, ip=[x.value for x in info["ip"]], ig=[x.value for x in info["ig"]], npas=npas, deet=deet, label=label, dateo=dateo, grid_type=info["grid_type"].value, typ_var=info["var_type"].value, nbits=nbits, data_type=data_type) i += 1 #check that all fields were copied nRecsIn = rObjIn.get_number_of_records() #assert i == nRecsIn, "copied {0} records, but should be {1}".format(i, nRecsIn) rObjIn.close() rObjOut.close()
def main(): #path = "/home/huziy/skynet3_rech1/test/snw_LImon_NA_CRCM5_CanESM2_historical_r1i1p1_185001-200512.rpn" path = "/home/sheena/skynet3_exec2/RPN/src/permafrost/snw_NA_CRCM5_CanESM2_rcp45_r1i1p1_200601-210012.rpn" months = [1,2,12] varname = "I5" rObj = RPN( path ) records = rObj.get_all_time_records_for_name(varname=varname) lons2d, lats2d = rObj.get_longitudes_and_latitudes() rObj.close() times = sorted(records.keys()) vals = np.array( [records[t] for t in times]) year_range = list(range(2006, 2101)) nc_file_name = "{0:s}_{1:d}_{2:d}.nc".format(varname, year_range[0], year_range[-1]) nx, ny = vals[0].shape #create netcdf file ds = Dataset(nc_file_name, "w", format = 'NETCDF3_CLASSIC') ds.createDimension('lon', nx) ds.createDimension('lat', ny) ds.createDimension("year", len(year_range)) the_var = ds.createVariable(varname, 'f', ("year",'lat','lon')) the_lon = ds.createVariable("xlon", 'f', ('lat','lon')) the_lat = ds.createVariable("xlat", 'f', ('lat','lon')) for i, the_year in enumerate(year_range): bool_vector = [t.year == the_year and t.month in months for t in times] bool_vector = np.array(bool_vector) the_var[i,:,:] = np.mean(vals[bool_vector], axis=0).transpose() the_lon[:] = lons2d[:,:].transpose() the_lat[:] = lats2d[:,:].transpose() ds.close() #TODO: implement pass
def main(): dateo = "19580101000000" npas = 552240 deet = 1200 ip2 = 184080 ip2old = 184086 in_file = "anal_NorthAmerica_0.44deg_ERA40-Int1.5_B1_rmn13_and_Class_1979010100_2" out_file = "anal_NorthAmerica_0.44deg_ERA40-Int1.5_B1_rmn13_and_Class_1979010100_dates_same" folder = "/home/huziy/skynet3_rech1/init_cond_for_lake_infl_exp" rObjIn = RPN(os.path.join(folder, in_file)) rObjOut = RPN(os.path.join(folder, out_file), mode="w") data = [] i = 0 while data is not None: data = rObjIn.get_next_record() if data is None: break info = rObjIn.get_current_info() nbits = info["nbits"].value data_type = info["data_type"].value if nbits > 0: nbits = -nbits print("nbits = {0}, data_type = {1}".format(nbits, data_type)) ips = info["ip"] if ips[1] == ip2old: ips[1] = ip2 ips[2] = 0 # since ip3 is 0 there # convert soil temperature to Kelvins if info["varname"].value.strip() == "I0": data += 273.15 rObjOut.write_2D_field(name=info["varname"], data=data, ip=ips, ig=info["ig"], npas=npas, deet=deet, label="IC, lake infl. exp.", dateo=dateo, grid_type=info["grid_type"], typ_var=info["var_type"], nbits=nbits, data_type=data_type) i += 1 # check that all fields were copied nRecsIn = rObjIn.get_number_of_records() assert i == nRecsIn, "copied {0} records, but should be {1}".format(i, nRecsIn) rObjIn.close() rObjOut.close()
def plot_initial_lake_depth(path = "data/from_guillimin/vary_lake_level1/pm1985010100_00000000p", var_name = "CLDP", lons2d = None, lats2d = None, basemap = None ): """ returns initial lake depth field """ r = RPN(path) field = r.get_first_record_for_name(var_name) r.close() _plot_depth(field, lons2d, lats2d, basemap = basemap, clevels=range(0,310, 10)) return field
def main(): folder = "/home/huziy/skynet3_exec1/modify_igs_in_rpn_file" in_file = "ANAL_NorthAmerica_0.44deg_MPIRCP45_B1_100_2070120100" out_file = in_file + "_ig_changed" rObjIn = RPN(os.path.join(folder, in_file)) rObjOut = RPN(os.path.join(folder, out_file), mode="w") ig_to_change = [1375, 0, 56480, 56480] new_ig = [499, 1064, 0, 0] data = [] i = 0 while data is not None: data = rObjIn.get_next_record() if data is None: break info = rObjIn.get_current_info nbits = info["nbits"].value data_type = info["data_type"].value if nbits > 0: nbits = -nbits print("nbits = {0}, data_type = {1}".format(nbits, data_type)) ips = [x.value for x in info["ip"]] npas = info["npas"].value deet = info["dt_seconds"].value dateo = info["dateo"] igold = [int(ig.value) for ig in info["ig"]] if igold == ig_to_change: info["ig"] = [c_int(ig) for ig in new_ig] rObjOut.write_2D_field(name=info["varname"].value, data=data, ip=ips, ig=[x.value for x in info["ig"]], npas=npas, deet=deet, label="", dateo=dateo, grid_type=info["grid_type"].value, typ_var=info["var_type"].value, nbits=nbits, data_type=data_type ) i += 1 #check that all fields were copied nRecsIn = rObjIn.get_number_of_records() assert i == nRecsIn, "copied {0} records, but should be {1}".format(i, nRecsIn) rObjIn.close() rObjOut.close()
def main(): rpn_path = "/home/huziy/skynet1_rech3/cordex/for_Samira/Africa_0.44deg_ERA40-Int1.5_E21981-2010/dailyAfrica_0.44deg_ERA40-Int1.5_E21981-2010TRAF" nc_path = rpn_path + ".nc" varname = "TRAF" time_units = "days since 1981-01-01 00:00:00" rObj = RPN(rpn_path) lons2d, lats2d = rObj.get_longitudes_and_latitudes() rObj.suppress_log_messages() data = rObj.get_4d_field(name=varname) rObj.close() ds = nc.Dataset(nc_path, "w", format="NETCDF3_CLASSIC") nx, ny = lons2d.shape levels = list(data.items())[0][1].keys() ds.createDimension("lon", nx) ds.createDimension("lat", ny) ds.createDimension("level", len(levels)) ds.createDimension("time", None) var = ds.createVariable(varname, "f4", dimensions=("time","level", "lon", "lat")) lonVar = ds.createVariable("longitude", "f4", dimensions=("lon", "lat")) latVar = ds.createVariable("latitude", "f4", dimensions=("lon", "lat")) timeVar = ds.createVariable("time", "f4", dimensions=("time",)) times_sorted = list( sorted( data.keys() ) ) levels_sorted = list( sorted(levels) ) data_arr = [ [ data[t][lev] for lev in levels_sorted ] for t in times_sorted ] data_arr = np.array(data_arr) var[:] = data_arr timeVar.units = time_units times_num = nc.date2num(times_sorted, time_units) timeVar[:] = times_num lonVar[:] = lons2d latVar[:] = lats2d ds.close() pass
def fix_file(path="/RESCUE/skynet3_rech1/huziy/test_export_to_hdf/test/pm1950010100_15802559p", leap_year=False, start_date=datetime(1950, 1, 1)): r_in = RPN(path=path) r_out = RPN(path=path + ".fixed", mode="w") data = [] i = 0 while data is not None: data = r_in.get_next_record() if data is None: break info = r_in.get_current_info() nbits = info["nbits"] data_type = info["data_type"] if nbits > 0: nbits = -nbits print("nbits = {0}, data_type = {1}".format(nbits, data_type)) # ips = map(lambda x: x.value, info["ip"]) ips = info["ip"] if leap_year: ips[2] = int(info["npas"] * info["dt_seconds"] / 3600) new_start_date = start_date else: # get the start of the current month hours_total = int(info["npas"] * info["dt_seconds"] / 3600) year = start_date.year + hours_total // (365 * 24) print(year) d_temp = datetime(2001, 1, 1) + timedelta(days=hours_total % (365 * 24), hours=hours_total % 24) new_start_date = datetime(year, d_temp.month, d_temp.day, d_temp.hour) r_out.write_2D_field(name=info["varname"], data=data, ip=info["ip"], ig=info["ig"], npas=info["npas"], deet=info["dt_seconds"], label="CORR_DATE", dateo=new_start_date, grid_type=info["grid_type"], typ_var=info["var_type"], nbits=nbits, data_type=data_type) i += 1 # check that all fields were copied nrecs_in = r_in.get_number_of_records() assert i == nrecs_in, "copied {0} records, but should be {1}".format(i, nrecs_in) r_in.close() r_out.close()
def compare_2d(path_base, path_list, label_list): """ compare only monthly fields """ delta_small = 1e-6 nvert_levs_for_soiltemp = 3 # Compare only 3 levels of the soil temperature img_folder = "{:%Y%m%d}".format(datetime.now()) for vname in ["TBAR", "SNO"]: r = RPN(os.path.join(path_base, "{}_monthly_fields.rpn".format(vname))) data_base = r.get_4d_field_fc_hour_as_time(name=vname) r.close()
def create_files(fnames=FILE_NAMES): for nf, f in enumerate(fnames): r = RPN(f, mode="w") nx = ny = 10 arr = np.zeros((nx, ny), dtype="f4") for i in range(nx): for j in range(ny): arr[i, j] = i ** 2 + j ** 2 r.write_2D_field( name="T{}".format(nf), data=arr, data_type=data_types.compressed_floating_point, nbits=-16 ) r.close()
def correct(path): #remove lake_fraction array and create a new one from the source (rpn data) #data print("Working on {0} ...".format(path)) h = tb.open_file(path, "a") #read data from the rpn file r = RPN(SOURCE_PATH) lkfr = r.get_first_record_for_name("ML") r.close() h.get_node("/", infovar.HDF_LAKE_FRACTION_NAME)[:] = lkfr h.close()
def correct(path): # remove lake_fraction array and create a new one from the source (rpn data) # data print("Working on {0} ...".format(path)) h = tb.open_file(path, "a") # read data from the rpn file r = RPN(SOURCE_PATH) lkfr = r.get_first_record_for_name("ML") r.close() h.get_node("/", infovar.HDF_LAKE_FRACTION_NAME)[:] = lkfr h.close()
def test_read_ts_file(): """ Test if the module is capable of reading the files containing timeseries data """ the_dir, script_name = os.path.split(__file__) in_path = get_input_file_path("erai_1980-2009_PR_ts.88", the_dir) r = RPN(in_path) pr = r.get_first_record_for_name("PR") print(pr.shape, pr.min(), pr.max(), pr.mean(), pr.std()) r.close()
def compare_soiltemp_1d(path_base, path_list, label_list): vname = "TBAR" level = 0 delta_small = 1e-6 r = RPN(os.path.join(path_base, "{}_monthly_fields.rpn".format(vname))) data_base = r.get_4d_field_fc_hour_as_time(name=vname) r.close() data_base = _convert_dict_to_4d_arr(data_base) to_mask = data_base < delta_small if vname == "SNO": to_mask = to_mask | (data_base > 1000) data_base = np.ma.masked_where(to_mask, data_base) fig = plt.figure(figsize=(15, 6)) data_base_ts = data_base.mean(axis=2).mean(axis=2)[:, level] - 273.15 plt.plot(data_base_ts, label="base") for the_path, the_label in zip(path_list, label_list): r1 = RPN(os.path.join(the_path, "{}_monthly_fields.rpn".format(vname))) data1 = _convert_dict_to_4d_arr( r1.get_4d_field_fc_hour_as_time(name=vname)) to_mask1 = (data1 < delta_small) if vname == "SNO": to_mask1 = to_mask1 | (data1 > 1000) data1 = np.ma.masked_where(to_mask1, data1) r1.close() data1_ts = data1.mean(axis=2).mean(axis=2)[:, level] - 273.15 plt.plot(data1_ts, label=the_label) plt.plot(data1_ts - data_base_ts, label=r"$\Delta$" + the_label, lw=5) # Shrink current axis by 20% ax = plt.gca() box = ax.get_position() ax.set_position([box.x0, box.y0, box.width * 0.8, box.height]) plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.) # fig.tight_layout() fig.savefig("{0}_{1}_diag_1d_{2:%Y-%m-%d_%H}.png".format( vname, level, datetime.now()))
def read_and_plot_ts_cross(path="", exp_name=""): var_interest = "ADD" path_to_dpth_to_bedrock = "/skynet1_rech3/huziy/CLASS_offline_VG/GEOPHYSICAL_FIELDS/test_analysis.rpn" # read depth to bedrock r = RPN(path_to_dpth_to_bedrock) _ = r.get_first_record_for_name("DPTH") lons2d, lats2d = r.get_longitudes_and_latitudes_for_the_last_read_rec() rll = RotatedLatLon(**r.get_proj_parameters_for_the_last_read_rec()) b = rll.get_basemap_object_for_lons_lats(lons2d=lons2d, lats2d=lats2d, resolution="c") r.close() layer_widths = [0.1, 0.2, 0.3, 0.5, 0.9, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5, 1.5] nlayers = 7 nt = 200*12 layer_widths = layer_widths[:nlayers] print(len(layer_widths)) #calculate depths of soil layer centers soil_lev_tops = np.cumsum([0, ] + layer_widths[:-1]) soil_lev_bottoms = np.cumsum(layer_widths) soil_levs = 0.5 * (soil_lev_tops + soil_lev_bottoms) i_interest_list, j_interest_list = [120, 120, 160, 170], [50, 60, 60, 60] r = RPN(path) data = r.get_4d_field_fc_hour_as_time(name=var_interest) lev_sorted = list(sorted(list(data.items())[0][1].keys()))[:nlayers] fc_sorted = list(sorted(data.keys()))[:nt] for i_interest, j_interest in zip(i_interest_list, j_interest_list): data1 = np.asarray( [[data[fc][lev][i_interest, j_interest] for lev in lev_sorted] for fc in fc_sorted]) plot_time_series(data=data1, soil_levels=soil_levs, basemap=b, i_interest=i_interest, j_interest=j_interest, longitude=lons2d[i_interest, j_interest], latitude=lats2d[i_interest, j_interest], exp_name=exp_name)
def main(): rpn_path = "/home/huziy/skynet1_rech3/cordex/for_Samira/Africa_0.44deg_ERA40-Int1.5_E21981-2010/dailyAfrica_0.44deg_ERA40-Int1.5_E21981-2010TRAF" nc_path = rpn_path + ".nc" varname = "TRAF" time_units = "days since 1981-01-01 00:00:00" rObj = RPN(rpn_path) lons2d, lats2d = rObj.get_longitudes_and_latitudes() rObj.suppress_log_messages() data = rObj.get_4d_field(name=varname) rObj.close() ds = nc.Dataset(nc_path, "w", format="NETCDF3_CLASSIC") nx, ny = lons2d.shape levels = list(data.items())[0][1].keys() ds.createDimension("lon", nx) ds.createDimension("lat", ny) ds.createDimension("level", len(levels)) ds.createDimension("time", None) var = ds.createVariable(varname, "f4", dimensions=("time", "level", "lon", "lat")) lonVar = ds.createVariable("longitude", "f4", dimensions=("lon", "lat")) latVar = ds.createVariable("latitude", "f4", dimensions=("lon", "lat")) timeVar = ds.createVariable("time", "f4", dimensions=("time", )) times_sorted = list(sorted(data.keys())) levels_sorted = list(sorted(levels)) data_arr = [[data[t][lev] for lev in levels_sorted] for t in times_sorted] data_arr = np.array(data_arr) var[:] = data_arr timeVar.units = time_units times_num = nc.date2num(times_sorted, time_units) timeVar[:] = times_num lonVar[:] = lons2d latVar[:] = lats2d ds.close() pass
def get_nemo_lakes_mask(samples_dir=""): for mfolder in os.listdir(samples_dir): mfolder_path = os.path.join(samples_dir, mfolder) for fn in os.listdir(mfolder_path): if fn.startswith("pm") and fn[-9:-1] != 8 * "0": fp = os.path.join(mfolder_path, fn) r = RPN(fp) tlake = r.get_first_record_for_name("NEM1") lons_2d, lats_2d = r.get_longitudes_and_latitudes_for_the_last_read_rec( ) r.close() return tlake > 0, lons_2d, lats_2d
def main(inout_paths): tiff_path, rpn_path = inout_paths print("tif path = {0}".format(tiff_path)) print("rpn path = {0}".format(rpn_path)) outGrid = RotatedLatLon(lon1=-90.0, lat1=50.0, lon2=0.0, lat2=0.0) Grd_dx = 0.5 Grd_dy = 0.5 Grd_ni = 170 Grd_nj = 158 Grd_iref = 11 Grd_jref = 11 Grd_latr = -33.5 Grd_lonr = 140.5 lons1d = np.array( [Grd_lonr + (i - Grd_iref + 1) * Grd_dx for i in range(Grd_ni)]) lats1d = np.array( [Grd_latr + (j - Grd_jref + 1) * Grd_dy for j in range(Grd_nj)]) lats2d, lons2d = np.meshgrid(lats1d, lons1d) lonlats = np.array( list( map(lambda x, y: outGrid.toGeographicLonLat(x, y), lons2d.flatten(), lats2d.flatten()))) print(lonlats.shape) rObj = RPN(rpn_path, mode="w") data = convert(tiff_path, lonlats) print("interpolated data") data.shape = lons2d.shape fieldName = os.path.basename(tiff_path).split("_")[0].lower() #write coordinates ig = outGrid.write_coords_to_rpn(rObj, lons1d, lats1d) rObj.write_2D_field(name=fieldName, data=data, grid_type="Z", ig=ig, label=fieldName) rObj.close() return 0 pass
def plot_lake_fraction( path="data/from_guillimin/vary_lake_level1/pm1985010100_00000000p", var_name="LF1", lons2d=None, lats2d=None, basemap=None): r = RPN(path) field = r.get_first_record_for_name(var_name) r.close() _plot_depth(field, lons2d, lats2d, basemap=basemap, clevels=np.arange(0, 1.1, 0.1), lowest_value=0.001) pass
def get_basemap_and_coords_improved( file_path="data/CORDEX/NorthAmerica_0.44deg_CanHistoE1/Samples/NorthAmerica_0.44deg_CanHistoE1_198101/pm1950010100_00816912p", field_name="PR"): rpnobj = RPN(file_path) the_mask = rpnobj.get_first_record_for_name(field_name) # plt.figure() # plt.pcolormesh(the_mask.transpose()) # plt.show() proj_params = rpnobj.get_proj_parameters_for_the_last_read_rec() rll = RotatedLatLon(**proj_params) lons2d, lats2d = rpnobj.get_longitudes_and_latitudes_for_the_last_read_rec() basemap = rll.get_basemap_object_for_lons_lats(lons2d=lons2d, lats2d=lats2d) rpnobj.close() return basemap, lons2d, lats2d