def copy_needed_field(gid, fout): """Copy the needed field""" nx = ecc.codes_get(gid, 'Ni') ny = ecc.codes_get(gid, 'Nj') first_lat = ecc.codes_get(gid, 'latitudeOfFirstGridPointInDegrees') north_south_step = ecc.codes_get(gid, 'jDirectionIncrementInDegrees') filter_north = 0 new_ny = int((first_lat - filter_north) / north_south_step) + 1 values = ecc.codes_get_values(gid) values_r = np.reshape(values, (ny, nx)) new_values = values_r[:new_ny, :] clone_id = ecc.codes_clone(gid) ecc.codes_set(clone_id, 'latitudeOfLastGridPointInDegrees', (filter_north)) ecc.codes_set(clone_id, 'Nj', new_ny) ecc.codes_set_values(clone_id, new_values.flatten()) ecc.codes_write(clone_id, fout) ecc.codes_release(clone_id)
def test_grib_ecc_1042(): # Issue ECC-1042: Python3 interface writes integer arrays incorrectly gid = eccodes.codes_grib_new_from_samples("regular_ll_sfc_grib2") # Trying write with inferred dtype write_vals = np.array([1, 2, 3]) eccodes.codes_set_values(gid, write_vals) read_vals = eccodes.codes_get_values(gid) length = len(read_vals) assert read_vals[0] == 1 assert read_vals[length - 1] == 3 # Trying write with explicit dtype write_vals = np.array( [ 1, 2, 3, ], dtype=float, ) eccodes.codes_set_values(gid, write_vals) read_vals = eccodes.codes_get_values(gid) assert read_vals[0] == 1 assert read_vals[length - 1] == 3 eccodes.codes_release(gid)
def test_grib_float_array(): gid = eccodes.codes_grib_new_from_samples("regular_ll_sfc_grib2") for ftype in (float, np.float16, np.float32, np.float64): values = np.ones((100000, ), ftype) eccodes.codes_set_array(gid, "values", values) assert (eccodes.codes_get_values(gid) == 1.0).all() eccodes.codes_set_values(gid, values) assert (eccodes.codes_get_values(gid) == 1.0).all()
def test_grib_ecc_1007(): # Issue ECC-1007: Python3 interface cannot write large arrays gid = eccodes.codes_grib_new_from_samples("regular_ll_sfc_grib2") numvals = 1501 * 1501 values = np.zeros((numvals, )) values[0] = 12 # Make sure it's not a constant field eccodes.codes_set_values(gid, values) maxv = eccodes.eccodes.codes_get(gid, "max") minv = eccodes.eccodes.codes_get(gid, "min") assert minv == 0 assert maxv == 12 eccodes.codes_release(gid)
def cli(output_file_path): handle = eccodes.codes_grib_new_from_samples('regular_ll_pl_grib2') value_size = eccodes.codes_get_size(handle, 'values') values = [] for i in range(0, value_size): values.append(i) eccodes.codes_set_values(handle, values) with open(output_file_path, 'wb') as output_file: eccodes.codes_write(handle, output_file) eccodes.codes_release(handle)
def clone_with_new_values(self, values): tmp_fd, tmp_path = tempfile.mkstemp(suffix=".tmp.grib") with os.fdopen(tmp_fd, "wb") as tmp, load_grib(self.path) as gid: clone_id = codes_clone(gid) # Use single-precision floating-point representation codes_set(clone_id, "bitsPerValue", 32) codes_set_values(clone_id, values) codes_write(clone_id, tmp) codes_release(clone_id) return type(self)(tmp_path)
def production_step(idx, values, fout): '''Compute z at half & full level for the given level, based on t/q/sp''' # We want to integrate up into the atmosphere, starting at the # ground so we start at the lowest level (highest number) and # keep accumulating the height as we go. # See the IFS documentation, part III # For speed and file I/O, we perform the computations with # numpy vectors instead of fieldsets. z_h = values['z'] for lev in list(reversed(list(range(1, values['nlevels'] + 1)))): z_h, z_f = compute_z_level(idx, lev, values, z_h) # store the result (z_f) in a field and add to the output if values['levelist'] == '' or str(lev) in values['levelist']: codes_set(values['sample'], 'level', lev) codes_set_values(values['sample'], z_f) codes_write(values['sample'], fout)
def repack(input_file, outfile, packing_type): """Repack infile with packing_type, write result to outfile.""" with open(input_file) as infile: i = 1 while True: in_gid = codes_grib_new_from_file(infile) if in_gid is None: break info("Repacking GRIB #{}".format(i)) payload = codes_get_values(in_gid) clone_id = codes_clone(in_gid) codes_set(clone_id, "packingType", packing_type) codes_set_values(clone_id, payload) if i == 1: mode = "w" else: mode = "a" with open(outfile, mode) as output: codes_write(clone_id, output) codes_release(clone_id) codes_release(in_gid) i += 1 if not confirm_packing_type(outfile, packing_type): raise EncodingError("Reencoding silently failed.")
def values(self, val: numpy.array): eccodes.codes_set_values(self.gid, val)
def main(): # Setting the paths according to where it runs (gort or ciclad) if 'gort' == socket.gethostname(): data_dir = '/dkol/data' elif 'ciclad' in socket.gethostname(): data_dir = '/data/legras/flexpart_in/STC/ERA5' else: print('unknown hostname for this program') SAFNWP_dir = join(data_dir, 'SAFNWP') MAINOUT_dir = join(SAFNWP_dir, 'HVR-LHR') SurfData_dir = join(SAFNWP_dir, 'LNSP-LHR') # Parsing the arguments parser = argparse.ArgumentParser() parser.add_argument("-y", "--year", type=int, help="year") parser.add_argument("-m1", "--month1", type=int, choices=1 + np.arange(12), help="start month") parser.add_argument("-d1", "--day1", type=int, choices=1 + np.arange(31), help="start day") parser.add_argument("-m2", "--month2", type=int, choices=1 + np.arange(12), help="end month") parser.add_argument("-d2", "--day2", type=int, choices=1 + np.arange(31), help="end day") year = 2017 month1 = 8 day1 = 23 month2 = 8 day2 = 24 print('parsing arguments') args = parser.parse_args() if args.year is not None: year = args.year if args.month1 is not None: month1 = args.month1 if args.month2 is not None: month2 = args.month2 if args.day1 is not None: day1 = args.day1 if args.day2 is not None: day2 = args.day2 # To be a loop on time date1 = datetime(year, month1, day1, 0) date2 = datetime(year, month2, day2, 0) # Getting the templates gid = {} # Surface template ftpt = open('SurfData_template', 'rb') gid['surf'] = ec.codes_grib_new_from_file(ftpt) ftpt.close() # Column templates ftpt = open('ENP_template', 'rb') for var in ['Z', 'T', 'RH', 'O3']: gid[var] = ec.codes_grib_new_from_file(ftpt) ftpt.close() # Time loop date = date1 while date < date2: # Building the interpolated data dat4 = Pinterpol(date) # Transform the geopotential into geopotential altitude try: dat4.var['Z'] *= cst.g except: pass # Calculate the relative humidity dat4.var['RH'] = 100 * np.array(pressPa)[:, None, None] * eq( dat4.var['Q']) / ew(dat4.var['T']) #%% # Defining output file file_out = join(MAINOUT_dir, date.strftime('%Y/%m'), date.strftime('ENP%y%m%d%H')) # Defining file hosting the surface data file_surf = join(SurfData_dir, date.strftime('%Y'), date.strftime('LNSP%y%m%d')) # Copying surface data to the output file try: call([ 'grib_copy', '-w', 'hour=' + str(date.hour), file_surf, file_out ]) except: call([ 'grib_copy', '-w', 'hour=' + str(date.hour), file_surf + '.grb', file_out ]) # Open the output file in append mode fout = open(file_out, 'ab') # Add first the tropopause data dicwmo = { 'pwmo': [82, 'WMO tropopause pressure', 'hPa'], 'Twmo': [81, 'WMO tropopause temperature', 'T'], 'zwmo': [83, 'WMO tropopause altitude', 'm**2 s**-2'] } dat4.d2d['pwmo'] /= 100 dat4.d2d['zwmo'] *= cst.g for var in ['Twmo', 'pwmo', 'zwmo']: clone_id = ec.codes_clone(gid['surf']) nx = ec.codes_get(gid['surf'], 'Ni') ny = ec.codes_get(gid['surf'], 'Nj') ec.codes_set(clone_id, 'paramId', dicwmo[var][0]) ec.codes_set(clone_id, 'dataDate', 10000 * date.year + 100 * date.month + date.day) ec.codes_set(clone_id, 'hour', date.hour) ec.codes_set_values(clone_id, np.reshape(dat4.d2d[var][::-1, :], nx * ny)) ec.codes_write(clone_id, fout) ec.codes_release(clone_id) # Add now the data on pressure levels for ll in range(len(pressures)): for var in ['Z', 'T', 'RH', 'O3']: clone_id = ec.codes_clone(gid[var]) nx = ec.codes_get(gid[var], 'Ni') ny = ec.codes_get(gid[var], 'Nj') ec.codes_set(clone_id, 'lev', pressures[ll]) ec.codes_set(clone_id, 'dataDate', 10000 * date.year + 100 * date.month + date.day) ec.codes_set(clone_id, 'hour', date.hour) ec.codes_set_values( clone_id, np.reshape(dat4.var[var][ll, ::-1, :], nx * ny)) ec.codes_write(clone_id, fout) ec.codes_release(clone_id) # Closing the output file fout.close() print('processed ', date) date += timedelta(hours=3)