def copy_needed_field(gid, fout): """Copy the needed field""" nx = ecc.codes_get(gid, 'Ni') ny = ecc.codes_get(gid, 'Nj') first_lat = ecc.codes_get(gid, 'latitudeOfFirstGridPointInDegrees') north_south_step = ecc.codes_get(gid, 'jDirectionIncrementInDegrees') filter_north = 0 new_ny = int((first_lat - filter_north) / north_south_step) + 1 values = ecc.codes_get_values(gid) values_r = np.reshape(values, (ny, nx)) new_values = values_r[:new_ny, :] clone_id = ecc.codes_clone(gid) ecc.codes_set(clone_id, 'latitudeOfLastGridPointInDegrees', (filter_north)) ecc.codes_set(clone_id, 'Nj', new_ny) ecc.codes_set_values(clone_id, new_values.flatten()) ecc.codes_write(clone_id, fout) ecc.codes_release(clone_id)
def test_bufr_encode(tmpdir): ibufr = eccodes.codes_bufr_new_from_samples("BUFR3_local_satellite") eccodes.codes_set_array(ibufr, "inputDelayedDescriptorReplicationFactor", (4, )) eccodes.codes_set(ibufr, "masterTableNumber", 0) eccodes.codes_set(ibufr, "bufrHeaderSubCentre", 0) eccodes.codes_set(ibufr, "bufrHeaderCentre", 98) eccodes.codes_set(ibufr, "updateSequenceNumber", 0) eccodes.codes_set(ibufr, "dataCategory", 12) eccodes.codes_set(ibufr, "dataSubCategory", 139) eccodes.codes_set(ibufr, "masterTablesVersionNumber", 13) eccodes.codes_set(ibufr, "localTablesVersionNumber", 1) eccodes.codes_set(ibufr, "numberOfSubsets", 492) eccodes.codes_set(ibufr, "localNumberOfObservations", 492) eccodes.codes_set(ibufr, "satelliteID", 4) eccodes.codes_set(ibufr, "observedData", 1) eccodes.codes_set(ibufr, "compressedData", 1) eccodes.codes_set(ibufr, "unexpandedDescriptors", 312061) eccodes.codes_set(ibufr, "pixelSizeOnHorizontal1", 1.25e04) eccodes.codes_set(ibufr, "orbitNumber", 31330) eccodes.codes_set(ibufr, "#1#beamIdentifier", 1) eccodes.codes_set(ibufr, "#4#likelihoodComputedForSolution", eccodes.CODES_MISSING_DOUBLE) eccodes.codes_set(ibufr, "pack", 1) output = tmpdir.join("test_bufr_encode.bufr") with open(str(output), "wb") as fout: eccodes.codes_write(ibufr, fout) eccodes.codes_release(ibufr)
def test_grib_write(tmpdir): gid = eccodes.codes_grib_new_from_samples("GRIB2") eccodes.codes_set(gid, "backgroundProcess", 44) output = tmpdir.join("test_grib_write.grib") with open(str(output), "wb") as fout: eccodes.codes_write(gid, fout) eccodes.codes_release(gid)
def test_bufr_read_write(tmpdir): bid = eccodes.codes_new_from_samples("BUFR4", eccodes.CODES_PRODUCT_BUFR) eccodes.codes_set(bid, "unpack", 1) assert eccodes.codes_get(bid, "typicalYear") == 2012 assert eccodes.codes_get(bid, "centre", str) == "ecmf" eccodes.codes_set(bid, "totalSunshine", 13) eccodes.codes_set(bid, "pack", 1) output = tmpdir.join("test_bufr_write.bufr") with open(str(output), "wb") as fout: eccodes.codes_write(bid, fout) assert eccodes.codes_get(bid, "totalSunshine") == 13 eccodes.codes_release(bid)
def cli(output_file_path): handle = eccodes.codes_grib_new_from_samples('regular_ll_pl_grib2') value_size = eccodes.codes_get_size(handle, 'values') values = [] for i in range(0, value_size): values.append(i) eccodes.codes_set_values(handle, values) with open(output_file_path, 'wb') as output_file: eccodes.codes_write(handle, output_file) eccodes.codes_release(handle)
def clone_with_new_values(self, values): tmp_fd, tmp_path = tempfile.mkstemp(suffix=".tmp.grib") with os.fdopen(tmp_fd, "wb") as tmp, load_grib(self.path) as gid: clone_id = codes_clone(gid) # Use single-precision floating-point representation codes_set(clone_id, "bitsPerValue", 32) codes_set_values(clone_id, values) codes_write(clone_id, tmp) codes_release(clone_id) return type(self)(tmp_path)
def production_step(idx, values, fout): '''Compute z at half & full level for the given level, based on t/q/sp''' # We want to integrate up into the atmosphere, starting at the # ground so we start at the lowest level (highest number) and # keep accumulating the height as we go. # See the IFS documentation, part III # For speed and file I/O, we perform the computations with # numpy vectors instead of fieldsets. z_h = values['z'] for lev in list(reversed(list(range(1, values['nlevels'] + 1)))): z_h, z_f = compute_z_level(idx, lev, values, z_h) # store the result (z_f) in a field and add to the output if values['levelist'] == '' or str(lev) in values['levelist']: codes_set(values['sample'], 'level', lev) codes_set_values(values['sample'], z_f) codes_write(values['sample'], fout)
def save_test_data(path): """Save the test file to the indicated directory.""" import eccodes as ec filepath = os.path.join(path, FILENAME) with open(filepath, "wb") as f: for m in [MSG]: buf = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') for key in m: val = m[key] if np.isscalar(val): ec.codes_set(buf, key, val) else: ec.codes_set_array(buf, key, val) ec.codes_set(buf, 'pack', 1) ec.codes_write(buf, f) ec.codes_release(buf) return filepath
def generate_grib(target, **kwargs): import eccodes for k, v in list(kwargs.items()): if not isinstance(v, (list, tuple)): kwargs[k] = [v] handle = None try: with open(os.path.join(os.path.dirname(__file__), "dummy.grib"), "rb") as f: handle = eccodes.codes_new_from_file(f, eccodes.CODES_PRODUCT_GRIB) with open(target, "wb") as f: for r in iterate_request(kwargs): for k, v in r.items(): eccodes.codes_set(handle, k, v) eccodes.codes_write(handle, f) finally: if handle is not None: eccodes.codes_release(handle)
def repack(input_file, outfile, packing_type): """Repack infile with packing_type, write result to outfile.""" with open(input_file) as infile: i = 1 while True: in_gid = codes_grib_new_from_file(infile) if in_gid is None: break info("Repacking GRIB #{}".format(i)) payload = codes_get_values(in_gid) clone_id = codes_clone(in_gid) codes_set(clone_id, "packingType", packing_type) codes_set_values(clone_id, payload) if i == 1: mode = "w" else: mode = "a" with open(outfile, mode) as output: codes_write(clone_id, output) codes_release(clone_id) codes_release(in_gid) i += 1 if not confirm_packing_type(outfile, packing_type): raise EncodingError("Reencoding silently failed.")
def write(self, file: T.IO[bytes]) -> None: eccodes.codes_write(self.codes_id, file)
def write(self, file): eccodes.codes_write(self.codes_id, file)
def write(self, outfile=None): """Write message to file.""" if not outfile: # This is a hack because the API does not accept inheritance outfile = self.codes_file.file_handle eccodes.codes_write(self.codes_id, outfile)
def write(self, fout, path): self.offset = fout.tell() eccodes.codes_write(self.handle, fout) if path: self.path = path
def main(): INPUT = "/home/trygveasp/tmp/build/fc2018010318+003grib_fp_mbr000" OUTPUT = '/home/trygveasp/tmp/build/out.set.grib' try: from eccodes import codes_grib_new_from_file, codes_get, codes_get_size, codes_write, codes_release, codes_get_values, CodesInternalError except: print("Missing eccodes") sys.exit(1) fin = open(INPUT) fout = open(OUTPUT, 'w') keys = [ 'Ni', 'Nj', 'latitudeOfFirstGridPointInDegrees', 'longitudeOfFirstGridPointInDegrees', 'indicatorOfParameter', 'level', 'timeRangeIndicator', 'indicatorOfTypeOfLevel', 'dataDate', 'dataTime' ] geography = [ "bitmapPresent", "Nx", "Ny", "latitudeOfFirstGridPointInDegrees", "longitudeOfFirstGridPointInDegrees", "LoVInDegrees", "DxInMetres", "DyInMetres", "iScansNegatively", "jScansPositively", "jPointsAreConsecutive", "Latin1InDegrees", "LaDInDegrees", "Latin2InDegrees", "latitudeOfSouthernPoleInDegrees", "longitudeOfSouthernPoleInDegrees", "gridType" ] while 1: gid = codes_grib_new_from_file(fin) if gid is None: print("Not found") break else: w_par = 6 w_lev = 0 w_typ = "sfc" w_tri = 0 par = codes_get(gid, "indicatorOfParameter") lev = codes_get(gid, "level") typ = codes_get(gid, "indicatorOfTypeOfLevel") tri = codes_get(gid, "timeRangeIndicator") if w_par == par and w_lev == lev and w_typ == typ and w_tri == tri: print("Found:", par, lev, typ, tri) geo = {} for key in geography: try: geo.update({key: codes_get(gid, key)}) except CodesInternalError as err: print('Error with key="%s" : %s' % (key, err.msg)) for key in geo: print(' %s: %s' % (key, geo[key])) print( 'There are %d values, average is %f, min is %f, max is %f' % (codes_get_size(gid, 'values'), codes_get(gid, 'average'), codes_get(gid, 'min'), codes_get(gid, 'max'))) values = codes_get_values(gid) nx = geo["Nx"] ny = geo["Ny"] print('%d values found in %s' % (len(values), INPUT)) field = np.empty([nx, ny]) ii = 0 for j in range(0, ny): for i in range(0, nx): #print i,j,ii field[i, j] = values[ii] ii = ii + 1 lonCenter = geo["LoVInDegrees"] latCenter = geo["LaDInDegrees"] latRef = geo["Latin2InDegrees"] print(lonCenter, latCenter, latRef) lon0 = geo["longitudeOfFirstGridPointInDegrees"] lat0 = geo["latitudeOfFirstGridPointInDegrees"] dx = geo["DxInMetres"] dy = geo["DyInMetres"] g0 = ccrs.Geodetic() proj = ccrs.LambertConformal(central_longitude=lonCenter, central_latitude=latCenter, standard_parallels=[latRef]) x0, y0 = proj.transform_point(lon0, lat0, g0) X = np.arange(x0, x0 + (nx * dx), dx) Y = np.arange(y0, y0 + (ny * dy), dy) print(values.shape, field.shape, x0, y0, X.shape, Y.shape) print(X[0] + 1022485) print("-1022485") print(Y[0] + 1129331) print("-1129331") Z = np.transpose(field) / 9.81 N = [10, 50, 100, 250, 500, 750, 1250, 2000] ax = plt.axes(projection=proj) plt.contourf(X, Y, Z, N, transform=proj) plt.show() break #codes_set_key_vals. codes_write(gid, fout) codes_release(gid) fin.close() fout.close()
def main(): # Setting the paths according to where it runs (gort or ciclad) if 'gort' == socket.gethostname(): data_dir = '/dkol/data' elif 'ciclad' in socket.gethostname(): data_dir = '/data/legras/flexpart_in/STC/ERA5' else: print('unknown hostname for this program') SAFNWP_dir = join(data_dir, 'SAFNWP') MAINOUT_dir = join(SAFNWP_dir, 'HVR-LHR') SurfData_dir = join(SAFNWP_dir, 'LNSP-LHR') # Parsing the arguments parser = argparse.ArgumentParser() parser.add_argument("-y", "--year", type=int, help="year") parser.add_argument("-m1", "--month1", type=int, choices=1 + np.arange(12), help="start month") parser.add_argument("-d1", "--day1", type=int, choices=1 + np.arange(31), help="start day") parser.add_argument("-m2", "--month2", type=int, choices=1 + np.arange(12), help="end month") parser.add_argument("-d2", "--day2", type=int, choices=1 + np.arange(31), help="end day") year = 2017 month1 = 8 day1 = 23 month2 = 8 day2 = 24 print('parsing arguments') args = parser.parse_args() if args.year is not None: year = args.year if args.month1 is not None: month1 = args.month1 if args.month2 is not None: month2 = args.month2 if args.day1 is not None: day1 = args.day1 if args.day2 is not None: day2 = args.day2 # To be a loop on time date1 = datetime(year, month1, day1, 0) date2 = datetime(year, month2, day2, 0) # Getting the templates gid = {} # Surface template ftpt = open('SurfData_template', 'rb') gid['surf'] = ec.codes_grib_new_from_file(ftpt) ftpt.close() # Column templates ftpt = open('ENP_template', 'rb') for var in ['Z', 'T', 'RH', 'O3']: gid[var] = ec.codes_grib_new_from_file(ftpt) ftpt.close() # Time loop date = date1 while date < date2: # Building the interpolated data dat4 = Pinterpol(date) # Transform the geopotential into geopotential altitude try: dat4.var['Z'] *= cst.g except: pass # Calculate the relative humidity dat4.var['RH'] = 100 * np.array(pressPa)[:, None, None] * eq( dat4.var['Q']) / ew(dat4.var['T']) #%% # Defining output file file_out = join(MAINOUT_dir, date.strftime('%Y/%m'), date.strftime('ENP%y%m%d%H')) # Defining file hosting the surface data file_surf = join(SurfData_dir, date.strftime('%Y'), date.strftime('LNSP%y%m%d')) # Copying surface data to the output file try: call([ 'grib_copy', '-w', 'hour=' + str(date.hour), file_surf, file_out ]) except: call([ 'grib_copy', '-w', 'hour=' + str(date.hour), file_surf + '.grb', file_out ]) # Open the output file in append mode fout = open(file_out, 'ab') # Add first the tropopause data dicwmo = { 'pwmo': [82, 'WMO tropopause pressure', 'hPa'], 'Twmo': [81, 'WMO tropopause temperature', 'T'], 'zwmo': [83, 'WMO tropopause altitude', 'm**2 s**-2'] } dat4.d2d['pwmo'] /= 100 dat4.d2d['zwmo'] *= cst.g for var in ['Twmo', 'pwmo', 'zwmo']: clone_id = ec.codes_clone(gid['surf']) nx = ec.codes_get(gid['surf'], 'Ni') ny = ec.codes_get(gid['surf'], 'Nj') ec.codes_set(clone_id, 'paramId', dicwmo[var][0]) ec.codes_set(clone_id, 'dataDate', 10000 * date.year + 100 * date.month + date.day) ec.codes_set(clone_id, 'hour', date.hour) ec.codes_set_values(clone_id, np.reshape(dat4.d2d[var][::-1, :], nx * ny)) ec.codes_write(clone_id, fout) ec.codes_release(clone_id) # Add now the data on pressure levels for ll in range(len(pressures)): for var in ['Z', 'T', 'RH', 'O3']: clone_id = ec.codes_clone(gid[var]) nx = ec.codes_get(gid[var], 'Ni') ny = ec.codes_get(gid[var], 'Nj') ec.codes_set(clone_id, 'lev', pressures[ll]) ec.codes_set(clone_id, 'dataDate', 10000 * date.year + 100 * date.month + date.day) ec.codes_set(clone_id, 'hour', date.hour) ec.codes_set_values( clone_id, np.reshape(dat4.var[var][ll, ::-1, :], nx * ny)) ec.codes_write(clone_id, fout) ec.codes_release(clone_id) # Closing the output file fout.close() print('processed ', date) date += timedelta(hours=3)