def test_grib_ecc_1042(): # Issue ECC-1042: Python3 interface writes integer arrays incorrectly gid = eccodes.codes_grib_new_from_samples("regular_ll_sfc_grib2") # Trying write with inferred dtype write_vals = np.array([1, 2, 3]) eccodes.codes_set_values(gid, write_vals) read_vals = eccodes.codes_get_values(gid) length = len(read_vals) assert read_vals[0] == 1 assert read_vals[length - 1] == 3 # Trying write with explicit dtype write_vals = np.array( [ 1, 2, 3, ], dtype=float, ) eccodes.codes_set_values(gid, write_vals) read_vals = eccodes.codes_get_values(gid) assert read_vals[0] == 1 assert read_vals[length - 1] == 3 eccodes.codes_release(gid)
def test_grib_float_array(): gid = eccodes.codes_grib_new_from_samples("regular_ll_sfc_grib2") for ftype in (float, np.float16, np.float32, np.float64): values = np.ones((100000, ), ftype) eccodes.codes_set_array(gid, "values", values) assert (eccodes.codes_get_values(gid) == 1.0).all() eccodes.codes_set_values(gid, values) assert (eccodes.codes_get_values(gid) == 1.0).all()
def get_ecc_value(gids, keyname, keyvalue): first = True for i, gid in enumerate(gids): if ecc.codes_get(gid, keyname) == keyvalue: if first: Ni = ecc.codes_get(gid, 'Ni') Nj = ecc.codes_get(gid, 'Nj') data = np.array(ecc.codes_get_values(gid)) first = False else: data = np.dstack((data, np.array(ecc.codes_get_values(gid)))) return np.squeeze(data.reshape(Ni, Nj, -1))
def get(self, name): try: if name == "values": return eccodes.codes_get_values(self.handle) return eccodes.codes_get(self.handle, name) except eccodes.KeyValueNotFoundError: return None
def copy_needed_field(gid, fout): """Copy the needed field""" nx = ecc.codes_get(gid, 'Ni') ny = ecc.codes_get(gid, 'Nj') first_lat = ecc.codes_get(gid, 'latitudeOfFirstGridPointInDegrees') north_south_step = ecc.codes_get(gid, 'jDirectionIncrementInDegrees') filter_north = 0 new_ny = int((first_lat - filter_north) / north_south_step) + 1 values = ecc.codes_get_values(gid) values_r = np.reshape(values, (ny, nx)) new_values = values_r[:new_ny, :] clone_id = ecc.codes_clone(gid) ecc.codes_set(clone_id, 'latitudeOfLastGridPointInDegrees', (filter_north)) ecc.codes_set(clone_id, 'Nj', new_ny) ecc.codes_set_values(clone_id, new_values.flatten()) ecc.codes_write(clone_id, fout) ecc.codes_release(clone_id)
def get(self, name): try: if name == "values": return eccodes.codes_get_values(self.handle) if name in ("distinctLatitudes", "distinctLongitudes"): return eccodes.codes_get_double_array(self.handle, name) return eccodes.codes_get(self.handle, name) except eccodes.KeyValueNotFoundError: return None
def test_grib_get_double_elements(): gid = eccodes.codes_grib_new_from_samples("gg_sfc_grib1") values = eccodes.codes_get_values(gid) num_vals = len(values) indexes = [0, int(num_vals / 2), num_vals - 1] elems = eccodes.codes_get_double_elements(gid, "values", indexes) assert math.isclose(elems[0], 259.6935, abs_tol=0.001) assert math.isclose(elems[1], 299.9064, abs_tol=0.001) assert math.isclose(elems[2], 218.8146, abs_tol=0.001) elems2 = eccodes.codes_get_elements(gid, "values", indexes) assert elems == elems2
def values(self): """ Property to access the values key of the GRIB file as a numpy array. :rtype: numpy.ndarray """ with load_grib(self.path) as gid: result = codes_get_values(gid) return result
def read_data(grid): with open(grid['file_name'], "rb") as fp: fp.seek(grid['offset'], 0) buff = fp.read(grid['length']) fp.close() gid = ecc.codes_new_from_message(buff) values = np.array(ecc.codes_get_values(gid)) ecc.codes_release(gid) return values
def compute_z_level(idx, lev, values, z_h): '''Compute z at half & full level for the given level, based on t/q/sp''' # select the levelist and retrieve the vaules of t and q # t_level: values for t # q_level: values for q codes_index_select(idx, 'level', lev) codes_index_select(idx, 'shortName', 't') gid = codes_new_from_index(idx) t_level = codes_get_values(gid) codes_release(gid) codes_index_select(idx, 'shortName', 'q') gid = codes_new_from_index(idx) q_level = codes_get_values(gid) codes_release(gid) # compute moist temperature t_level = t_level * (1. + 0.609133 * q_level) # compute the pressures (on half-levels) ph_lev, ph_levplusone = get_ph_levs(values, lev) if lev == 1: dlog_p = np.log(ph_levplusone / 0.1) alpha = np.log(2) else: dlog_p = np.log(ph_levplusone / ph_lev) alpha = 1. - ((ph_lev / (ph_levplusone - ph_lev)) * dlog_p) t_level = t_level * R_D # z_f is the geopotential of this full level # integrate from previous (lower) half-level z_h to the # full level z_f = z_h + (t_level * alpha) # z_h is the geopotential of 'half-levels' # integrate z_h to next half level z_h = z_h + (t_level * dlog_p) return z_h, z_f
def get(self, name): # LOG.warn(str(self) + str(name)) if name in CHEAT: return CHEAT[name] try: if name == "values": return eccodes.codes_get_values(self.handle) size = eccodes.codes_get_size(self.handle, name) LOG.debug(f"{name}:{size}") if size and size > 1: return eccodes.codes_get_array(self.handle, name) return eccodes.codes_get(self.handle, name) except eccodes.KeyValueNotFoundError: return None
def _get_xarray_from_msg(self, gid): """Read the values from the GRIB message and return a DataArray object. Args: gid: The ID of the GRIB message. Returns: DataArray: The array containing the retrieved values. """ # Data from GRIB message are read into an Xarray... xarr = xr.DataArray(da.from_array( ec.codes_get_values(gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=('y', 'x')) return xarr
def gribs_match(left, right): """Check if GRIBs in both input files store the same data.""" comparisons = [] with open(left) as a, open(right) as b: while True: a_gid = codes_grib_new_from_file(a) if a_gid is None: break b_gid = codes_grib_new_from_file(b) if b_gid is None: comparisons.append(False) info("GRIBs contain unequal number of messages.") continue packing_errors = [0] try: packing_errors.append(codes_get(a_gid, "packingError")) packing_errors.append(codes_get(b_gid, "packingError")) except CodesInternalError: pass tolerance = max(packing_errors) a_values = codes_get_values(a_gid) b_values = codes_get_values(b_gid) comparisons.append(np.allclose(a_values, b_values, atol=tolerance)) return comparisons
def get_surface_pressure(idx): '''Get the surface pressure for date-time-step''' codes_index_select(idx, 'level', 1) codes_index_select(idx, 'shortName', 'lnsp') gid = codes_new_from_index(idx) if gid is None: raise WrongStepError() if codes_get(gid, 'gridType', str) == 'sh': print('%s [ERROR] fields must be gridded, not spectral' % sys.argv[0], file=sys.stderr) sys.exit(1) # surface pressure sfc_p = np.exp(codes_get_values(gid)) codes_release(gid) return sfc_p
def cli(file_path): with open(file_path, 'rb') as f: handle = eccodes.codes_grib_new_from_file(f, headers_only=False) while handle is not None: date = eccodes.codes_get(handle, "dataDate") type_of_level = eccodes.codes_get(handle, "typeOfLevel") level = eccodes.codes_get(handle, "level") values = eccodes.codes_get_array(handle, "values") value = values[-1] values_array = eccodes.codes_get_values(handle, "values") value_array = values[-1] print(date, type_of_level, level, value) eccodes.codes_release(handle) handle = eccodes.codes_grib_new_from_file(f, headers_only=False)
def extract_from_message( messageid: int) -> Tuple[Dict[str, np.ma.MaskedArray], str]: """ Eccodes tools to extract the field and do a sanity check on the name or cfVarName Returns a dictionary of the field, with endStep key, and additionally the units string EndStep is important in case the self.operation wants a certain timestep extracted. """ name = ec.codes_get(messageid, 'name').lower().split(' ') datestamp = str(ec.codes_get(messageid, 'dataDate')) assert ('_'.join(name) == self.encoding.loc['variable']) or (ec.codes_get( messageid, 'cfVarName') == self.encoding.name) assert pd.Timestamp( year=int(datestamp[:4]), month=int(datestamp[4:6]), day=int(datestamp[6:])) == date # Date comes from one level up # Extract the gridded values, reshape and mask. values = ec.codes_get_values(messageid) # One dimensional array lat_fastest_changing = (ec.codes_get(messageid, 'jPointsAreConsecutive') == 1) values = values.reshape( (ec.codes_get(messageid, 'Nj'), ec.codes_get(messageid, 'Ni')), order='F' if lat_fastest_changing else 'C') # order C means last index fastest changing if ec.codes_get( messageid, 'latitudeOfFirstGridPointInDegrees') > ec.codes_get( messageid, 'latitudeOfLastGridPointInDegrees'): values = values[:: -1, :] # In my eventual netcdf storage I want the latitudes to increase with index masked_values = np.ma.MaskedArray(data=values, mask=(values == ec.codes_get( messageid, 'missingValue'))) units = ec.codes_get(messageid, 'units') timeinfo = str( ec.codes_get(messageid, self.encoding.loc['timevariable'])) if timeinfo[-2:] == '00': timeinfo = timeinfo[: -2] # Remove the trailing zeros of the minutes if len(timeinfo) == 1: timeinfo = '0' + timeinfo[ 0] # Prepending to match the hhUTC codes return ({timeinfo: masked_values}, units)
def get_initial_values(idx, keep_sample=False): '''Get the values of surface z, pv and number of levels ''' codes_index_select(idx, 'level', 1) codes_index_select(idx, 'step', 0) codes_index_select(idx, 'shortName', 'z') gid = codes_new_from_index(idx) values = {} # surface geopotential values['z'] = codes_get_values(gid) values['pv'] = codes_get_array(gid, 'pv') values['nlevels'] = codes_get(gid, 'NV', int) // 2 - 1 check_max_level(idx, values) if keep_sample: values['sample'] = gid else: codes_release(gid) return values
def repack(input_file, outfile, packing_type): """Repack infile with packing_type, write result to outfile.""" with open(input_file) as infile: i = 1 while True: in_gid = codes_grib_new_from_file(infile) if in_gid is None: break info("Repacking GRIB #{}".format(i)) payload = codes_get_values(in_gid) clone_id = codes_clone(in_gid) codes_set(clone_id, "packingType", packing_type) codes_set_values(clone_id, payload) if i == 1: mode = "w" else: mode = "a" with open(outfile, mode) as output: codes_write(clone_id, output) codes_release(clone_id) codes_release(in_gid) i += 1 if not confirm_packing_type(outfile, packing_type): raise EncodingError("Reencoding silently failed.")
def main(): INPUT = "/home/trygveasp/tmp/build/fc2018010318+003grib_fp_mbr000" OUTPUT = '/home/trygveasp/tmp/build/out.set.grib' try: from eccodes import codes_grib_new_from_file, codes_get, codes_get_size, codes_write, codes_release, codes_get_values, CodesInternalError except: print("Missing eccodes") sys.exit(1) fin = open(INPUT) fout = open(OUTPUT, 'w') keys = [ 'Ni', 'Nj', 'latitudeOfFirstGridPointInDegrees', 'longitudeOfFirstGridPointInDegrees', 'indicatorOfParameter', 'level', 'timeRangeIndicator', 'indicatorOfTypeOfLevel', 'dataDate', 'dataTime' ] geography = [ "bitmapPresent", "Nx", "Ny", "latitudeOfFirstGridPointInDegrees", "longitudeOfFirstGridPointInDegrees", "LoVInDegrees", "DxInMetres", "DyInMetres", "iScansNegatively", "jScansPositively", "jPointsAreConsecutive", "Latin1InDegrees", "LaDInDegrees", "Latin2InDegrees", "latitudeOfSouthernPoleInDegrees", "longitudeOfSouthernPoleInDegrees", "gridType" ] while 1: gid = codes_grib_new_from_file(fin) if gid is None: print("Not found") break else: w_par = 6 w_lev = 0 w_typ = "sfc" w_tri = 0 par = codes_get(gid, "indicatorOfParameter") lev = codes_get(gid, "level") typ = codes_get(gid, "indicatorOfTypeOfLevel") tri = codes_get(gid, "timeRangeIndicator") if w_par == par and w_lev == lev and w_typ == typ and w_tri == tri: print("Found:", par, lev, typ, tri) geo = {} for key in geography: try: geo.update({key: codes_get(gid, key)}) except CodesInternalError as err: print('Error with key="%s" : %s' % (key, err.msg)) for key in geo: print(' %s: %s' % (key, geo[key])) print( 'There are %d values, average is %f, min is %f, max is %f' % (codes_get_size(gid, 'values'), codes_get(gid, 'average'), codes_get(gid, 'min'), codes_get(gid, 'max'))) values = codes_get_values(gid) nx = geo["Nx"] ny = geo["Ny"] print('%d values found in %s' % (len(values), INPUT)) field = np.empty([nx, ny]) ii = 0 for j in range(0, ny): for i in range(0, nx): #print i,j,ii field[i, j] = values[ii] ii = ii + 1 lonCenter = geo["LoVInDegrees"] latCenter = geo["LaDInDegrees"] latRef = geo["Latin2InDegrees"] print(lonCenter, latCenter, latRef) lon0 = geo["longitudeOfFirstGridPointInDegrees"] lat0 = geo["latitudeOfFirstGridPointInDegrees"] dx = geo["DxInMetres"] dy = geo["DyInMetres"] g0 = ccrs.Geodetic() proj = ccrs.LambertConformal(central_longitude=lonCenter, central_latitude=latCenter, standard_parallels=[latRef]) x0, y0 = proj.transform_point(lon0, lat0, g0) X = np.arange(x0, x0 + (nx * dx), dx) Y = np.arange(y0, y0 + (ny * dy), dy) print(values.shape, field.shape, x0, y0, X.shape, Y.shape) print(X[0] + 1022485) print("-1022485") print(Y[0] + 1129331) print("-1129331") Z = np.transpose(field) / 9.81 N = [10, 50, 100, 250, 500, 750, 1250, 2000] ax = plt.axes(projection=proj) plt.contourf(X, Y, Z, N, transform=proj) plt.show() break #codes_set_key_vals. codes_write(gid, fout) codes_release(gid) fin.close() fout.close()
def read_field(self): geography = [ "bitmapPresent", "Nx", "Ny", "latitudeOfFirstGridPointInDegrees", "longitudeOfFirstGridPointInDegrees", "LoVInDegrees", "DxInMetres", "DyInMetres", "iScansNegatively", "jScansPositively", "jPointsAreConsecutive", "Latin1InDegrees", "LaDInDegrees", "Latin2InDegrees", "latitudeOfSouthernPoleInDegrees", "longitudeOfSouthernPoleInDegrees", "gridType" ] if self.fname == None or not os.path.isfile(self.fname): print("The file " + str(self.fname) + " does not exist!") sys.exit(1) print("Reading file: " + self.fname) f = open(self.fname, "r") while 1: gid = ec.codes_grib_new_from_file(f) if gid is None: break par = ec.codes_get(gid, "indicatorOfParameter") lev = ec.codes_get(gid, "level") typ = ec.codes_get(gid, "indicatorOfTypeOfLevel") tri = ec.codes_get(gid, "timeRangeIndicator") #print("Search::", w_par, w_lev, w_typ, w_tri) if self.par == par and self.lev == lev and self.typ == typ and self.tri == tri: print("Found:", self.par, self.lev, self.typ, self.tri) geo = {} for key in geography: try: geo.update({key: ec.codes_get(gid, key)}) except ec.CodesInternalError as err: print('Error with key="%s" : %s' % (key, err.msg)) print( 'There are %d values, average is %f, min is %f, max is %f' % (ec.codes_get_size(gid, 'values'), ec.codes_get(gid, 'average'), ec.codes_get( gid, 'min'), ec.codes_get(gid, 'max'))) # Date/time d = ec.codes_get(gid, "validityDate") t = ec.codes_get(gid, "validityTime") h = int(t) / 100 m = t % h s = (h * 3600) + (m * 60) date = datetime.strptime(str(d), "%Y%m%d") time = timedelta(seconds=s) dt = date + time # Missing values mv = None try: mv = ec.codes_get(gid, "missingValue") except: print("Field does not contanin missing values") if geo["gridType"].lower() == "lambert": values = ec.codes_get_values(gid) nx = geo["Nx"] ny = geo["Ny"] lonCenter = geo["LoVInDegrees"] latCenter = geo["LaDInDegrees"] latRef = geo["Latin2InDegrees"] lon0 = geo["longitudeOfFirstGridPointInDegrees"] lat0 = geo["latitudeOfFirstGridPointInDegrees"] dx = geo["DxInMetres"] dy = geo["DyInMetres"] proj4_string = "+proj=lcc +lat_0=" + str( latCenter) + " +lon_0=" + str( lonCenter) + " +lat_1=" + str( latRef) + " +lat_2=" + str( latRef) + " +no_defs +units=m +R=6.371e+06" proj4 = Proj(proj4_string) x0, y0 = proj4(lon0, lat0) x0 = int(round(x0)) y0 = int(round(y0)) field = np.empty([nx, ny]) lons = np.empty([nx, ny]) lats = np.empty([nx, ny]) X = np.arange(x0, x0 + (nx * dx), dx) Y = np.arange(y0, y0 + (ny * dy), dy) ii = 0 for i in range(0, nx): for j in range(0, ny): field[i, j] = values[ii] lons[i, j], lats[i, j] = proj4(X[i], Y[j], inverse=True) # print i,j,lons[i, j], lats[i, j] ii = ii + 1 if mv is not None: field[field == mv] = np.nan ec.codes_release(gid) f.close() return (lons, lats, X, Y, dt, field) else: print(geo["gridType"] + " not implemented yet!") ec.codes_release(gid) f.close()
def get_values(self): vals = eccodes.codes_get_values(self.handle) if self.get_long("bitmapPresent"): vals[vals == CodesHandle.MISSING_VALUE] = np.nan return vals
def field(self, gribvar, time): if eccodes is None: raise Exception("eccodes not found. Needed for reading grib files") """ """ geography = ["bitmapPresent", "Nx", "Ny", "latitudeOfFirstGridPointInDegrees", "longitudeOfFirstGridPointInDegrees", "LoVInDegrees", "DxInMetres", "DyInMetres", "iScansNegatively", "jScansPositively", "jPointsAreConsecutive", "Latin1InDegrees", "LaDInDegrees", "Latin2InDegrees", "latitudeOfSouthernPoleInDegrees", "longitudeOfSouthernPoleInDegrees", "gridType" ] geo_out = None fh = open(self.fname) while 1: gid = eccodes.codes_grib_new_from_file(fh) if gid is None: print("\nCould not find key") gribvar.print_keys() fh.close() return None else: # print("\n Next key") # print_grib_id(gid) if gribvar.matches(gid): # print("Found key") # gribvar.print_keys() geo = {} for key in geography: try: geo.update({key: eccodes.codes_get(gid, key)}) except eccodes.CodesInternalError as err: print('Error with key="%s" : %s' % (key, err.msg)) # print('There are %d values, average is %f, min is %f, max is %f' % ( # codes_get_size(gid, 'values'), # codes_get(gid, 'average'), # codes_get(gid, 'min'), # codes_get(gid, 'max') # )) if geo["gridType"].lower() == "lambert": values = eccodes.codes_get_values(gid) print(values) nx = geo["Nx"] ny = geo["Ny"] lon0 = geo["LoVInDegrees"] lat0 = geo["LaDInDegrees"] ll_lon = geo["longitudeOfFirstGridPointInDegrees"] ll_lat = geo["latitudeOfFirstGridPointInDegrees"] dx = geo["DxInMetres"] dy = geo["DyInMetres"] # TODO Check time consistency print("Hopefullly valid for time ", time) earth = 6.37122e+6 proj4 = "+proj=lcc +lat_0=" + str(lat0) + " +lon_0=" + str(lon0) + " +lat_1=" + \ str(lat0) + " +lat_2=" + str(lat0) + " +units=m +no_defs +R=" + str(earth) proj = Proj(proj4) x0, y0 = proj(ll_lon, ll_lat) xc = x0 + 0.5 * (nx - 1) * dx yc = y0 + 0.5 * (ny - 1) * dy lonc, latc = proj(xc, yc, inverse=True) field = np.reshape(values, [nx, ny], order="F") if geo_out is None: domain = { "nam_conf_proj": { "xlon0": lon0, "xlat0": lat0 }, "nam_conf_proj_grid": { "xloncen": lonc, "xlatcen": latc, "nimax": nx, "njmax": ny, "xdx": dx, "xdy": dy, "ilone": 0, "ilate": 0 } } geo_out = surfex.geo.ConfProj(domain) else: raise NotImplementedError(geo["gridType"] + " not implemented yet!") eccodes.codes_release(gid) fh.close() # print lons # print lats if geo_out is None: raise Exception("No geometry is found in file") return field, geo_out eccodes.codes_release(gid)
def sendGribData(self, *, requestHandle: dreg.RequestHandle = None, userDataReqs=None): if not self.registerAll_ and (not requestHandle or not userDataReqs): raise RuntimeError( "If not all topics are registered, we need to pass a request handle and list of topics" ) luserDataReqs = userDataReqs # timestamp: {fieldname: []} fieldsmetadata = {} with ecc.GribFile(self.filename_) as grib: # Warning do not use/print/etc len(grib), for strange reasons it will always return the same msg for i in range(len(grib)): msg = ecc.GribMessage(grib) fieldname = parseGrib.getGribFieldname( table2Version=msg["table2Version"], indicatorOfParameter=msg["indicatorOfParameter"], indicatorOfTypeOfLevel=msg["indicatorOfTypeOfLevel"], typeOfLevel=msg["typeOfLevel"], timeRangeIndicator=msg["timeRangeIndicator"]) # fieldname2 = self.getGribFieldname(msg) if not fieldname: print( 'WARNING: found a grib field with no match in table : ', msg['cfVarName'], msg['table2Version'], msg['indicatorOfParameter'], msg['indicatorOfTypeOfLevel']) continue if fieldname in [x.name for x in luserDataReqs] or self.registerAll_: timestamp = self.getTimestamp(msg) toplevel = msg["topLevel"] levels = fieldsmetadata.setdefault(timestamp, {}).setdefault( fieldname, []) bisect.insort(levels, toplevel) with ecc.GribFile(self.filename_) as grib: # Warning do not use/print/etc len(grib), for strange reasons it will always return the same msg for i in range(len(grib)): msg = ecc.GribMessage(grib) fieldname = parseGrib.getGribFieldname( table2Version=msg["table2Version"], indicatorOfParameter=msg["indicatorOfParameter"], indicatorOfTypeOfLevel=msg["indicatorOfTypeOfLevel"], typeOfLevel=msg["typeOfLevel"], timeRangeIndicator=msg["timeRangeIndicator"]) # fieldname2 = self.getGribFieldname(msg) if not fieldname: print( 'WARNING: found a grib field with no match in table : ', msg['cfVarName'], msg['table2Version'], msg['indicatorOfParameter'], msg['indicatorOfTypeOfLevel']) continue if self.registerAll_: # Only subscribe if the field was not registered yet requestHandle = dreg.DataRegistry.subscribeIfNotExists( self, fieldname) assert requestHandle if fieldname in [x.name for x in luserDataReqs] or self.registerAll_: timestamp = self.getTimestamp(msg) levels = fieldsmetadata[timestamp][fieldname] requestHandle.timestamp_ = timestamp ni = msg['Ni'] nj = msg['Nj'] lord = 'F' if not msg['jPointsAreConsecutive'] == 0: lord = 'C' arr = np.reshape(ecc.codes_get_values(msg.gid), (ni, nj), order='F').astype(np.float32) lev = msg["topLevel"] level_index = levels.index(lev) msgkey = data.MsgKey(1, fieldname, 1, 0, timestamp, 0, 0, level_index, ni, nj, len(levels), ni, nj, msg["longitudeOfFirstGridPoint"], msg["longitudeOfLastGridPoint"], msg["latitudeOfFirstGridPoint"], msg["latitudeOfLastGridPoint"]) self.insertDataPatch( requestHandle, fieldname, fieldop.SinglePatch(0, 0, ni, nj, level_index, arr), msgkey)
def values(self) -> numpy.array: return eccodes.codes_get_values(self.gid)