def repack(input_file, outfile, packing_type): """Repack infile with packing_type, write result to outfile.""" with open(input_file) as infile: i = 1 while True: in_gid = codes_grib_new_from_file(infile) if in_gid is None: break info("Repacking GRIB #{}".format(i)) payload = codes_get_values(in_gid) clone_id = codes_clone(in_gid) codes_set(clone_id, "packingType", packing_type) codes_set_values(clone_id, payload) if i == 1: mode = "w" else: mode = "a" with open(outfile, mode) as output: codes_write(clone_id, output) codes_release(clone_id) codes_release(in_gid) i += 1 if not confirm_packing_type(outfile, packing_type): raise EncodingError("Reencoding silently failed.")
def from_message(cls, message, **kwargs): codes_id = eccodes.codes_clone(message.codes_id) return cls(codes_id=codes_id, **kwargs)
def load_messages_from_file( file_path: str or Path, parameter: str or typing.Dict, level_type: str or typing.Dict or typing.List or None = None, level: int or float or typing.List or None = None, **kwargs, ) -> typing.List or None: """ Load multiply messages from file. This function will scan all messages in GRIB 2 file and return all messages which fit conditions. Parameters ---------- file_path: str or Path parameter: str or typing.Dict see ``load_message_from_file``, required option. level_type: str or typing.List or None level type. - string, same as ``load_message_from_file`` - typing.List, level type should be in the list. - None, don't check level type. level: int or float or typing.Dict or typing.List or None level value. - string, same as ``load_message_from_file`` - typing.Dict, same as ``load_message_from_file`` - typing.List, level value should be in the list. - None, don't check level value. For example, load all messages of some typeOfLevel. kwargs: dict other parameters Returns ------- typing.List or None: a list of message number or None if no message is found. """ fixed_level_type, _ = _fix_level(level_type, None) messages = [] # print("count...") # with open(file_path, "rb") as f: # total_count = eccodes.codes_count_in_file(f) # print(total_count) # print("count..done") with open(file_path, "rb") as f: # pbar = tqdm(total=total_count) while True: message_id = eccodes.codes_grib_new_from_file(f) if message_id is None: break # pbar.update(1) if not _check_parameter(message_id, parameter): eccodes.codes_release(message_id) continue if not _check_level_type(message_id, fixed_level_type): eccodes.codes_release(message_id) continue if not _check_level_value(message_id, level): eccodes.codes_release(message_id) continue # clone message new_message_id = eccodes.codes_clone(message_id) eccodes.codes_release(message_id) messages.append(new_message_id) # pbar.close() if len(messages) == 0: return None return messages
def load_message_from_file( file_path: str or Path, parameter: str or typing.Dict or None = None, level_type: str or typing.Dict or None = None, level: int or float or None = None, **kwargs, ) -> int or None: """ Load the **first** message from GRIB 2 file using eccodes-python library. Returned message is a copied one of original message and file is closed before return. And the returned message should be released by user using `eccodes.codes_release()`. Parameters ---------- file_path: str or Path GRIB 2 file path. parameter: str or typing.Dict short name of the field or a dictionary including some GRIB keys: - discipline - parameterCategory - parameterNumber level_type: str or typing.Dict level type. level: int or float level value. kwargs: dict ignored Returns ------- int or None GRIB handler (int) if found or None if not found. Examples -------- Load 850hPa temperature from GRAPES GFS and get values from GRIB message. >>> t = load_message_from_file( ... file_path="/g1/COMMONDATA/OPER/NWPC/GRAPES_GFS_GMF/Prod-grib/2020031721/ORIG/gmf.gra.2020031800105.grb2", ... parameter="t", ... level_type="isobaricInhPa", ... level=850, ... ) >>> data = eccodes.codes_get_double_array(t, "values") >>> data = data.reshape([720, 1440]) >>> data array([[249.19234375, 249.16234375, 249.16234375, ..., 249.15234375, 249.19234375, 249.14234375], [249.45234375, 249.45234375, 249.42234375, ..., 249.45234375, 249.44234375, 249.44234375], [249.69234375, 249.68234375, 249.68234375, ..., 249.70234375, 249.67234375, 249.68234375], ..., [235.33234375, 235.45234375, 235.62234375, ..., 235.47234375, 235.63234375, 235.48234375], [235.78234375, 235.91234375, 235.64234375, ..., 235.80234375, 235.72234375, 235.82234375], [235.66234375, 235.86234375, 235.82234375, ..., 235.85234375, 235.68234375, 235.70234375]]) """ fixed_level_type, _ = _fix_level(level_type, None) with open(file_path, "rb") as f: while True: message_id = eccodes.codes_grib_new_from_file(f) if message_id is None: return None if not _check_message(message_id, parameter, fixed_level_type, level): eccodes.codes_release(message_id) continue # clone message new_message_id = eccodes.codes_clone(message_id) eccodes.codes_release(message_id) return new_message_id return None
def clone(self): new_handle = eccodes.codes_clone(self.handle) return CodesHandle(new_handle, None, None)
def from_message(cls, message, **kwargs): # type: (Message, T.Any) -> Message codes_id = eccodes.codes_clone(message.codes_id) return cls(codes_id=codes_id, **kwargs)
def main(): # Setting the paths according to where it runs (gort or ciclad) if 'gort' == socket.gethostname(): data_dir = '/dkol/data' elif 'ciclad' in socket.gethostname(): data_dir = '/data/legras/flexpart_in/STC/ERA5' else: print('unknown hostname for this program') SAFNWP_dir = join(data_dir, 'SAFNWP') MAINOUT_dir = join(SAFNWP_dir, 'HVR-LHR') SurfData_dir = join(SAFNWP_dir, 'LNSP-LHR') # Parsing the arguments parser = argparse.ArgumentParser() parser.add_argument("-y", "--year", type=int, help="year") parser.add_argument("-m1", "--month1", type=int, choices=1 + np.arange(12), help="start month") parser.add_argument("-d1", "--day1", type=int, choices=1 + np.arange(31), help="start day") parser.add_argument("-m2", "--month2", type=int, choices=1 + np.arange(12), help="end month") parser.add_argument("-d2", "--day2", type=int, choices=1 + np.arange(31), help="end day") year = 2017 month1 = 8 day1 = 23 month2 = 8 day2 = 24 print('parsing arguments') args = parser.parse_args() if args.year is not None: year = args.year if args.month1 is not None: month1 = args.month1 if args.month2 is not None: month2 = args.month2 if args.day1 is not None: day1 = args.day1 if args.day2 is not None: day2 = args.day2 # To be a loop on time date1 = datetime(year, month1, day1, 0) date2 = datetime(year, month2, day2, 0) # Getting the templates gid = {} # Surface template ftpt = open('SurfData_template', 'rb') gid['surf'] = ec.codes_grib_new_from_file(ftpt) ftpt.close() # Column templates ftpt = open('ENP_template', 'rb') for var in ['Z', 'T', 'RH', 'O3']: gid[var] = ec.codes_grib_new_from_file(ftpt) ftpt.close() # Time loop date = date1 while date < date2: # Building the interpolated data dat4 = Pinterpol(date) # Transform the geopotential into geopotential altitude try: dat4.var['Z'] *= cst.g except: pass # Calculate the relative humidity dat4.var['RH'] = 100 * np.array(pressPa)[:, None, None] * eq( dat4.var['Q']) / ew(dat4.var['T']) #%% # Defining output file file_out = join(MAINOUT_dir, date.strftime('%Y/%m'), date.strftime('ENP%y%m%d%H')) # Defining file hosting the surface data file_surf = join(SurfData_dir, date.strftime('%Y'), date.strftime('LNSP%y%m%d')) # Copying surface data to the output file try: call([ 'grib_copy', '-w', 'hour=' + str(date.hour), file_surf, file_out ]) except: call([ 'grib_copy', '-w', 'hour=' + str(date.hour), file_surf + '.grb', file_out ]) # Open the output file in append mode fout = open(file_out, 'ab') # Add first the tropopause data dicwmo = { 'pwmo': [82, 'WMO tropopause pressure', 'hPa'], 'Twmo': [81, 'WMO tropopause temperature', 'T'], 'zwmo': [83, 'WMO tropopause altitude', 'm**2 s**-2'] } dat4.d2d['pwmo'] /= 100 dat4.d2d['zwmo'] *= cst.g for var in ['Twmo', 'pwmo', 'zwmo']: clone_id = ec.codes_clone(gid['surf']) nx = ec.codes_get(gid['surf'], 'Ni') ny = ec.codes_get(gid['surf'], 'Nj') ec.codes_set(clone_id, 'paramId', dicwmo[var][0]) ec.codes_set(clone_id, 'dataDate', 10000 * date.year + 100 * date.month + date.day) ec.codes_set(clone_id, 'hour', date.hour) ec.codes_set_values(clone_id, np.reshape(dat4.d2d[var][::-1, :], nx * ny)) ec.codes_write(clone_id, fout) ec.codes_release(clone_id) # Add now the data on pressure levels for ll in range(len(pressures)): for var in ['Z', 'T', 'RH', 'O3']: clone_id = ec.codes_clone(gid[var]) nx = ec.codes_get(gid[var], 'Ni') ny = ec.codes_get(gid[var], 'Nj') ec.codes_set(clone_id, 'lev', pressures[ll]) ec.codes_set(clone_id, 'dataDate', 10000 * date.year + 100 * date.month + date.day) ec.codes_set(clone_id, 'hour', date.hour) ec.codes_set_values( clone_id, np.reshape(dat4.var[var][ll, ::-1, :], nx * ny)) ec.codes_write(clone_id, fout) ec.codes_release(clone_id) # Closing the output file fout.close() print('processed ', date) date += timedelta(hours=3)