def scan_grib(self, **kwargs): gribs = [] v_selected = kwargs['shortName'] v_pert = kwargs.get('perturbationNumber', -1) if not utils.is_container(v_selected): v_selected = [v_selected] if self._grbindx: for v in v_selected: codes_index_select(self._grbindx, 'shortName', str(v)) if v_pert != -1: codes_index_select(self._grbindx, 'perturbationNumber', int(v_pert)) while 1: gid = codes_new_from_index(self._grbindx) if gid is None: break if GRIBReader._find(gid, **kwargs): gribs.append(gid) else: # release unused grib codes_release(gid) elif self._file_handler: while 1: gid = codes_new_from_file(self._file_handler, product_kind=CODES_PRODUCT_GRIB) if gid is None: break if GRIBReader._find(gid, **kwargs): gribs.append(gid) else: # release unused grib codes_release(gid) return gribs
def get_surface_pressure(idx): '''Get the surface pressure for date-time-step''' codes_index_select(idx, 'level', 1) codes_index_select(idx, 'shortName', 'lnsp') gid = codes_new_from_index(idx) if gid is None: raise WrongStepError() if codes_get(gid, 'gridType', str) == 'sh': print('%s [ERROR] fields must be gridded, not spectral' % sys.argv[0], file=sys.stderr) sys.exit(1) # surface pressure sfc_p = np.exp(codes_get_values(gid)) codes_release(gid) return sfc_p
def test_grib_index_new_from_file(tmpdir): fpath = get_sample_fullpath("GRIB1.tmpl") if fpath is None: return index_keys = ["shortName", "level", "number", "step", "referenceValue"] iid = eccodes.codes_index_new_from_file(fpath, index_keys) index_file = str(tmpdir.join("temp.grib.index")) eccodes.codes_index_write(iid, index_file) key = "level" assert eccodes.eccodes.codes_index_get_size(iid, key) == 1 # Cannot get the native type of a key from an index # so right now the default is str. assert eccodes.codes_index_get(iid, key) == ("500", ) assert eccodes.codes_index_get(iid, key, int) == (500, ) assert eccodes.codes_index_get_long(iid, key) == (500, ) key = "referenceValue" refVal = 47485.4 assert eccodes.codes_index_get_double(iid, key) == (refVal, ) assert eccodes.codes_index_get(iid, key, float) == (refVal, ) eccodes.codes_index_select(iid, "level", 500) eccodes.codes_index_select(iid, "shortName", "z") eccodes.codes_index_select(iid, "number", 0) eccodes.codes_index_select(iid, "step", 0) eccodes.codes_index_select(iid, "referenceValue", refVal) gid = eccodes.codes_new_from_index(iid) assert eccodes.codes_get(gid, "edition") == 1 assert eccodes.codes_get(gid, "totalLength") == 107 eccodes.codes_release(gid) eccodes.codes_index_release(iid) iid2 = eccodes.codes_index_read(index_file) assert eccodes.codes_index_get(iid2, "shortName") == ("z", ) eccodes.codes_index_release(iid2)
def cli(file_path): index_id = eccodes.codes_index_new_from_file(str(file_path), ["paramId"]) eccodes.codes_index_add_file(index_id, str(file_path)) param_size = eccodes.codes_index_get_size(index_id, "paramId") print("param id count:", param_size) param_id_list = eccodes.codes_index_get(index_id, "paramId") print("param id list:", param_id_list) eccodes.codes_index_select(index_id, "paramId", '131') handle = eccodes.codes_new_from_index(index_id) while handle is not None: short_name = eccodes.codes_get(handle, "shortName") date = eccodes.codes_get(handle, "dataDate") type_of_level = eccodes.codes_get(handle, "typeOfLevel") level = eccodes.codes_get(handle, "level") print(short_name, date, type_of_level, level) eccodes.codes_release(handle) handle = eccodes.codes_new_from_index(index_id)
def has_geopotential(self): has_geo = False from pyg2p.main.config import GeopotentialsConfiguration v_selected = GeopotentialsConfiguration.short_names if self._grbindx: for v in v_selected: codes_index_select(self._grbindx, 'shortName', str(v)) while 1: gid = codes_new_from_index(self._grbindx) if gid is None: break has_geo = True codes_release(gid) elif self._file_handler: while 1: gid = codes_new_from_file(self._file_handler, product_kind=CODES_PRODUCT_GRIB) if gid is None: break has_geo = True codes_release(gid) return has_geo
def main(): '''Main function''' args = parse_args() print('Arguments: %s' % ", ".join(['%s: %s' % (k, v) for k, v in vars(args).items()])) fout = open(args.output, 'wb') index_keys = ['date', 'time', 'shortName', 'level', 'step'] idx = codes_index_new_from_file(args.z_lnsp, index_keys) codes_index_add_file(idx, args.t_q) if 'u_v' in args: codes_index_add_file(idx, args.u_v) # iterate date for date in codes_index_get(idx, 'date'): codes_index_select(idx, 'date', date) # iterate step for time in codes_index_get(idx, 'time'): codes_index_select(idx, 'time', time) values = get_initial_values(idx, keep_sample=True) if 'height' in args: values['height'] = args.height values['gh'] = args.height * R_G + values['z'] if 'levelist' in args: values['levelist'] = args.levelist # iterate step all but geopotential z which is always step 0 (an) for step in codes_index_get(idx, 'step'): codes_index_select(idx, 'step', step) # surface pressure try: values['sp'] = get_surface_pressure(idx) production_step(idx, values, fout) except WrongStepError: if step != '0': raise try: codes_release(values['sample']) except KeyError: pass codes_index_release(idx) fout.close()
def get_initial_values(idx, keep_sample=False): '''Get the values of surface z, pv and number of levels ''' codes_index_select(idx, 'level', 1) codes_index_select(idx, 'step', 0) codes_index_select(idx, 'shortName', 'z') gid = codes_new_from_index(idx) values = {} # surface geopotential values['z'] = codes_get_values(gid) values['pv'] = codes_get_array(gid, 'pv') values['nlevels'] = codes_get(gid, 'NV', int) // 2 - 1 check_max_level(idx, values) if keep_sample: values['sample'] = gid else: codes_release(gid) return values
def compute_z_level(idx, lev, values, z_h): '''Compute z at half & full level for the given level, based on t/q/sp''' # select the levelist and retrieve the vaules of t and q # t_level: values for t # q_level: values for q codes_index_select(idx, 'level', lev) codes_index_select(idx, 'shortName', 't') gid = codes_new_from_index(idx) t_level = codes_get_values(gid) codes_release(gid) codes_index_select(idx, 'shortName', 'q') gid = codes_new_from_index(idx) q_level = codes_get_values(gid) codes_release(gid) # compute moist temperature t_level = t_level * (1. + 0.609133 * q_level) # compute the pressures (on half-levels) ph_lev, ph_levplusone = get_ph_levs(values, lev) if lev == 1: dlog_p = np.log(ph_levplusone / 0.1) alpha = np.log(2) else: dlog_p = np.log(ph_levplusone / ph_lev) alpha = 1. - ((ph_lev / (ph_levplusone - ph_lev)) * dlog_p) t_level = t_level * R_D # z_f is the geopotential of this full level # integrate from previous (lower) half-level z_h to the # full level z_f = z_h + (t_level * alpha) # z_h is the geopotential of 'half-levels' # integrate z_h to next half level z_h = z_h + (t_level * dlog_p) return z_h, z_f
def update_nwp(params): LOG.info("METNO update nwp") tempfile.tempdir = params['options']['nwp_outdir'] ecmwf_path = params['options']['ecmwf_path'] if not os.path.exists(ecmwf_path): ecmwf_path = ecmwf_path.replace("storeB", "storeA") LOG.warning( "Need to replace storeB with storeA for ecmwf_path: {}".format( str(ecmwf_path))) filelist = glob( os.path.join(ecmwf_path, params['options']['ecmwf_prefix'] + "*")) if len(filelist) == 0: LOG.info("Found no input files! dir = " + str( os.path.join(ecmwf_path, params['options']['ecmwf_prefix'] + "*"))) return from trollsift import Parser, compose filelist.sort() for filename in filelist: if params['options']['ecmwf_file_name_sift'] is not None: try: parser = Parser(params['options']['ecmwf_file_name_sift']) except NoOptionError as noe: LOG.error("NoOptionError {}".format(noe)) continue if not parser.validate(os.path.basename(filename)): LOG.error( "Parser validate on filename: {} failed.".format(filename)) continue res = parser.parse("{}".format(os.path.basename(filename))) time_now = datetime.utcnow() if 'analysis_time' in res: if res['analysis_time'].year == 1900: # This is tricky. Filename is missing year in name # Need to guess the year from a compination of year now # and month now and month of the analysis time taken from the filename # If the month now is 1(January) and the analysis month is 12, # then the time has passed New Year, but the NWP analysis time is previous year. if time_now.month == 1 and res['analysis_time'].month == 12: analysis_year = time_now.year - 1 else: analysis_year = time_now.year res['analysis_time'] = res['analysis_time'].replace( year=analysis_year) else: LOG.error( "Can not parse analysis_time in file name. Check config and filename timestamp" ) if 'forecast_time' in res: if res['forecast_time'].year == 1900: # See above for explanation if res['analysis_time'].month == 12 and res[ 'forecast_time'].month == 1: forecast_year = res['analysis_time'].year + 1 else: forecast_year = res['analysis_time'].year res['forecast_time'] = res['forecast_time'].replace( year=forecast_year) else: LOG.error( "Can not parse forecast_time in file name. Check config and filename timestamp" ) forecast_time = res['forecast_time'] analysis_time = res['analysis_time'] step_delta = forecast_time - analysis_time step = "{:03d}H{:02d}M".format( int(step_delta.days * 24 + step_delta.seconds / 3600), 0) else: LOG.error("Not sift pattern given. Can not parse input NWP files") if analysis_time < params['starttime']: # LOG.debug("skip analysis time {} older than search time {}".format(analysis_time, params['starttime'])) continue if int(step[:3]) not in params['nlengths']: # LOG.debug("Skip step {}, not in {}".format(int(step[:3]), params['nlengths'])) continue output_parameters = {} output_parameters['analysis_time'] = analysis_time output_parameters['step_hour'] = int(step_delta.days * 24 + step_delta.seconds / 3600) output_parameters['step_min'] = 0 try: if not os.path.exists(params['options']['nwp_outdir']): os.makedirs(params['options']['nwp_outdir']) except OSError as e: LOG.error("Failed to create directory: %s", e) result_file = "" try: result_file = os.path.join( params['options']['nwp_outdir'], compose(params['options']['nwp_output'], output_parameters)) _result_file = os.path.join( params['options']['nwp_outdir'], compose("." + params['options']['nwp_output'], output_parameters)) _result_file_lock = os.path.join( params['options']['nwp_outdir'], compose("." + params['options']['nwp_output'] + ".lock", output_parameters)) except Exception as e: LOG.error( "Joining outdir with output for nwp failed with: {}".format(e)) LOG.info("Result file: {}".format(result_file)) if os.path.exists(result_file): LOG.info("File: " + str(result_file) + " already there...") continue import fcntl import errno import time rfl = open(_result_file_lock, 'w+') # do some locking while True: try: fcntl.flock(rfl, fcntl.LOCK_EX | fcntl.LOCK_NB) LOG.debug("1Got lock for NWP outfile: {}".format(result_file)) break except IOError as e: if e.errno != errno.EAGAIN: raise else: LOG.debug("Waiting for lock ... {}".format(result_file)) time.sleep(1) if os.path.exists(result_file): LOG.info("File: " + str(result_file) + " already there...") # Need to release the lock fcntl.flock(rfl, fcntl.LOCK_UN) rfl.close() continue fout = open(_result_file, 'wb') try: # Do the static fields # Note: field not in the filename variable, but a configured filename for static fields static_filename = params['options']['ecmwf_static_surface'] if not os.path.exists(static_filename): static_filename = static_filename.replace("storeB", "storeA") LOG.warning("Need to replace storeB with storeA") index_vals = [] index_keys = ['paramId', 'level'] LOG.debug("Start building index") LOG.debug("Handeling file: %s", filename) iid = ecc.codes_index_new_from_file(filename, index_keys) filename_n1s = filename.replace('N2D', 'N1S') LOG.debug("Add to index %s", filename_n1s) ecc.codes_index_add_file(iid, filename_n1s) LOG.debug("Add to index %s", static_filename) ecc.codes_index_add_file(iid, static_filename) LOG.debug("Done index") for key in index_keys: key_vals = ecc.codes_index_get(iid, key) key_vals = tuple(x for x in key_vals if x != 'undef') index_vals.append(key_vals) for prod in product(*index_vals): for i in range(len(index_keys)): ecc.codes_index_select(iid, index_keys[i], prod[i]) while 1: gid = ecc.codes_new_from_index(iid) if gid is None: break param = ecc.codes_get(gid, index_keys[0]) parameters = [ 172, 129, 235, 167, 168, 137, 130, 131, 132, 133, 134, 157 ] if param in parameters: LOG.debug("Doing param: %d", param) copy_needed_field(gid, fout) ecc.codes_release(gid) ecc.codes_index_release(iid) fout.close() os.rename(_result_file, result_file) except WrongLengthError as wle: LOG.error("Something wrong with the data: %s", wle) raise # In the end release the lock fcntl.flock(rfl, fcntl.LOCK_UN) rfl.close() os.remove(_result_file_lock) return