def _find(gid, **kwargs): for k, v in kwargs.iteritems(): if not grib_is_defined(gid, k): return False iscontainer = utils.is_container(v) iscallable = utils.is_callable(v) if (not iscontainer and not iscallable and grib_get(gid, k) == v) or\ (iscontainer and grib_get(gid, k) in v) or \ (iscallable and v(grib_get(gid, k))): continue else: return False return True
def get_grib_metadata(filename, shortname, level=None): """Restituisce l'elenco dei grib che fanno match con shortname e level passati""" grib_get_or_none = lambda gid, key: gribapi.grib_get(gid, key) if gribapi.grib_is_defined(gid, key) else None with open(filename) as fp: # Itero sui messaggi GRIB while True: gid = gribapi.grib_new_from_file(fp) # when None, there are no more messages if gid is None: break # grib message should have these metadata if grib_get_or_none(gid, "cfVarName") != shortname: continue if level is not None and grib_get_or_none(gid, "level") != level: continue global units units=grib_get_or_none(gid, "units") # custom scaling options global scaling_offset global scaling_factor if units == 'K': scaling_offset=-273.15 units = '°C' # converting total cloud cover for cosmo in okta if units == '%' and grib_get_or_none(gid, "cfVarName") == 'tcc': scaling_factor = 0.08 units = 'okta' # converting total cloud cover for ifs-ecmwf in okta if units == '(0 - 1)' and grib_get_or_none(gid, "cfVarName") == 'tcc': scaling_factor = 8.0 units = 'okta' # converting precipitations in mm if units == 'm' and grib_get_or_none(gid, "cfVarName") != 'hzerocl': scaling_factor = 0.001 units = 'mm' if units == 'pa': scaling_factor = 0.01 units = 'hPa' yield gid, grib_get_or_none(gid, "endStep")
def __init__(self, gid): self._gid = gid self._geo_keys = { key_: getattr(gribapi, 'grib_get_{}'.format(type_))(gid, key_) for key_, type_ in self.keys if gribapi.grib_is_defined(gid, key_) } self._missing_keys = { key_: 'MISSING' for key_ in self.check_for_missing_keys if gribapi.grib_is_missing(gid, key_) } self._grid_type = self._geo_keys.get('gridType') self._points_meridian = self._geo_keys.get('Nj') self._missing_value = self._geo_keys.get('missingValue') self._grid_id = self._build_id() # lazy computation self._lats = None self._longs = None self._grid_details_2nd = None self._change_resolution_step = None
def read_lsm(res_num, input_path_oifs, output_path_oifs, exp_name_oifs, num_fields): ''' This function reads the oifs input file in grib format and save it into a list of numpy arrays. ''' print(' Opening Grib inpute file: %s ' % (input_path_oifs, )) input_file_oifs = input_path_oifs + 'ICMGG' + exp_name_oifs + 'INIT' gid = [None] * num_fields gribfield = [None] * num_fields with open(input_file_oifs, 'r+') as f: keys = ['N', 'shortName'] for i in range(num_fields): gid[i] = gribapi.grib_new_from_file(f) if gid[i] is None: break for key in keys: if not gribapi.grib_is_defined(gid[i], key): raise ValueError("Key '%s' was not defined" % key) print('%s=%s' % (key, gribapi.grib_get(gid[i], key))) shortName = gribapi.grib_get(gid[i], 'shortName') if shortName == 'lsm': lsm_id = i if shortName == 'slt': slt_id = i if shortName == 'cl': cl_id = i nres = gribapi.grib_get(gid[i], 'N') gribfield[i] = gribapi.grib_get_values(gid[i]) return (gribfield, lsm_id, slt_id, cl_id, gid)
def get_phenomena(self): phen_list = [] phenomenon =\ { "id" : "", "attribute_count" : "", "attributes" :[] } phen_keys = [ "paramId", "cfNameECMF", "cfName", "cfVarName", "units", "nameECMF", "name" ] phen_attr =\ { "name" : "", "value": "" } try: fd = open(self.file_path) while 1: gid = gapi.grib_new_from_file(fd) if gid is None: break phen_attr_list = [] attr_count = 0 for key in phen_keys: if not gapi.grib_is_defined(gid, key): continue value = str(gapi.grib_get(gid, key)) if len(key) < util.MAX_ATTR_LENGTH \ and len(value) < util.MAX_ATTR_LENGTH \ and util.is_valid_phen_attr(value): phen_attr["name"] = str(key.strip()) phen_attr["value"] = str(unicode(value).strip()) if phen_attr not in phen_attr_list: phen_attr_list.append(phen_attr.copy()) attr_count = attr_count + 1 if len(phen_attr_list) > 0: new_phenomenon = phenomenon.copy() new_phenomenon["attributes"] = phen_attr_list new_phenomenon["attribute_count"] = attr_count if new_phenomenon not in phen_list: phen_list.append(new_phenomenon) gapi.grib_release(gid) fd.close() return phen_list except Exception: return None
def get_metadata_level3(self): phen_list = [] phenomenon =\ { "id" : "", "attribute_count" : "", "attributes" :[] } phen_attr =\ { "name" : "", "value": "" } lat_f_l = [] lon_f_l = [] lat_l_l = [] lon_l_l = [] date_d_l = [] date_t_l = [] phen_keys = [ "paramId", "cfNameECMF", "cfName", "cfVarName", "units", "nameECMF", "name", "Ni", "Nj", "latitudeOfFirstGridPointInDegrees", "longitudeOfFirstGridPointInDegrees", "latitudeOfLastGridPointInDegrees", "longitudeOfLastGridPointInDegrees", "dataDate", "dataTime" ] try: fd = open(self.file_path) while 1: gid = gapi.grib_new_from_file(fd) if gid is None: break phen_attr_list = [] attr_count = 0 for key in phen_keys: if not gapi.grib_is_defined(gid, key): continue value = str(gapi.grib_get(gid, key)) #So the file contains many records but all report the #same spatial and temporal information. Only complete distinct records #will be stored i.e the one that contain the full list of parameter #and are unique. If evety record has got different spatial and temporal #then th eindex must change because currently there is only on geo_shape_field. if key == "latitudeOfFirstGridPointInDegrees": lat_f_l.append(value) elif key == "longitudeOfFirstGridPointInDegrees": lon_f_l.append(value) elif key == "latitudeOfLastGridPointInDegrees": lat_l_l.append(value) elif key =="longitudeOfLastGridPointInDegrees": lon_l_l.append(value) elif key == "dataDate": date_d_l.append(value) elif key == "dataTime": date_t_l.append(value) else: if len(key) < util.MAX_ATTR_LENGTH \ and len(value) < util.MAX_ATTR_LENGTH \ and util.is_valid_phen_attr(value): phen_attr["name"] = str(key.strip()) phen_attr["value"] = str(unicode(value).strip()) if phen_attr not in phen_attr_list: phen_attr_list.append(phen_attr.copy()) attr_count = attr_count + 1 if len(phen_attr_list) > 0: new_phenomenon = phenomenon.copy() new_phenomenon["attributes"] = phen_attr_list new_phenomenon["attribute_count"] = attr_count if new_phenomenon not in phen_list: phen_list.append(new_phenomenon) gapi.grib_release(gid) fd.close() if len(lat_f_l) > 0 \ and len(lon_f_l) > 0 \ and len(lat_l_l) > 0 \ and len(lon_l_l) > 0 \ and len(date_d_l) > 0 \ and len(date_t_l): geospatial_dict = {} geospatial_dict["type"] = "envelope" temporal_dict = {} lat_f = min(lat_f_l) lon_f = min(lon_f_l) lat_l = max(lat_l_l) lon_l = max(lon_l_l) date_d = min(date_d_l) date_t = min(date_t_l) if float(lon_l) > 180: lon_l = (float(lon_l) -180) - 180 geospatial_dict["coordinates"] = [[round(float(lon_f), 3), round(float(lat_f), 3)], [round(float(lon_l), 3), round(float(lat_l), 3) ]] temporal_dict["start_time"] = date_d temporal_dict["end_time"] = date_t return (phen_list, geospatial_dict, temporal_dict) else: return (phen_list,) except Exception as ex: return None