def get_phenomena(self): phenomenon =\ { "id" : "", "attribute_count" : "", "attributes" :[] } phen_attr =\ { "name" : "", "value": "" } try: self.handler_id = "pp handler level 2." pp_file_content=cdms.open(self.file_path) var_ids = pp_file_content.listvariables() # Filter long values and overwrite duplicates. phen_list = [] for var_id in var_ids: metadata_dict = pp_file_content[var_id].attributes phen_attr_list = [] attr_count = 0 for key in metadata_dict.keys(): value = str(metadata_dict[key]) if len(key) < util.MAX_ATTR_LENGTH \ and len(value) < util.MAX_ATTR_LENGTH \ and util.is_valid_phen_attr(value): phen_attr["name"] = str(key.strip()) phen_attr["value"] = str(unicode(value).strip()) phen_attr_list.append(phen_attr.copy()) attr_count = attr_count + 1 # Dict of phenomenon attributes. if len(phen_attr_list) > 0: # phenomenon is a dictionary type # phen_attr_list is a list # attr_count is an INT new_phenomenon = phenomenon.copy() new_phenomenon["attributes"] = phen_attr_list new_phenomenon["attribute_count"] = attr_count # JAR 11-Oct-2016 # Append to list if new_phenomenon is NOT already in the phen_list if new_phenomenon not in phen_list: phen_list.append(new_phenomenon) pp_file_content.close() return (phen_list, ) except Exception as ex: return None
def get_phenomena(self, netcdf): """ Construct list of Phenomena based on variables in NetCDF file. :returns : List of metadata.product.Parameter objects. """ phen_list = [] phenomenon =\ { "id" : "", "attribute_count" : "", "attributes" :[] } phen_attr =\ { "name" : "", "value": "" } #for all phenomena list. for v_name, v_data in netcdf.variables.iteritems(): phen_attr_list = [] #for all attributtes in phenomenon. attr_count = 0 for key, value in v_data.__dict__.iteritems(): if not self.is_valid_parameter(key, value) \ or not util.is_valid_phen_attr(value): continue phen_attr["name"] = str(key.strip()) phen_attr["value"] = str(unicode(value).strip()) phen_attr_list.append(phen_attr.copy()) attr_count = attr_count + 1 phen_attr["name"] = "var_id" phen_attr["value"] = str(v_name) phen_attr_list.append(phen_attr.copy()) attr_count = attr_count + 1 if len(phen_attr_list) > 0: new_phenomenon = phenomenon.copy() new_phenomenon["attributes"] = phen_attr_list new_phenomenon["attribute_count"] = attr_count phen_list.append(new_phenomenon) return phen_list
def phenomena(self): try: na_fhandle = nappy.openNAFile(self.file_path) variables = {} for var in na_fhandle.getVariables(): if util.is_valid_phen_attr(var[1]): variables.update({ var[0]: { "name": var[0], "units": var[1] } }) variables = [util.Parameter(k, other_params=var) for (k, var) in variables.iteritems()] return variables except Exception: return None
def get_phenomena(self): phen_list = [] phenomenon =\ { "id" : "", "attribute_count" : "", "attributes" :[] } phen_keys = [ "paramId", "cfNameECMF", "cfName", "cfVarName", "units", "nameECMF", "name" ] phen_attr =\ { "name" : "", "value": "" } try: fd = open(self.file_path) while 1: gid = gapi.grib_new_from_file(fd) if gid is None: break phen_attr_list = [] attr_count = 0 for key in phen_keys: if not gapi.grib_is_defined(gid, key): continue value = str(gapi.grib_get(gid, key)) if len(key) < util.MAX_ATTR_LENGTH \ and len(value) < util.MAX_ATTR_LENGTH \ and util.is_valid_phen_attr(value): phen_attr["name"] = str(key.strip()) phen_attr["value"] = str(unicode(value).strip()) if phen_attr not in phen_attr_list: phen_attr_list.append(phen_attr.copy()) attr_count = attr_count + 1 if len(phen_attr_list) > 0: new_phenomenon = phenomenon.copy() new_phenomenon["attributes"] = phen_attr_list new_phenomenon["attribute_count"] = attr_count if new_phenomenon not in phen_list: phen_list.append(new_phenomenon) gapi.grib_release(gid) fd.close() return phen_list except Exception: return None
def get_metadata_level3(self): phen_list = [] phenomenon =\ { "id" : "", "attribute_count" : "", "attributes" :[] } phen_attr =\ { "name" : "", "value": "" } lat_f_l = [] lon_f_l = [] lat_l_l = [] lon_l_l = [] date_d_l = [] date_t_l = [] phen_keys = [ "paramId", "cfNameECMF", "cfName", "cfVarName", "units", "nameECMF", "name", "Ni", "Nj", "latitudeOfFirstGridPointInDegrees", "longitudeOfFirstGridPointInDegrees", "latitudeOfLastGridPointInDegrees", "longitudeOfLastGridPointInDegrees", "dataDate", "dataTime" ] try: fd = open(self.file_path) while 1: gid = gapi.grib_new_from_file(fd) if gid is None: break phen_attr_list = [] attr_count = 0 for key in phen_keys: if not gapi.grib_is_defined(gid, key): continue value = str(gapi.grib_get(gid, key)) #So the file contains many records but all report the #same spatial and temporal information. Only complete distinct records #will be stored i.e the one that contain the full list of parameter #and are unique. If evety record has got different spatial and temporal #then th eindex must change because currently there is only on geo_shape_field. if key == "latitudeOfFirstGridPointInDegrees": lat_f_l.append(value) elif key == "longitudeOfFirstGridPointInDegrees": lon_f_l.append(value) elif key == "latitudeOfLastGridPointInDegrees": lat_l_l.append(value) elif key =="longitudeOfLastGridPointInDegrees": lon_l_l.append(value) elif key == "dataDate": date_d_l.append(value) elif key == "dataTime": date_t_l.append(value) else: if len(key) < util.MAX_ATTR_LENGTH \ and len(value) < util.MAX_ATTR_LENGTH \ and util.is_valid_phen_attr(value): phen_attr["name"] = str(key.strip()) phen_attr["value"] = str(unicode(value).strip()) if phen_attr not in phen_attr_list: phen_attr_list.append(phen_attr.copy()) attr_count = attr_count + 1 if len(phen_attr_list) > 0: new_phenomenon = phenomenon.copy() new_phenomenon["attributes"] = phen_attr_list new_phenomenon["attribute_count"] = attr_count if new_phenomenon not in phen_list: phen_list.append(new_phenomenon) gapi.grib_release(gid) fd.close() if len(lat_f_l) > 0 \ and len(lon_f_l) > 0 \ and len(lat_l_l) > 0 \ and len(lon_l_l) > 0 \ and len(date_d_l) > 0 \ and len(date_t_l): geospatial_dict = {} geospatial_dict["type"] = "envelope" temporal_dict = {} lat_f = min(lat_f_l) lon_f = min(lon_f_l) lat_l = max(lat_l_l) lon_l = max(lon_l_l) date_d = min(date_d_l) date_t = min(date_t_l) if float(lon_l) > 180: lon_l = (float(lon_l) -180) - 180 geospatial_dict["coordinates"] = [[round(float(lon_f), 3), round(float(lat_f), 3)], [round(float(lon_l), 3), round(float(lat_l), 3) ]] temporal_dict["start_time"] = date_d temporal_dict["end_time"] = date_t return (phen_list, geospatial_dict, temporal_dict) else: return (phen_list,) except Exception as ex: return None
def get_metadata_pp_level3(self): phenomenon =\ { "id" : "", "attribute_count" : "", "attributes" :[] } phen_attr =\ { "name" : "", "value": "" } spatial = None #Get basic file info. file_info = self.get_metadata_generic_level1() if file_info is not None: try: self.handler_id = "pp handler level 3." lat_l = [] lat_u = [] lon_l = [] lon_u = [] start_time_l = [] end_time_l = [] pp_file_content=cdms.open(self.file_path) var_ids = pp_file_content.listvariables() phen_list = [] for var_id in var_ids: metadata_dict = pp_file_content[var_id].attributes phen_attr_list = [] attr_count = 0 for key in metadata_dict.keys(): value = str(metadata_dict[key]) if len(key) < util.MAX_ATTR_LENGTH \ and len(value) < util.MAX_ATTR_LENGTH \ and util.is_valid_phen_attr(value): phen_attr["name"] = str(key.strip()) phen_attr["value"] = str(unicode(value).strip()) phen_attr_list.append(phen_attr.copy()) attr_count = attr_count + 1 #Dict of phenomenon attributes. if len(phen_attr_list) > 0: new_phenomenon = phenomenon.copy() new_phenomenon["attributes"] = phen_attr_list new_phenomenon["attribute_count"] = attr_count phen_list.append(new_phenomenon) try : spatial = self.getBoundingBox(var_id, pp_file_content) temporal = self.getTemporalDomain(var_id, pp_file_content) #geospatial data. lon_l.append(spatial[0]) lat_l.append(spatial[1]) lon_u.append(spatial[2]) lat_u.append(spatial[3]) #temporal start_time_l.append(temporal[0]) end_time_l.append(temporal[1]) except Exception as ex: continue if len(lat_l) > 0 \ and len(lon_l) > 0 \ and len(lat_u) > 0 \ and len(lon_u) > 0: min_lon_l = self.normalize_lon(min(lon_l)) min_lat_l = self.normalize_lat(min(lat_l)) max_lon_u = self.normalize_lon(max(lon_u)) max_lat_u = self.normalize_lat(max(lat_u)) spatial = {'coordinates': {'type': 'envelope', 'coordinates': [[round(min_lon_l, 3), round(min_lat_l, 3)], [round(max_lon_u, 3), round(max_lat_u, 3)]]}} #kltsa 22/07/2016 change for issue 23341: validation of dates added. if len(start_time_l) > 0 \ and len(end_time_l) > 0: min_time = min(start_time_l) max_time = max(end_time_l) if util.is_date_valid(min_time.split("T")[0])\ and util.is_date_valid(max_time.split("T")[0]): file_info[0]["info"]["temporal"] = {'start_time': min_time, 'end_time': max_time } pp_file_content.close() return file_info + (phen_list, spatial, ) except Exception as ex: return file_info else: return None