def read_amsr_hdf4(filename): from pyhdf.SD import SD, SDC from pyhdf.HDF import HDF, HC import pyhdf.VS retv = AmsrObject() h4file = SD(filename, SDC.READ) datasets = h4file.datasets() attributes = h4file.attributes() #for idx,attr in enumerate(attributes.keys()): # print idx, attr for sds in ["Longitude", "Latitude", "High_res_cloud"]: data = h4file.select(sds).get() if sds in ["Longitude", "Latitude"]: retv.all_arrays[sds.lower()] = data.ravel() elif sds in ["High_res_cloud"]: lwp_gain = h4file.select(sds).attributes()['Scale'] retv.all_arrays["lwp_mm"] = data.ravel() * lwp_gain #print h4file.select(sds).info() h4file = HDF(filename, SDC.READ) vs = h4file.vstart() data_info_list = vs.vdatainfo() #print "1D data compound/Vdata" for item in data_info_list: #1D data compound/Vdata name = item[0] #print name if name in ["Time"]: data_handle = vs.attach(name) data = np.array(data_handle[:]) retv.all_arrays["sec1993"] = data data_handle.detach() else: pass #print name #data = np.array(data_handle[:]) #attrinfo_dic = data_handle.attrinfo() #factor = data_handle.findattr('factor') #offset = data_handle.findattr('offset') #print data_handle.factor #data_handle.detach() #print data_handle.attrinfo() h4file.close() #for key in retv.all_arrays.keys(): # print key, retv.all_arrays[key] return retv
def read_amsr_hdf4(filename): from pyhdf.SD import SD, SDC from pyhdf.HDF import HDF # HC import pyhdf.VS retv = AmsrObject() h4file = SD(filename, SDC.READ) # datasets = h4file.datasets() # attributes = h4file.attributes() # for idx, attr in enumerate(attributes.keys()): # print idx, attr for sds in ["Longitude", "Latitude", "High_res_cloud"]: data = h4file.select(sds).get() if sds in ["Longitude", "Latitude"]: retv.all_arrays[sds.lower()] = data.ravel() elif sds in ["High_res_cloud"]: lwp_gain = h4file.select(sds).attributes()['Scale'] retv.all_arrays["lwp_mm"] = data.ravel() * lwp_gain # print h4file.select(sds).info() h4file = HDF(filename, SDC.READ) vs = h4file.vstart() data_info_list = vs.vdatainfo() # print "1D data compound/Vdata" for item in data_info_list: # 1D data compound/Vdata name = item[0] # print name if name in ["Time"]: data_handle = vs.attach(name) data = np.array(data_handle[:]) retv.all_arrays["sec1993"] = data data_handle.detach() else: pass # print name # data = np.array(data_handle[:]) # attrinfo_dic = data_handle.attrinfo() # factor = data_handle.findattr('factor') # offset = data_handle.findattr('offset') # print data_handle.factor # data_handle.detach() # print data_handle.attrinfo() h4file.close() # for key in retv.all_arrays.keys(): # print key, retv.all_arrays[key] return retv
def read_cloudsat_hdf4(filename): from pyhdf.SD import SD, SDC from pyhdf.HDF import HDF, HC import pyhdf.VS def convert_data(data): if len(data.shape) == 2: if data.shape[1] == 1: return data[:, 0] elif data.shape[0] == 1: return data[0, :] return data retv = CloudsatObject() h4file = SD(filename, SDC.READ) datasets = h4file.datasets() attributes = h4file.attributes() #for idx,attr in enumerate(attributes.keys()): # print idx, attr for idx,sds in enumerate(datasets.keys()): #2D data, print idx, sds data = h4file.select(sds).get() #print h4file.select(sds).attributes().keys() am_name = clsat_name_conversion(sds, retv) if am_name in retv.all_arrays.keys(): retv.all_arrays[am_name] = convert_data(data) #print h4file.select(sds).info() h4file = HDF(filename, SDC.READ) vs = h4file.vstart() data_info_list = vs.vdatainfo() for item in data_info_list: #1D data compound/Vdata name = item[0] data_handle = vs.attach(name) data = np.array(data_handle[:]) attrinfo_dic = data_handle.attrinfo() factor = data_handle.findattr('factor') offset = data_handle.findattr('offset') #print data_handle.factor am_name = clsat_name_conversion(name, retv) if am_name in retv.all_arrays.keys(): #To save RAM and disk only read what we use! if factor is None and offset is None: retv.all_arrays[am_name] = convert_data(data) elif np.float(factor.get()) == 1.0 and np.float(offset.get()) == 0.0: retv.all_arrays[am_name] = convert_data(data) else: if factor is None: factor = 1.0 if offset is None: offset = 0.0 raise MatchupError("Not default offset and factor. Fix code") #The code below is probably ok, but make sure: #the_data_scaled = convert_data(data)*factor + offset #retv.all_arrays[am_name] = the_data_scaled data_handle.detach() #print data_handle.attrinfo() h4file.close() # Convert from TAI time to UTC in seconds since 1970: dsec = time.mktime((1993,1,1,0,0,0,0,0,0)) - time.timezone retv.sec_1970 = retv.Profile_time.ravel() + retv.TAI_start + dsec return retv