def read_data(self): # open a new netCDF file for reading. ncfile = NetCDFFile(self.file,'r') dimNames = ncfile.dimensions.keys() variableNames = ncfile.variables.keys() #print dimNames #print variableNames # Get the geolocation data intime = ncfile.variables['initial_time0_hours'] inlatitude = ncfile.variables['g0_lat_2'] inlongitude = ncfile.variables['g0_lon_3'] self.intime_size = N.size(intime) self.inlat_size = N.size(inlatitude) self.inlon_size = N.size(inlongitude) #print 'original time size = ', self.intime_size #print 'original latitude size = ', self.inlat_size #print 'original longitude size =', self.inlon_size # Here we have to 'expand out' lat, lon and time so that the # middle end is permitted to function as it does for irregular # grids. This involves quite a bit of repititive values and # increased memory consumption. new_var_time = N.array([]) new_var_lat = N.array([]) new_var_lon = N.array([]) coloc_param_size = self.intime_size * self.inlat_size * self.inlon_size new_var_time.resize(coloc_param_size) new_var_lat.resize(coloc_param_size) new_var_lon.resize(coloc_param_size) #print 'new time size = ', new_var_time.size #print 'new latitude size = ', new_var_lat.size #print 'new longitude size =', new_var_lon.size # Now that we have both the original representation of the geolocation params # and a properly-sized location to put their expanded representations - do the # expansion i = j = k = m = 0 for i in range(0, self.intime_size): # Convert hours since 01/01/1900 to unix time tk = UT.hours_since_19th_century_to_unix_time(intime[i]) for j in range(0, self.inlat_size): lat_tk = inlatitude[j] for k in range(0, self.inlon_size): lon_tk = inlongitude[k] new_var_time[m] = tk new_var_lat[m] = lat_tk new_var_lon[m] = lon_tk #print m, ": ", tk, ", ", lat_tk, ", ", lon_tk # Show leading edge of expanded colocation params #if(m < 242): #print m, ": ", tk, ", ", lat_tk, ", ", lon_tk m += 1 self.time = new_var_time self.latitude = new_var_lat ### convert to (-180,180) so it's consistent with CloudSat ### self.longitude = new_var_lon self.longitude = N.where(new_var_lon > 180.0, new_var_lon - 360.0, new_var_lon) # End of colocation parameter expansion for regular grids level_name = ['lv_ISBL1'] for i in range(len(level_name)): local_level = ncfile.variables[level_name[i]] level_data = local_level.getValue() attList = dir(local_level) attribute = {} for j in attList: if(j!='assignValue' and j!='getValue' and j!='typecode'): attValue = getattr(local_level, j) attribute[j] = attValue # add dimension name to the attribute attribute['dimension1']='isobaric_level' # Store (attribute, data) in the level dictionary self.levels[level_name[i]]=(attribute, level_data) #print 'new time size = ', N.size(self.time) #print 'new latitude size = ', N.size(self.latitude) #print 'new longitude size =', N.size(self.longitude) for i in range(len(variableNames)): if(variableNames[i]!='initial_time0_encoded' and variableNames[i]!='initial_time0_hours' and variableNames[i]!='lv_ISBL1' and variableNames[i]!='g0_lat_2' and variableNames[i]!='g0_lon_3'): local_var = ncfile.variables[variableNames[i]] # get data of the variable local_data = N.array(local_var.getValue()) # get attributes of the variable attList = dir(local_var) attribute = {} for j in attList: if(j!='assignValue' and j!='getValue' and j!='typecode'): attValue = getattr(local_var, j) attribute[j]=attValue # add dimension 1 name to attribute attribute['dimension1'] = 'isobaric_level' # Fill in UT.NAN where data is the value defined by attribute '_FillValue' if(N.size(local_data.shape) == 3): fill_value = N.where(local_data == attribute['_FillValue']) local_data[fill_value] = UT.NAN # remove unnecessary attribute since we already applied it del attribute['_FillValue'] # Now we reshaped have to reshape to 1D for the middle end oneD_data = N.reshape(local_data, (local_data.shape[0]*local_data.shape[1]*local_data.shape[2])) # store (attribute, data) in the data dictionary self.data[variableNames[i]]=(attribute, oneD_data) #print "attribute = ", attribute #print "data = ", oneD_data # Fill in UT.NAN where data is the value defined by attribute '_FillValue' elif(N.size(local_data.shape) == 4): fill_value = N.where(local_data == attribute['_FillValue']) local_data[fill_value] = UT.NAN # remove unnecessary attribute since we already applied it del attribute['_FillValue'] # Now we have to swap the axis of array to make the p-level to be the last axis local_data = N.swapaxes(local_data, 1, 2) # swap the axes of p-level and latitude local_data = N.swapaxes(local_data, 2, 3) # swap the axes of p-level and longitude # Now the array has the axis in this order (time, lat, lon, p-level) # Now we have to reshape the four-dimensional array to 2-dimensional array for middle end print local_data.shape twoD_data = N.reshape(local_data, (local_data.shape[0]*local_data.shape[1]*local_data.shape[2],local_data.shape[3])) # store (attribute, data) in the data dictionary self.data[variableNames[i]]=(attribute, twoD_data) #print "attribute = ", attribute #print "data = ", oneD_data else: print 'Warning: Unsupported shape detected for ', variableNames[i], ' ... skipping' ncfile.close()
def read_data(self): # open a new netCDF file for reading. ncfile = NetCDFFile(self.file,'r') dimNames = ncfile.dimensions.keys() variableNames = ncfile.variables.keys() #print dimNames #print variableNames # Get the geolocation data intime = ncfile.variables['initial_time0_hours'] inlatitude = ncfile.variables['g0_lat_1'] inlongitude = ncfile.variables['g0_lon_2'] self.intime_size = N.size(intime) self.inlat_size = N.size(inlatitude) self.inlon_size = N.size(inlongitude) #print 'original time size = ', self.intime_size #print 'original latitude size = ', self.inlat_size #print 'original longitude size =', self.inlon_size # Here we have to 'expand out' lat, lon and time so that the # middle end is permitted to function as it does for irregular # grids. This involves quite a bit of repititive values and # increased memory consumption. new_var_time = N.array([]) new_var_lat = N.array([]) new_var_lon = N.array([]) coloc_param_size = self.intime_size * self.inlat_size * self.inlon_size new_var_time.resize(coloc_param_size) new_var_lat.resize(coloc_param_size) new_var_lon.resize(coloc_param_size) #print 'new time size = ', new_time_size #print 'new latitude size = ', new_lat_size #print 'new longitude size =', new_lon_size # Now that we have both the original representation of the geolocation params # and a properly-sized location to put their expanded representations - do the # expansion i = j = k = m = 0 for i in range(0, self.intime_size): # Convert hours since 01/01/1900 to unix time tk = UT.hours_since_19th_century_to_unix_time(intime[i]) for j in range(0, self.inlat_size): lat_tk = inlatitude[j] for k in range(0, self.inlon_size): lon_tk = inlongitude[k] new_var_time[m] = tk new_var_lat[m] = lat_tk new_var_lon[m] = lon_tk #print m, ": ", tk, ", ", lat_tk, ", ", lon_tk # Show leading edge of expanded colocation params #if(m < 242): #print m, ": ", tk, ", ", lat_tk, ", ", lon_tk m += 1 self.time = new_var_time self.latitude = new_var_lat ### convert to (-180,180) so it's consistent with CloudSat ### self.longitude = new_var_lon self.longitude = N.where(new_var_lon > 180.0, new_var_lon - 360.0, new_var_lon) # End of colocation parameter expansion for regular grids #print 'new time size = ', N.size(self.time) #print 'new latitude size = ', N.size(self.latitude) #print 'new longitude size =', N.size(self.longitude) for i in range(len(variableNames)): if(variableNames[i]!='initial_time0_encoded' and variableNames[i]!='initial_time0_hours' and variableNames[i]!='g0_lat_1' and variableNames[i]!='g0_lon_2'): local_var = ncfile.variables[variableNames[i]] # get data of the variable local_data = N.array(local_var.getValue()) # get attributes of the variable attList = dir(local_var) attribute = {} for j in attList: if(j!='assignValue' and j!='getValue' and j!='typecode'): attValue = getattr(local_var, j) attribute[j]=attValue #print j, attValue #print variableNames[i], "attribute = ", attribute #print 'local data dimension', local_data.shape # Fill in UT.NAN where data is the value defined by attribute '_FillValue' fill_value = N.where(local_data == attribute['_FillValue']) local_data[fill_value] = UT.NAN # remove unnecessary attribute since we already applied it del attribute['_FillValue'] # Now we reshaped have to reshape to 1D for the middle end oneD_data = N.reshape(local_data, local_data.shape[0]*local_data.shape[1]*local_data.shape[2]) # store (attribute, data) in the data dictionary self.data[variableNames[i]]=(attribute, oneD_data) #print "attribute = ", attribute #print "data = ", oneD_data ncfile.close()