def _get_bt_dataset(self, data, calibration_index, wave_number): """Get the dataset as brightness temperature. Apparently we don't use these calibration factors for Rad -> BT:: coeffs = self._get_coefficients(ds_info['calibration_key'], calibration_index) # coefficients are per-scan, we need to repeat the values for a # clean alignment coeffs = np.repeat(coeffs, data.shape[0] // coeffs.shape[1], axis=1) coeffs = coeffs.rename({ coeffs.dims[0]: 'coefficients', coeffs.dims[1]: 'y' }) # match data dims data = coeffs[0] + coeffs[1] * data + coeffs[2] * data**2 + coeffs[3] * data**3 """ # pass the dask array bt_data = rad2temp(wave_number, data.data * 1e-5) # brightness temperature if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data # additional corrections from the file corr_coeff_a = float( self['/attr/TBB_Trans_Coefficient_A'][calibration_index]) corr_coeff_b = float( self['/attr/TBB_Trans_Coefficient_B'][calibration_index]) if corr_coeff_a != 0: data = (data - corr_coeff_b) / corr_coeff_a # Some BT bands seem to have 0 in the first 10 columns # and it is an invalid Kelvin measurement, so let's mask data = data.where(data != 0) return data
def get_dataset(self, dataset_id, ds_info): file_key = self.geolocation_prefix + ds_info.get( 'file_key', dataset_id.name) if self.platform_id == 'FY3B': file_key = file_key.replace('Data/', '') data = self[file_key] band_index = ds_info.get('band_index') if band_index is not None: data = data[band_index] data = data.where( (data >= self[file_key + '/attr/valid_range'][0]) & (data <= self[file_key + '/attr/valid_range'][1])) if 'E' in dataset_id.name: slope = self[ self.l1b_prefix + 'Emissive_Radiance_Scales'].data[:, band_index][:, np.newaxis] intercept = self[ self.l1b_prefix + 'Emissive_Radiance_Offsets'].data[:, band_index][:, np.newaxis] # Converts cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = self['/attr/' + self.wave_number][band_index] * 100 bt_data = rad2temp(wave_number, (data.data * slope + intercept) * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data elif 'R' in dataset_id.name: slope = self['/attr/RefSB_Cal_Coefficients'][0::2] intercept = self['/attr/RefSB_Cal_Coefficients'][1::2] data = data * slope[band_index] + intercept[band_index] else: data = data.where( (data >= self[file_key + '/attr/valid_range'][0]) & (data <= self[file_key + '/attr/valid_range'][1])) data = self[file_key + '/attr/Intercept'] + self[file_key + '/attr/Slope'] * data new_dims = {old: new for old, new in zip(data.dims, ('y', 'x'))} data = data.rename(new_dims) data.attrs.update({ 'platform_name': self['/attr/Satellite Name'], 'sensor': self['/attr/Sensor Identification Code'] }) data.attrs.update(ds_info) units = self.get(file_key + '/attr/units') if units is not None and str(units).lower() != 'none': data.attrs.update({'units': self.get(file_key + '/attr/units')}) elif data.attrs.get('calibration') == 'reflectance': data.attrs.update({'units': '%'}) else: data.attrs.update({'units': '1'}) return data
def get_dataset(self, dataset_id, ds_info): """Create DataArray from file content for `dataset_id`.""" file_key = self.geolocation_prefix + ds_info.get('file_key', dataset_id['name']) if self.platform_id == 'FY3B': file_key = file_key.replace('Data/', '') data = self[file_key] band_index = ds_info.get('band_index') valid_range = data.attrs.pop('valid_range', None) if isinstance(valid_range, np.ndarray): valid_range = valid_range.tolist() if band_index is not None: data = data[band_index] if valid_range: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) if 'Emissive' in file_key: slope = self._correct_slope(self[self.l1b_prefix + 'Emissive_Radiance_Scales']. data[:, band_index][:, np.newaxis]) intercept = self[self.l1b_prefix + 'Emissive_Radiance_Offsets'].data[:, band_index][:, np.newaxis] # Converts cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = self['/attr/' + self.wave_number][band_index] * 100 bt_data = rad2temp(wave_number, (data.data * slope + intercept) * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data elif 'RefSB' in file_key: if self.platform_id == 'FY3B': coeffs = da.from_array(FY3B_REF_COEFFS, chunks=-1) else: coeffs = self['/attr/RefSB_Cal_Coefficients'] slope = self._correct_slope(coeffs[0::2]) intercept = coeffs[1::2] data = data * slope[band_index] + intercept[band_index] else: slope = self._correct_slope(self[file_key + '/attr/Slope']) intercept = self[file_key + '/attr/Intercept'] if valid_range: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) data = data * slope + intercept new_dims = {old: new for old, new in zip(data.dims, ('y', 'x'))} data = data.rename(new_dims) # use lowercase sensor name to be consistent with the rest of satpy data.attrs.update({'platform_name': self['/attr/Satellite Name'], 'sensor': self['/attr/Sensor Identification Code'].lower()}) data.attrs.update(ds_info) units = self.get(file_key + '/attr/units') if units is not None and str(units).lower() != 'none': data.attrs.update({'units': self.get(file_key + '/attr/units')}) elif data.attrs.get('calibration') == 'reflectance': data.attrs.update({'units': '%'}) else: data.attrs.update({'units': '1'}) return data
def radiance2tb(rad, wavelength): """Get the Tb from the radiance using the Planck function. rad: Radiance in SI units wavelength: Wavelength in SI units (meter) """ from pyspectral.blackbody import blackbody_rad2temp as rad2temp return rad2temp(wavelength, rad)
def radiance2tb(self, rad, wavelength, **kwargs): """Get the Tb from the radiance using the Planck function, and optionally the relative spectral response function rad: Radiance in SI units """ from pyspectral.blackbody import blackbody_rad2temp as rad2temp return rad2temp(wavelength, rad)
def radiance2tb(rad, wavelength): """ Get the Tb from the radiance using the Planck function rad: Radiance in SI units wavelength: Wavelength in SI units (meter) """ from pyspectral.blackbody import blackbody_rad2temp as rad2temp return rad2temp(wavelength, rad)
def _calibrate_emissive(self, data, band_index): slope = self._correct_slope( self[self.l1b_prefix + 'Emissive_Radiance_Scales'].data[:, band_index][:, np.newaxis]) intercept = self[ self.l1b_prefix + 'Emissive_Radiance_Offsets'].data[:, band_index][:, np.newaxis] # Converts cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = self['/attr/' + self.wave_number][band_index] * 100 bt_data = rad2temp(wave_number, (data.data * slope + intercept) * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data
def get_dataset(self, dataset_id, ds_info): file_key = self.geolocation_prefix + ds_info.get('file_key', dataset_id.name) if self.platform_id == 'FY3B': file_key = file_key.replace('Data/', '') data = self.get(file_key) if data is None: logging.error('File key "{0}" could not be found in file {1}'.format(file_key, self.filename)) band_index = ds_info.get('band_index') if band_index is not None: data = data[band_index] data = data.where((data >= self[file_key + '/attr/valid_range'][0]) & (data <= self[file_key + '/attr/valid_range'][1])) if 'E' in dataset_id.name: slope = self[self.l1b_prefix + 'Emissive_Radiance_Scales'].data[:, band_index][:, np.newaxis] intercept = self[self.l1b_prefix + 'Emissive_Radiance_Offsets'].data[:, band_index][:, np.newaxis] radiance_data = rad2temp(self['/attr/' + self.wave_number][band_index] * 100, (data * slope + intercept) * 1e-5) data = xr.DataArray(da.from_array(radiance_data, data.chunks), coords=data.coords, dims=data.dims, name=data.name, attrs=data.attrs) elif 'R' in dataset_id.name: slope = self['/attr/RefSB_Cal_Coefficients'][0::2] intercept = self['/attr/RefSB_Cal_Coefficients'][1::2] data = data * slope[band_index] + intercept[band_index] else: data = data.where((data >= self[file_key + '/attr/valid_range'][0]) & (data <= self[file_key + '/attr/valid_range'][1])) data = self[file_key + '/attr/Intercept'] + self[file_key + '/attr/Slope'] * data new_dims = {old: new for old, new in zip(data.dims, ('y', 'x'))} data = data.rename(new_dims) data.attrs.update({'platform_name': self['/attr/Satellite Name'], 'sensor': self['/attr/Sensor Identification Code']}) data.attrs.update(ds_info) units = self.get(file_key + '/attr/units') if units is not None and str(units).lower() != 'none': data.attrs.update({'units': self.get(file_key + '/attr/units')}) elif data.attrs.get('calibration') == 'reflectance': data.attrs.update({'units': '%'}) else: data.attrs.update({'units': '1'}) return data
def _calibrate_ir(self, dataset_id, data): """Calibrate radiance data to BTs using either pyspectral or in-file coefficients.""" if self.calib_mode == 'PYSPECTRAL': # depends on the radiance calibration above # Convert um to m^-1 (SI units for pyspectral) wn = 1 / (dataset_id.wavelength[1] / 1e6) # Convert cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. bt_data = rad2temp(wn, data.data * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data else: # IR coefficients from the file # Channel specific c0 = self.nc.attrs['Teff_to_Tbb_c0'] c1 = self.nc.attrs['Teff_to_Tbb_c1'] c2 = self.nc.attrs['Teff_to_Tbb_c2'] # These should be fixed, but load anyway cval = self.nc.attrs['light_speed'] kval = self.nc.attrs['Boltzmann_constant_k'] hval = self.nc.attrs['Plank_constant_h'] # Compute wavenumber as cm-1 wn = (10000 / dataset_id.wavelength[1]) * 100 # Convert radiance to effective brightness temperature e1 = (2 * hval * cval * cval) * np.power(wn, 3) e2 = (data.data * 1e-5) t_eff = ((hval * cval / kval) * wn) / np.log((e1 / e2) + 1) # Now convert to actual brightness temperature bt_data = c0 + c1 * t_eff + c2 * t_eff * t_eff data.data = bt_data return data
def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" file_key = ds_info.get('file_key', dataset_id['name']) band_index = ds_info.get('band_index') data = self[file_key] if band_index is not None: data = data[band_index] if data.ndim >= 2: data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) if 'rows_per_scan' in self.filetype_info: attrs.setdefault('rows_per_scan', self.filetype_info['rows_per_scan']) fill_value = attrs.pop('FillValue', np.nan) # covered by valid_range valid_range = attrs.pop('valid_range', None) if dataset_id.get('calibration') == 'counts': # preserve integer type of counts if possible attrs['_FillValue'] = fill_value new_fill = fill_value else: new_fill = np.nan if valid_range is not None: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. if dataset_id['name'] in ['24', '25'] and valid_range[1] == 4095: valid_range[1] = 25000 # typically bad_values == 65535, saturated == 65534 # dead detector == 65533 data = data.where( (data >= valid_range[0]) & (data <= valid_range[1]), new_fill) slope = attrs.pop('Slope', None) intercept = attrs.pop('Intercept', None) if slope is not None and dataset_id.get('calibration') != 'counts': if band_index is not None: slope = slope[band_index] intercept = intercept[band_index] data = data * slope + intercept if dataset_id.get('calibration') == "reflectance": # some bands have 0 counts for the first N columns and # seem to be invalid data points data = data.where(data != 0) coeffs = self._get_coefficients(ds_info['calibration_key'], ds_info['calibration_index']) data = coeffs[0] + coeffs[1] * data + coeffs[2] * data**2 elif dataset_id.get('calibration') == "brightness_temperature": cal_index = ds_info['calibration_index'] # Apparently we don't use these calibration factors for Rad -> BT # coeffs = self._get_coefficients(ds_info['calibration_key'], cal_index) # # coefficients are per-scan, we need to repeat the values for a # # clean alignment # coeffs = np.repeat(coeffs, data.shape[0] // coeffs.shape[1], axis=1) # coeffs = coeffs.rename({ # coeffs.dims[0]: 'coefficients', coeffs.dims[1]: 'y' # }) # match data dims # data = coeffs[0] + coeffs[1] * data + coeffs[2] * data**2 + coeffs[3] * data**3 # Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. wave_number = 1. / (dataset_id['wavelength'][1] / 1e6) # pass the dask array bt_data = rad2temp(wave_number, data.data * 1e-5) # brightness temperature if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays data.data = da.from_array(bt_data, chunks=data.data.chunks) else: # new versions of pyspectral can do dask arrays data.data = bt_data # additional corrections from the file corr_coeff_a = float( self['/attr/TBB_Trans_Coefficient_A'][cal_index]) corr_coeff_b = float( self['/attr/TBB_Trans_Coefficient_B'][cal_index]) if corr_coeff_a != 0: data = (data - corr_coeff_b) / corr_coeff_a # Some BT bands seem to have 0 in the first 10 columns # and it is an invalid Kelvin measurement, so let's mask data = data.where(data != 0) data.attrs = attrs # convert bytes to str for key, val in attrs.items(): # python 3 only if bytes is not str and isinstance(val, bytes): data.attrs[key] = val.decode('utf8') data.attrs.update({ 'platform_name': self['/attr/Satellite Name'], 'sensor': self.sensor_name, }) return data
def load_virr(satscene, options): """Read the VIRR hdf5 file""" if "filename" not in options: raise IOError("No 1km virr filename given, cannot load") values = {"orbit": satscene.orbit, "satname": satscene.satname, "instrument": satscene.instrument_name, "satellite": satscene.fullname } filename = \ os.path.join(satscene.time_slot.strftime(options["dir"]) % values, satscene.time_slot.strftime( options["filename"]) % values) LOGGER.debug("Filename= %s", filename) datasets = ['EV_Emissive', 'EV_RefSB'] calibrate = options['calibrate'] LOGGER.debug("Calibrate = " + str(calibrate)) h5f = h5py.File(filename, 'r') # Get geolocation information lons = h5f['Longitude'][:] lats = h5f['Latitude'][:] # Mask out unrealistic values: mask = np.logical_or(lats > 90., lons > 90.) lons = np.ma.masked_array(lons, mask=mask) lats = np.ma.masked_array(lats, mask=mask) sunz = h5f['SolarZenith'][:] slope = h5f['SolarZenith'].attrs['Slope'][0] intercept = h5f['SolarZenith'].attrs['Intercept'][0] sunz = sunz * slope + intercept sunz = np.where(np.greater(sunz, 85.0), 85.0, sunz) # Get the calibration information # Emissive radiance coefficients: emis_offs = h5f['Emissive_Radiance_Offsets'][:] emis_scales = h5f['Emissive_Radiance_Scales'][:] # Central wave number (unit = cm-1) for the three IR bands # It is ordered according to decreasing wave number (increasing wavelength): # 3.7 micron, 10.8 micron, 12 micron emiss_centroid_wn = h5f.attrs['Emmisive_Centroid_Wave_Number'] # VIS/NIR calibration stuff: refsb_cal_coeff = h5f.attrs['RefSB_Cal_Coefficients'] visnir_scales = refsb_cal_coeff[0::2] visnir_offs = refsb_cal_coeff[1::2] refsb_effective_wl = h5f.attrs['RefSB_Effective_Wavelength'] # Read the band data: for dset in datasets: band_data = h5f[dset] valid_range = band_data.attrs['valid_range'] LOGGER.debug("valid-range = " + str(valid_range)) fillvalue = band_data.attrs['_FillValue'] band_names = band_data.attrs['band_name'].split(',') slope = band_data.attrs['Slope'] intercept = band_data.attrs['Intercept'] units = band_data.attrs['units'] long_name = band_data.attrs['long_name'] LOGGER.debug('band names = ' + str(band_names)) for (i, band) in enumerate(band_names): if band not in satscene.channels_to_load: continue LOGGER.debug("Reading channel %s, i=%d", band, i) data = band_data[i] bandmask = np.logical_or(np.less(data, valid_range[0]), np.greater(data, valid_range[1])) if calibrate: if dset in ['EV_Emissive']: data = (np.array([emis_offs[:, i]]).transpose() + data * np.array([emis_scales[:, i]]).transpose()) # Radiance to Tb conversion. # Pyspectral wants SI units, # but radiance data are in mW/m^2/str/cm^-1 and wavenumbers are in cm^-1 # Therefore multply wavenumber by 100 and radiances by # 10^-5 data = rad2temp(emiss_centroid_wn[i] * 100., data * 1e-5) LOGGER.debug("IR data calibrated") if dset in ['EV_RefSB']: data = (visnir_offs[i] + data * visnir_scales[i]) / np.cos(np.deg2rad(sunz)) satscene[band] = np.ma.masked_array(data, mask=bandmask, copy=False) from pyresample import geometry satscene.area = geometry.SwathDefinition(lons=lons, lats=lats) h5f.close()