def test_get_sample_from_bil_info(self): t__, s__, input_idxs, idx_arr = bil.get_bil_info(self.swath_def, self.target_def, 50e5, neighbours=32, nprocs=1) # Sample from data1 res = bil.get_sample_from_bil_info(self.data1.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res[5], 1.) # Sample from data2 res = bil.get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res[5], 2.) # Reshaping res = bil.get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr, output_shape=self.target_def.shape) res = res.shape self.assertEqual(res[0], self.target_def.shape[0]) self.assertEqual(res[1], self.target_def.shape[1]) # Test rounding that is happening for certain values res = bil.get_sample_from_bil_info(self.data3.ravel(), t__, s__, input_idxs, idx_arr, output_shape=self.target_def.shape) # Four pixels are outside of the data self.assertEqual(np.isnan(res).sum(), 4)
def compute(self, data, fill_value=None, **kwargs): """Resample the given data using bilinear interpolation""" del kwargs target_shape = self.target_geo_def.shape if data.ndim == 3: output_shape = list(target_shape) output_shape.append(data.shape[-1]) res = np.zeros(output_shape, dtype=data.dtype) for i in range(data.shape[-1]): res[:, :, i] = get_sample_from_bil_info(data[:, :, i].ravel(), self.cache[ 'bilinear_t'], self.cache[ 'bilinear_s'], self.cache[ 'input_idxs'], self.cache['idx_arr'], output_shape=target_shape) else: res = get_sample_from_bil_info(data.ravel(), self.cache['bilinear_t'], self.cache['bilinear_s'], self.cache['input_idxs'], self.cache['idx_arr'], output_shape=target_shape) res = np.ma.masked_invalid(res) return res
def compute(self, data, fill_value=None, **kwargs): """Resample the given data using bilinear interpolation""" del kwargs target_shape = self.target_geo_def.shape if data.ndim == 3: output_shape = list(target_shape) output_shape.append(data.shape[-1]) res = np.zeros(output_shape, dtype=data.dtype) for i in range(data.shape[-1]): res[:, :, i] = \ get_sample_from_bil_info(data[:, :, i].ravel(), self.cache['bilinear_t'], self.cache['bilinear_s'], self.cache['input_idxs'], self.cache['idx_arr'], output_shape=target_shape) else: res = \ get_sample_from_bil_info(data.ravel(), self.cache['bilinear_t'], self.cache['bilinear_s'], self.cache['input_idxs'], self.cache['idx_arr'], output_shape=target_shape) res = np.ma.masked_invalid(res) return res
def _bin_file(self, src_range, target_def): source_def = geometry.SwathDefinition( lons=self._file_data['longitude'][src_range[0]:src_range[1]], lats=self._file_data['latitude'][src_range[0]:src_range[1]]) nan_slice = False try: with warnings.catch_warnings(): warnings.filterwarnings('ignore') radius = self._file_def.get('INFO', {}).get('binRadius', 5e4) (t_params, s_params, input_idxs, idx_ref) = bilinear.get_bil_info(source_def, target_def, radius=radius) # valid_input_index, valid_output_index, index_array, distance_array = \ # kd_tree.get_neighbour_info(source_def, target_def, 5000, neighbours=1) except (IndexError, ValueError): # No data from this file falls within the slice nan_slice = True # create target file data target_file = xarray.Dataset() # rebin any data needing rebinned. all_keys = [key for key in self._file_data if key in self._to_bin] for key in all_keys: if nan_slice: binned_data = numpy.full( target_def.shape, numpy.nan, dtype=float, ) else: with warnings.catch_warnings(): warnings.filterwarnings('ignore') key_data = self._file_data[key][ src_range[0]:src_range[1]].data binned_data = bilinear.get_sample_from_bil_info( key_data, t_params, s_params, input_idxs, idx_ref, output_shape=target_def.shape) dim = ('x', 'y') if len(binned_data.shape) == 3: dim += ('corners') target_file[key] = (dim, binned_data) # Figure the start and duration of this file # Don't assume sorted, though probably is file_timerange = self._file_data['datetime_start'][ src_range[0]:src_range[1]] start_datetime = file_timerange.min() stop_datetime = file_timerange.max() target_file.coords['datetime_start'] = start_datetime target_file.coords['datetime_length'] = stop_datetime - start_datetime return target_file
def test_get_sample_from_bil_info(self): t__, s__, input_idxs, idx_arr = bil.get_bil_info(self.swath_def, self.target_def) # Sample from data1 res = bil.get_sample_from_bil_info(self.data1.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res[5], 1.) # Sample from data2 res = bil.get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res[5], 2.) # Reshaping res = bil.get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr, output_shape=self.target_def.shape) res = res.shape self.assertEqual(res[0], self.target_def.shape[0]) self.assertEqual(res[1], self.target_def.shape[1])
def test_get_sample_from_bil_info(self): """Test resampling using resampling indices.""" from pyresample.bilinear import get_bil_info, get_sample_from_bil_info t__, s__, input_idxs, idx_arr = get_bil_info(self.source_def, self.target_def, 50e5, neighbours=32, nprocs=1) # Sample from data1 res = get_sample_from_bil_info(self.data1.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res.ravel()[5], 1.) # Sample from data2 res = get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res.ravel()[5], 2.) # Reshaping res = get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr, output_shape=self.target_def.shape) res = res.shape self.assertEqual(res[0], self.target_def.shape[0]) self.assertEqual(res[1], self.target_def.shape[1]) # Test rounding that is happening for certain values res = get_sample_from_bil_info(self.data3.ravel(), t__, s__, input_idxs, idx_arr, output_shape=self.target_def.shape) # Four pixels are outside of the data self.assertEqual(np.isnan(res).sum(), 4) # Masked array as input, result should be plain Numpy array data = np.ma.masked_all(self.data1.shape) res = get_sample_from_bil_info(data.ravel(), t__, s__, input_idxs, idx_arr) assert not hasattr(res, 'mask')
def _project_array_bilinear(self, data): """Project array *data* using bilinear interpolation""" if 'bilinear_t' not in self._cache: self._cache['bilinear_t'] = self._file_cache['bilinear_t'] self._cache['bilinear_s'] = self._file_cache['bilinear_s'] self._cache['input_idxs'] = self._file_cache['input_idxs'] self._cache['idx_arr'] = self._file_cache['idx_arr'] res = get_sample_from_bil_info(data.ravel(), self._cache['bilinear_t'], self._cache['bilinear_s'], self._cache['input_idxs'], self._cache['idx_arr'], output_shape=self.out_area.shape) res = np.ma.masked_invalid(res) return res
def project_array(self, data): """Project an array *data* along the given Projector object. """ if self.mode == "nearest": if not 'valid_index' in self._cache: self._cache['valid_index'] = self._file_cache['valid_index'] self._cache['valid_output_index'] = \ self._file_cache['valid_output_index'] self._cache['index_array'] = self._file_cache['index_array'] valid_index, valid_output_index, index_array = \ (self._cache['valid_index'], self._cache['valid_output_index'], self._cache['index_array']) res = kd_tree.get_sample_from_neighbour_info('nn', self.out_area.shape, data, valid_index, valid_output_index, index_array, fill_value=None) elif self.mode == "quick": if not 'row_idx' in self._cache: self._cache['row_idx'] = self._file_cache['row_idx'] self._cache['col_idx'] = self._file_cache['col_idx'] row_idx, col_idx = self._cache['row_idx'], self._cache['col_idx'] img = image.ImageContainer(data, self.in_area, fill_value=None) res = np.ma.array(img.get_array_from_linesample(row_idx, col_idx), dtype=data.dtype) elif self.mode == "ewa": from pyresample.ewa import fornav # TODO: should be user configurable? rows_per_scan = None if 'ewa_cols' not in self._cache: self._cache['ewa_cols'] = self._file_cache['ewa_cols'] self._cache['ewa_rows'] = self._file_cache['ewa_rows'] num_valid_points, res = fornav(self._cache['ewa_cols'], self._cache['ewa_rows'], self.out_area, data, rows_per_scan=rows_per_scan) elif self.mode == "bilinear": if 'bilinear_t' not in self._cache: self._cache['bilinear_t'] = self._file_cache['bilinear_t'] self._cache['bilinear_s'] = self._file_cache['bilinear_s'] self._cache['input_idxs'] = self._file_cache['input_idxs'] self._cache['idx_arr'] = self._file_cache['idx_arr'] res = get_sample_from_bil_info(data.ravel(), self._cache['bilinear_t'], self._cache['bilinear_s'], self._cache['input_idxs'], self._cache['idx_arr'], output_shape=self.out_area.shape) res = np.ma.masked_invalid(res) return res
def read_seviri_channel(channel_list, time, subdomain=(), regrid=False, my_area=geometry.AreaDefinition('Tropical Africa', 'Tropical Africa', 'Hatano Equal Area',{'proj' : 'hatano', 'lon_0' : 15.0}, 1732, 1510, (-4330000.,-3775000.,4330000., 3775000.)), interp_coeffs=(), reflectance_correct=False): '''Read SEVIRI data for given channels and time Includes functionality to subsample or regrid. Requires satpy. Assumes SEVIRI files are located in sev_data_dir1 set above, with directory structure sev_data_dir1/Year/YearMonthDay/Hour/ Args: channel_list (list): list of channels to read, see file_dict for possible values time (datetime): SEVIRI file date and time, every 00, 15, 30 or 45 minutes exactly, subdomain (tuple, optional): If not empty and regrid is False, then tuple values are (West boundary, South boundary, East boundary, North boundary) Defaults to empty tuple. regrid (bool, optional): If True, then data is regriddedonto grid defined by my_area. Defaults to False. my_area (AreaDefinition, optional): pyresmaple.geometry.AreaDefinition Only used if regrid=True Defaults to a Hatano equal area projection ~4.5 km resolution extending from ~33W to ~63E and ~29S to ~29N. interp_coeffs (tuple, optional): Interpolation coefficients that may be used for bilinear interpolation onto new grid. Faccilitates use of same coeffcients when regridding operation is repeated in multiple calls to read_seviri_channel. Defaults to empty tuple. reflectance_correct(bool, optional): Correct visible reflectances for variation in solar zenith angle and earth-sun distances. Defaults to False. Returns: data (dict): Dictionary containing following entries: lons (ndarray, shape(nlat,nlon)): Array of longitude values lats (ndarray, shape(nlat,nlon)): Array of latitude values interp_coeffs (tuple): If regrid is True, then the interpolation coefficients are returned in this variable to speed up future regridding channel (ndarray, shape(nlat,nlon)): Dictionary contains separate entry for each channel in channel_list ''' filenames = [] sat_names = ['MSG4', 'MSG3', 'MSG2', 'MSG1'] sat_ind = -1 if time in unavailable_times: raise UnavailableFileError("SEVIRI observations for "+time.strftime("%Y/%m/%d_%H%M")+" are not available") while ((len(filenames) == 0) & (sat_ind < len(sat_names)-1)): # Sometimes have data from multiple instruments (e.g. 20160504_1045 has MSG3 and MSG1), this ensures most recent is prioritised. sat_ind += 1 filenames=glob.glob(sev_data_dir1+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]+time.strftime("*EPI*%Y%m%d%H%M-*"))+ glob.glob(sev_data_dir1+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]+time.strftime("*PRO*%Y%m%d%H%M-*"))# PRO and EPI files necessary in all scenarios sev_dir = sev_data_dir1+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind] if len(filenames) == 0: # Try alternative directory for SEVIRI data. filenames=glob.glob(sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")+time.strftime("*EPI*%Y%m%d%H%M-*"))+ glob.glob(sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")+time.strftime("*PRO*%Y%m%d%H%M-*"))# PRO and EPI files necessary in all scenarios sev_dir = sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*") if len(filenames) == 0: print 'sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]+time.strftime("*EPI*%Y%m%d%H%M-*")=', sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]+time.strftime("*EPI*%Y%m%d%H%M-*") raise MissingFileError("SEVIRI observations for "+time.strftime("%Y/%m/%d_%H%M")+" are missing. Please check if they can be downloaded and if not, add to the list of unavailable times.") else: for channel in channel_list: filenames=filenames + glob.glob(sev_dir+'*'+file_dict[channel]+time.strftime("*%Y%m%d%H%M-*")) # add channels required scene = satpy.Scene(reader="seviri_l1b_hrit", filenames=filenames) data = {} scene.load(channel_list) if reflectance_correct: lons, lats = scene[channel_list[0]].area.get_lonlats() if 0.8 in channel_list: scene[0.8] = reflectance_correction(scene[0.8], lons, lats) if 0.6 in channel_list: scene[0.6] = reflectance_correction(scene[0.6], lons, lats) if regrid != False: lons, lats = my_area.get_lonlats() if len(interp_coeffs) == 0: interp_coeffs = bilinear.get_bil_info(scene[channel_list[0]].area, my_area, radius=50e3, nprocs=1) data.update({'interp_coeffs': interp_coeffs}) for channel in channel_list: data.update({str(channel): bilinear.get_sample_from_bil_info(scene[channel].values.ravel(), interp_coeffs[0], interp_coeffs[1], interp_coeffs[2], interp_coeffs[3], output_shape=my_area.shape)}) else: if len(subdomain) > 0: scene = scene.crop(ll_bbox=subdomain) lons, lats = scene[channel_list[0]].area.get_lonlats() lons = lons[:,::-1] # Need to invert y-axis to get longitudes increasing. lats = lats[:,::-1] for channel in channel_list: data.update({str(channel) : scene[channel].values[:,::-1]}) data.update({'lons' : lons, 'lats' : lats, 'interp_coeffs' : interp_coeffs}) # Compressed files are decompressed to TMPDIR. Now tidy up delete_list = glob.glob(my_tmpdir+time.strftime("*%Y%m%d%H%M-*")) for d in delete_list: os.remove(d) return data