def test_get_bil_info(self): def _check_ts(t__, s__): for i in range(len(t__)): # Just check the exact value for one pixel if i == 5: self.assertAlmostEqual(t__[i], 0.730659147133, 5) self.assertAlmostEqual(s__[i], 0.310314173004, 5) # These pixels are outside the area elif i in [12, 13, 14, 15]: self.assertTrue(np.isnan(t__[i])) self.assertTrue(np.isnan(s__[i])) # All the others should have values between 0.0 and 1.0 else: self.assertTrue(t__[i] >= 0.0) self.assertTrue(s__[i] >= 0.0) self.assertTrue(t__[i] <= 1.0) self.assertTrue(s__[i] <= 1.0) t__, s__, input_idxs, idx_arr = bil.get_bil_info(self.swath_def, self.target_def, 50e5, neighbours=32, nprocs=1, reduce_data=False) _check_ts(t__, s__) t__, s__, input_idxs, idx_arr = bil.get_bil_info(self.swath_def, self.target_def, 50e5, neighbours=32, nprocs=1, reduce_data=True) _check_ts(t__, s__)
def test_get_bil_info(self): def _check_ts(t__, s__): for i in range(len(t__)): # Just check the exact value for one pixel if i == 5: self.assertAlmostEqual(t__[i], 0.730659147133, 5) self.assertAlmostEqual(s__[i], 0.310314173004, 5) # These pixels are outside the area elif i in [12, 13, 14, 15]: self.assertTrue(np.isnan(t__[i])) self.assertTrue(np.isnan(s__[i])) # All the others should have values between 0.0 and 1.0 else: self.assertTrue(t__[i] >= 0.0) self.assertTrue(s__[i] >= 0.0) self.assertTrue(t__[i] <= 1.0) self.assertTrue(s__[i] <= 1.0) t__, s__, input_idxs, idx_arr = bil.get_bil_info(self.swath_def, self.target_def, 50e5, neighbours=32, nprocs=1, reduce_data=False) _check_ts(t__, s__) t__, s__, input_idxs, idx_arr = bil.get_bil_info(self.swath_def, self.target_def, 50e5, neighbours=32, nprocs=1, reduce_data=True) _check_ts(t__, s__)
def _bin_file(self, src_range, target_def): source_def = geometry.SwathDefinition( lons=self._file_data['longitude'][src_range[0]:src_range[1]], lats=self._file_data['latitude'][src_range[0]:src_range[1]]) nan_slice = False try: with warnings.catch_warnings(): warnings.filterwarnings('ignore') radius = self._file_def.get('INFO', {}).get('binRadius', 5e4) (t_params, s_params, input_idxs, idx_ref) = bilinear.get_bil_info(source_def, target_def, radius=radius) # valid_input_index, valid_output_index, index_array, distance_array = \ # kd_tree.get_neighbour_info(source_def, target_def, 5000, neighbours=1) except (IndexError, ValueError): # No data from this file falls within the slice nan_slice = True # create target file data target_file = xarray.Dataset() # rebin any data needing rebinned. all_keys = [key for key in self._file_data if key in self._to_bin] for key in all_keys: if nan_slice: binned_data = numpy.full( target_def.shape, numpy.nan, dtype=float, ) else: with warnings.catch_warnings(): warnings.filterwarnings('ignore') key_data = self._file_data[key][ src_range[0]:src_range[1]].data binned_data = bilinear.get_sample_from_bil_info( key_data, t_params, s_params, input_idxs, idx_ref, output_shape=target_def.shape) dim = ('x', 'y') if len(binned_data.shape) == 3: dim += ('corners') target_file[key] = (dim, binned_data) # Figure the start and duration of this file # Don't assume sorted, though probably is file_timerange = self._file_data['datetime_start'][ src_range[0]:src_range[1]] start_datetime = file_timerange.min() stop_datetime = file_timerange.max() target_file.coords['datetime_start'] = start_datetime target_file.coords['datetime_length'] = stop_datetime - start_datetime return target_file
def test_get_sample_from_bil_info(self): t__, s__, input_idxs, idx_arr = bil.get_bil_info(self.swath_def, self.target_def, 50e5, neighbours=32, nprocs=1) # Sample from data1 res = bil.get_sample_from_bil_info(self.data1.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res[5], 1.) # Sample from data2 res = bil.get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res[5], 2.) # Reshaping res = bil.get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr, output_shape=self.target_def.shape) res = res.shape self.assertEqual(res[0], self.target_def.shape[0]) self.assertEqual(res[1], self.target_def.shape[1]) # Test rounding that is happening for certain values res = bil.get_sample_from_bil_info(self.data3.ravel(), t__, s__, input_idxs, idx_arr, output_shape=self.target_def.shape) # Four pixels are outside of the data self.assertEqual(np.isnan(res).sum(), 4)
def test_get_sample_from_bil_info(self): t__, s__, input_idxs, idx_arr = bil.get_bil_info(self.swath_def, self.target_def, 50e5, neighbours=32, nprocs=1) # Sample from data1 res = bil.get_sample_from_bil_info(self.data1.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res[5], 1.) # Sample from data2 res = bil.get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res[5], 2.) # Reshaping res = bil.get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr, output_shape=self.target_def.shape) res = res.shape self.assertEqual(res[0], self.target_def.shape[0]) self.assertEqual(res[1], self.target_def.shape[1]) # Test rounding that is happening for certain values res = bil.get_sample_from_bil_info(self.data3.ravel(), t__, s__, input_idxs, idx_arr, output_shape=self.target_def.shape) # Four pixels are outside of the data self.assertEqual(np.isnan(res).sum(), 4)
def test_get_bil_info(self): t__, s__, input_idxs, idx_arr = bil.get_bil_info( self.swath_def, self.target_def) # Only 6th index should have valid values for i in range(len(t__)): if i == 5: self.assertAlmostEqual(t__[i], 0.684850870155, 5) self.assertAlmostEqual(s__[i], 0.775433912393, 5) else: self.assertTrue(np.isnan(t__[i])) self.assertTrue(np.isnan(s__[i]))
def calc_bilinear_params(in_area, out_area, radius, nprocs=1): """Calculate projection parameters for bilinear interpolation""" bilinear_t, bilinear_s, input_idxs, idx_arr = \ get_bil_info(in_area, out_area, radius, neighbours=32, nprocs=nprocs, masked=False) cache = {} cache['bilinear_s'] = bilinear_s cache['bilinear_t'] = bilinear_t cache['input_idxs'] = input_idxs cache['idx_arr'] = idx_arr return cache
def calc_bilinear_params(in_area, out_area, radius, nprocs=1): """Calculate projection parameters for bilinear interpolation""" bilinear_t, bilinear_s, input_idxs, idx_arr = \ get_bil_info(in_area, out_area, radius, neighbours=32, nprocs=nprocs, masked=False) cache = {} cache['bilinear_s'] = bilinear_s cache['bilinear_t'] = bilinear_t cache['input_idxs'] = input_idxs cache['idx_arr'] = idx_arr return cache
def precompute(self, mask=None, radius_of_influence=50000, reduce_data=True, nprocs=1, segments=None, cache_dir=False, **kwargs): """Create bilinear coefficients and store them for later use. Note: The `mask` keyword should be provided if geolocation may be valid where data points are invalid. This defaults to the `mask` attribute of the `data` numpy masked array passed to the `resample` method. """ del kwargs source_geo_def = mask_source_lonlats(self.source_geo_def, mask) bil_hash = self.get_hash(source_geo_def=source_geo_def, radius_of_influence=radius_of_influence, mode="bilinear") filename = self._create_cache_filename(cache_dir, bil_hash) self._read_params_from_cache(cache_dir, bil_hash, filename) if self.cache is not None: LOG.debug("Loaded bilinear parameters") return self.cache else: LOG.debug("Computing bilinear parameters") bilinear_t, bilinear_s, input_idxs, idx_arr = get_bil_info( source_geo_def, self.target_geo_def, radius_of_influence, neighbours=32, nprocs=nprocs, masked=False) self.cache = { 'bilinear_s': bilinear_s, 'bilinear_t': bilinear_t, 'input_idxs': input_idxs, 'idx_arr': idx_arr } self._update_caches(bil_hash, cache_dir, filename) return self.cache
def test_get_bil_info(self): """Test calculation of bilinear resampling indices.""" from pyresample.bilinear import get_bil_info def _check_ts(t__, s__): for i in range(len(t__)): # Just check the exact value for one pixel if i == 5: self.assertAlmostEqual(t__[i], 0.730659147133, 5) self.assertAlmostEqual(s__[i], 0.310314173004, 5) # These pixels are outside the area elif i in [12, 13, 14, 15]: self.assertTrue(np.isnan(t__[i])) self.assertTrue(np.isnan(s__[i])) # All the others should have values between 0.0 and 1.0 else: self.assertTrue(t__[i] >= 0.0) self.assertTrue(s__[i] >= 0.0) self.assertTrue(t__[i] <= 1.0) self.assertTrue(s__[i] <= 1.0) t__, s__, _, _ = get_bil_info(self.source_def, self.target_def, 50e5, neighbours=32, nprocs=1, reduce_data=False) _check_ts(t__, s__) t__, s__, _, _ = get_bil_info(self.source_def, self.target_def, 50e5, neighbours=32, nprocs=2, reduce_data=True) _check_ts(t__, s__)
def test_get_sample_from_bil_info(self): t__, s__, input_idxs, idx_arr = bil.get_bil_info(self.swath_def, self.target_def) # Sample from data1 res = bil.get_sample_from_bil_info(self.data1.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res[5], 1.) # Sample from data2 res = bil.get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res[5], 2.) # Reshaping res = bil.get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr, output_shape=self.target_def.shape) res = res.shape self.assertEqual(res[0], self.target_def.shape[0]) self.assertEqual(res[1], self.target_def.shape[1])
def test_get_sample_from_bil_info(self): """Test resampling using resampling indices.""" from pyresample.bilinear import get_bil_info, get_sample_from_bil_info t__, s__, input_idxs, idx_arr = get_bil_info(self.source_def, self.target_def, 50e5, neighbours=32, nprocs=1) # Sample from data1 res = get_sample_from_bil_info(self.data1.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res.ravel()[5], 1.) # Sample from data2 res = get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr) self.assertEqual(res.ravel()[5], 2.) # Reshaping res = get_sample_from_bil_info(self.data2.ravel(), t__, s__, input_idxs, idx_arr, output_shape=self.target_def.shape) res = res.shape self.assertEqual(res[0], self.target_def.shape[0]) self.assertEqual(res[1], self.target_def.shape[1]) # Test rounding that is happening for certain values res = get_sample_from_bil_info(self.data3.ravel(), t__, s__, input_idxs, idx_arr, output_shape=self.target_def.shape) # Four pixels are outside of the data self.assertEqual(np.isnan(res).sum(), 4) # Masked array as input, result should be plain Numpy array data = np.ma.masked_all(self.data1.shape) res = get_sample_from_bil_info(data.ravel(), t__, s__, input_idxs, idx_arr) assert not hasattr(res, 'mask')
def precompute(self, mask=None, radius_of_influence=50000, reduce_data=True, nprocs=1, segments=None, cache_dir=False, **kwargs): """Create bilinear coefficients and store them for later use. Note: The `mask` keyword should be provided if geolocation may be valid where data points are invalid. This defaults to the `mask` attribute of the `data` numpy masked array passed to the `resample` method. """ del kwargs source_geo_def = mask_source_lonlats(self.source_geo_def, mask) bil_hash = self.get_hash(source_geo_def=source_geo_def, radius_of_influence=radius_of_influence, mode="bilinear") filename = self._create_cache_filename(cache_dir, bil_hash) self._read_params_from_cache(cache_dir, bil_hash, filename) if self.cache is not None: LOG.debug("Loaded bilinear parameters") return self.cache else: LOG.debug("Computing bilinear parameters") bilinear_t, bilinear_s, input_idxs, idx_arr = get_bil_info(source_geo_def, self.target_geo_def, radius_of_influence, neighbours=32, nprocs=nprocs, masked=False) self.cache = {'bilinear_s': bilinear_s, 'bilinear_t': bilinear_t, 'input_idxs': input_idxs, 'idx_arr': idx_arr} self._update_caches(bil_hash, cache_dir, filename) return self.cache
def __init__(self, in_area, out_area, in_latlons=None, mode=None, radius=10000, nprocs=1): if (mode is not None and mode not in ["quick", "nearest", "ewa", "bilinear"]): raise ValueError("Projector mode must be one of 'nearest', " "'quick', 'ewa', 'bilinear'") self.area_file = get_area_file() self.in_area = None self.out_area = None self._cache = None self._filename = None self.mode = "quick" self.radius = radius self.conf = ConfigParser.ConfigParser() self.conf.read(os.path.join(CONFIG_PATH, "mpop.cfg")) # TODO: # - Rework so that in_area and out_area can be lonlats. # - Add a recompute flag ? # Setting up the input area try: self.in_area = get_area_def(in_area) in_id = in_area except (utils.AreaNotFound, AttributeError): try: in_id = in_area.area_id self.in_area = in_area except AttributeError: try: # TODO: Note that latlons are in order (lons, lats) self.in_area = geometry.SwathDefinition(lons=in_latlons[0], lats=in_latlons[1]) in_id = in_area except TypeError: raise utils.AreaNotFound("Input area " + str(in_area) + " must be defined in " + self.area_file + ", be an area object" " or longitudes/latitudes must be " "provided.") # Setting up the output area try: self.out_area = get_area_def(out_area) out_id = out_area except (utils.AreaNotFound, AttributeError): try: out_id = out_area.area_id self.out_area = out_area except AttributeError: raise utils.AreaNotFound("Output area " + str(out_area) + " must be defined in " + self.area_file + " or " "be an area object.") # if self.in_area == self.out_area: # return # choosing the right mode if necessary if mode is None: try: dicts = in_area.proj_dict, out_area.proj_dict del dicts self.mode = "quick" except AttributeError: self.mode = "nearest" else: self.mode = mode filename = (in_id + "2" + out_id + "_" + str(_get_area_hash(self.in_area)) + "to" + str(_get_area_hash(self.out_area)) + "_" + self.mode + ".npz") projections_directory = "/var/tmp" try: projections_directory = self.conf.get("projector", "projections_directory") except ConfigParser.NoSectionError: pass self._filename = os.path.join(projections_directory, filename) try: self._cache = {} self._file_cache = np.load(self._filename) except: logger.info("Computing projection from %s to %s...", in_id, out_id) if self.mode == "nearest": valid_index, valid_output_index, index_array, distance_array = \ kd_tree.get_neighbour_info(self.in_area, self.out_area, self.radius, neighbours=1, nprocs=nprocs) del distance_array self._cache = {} self._cache['valid_index'] = valid_index self._cache['valid_output_index'] = valid_output_index self._cache['index_array'] = index_array elif self.mode == "quick": ridx, cidx = \ utils.generate_quick_linesample_arrays(self.in_area, self.out_area) self._cache = {} self._cache['row_idx'] = ridx self._cache['col_idx'] = cidx elif self.mode == "ewa": from pyresample.ewa import ll2cr swath_points_in_grid, cols, rows = ll2cr(self.in_area, self.out_area) self._cache = {} # self._cache['ewa_swath_points_in_grid'] = \ # swath_points_in_grid self._cache['ewa_cols'] = cols self._cache['ewa_rows'] = rows elif self.mode == "bilinear": bilinear_t, bilinear_s, input_idxs, idx_arr = \ get_bil_info(self.in_area, self.out_area, self.radius, neighbours=32, nprocs=nprocs, masked=False) self._cache = {} self._cache['bilinear_s'] = bilinear_s self._cache['bilinear_t'] = bilinear_t self._cache['input_idxs'] = input_idxs self._cache['idx_arr'] = idx_arr
def read_seviri_channel(channel_list, time, subdomain=(), regrid=False, my_area=geometry.AreaDefinition('Tropical Africa', 'Tropical Africa', 'Hatano Equal Area',{'proj' : 'hatano', 'lon_0' : 15.0}, 1732, 1510, (-4330000.,-3775000.,4330000., 3775000.)), interp_coeffs=(), reflectance_correct=False): '''Read SEVIRI data for given channels and time Includes functionality to subsample or regrid. Requires satpy. Assumes SEVIRI files are located in sev_data_dir1 set above, with directory structure sev_data_dir1/Year/YearMonthDay/Hour/ Args: channel_list (list): list of channels to read, see file_dict for possible values time (datetime): SEVIRI file date and time, every 00, 15, 30 or 45 minutes exactly, subdomain (tuple, optional): If not empty and regrid is False, then tuple values are (West boundary, South boundary, East boundary, North boundary) Defaults to empty tuple. regrid (bool, optional): If True, then data is regriddedonto grid defined by my_area. Defaults to False. my_area (AreaDefinition, optional): pyresmaple.geometry.AreaDefinition Only used if regrid=True Defaults to a Hatano equal area projection ~4.5 km resolution extending from ~33W to ~63E and ~29S to ~29N. interp_coeffs (tuple, optional): Interpolation coefficients that may be used for bilinear interpolation onto new grid. Faccilitates use of same coeffcients when regridding operation is repeated in multiple calls to read_seviri_channel. Defaults to empty tuple. reflectance_correct(bool, optional): Correct visible reflectances for variation in solar zenith angle and earth-sun distances. Defaults to False. Returns: data (dict): Dictionary containing following entries: lons (ndarray, shape(nlat,nlon)): Array of longitude values lats (ndarray, shape(nlat,nlon)): Array of latitude values interp_coeffs (tuple): If regrid is True, then the interpolation coefficients are returned in this variable to speed up future regridding channel (ndarray, shape(nlat,nlon)): Dictionary contains separate entry for each channel in channel_list ''' filenames = [] sat_names = ['MSG4', 'MSG3', 'MSG2', 'MSG1'] sat_ind = -1 if time in unavailable_times: raise UnavailableFileError("SEVIRI observations for "+time.strftime("%Y/%m/%d_%H%M")+" are not available") while ((len(filenames) == 0) & (sat_ind < len(sat_names)-1)): # Sometimes have data from multiple instruments (e.g. 20160504_1045 has MSG3 and MSG1), this ensures most recent is prioritised. sat_ind += 1 filenames=glob.glob(sev_data_dir1+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]+time.strftime("*EPI*%Y%m%d%H%M-*"))+ glob.glob(sev_data_dir1+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]+time.strftime("*PRO*%Y%m%d%H%M-*"))# PRO and EPI files necessary in all scenarios sev_dir = sev_data_dir1+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind] if len(filenames) == 0: # Try alternative directory for SEVIRI data. filenames=glob.glob(sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")+time.strftime("*EPI*%Y%m%d%H%M-*"))+ glob.glob(sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")+time.strftime("*PRO*%Y%m%d%H%M-*"))# PRO and EPI files necessary in all scenarios sev_dir = sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*") if len(filenames) == 0: print 'sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]+time.strftime("*EPI*%Y%m%d%H%M-*")=', sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]+time.strftime("*EPI*%Y%m%d%H%M-*") raise MissingFileError("SEVIRI observations for "+time.strftime("%Y/%m/%d_%H%M")+" are missing. Please check if they can be downloaded and if not, add to the list of unavailable times.") else: for channel in channel_list: filenames=filenames + glob.glob(sev_dir+'*'+file_dict[channel]+time.strftime("*%Y%m%d%H%M-*")) # add channels required scene = satpy.Scene(reader="seviri_l1b_hrit", filenames=filenames) data = {} scene.load(channel_list) if reflectance_correct: lons, lats = scene[channel_list[0]].area.get_lonlats() if 0.8 in channel_list: scene[0.8] = reflectance_correction(scene[0.8], lons, lats) if 0.6 in channel_list: scene[0.6] = reflectance_correction(scene[0.6], lons, lats) if regrid != False: lons, lats = my_area.get_lonlats() if len(interp_coeffs) == 0: interp_coeffs = bilinear.get_bil_info(scene[channel_list[0]].area, my_area, radius=50e3, nprocs=1) data.update({'interp_coeffs': interp_coeffs}) for channel in channel_list: data.update({str(channel): bilinear.get_sample_from_bil_info(scene[channel].values.ravel(), interp_coeffs[0], interp_coeffs[1], interp_coeffs[2], interp_coeffs[3], output_shape=my_area.shape)}) else: if len(subdomain) > 0: scene = scene.crop(ll_bbox=subdomain) lons, lats = scene[channel_list[0]].area.get_lonlats() lons = lons[:,::-1] # Need to invert y-axis to get longitudes increasing. lats = lats[:,::-1] for channel in channel_list: data.update({str(channel) : scene[channel].values[:,::-1]}) data.update({'lons' : lons, 'lats' : lats, 'interp_coeffs' : interp_coeffs}) # Compressed files are decompressed to TMPDIR. Now tidy up delete_list = glob.glob(my_tmpdir+time.strftime("*%Y%m%d%H%M-*")) for d in delete_list: os.remove(d) return data