class ReaderWrapper(roles.FrontendRole): FILE_EXTENSIONS = [] DEFAULT_READER_NAME = None DEFAULT_DATASETS = [] # This is temporary until a better solution is found for loading start/end time on init PRIMARY_FILE_TYPE = None def __init__(self, **kwargs): self.reader = kwargs.pop("reader", self.DEFAULT_READER_NAME) super(ReaderWrapper, self).__init__(**kwargs) pathnames = self.find_files_with_extensions() # Create a satpy Scene object self.scene = Scene(reader=self.reader, filenames=pathnames, reader_kwargs=kwargs) self._begin_time = self.scene.start_time self._end_time = self.scene.end_time self.wishlist = set() @property def begin_time(self): return self._begin_time @property def end_time(self): return self._end_time @property def available_product_names(self): return self.scene.available_dataset_names(reader_name=self.reader, composites=True) @property def all_product_names(self): return self.scene.all_dataset_names(reader_name=self.reader, composites=True) @property def default_products(self): return self.DEFAULT_DATASETS def filter(self, scene): pass def create_scene(self, products=None, **kwargs): LOG.debug("Loading scene data...") # If the user didn't provide the products they want, figure out which ones we can create if products is None: LOG.debug("No products specified to frontend, will try to load logical defaults products") products = self.default_products kwargs.pop("overwrite_existing") kwargs.pop("exit_on_error") kwargs.pop("keep_intermediate") self.scene.load(products, **kwargs) self.wishlist = self.scene.wishlist # Apply Filters self.filter(self.scene) # Delete the satpy scene so memory is cleared out if it isn't used by the caller scene = self.scene self.scene = None return scene
def plot_coastlines_on_map(composite, files, photo_extent, points, result_path, dpi=800): import matplotlib.pyplot as plt import cartopy.crs as ccrs from satpy.scene import Scene # fig = plt.figure(figsize=(16,12)) # col = ['r', 'g', 'b', 'y', 'm', 'k', 'c', 'w'] scn = Scene(filenames=files) scn.load([composite]) new_scn = scn crs = new_scn[composite].attrs['area'].to_cartopy_crs() ax1 = plt.axes(projection=ccrs.Mercator()) ax1.set_extent(photo_extent) #ax1.drawcountries() #ax1.drawstates() #ax1.gridlines() # ax.coastlines(resolution='50m', color='red') ax1.coastlines(color='r') plt.plot() # ax.gridlines() # ax.set_global() for i, (lat, lon) in enumerate(zip(points[0], points[1])): plt.plot(lon, lat, 'r*', ms=15, transform=ccrs.Geodetic()) # plt.plot(400, 2000, 'ok', markersize=400, color=col[i],projection=crs) # fig.suptitle(description, fontsize=20, fontweight='bold') # ax.scatter(10,40,latlon=True,color='blue') plt.imshow(new_scn['VIS006'], transform=crs, extent=crs.bounds, origin='upper', cmap='gray') # cbar = plt.colorbar() # cbar.set_label("Kelvin") plt.savefig(result_path, dpi=dpi) # plt.show() # plt.imsave(result_path, dpi=dpi) return ()
def test_getitem_modifiers(self): """Test __getitem__ with names and modifiers""" from satpy import Scene, Dataset, DatasetID import numpy as np # Return least modified item scene = Scene() scene['1'] = ds1_m0 = Dataset(np.arange(5)) scene[DatasetID(name='1', modifiers=('mod1',))] = ds1_m1 = Dataset(np.arange(5)) self.assertIs(scene['1'], ds1_m0) self.assertEquals(len(list(scene.keys())), 2) scene = Scene() scene['1'] = ds1_m0 = Dataset(np.arange(5)) scene[DatasetID(name='1', modifiers=('mod1',))] = ds1_m1 = Dataset(np.arange(5)) scene[DatasetID(name='1', modifiers=('mod1', 'mod2'))] = ds1_m2 = Dataset(np.arange(5)) self.assertIs(scene['1'], ds1_m0) self.assertEquals(len(list(scene.keys())), 3) scene = Scene() scene[DatasetID(name='1', modifiers=('mod1', 'mod2'))] = ds1_m2 = Dataset(np.arange(5)) scene[DatasetID(name='1', modifiers=('mod1',))] = ds1_m1 = Dataset(np.arange(5)) self.assertIs(scene['1'], ds1_m1) self.assertIs(scene[DatasetID('1', modifiers=('mod1', 'mod2'))], ds1_m2) self.assertRaises(KeyError, scene.__getitem__, DatasetID(name='1', modifiers=tuple())) self.assertEquals(len(list(scene.keys())), 2)
def step_impl(context): from satpy.scene import Scene from datetime import datetime os.chdir("/tmp/") scn = Scene(platform_name="Suomi-NPP", sensor="viirs", start_time=datetime(2015, 3, 11, 11, 20), end_time=datetime(2015, 3, 11, 11, 26)) context.available_dataset_ids = scn.available_datasets()
def __init__(self, **kwargs): self.reader = kwargs.pop("reader", self.DEFAULT_READER_NAME) super(ReaderWrapper, self).__init__(**kwargs) pathnames = self.find_files_with_extensions() # Create a satpy Scene object self.scene = Scene(reader=self.reader, filenames=pathnames) self._begin_time = self.scene.start_time self._end_time = self.scene.end_time
def step_impl(context): from satpy.scene import Scene from datetime import datetime os.chdir("/tmp/") scn = Scene(platform_name="Suomi-NPP", sensor="viirs", start_time=datetime(2015, 3, 11, 11, 20), end_time=datetime(2015, 3, 11, 11, 26)) context.available_dataset_ids = scn.available_dataset_ids()
def step_impl(context): from satpy.scene import Scene from datetime import datetime os.chdir("/tmp/") scn = Scene(platform_name="Suomi-NPP", sensor="viirs", start_time=datetime(2015, 3, 11, 11, 20), end_time=datetime(2015, 3, 11, 11, 26)) scn.load(["M02"]) context.scene = scn
def crop(self, st, et, delta): # Crop data every 'delta' and split into IC and CG scn = Scene(glob.glob(entln_path + 'LtgFlashPortions' + st.strftime('%Y%m%d') + '.csv'), reader='entln') vname = 'timestamp' # any name in data is OK, because we just bin the counts scn.load([vname]) # ---- loop through hour and delta interval ----- # for h in range(st.hour, et.hour): for m in range(0, 60, delta): # 1. -----Crop by delta----- # timestamp = scn[vname].timestamp.values.astype('datetime64[s]') if m + delta < 60: cond = (timestamp >= st.replace(hour=h, minute=m)) & ( timestamp < st.replace(hour=h, minute=m + delta)) else: cond = (timestamp >= st.replace(hour=h, minute=m)) & ( timestamp < st.replace(hour=h + 1, minute=0)) # 2. -----Crop by type ----- # self.ic = copy.deepcopy(scn) self.cg = copy.deepcopy(scn) cond_cg = (scn[vname].type != 1) & (cond) cond_ic = (scn[vname].type == 1) & (cond) self.cg[vname] = self.cg[vname][cond_cg] # if we only use CG data, IC is eaual to CG here # and the constant ratio: IC/CG = iccg_ratio is used later if only_cg: self.ic[vname] = self.ic[vname][cond_cg] else: self.ic[vname] = self.ic[vname][cond_ic] # Correct attrs area_ic = SwathDefinition(lons=self.ic[vname].coords['longitude'], \ lats=self.ic[vname].coords['latitude'] ) area_cg = SwathDefinition(lons=self.cg[vname].coords['longitude'], \ lats=self.cg[vname].coords['latitude'] ) self.correct_attrs(self.ic, area_ic, vname) self.correct_attrs(self.cg, area_cg, vname) # 3. -----Crop by WRF_grid ----- # self.resample_WRF() if only_cg: self.tl = (self.ic[vname] * iccg_ratio + self.cg[vname]) / cg_de else: self.tl = self.ic[vname] / ic_de + self.cg[vname] / cg_de self.save(vname, h, m)
def __init__(self, **kwargs): self.reader = kwargs.pop("reader", self.DEFAULT_READER_NAME) super(ReaderWrapper, self).__init__(**kwargs) pathnames = self.find_files_with_extensions() # Remove keyword arguments that Satpy won't understand for key in ('search_paths', 'keep_intermediate', 'overwrite_existing', 'exit_on_error'): kwargs.pop(key, None) # Create a satpy Scene object self.scene = Scene(reader=self.reader, filenames=pathnames, reader_kwargs=kwargs) self._begin_time = self.scene.start_time self._end_time = self.scene.end_time self.wishlist = set()
class TestBaseWriter(unittest.TestCase): """Test the base writer class.""" def setUp(self): """Set up tests.""" import tempfile from datetime import datetime from satpy.scene import Scene import dask.array as da ds1 = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={ 'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0) }) self.scn = Scene() self.scn['test'] = ds1 # Temp dir self.base_dir = tempfile.mkdtemp() def tearDown(self): """Remove the temporary directory created for a test""" try: shutil.rmtree(self.base_dir, ignore_errors=True) except OSError: pass def test_save_dataset_static_filename(self): """Test saving a dataset with a static filename specified.""" self.scn.save_datasets(base_dir=self.base_dir, filename='geotiff.tif') self.assertTrue( os.path.isfile(os.path.join(self.base_dir, 'geotiff.tif'))) def test_save_dataset_dynamic_filename(self): """Test saving a dataset with a format filename specified.""" fmt_fn = 'geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif' exp_fn = 'geotiff_test_20180101_000000.tif' self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) self.assertTrue(os.path.isfile(os.path.join(self.base_dir, exp_fn))) def test_save_dataset_dynamic_filename_with_dir(self): """Test saving a dataset with a format filename that includes a directory.""" fmt_fn = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif') exp_fn = os.path.join('20180101', 'geotiff_test_20180101_000000.tif') self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) self.assertTrue(os.path.isfile(os.path.join(self.base_dir, exp_fn))) # change the filename pattern but keep the same directory fmt_fn2 = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H}.tif') exp_fn2 = os.path.join('20180101', 'geotiff_test_20180101_00.tif') self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn2) self.assertTrue(os.path.isfile(os.path.join(self.base_dir, exp_fn2))) # the original file should still exist self.assertTrue(os.path.isfile(os.path.join(self.base_dir, exp_fn)))
def _create_scene(self): """Create a scene object from available data. Returns ------- satpy.scene.Scene Inialized scene object """ data = self.message.data filter_parameters = { "start_time": data["start_time"] - ORBIT_SLACK, "end_time": data["end_time"] + ORBIT_SLACK, "platform_name": data["platform_name"], } filenames = find_files_and_readers( base_dir="/viirs/sdr", reader="viirs_sdr", filter_parameters=filter_parameters, ) try: scene = Scene(filenames=filenames, reader="viirs_sdr") except ValueError as e: logger.exception("Loading files didn't go well: %s", filenames) raise e return scene
def from_files(cls, files_to_sort, reader=None, ensure_all_readers=False, **kwargs): """Create multiple Scene objects from multiple files. Args: files_to_sort (Collection[str]): files to read reader (str or Collection[str]): reader or readers to use ensure_all_readers (bool): If True, limit to scenes where all readers have at least one file. If False (default), include all scenes where at least one reader has at least one file. This uses the :func:`satpy.readers.group_files` function to group files. See this function for more details on additional possible keyword arguments. In particular, it is strongly recommended to pass `"group_keys"` when using multiple instruments. .. versionadded:: 0.12 """ from satpy.readers import group_files file_groups = group_files(files_to_sort, reader=reader, **kwargs) if ensure_all_readers: file_groups = [fg for fg in file_groups if all(fg.values())] scenes = (Scene(filenames=fg) for fg in file_groups) return cls(scenes)
def test_setitem(self): from satpy import Scene, Dataset import numpy as np scene = Scene() scene["1"] = ds1 = Dataset(np.arange(5)) self.assertSetEqual(set(scene.datasets.keys()), {ds1.id}) self.assertSetEqual(set(scene.wishlist), {ds1.id})
def _create_scene(file_format, filenames, calib_coefs): return Scene(reader=file_format, filenames=filenames, reader_kwargs={ 'calib_mode': CALIB_MODE, 'ext_calib_coefs': calib_coefs })
def blend(self, blend_function=stack): """Blend the datasets into one scene. Reduce the :class:`MultiScene` to a single :class:`~satpy.scene.Scene`. Datasets occurring in each scene will be passed to a blending function, which shall take as input a list of datasets (:class:`xarray.DataArray` objects) and shall return a single dataset (:class:`xarray.DataArray` object). The blend method then assigns those datasets to the blended scene. Blending functions provided in this module are :func:`stack` (the default) and :func:`timeseries`, but the Python built-in function :func:`sum` also works and may be appropriate for some types of data. .. note:: Blending is not currently optimized for generator-based MultiScene. """ new_scn = Scene() common_datasets = self.shared_dataset_ids for ds_id in common_datasets: datasets = [scn[ds_id] for scn in self.scenes if ds_id in scn] new_scn[ds_id] = blend_function(datasets) return new_scn
def _create_scene(file_format, filenames, calib_coefs): return Scene(reader=file_format, filenames=filenames, reader_kwargs={ 'calib_mode': CalibrationData.SATPY_CALIB_MODE.value, 'ext_calib_coefs': calib_coefs })
def _get_test_dataset_calibration_one_dataset(self, bands=1): """Helper function to create a single test dataset.""" import xarray as xr import dask.array as da from datetime import datetime from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict from satpy import DatasetID from satpy.scene import Scene area_def = AreaDefinition( 'test', 'test', 'test', proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), 100, 200, (-1000., -1500., 1000., 1500.), ) d = [DatasetID(name='4', calibration='brightness_temperature')] scene = Scene() scene["4"] = xr.DataArray( da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={'calibration': 'brightness_temperature'}) data = scene['4'] calibration = [] for p in scene: calibration.append(p.attrs['calibration']) new_attrs = { 'name': 'datasets', 'start_time': datetime.utcnow(), 'platform_name': "TEST_PLATFORM_NAME", 'sensor': 'test-sensor', 'area': area_def, 'prerequisites': d, 'metadata_requirements': { 'order': ['4'], 'config': { '4': { 'alias': 'BT', 'calibration': 'brightness_temperature', 'min-val': '-150', 'max-val': '50' }, }, 'translate': { '4': '4', }, 'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff' } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) return ds1
def __init__(self, **kwargs): self.reader = kwargs.pop("reader", self.DEFAULT_READER_NAME) super(ReaderWrapper, self).__init__(**kwargs) pathnames = self.find_files_with_extensions() # Create a satpy Scene object self.scene = Scene(reader=self.reader, filenames=pathnames, reader_kwargs=kwargs) self._begin_time = self.scene.start_time self._end_time = self.scene.end_time
def test_available_composites_no_datasets(self): from satpy import Scene scene = Scene() id_list = scene.available_composite_ids(available_datasets=[]) self.assertListEqual(id_list, []) # no sensors are loaded so we shouldn't get any comps either id_list = scene.available_composite_names(available_datasets=[]) self.assertListEqual(id_list, [])
def test_iter(self): from satpy import Scene, Dataset import numpy as np scene = Scene() scene["1"] = Dataset(np.arange(5)) scene["2"] = Dataset(np.arange(5)) scene["3"] = Dataset(np.arange(5)) for x in scene: self.assertIsInstance(x, Dataset)
def blend(self, blend_function=stack): """Blend the datasets into one scene.""" new_scn = Scene() common_datasets = self.shared_dataset_ids for ds_id in common_datasets: datasets = [scn[ds_id] for scn in self.scenes if ds_id in scn] new_scn[ds_id] = blend_function(datasets) return new_scn
def test_contains(self): from satpy import Scene, Dataset import numpy as np scene = Scene() scene["1"] = ds1 = Dataset(np.arange(5), wavelength=(0.1, 0.2, 0.3)) self.assertTrue('1' in scene) self.assertTrue(0.15 in scene) self.assertFalse('2' in scene) self.assertFalse(0.31 in scene)
def setUp(self): """Create temporary directory to save files to and a mock scene.""" import tempfile from datetime import datetime from satpy.scene import Scene ds1 = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={ 'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0) }) self.scn = Scene() self.scn['test'] = ds1 # Temp dir self.base_dir = tempfile.mkdtemp()
def test_available_dataset_names_no_readers(self): from satpy import Scene scene = Scene() self.assertRaises(KeyError, scene.available_dataset_names, reader_name='fake') name_list = scene.available_dataset_names() self.assertListEqual(name_list, []) # no sensors are loaded so we shouldn't get any comps either name_list = scene.available_dataset_names(composites=True) self.assertListEqual(name_list, [])
def test_iter(self): from satpy import Scene, Projectable import numpy as np scene = Scene() scene["1"] = Projectable(np.arange(5)) scene["2"] = Projectable(np.arange(5)) scene["3"] = Projectable(np.arange(5)) for x in scene: self.assertIsInstance(x, Projectable)
def draw_polygons_on_map(polygons, lines, points, composite, files, photo_extent , result_path, projection='Stereographic', dpi=200): import matplotlib.pyplot as plt import cartopy.crs as ccrs from satpy.scene import Scene col = ['g', 'b', 'y', 'm', 'k', 'c', 'w'] col2 = ['m', 'k', 'c', 'w'] col3 = ['y', 'y', 'y'] scn = Scene(filenames=files) scn.load([composite]) new_scn = scn crs = new_scn[composite].attrs['area'].to_cartopy_crs() proj = getattr(ccrs,projection)() ax0 = plt.axes(projection=proj) if photo_extent == 'global': ax0.set_global() else: ax0.set_extent(photo_extent, crs=ccrs.PlateCarree()) ax0.gridlines() ax0.coastlines(color='r') plt.plot() # Plot 1 polygon: for i, polygon in enumerate(polygons): poly_lats, poly_lons = polygon[0], polygon[1] for la, lo in zip(poly_lats, poly_lons): plt.fill(lo, la, transform=ccrs.PlateCarree(), color=col[i]) # Plot line: for i, line in enumerate(lines): poly_lats, poly_lons = line[0], line[1] for la, lo in zip(poly_lats, poly_lons): plt.plot(lo, la, 'ok', markersize=4,transform=ccrs.PlateCarree(), color=col2[i]) # Plot points: for i, point in enumerate(points): la, lo = point[0], point[1] print(la,lo) plt.plot(lo, la, 'ok', markersize=4, transform=ccrs.PlateCarree(), color=col3[i]) plt.imshow(new_scn[composite], transform=crs, extent=crs.bounds, origin='upper', cmap='gray') plt.savefig(result_path, dpi=dpi) return ()
def setUp(self): """Set up tests.""" import tempfile from datetime import datetime from satpy.scene import Scene import dask.array as da ds1 = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={ 'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0) }) self.scn = Scene() self.scn['test'] = ds1 # Temp dir self.base_dir = tempfile.mkdtemp()
def test_getitem(self): """Test __getitem__ with names only""" from satpy import Scene, Dataset import numpy as np scene = Scene() scene["1"] = ds1 = Dataset(np.arange(5)) scene["2"] = ds2 = Dataset(np.arange(5)) scene["3"] = ds3 = Dataset(np.arange(5)) self.assertIs(scene['1'], ds1) self.assertIs(scene['2'], ds2) self.assertIs(scene['3'], ds3) self.assertRaises(KeyError, scene.__getitem__, '4')
def step_impl(context): """ :type context: behave.runner.Context """ from satpy.scene import Scene from datetime import datetime from satpy.dataset import Dataset scn = Scene(platform_name="Suomi-NPP", sensor="viirs", start_time=datetime(2015, 3, 11, 11, 20), end_time=datetime(2015, 3, 11, 11, 26)) scn["MyDataset"] = Dataset([[1, 2], [3, 4]]) context.scene = scn
def setup_method(self): """Set up tests.""" import tempfile from datetime import datetime from satpy.scene import Scene ds1 = xr.DataArray(da.zeros((100, 200), chunks=50), dims=('y', 'x'), attrs={ 'name': 'test', 'start_time': datetime(2018, 1, 1, 0, 0, 0), 'sensor': 'fake_sensor', }) ds2 = ds1.copy() ds2.attrs['sensor'] = {'fake_sensor1', 'fake_sensor2'} self.scn = Scene() self.scn['test'] = ds1 self.scn['test2'] = ds2 # Temp dir self.base_dir = tempfile.mkdtemp()
def test_delitem(self): from satpy import Scene, Dataset import numpy as np scene = Scene() scene["1"] = ds1 = Dataset(np.arange(5), wavelength=(0.1, 0.2, 0.3)) scene["2"] = ds2 = Dataset(np.arange(5), wavelength=(0.4, 0.5, 0.6)) scene["3"] = ds3 = Dataset(np.arange(5), wavelength=(0.7, 0.8, 0.9)) del scene['1'] del scene['3'] del scene[0.45] self.assertEquals(len(scene.wishlist), 0) self.assertEquals(len(scene.datasets.keys()), 0) self.assertRaises(KeyError, scene.__delitem__, 0.2)
def test_geotiff_scene_nan(self): """Test reading TIFF images originally containing NaN values via satpy.Scene().""" from satpy import Scene fname = os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif') scn = Scene(reader='generic_image', filenames=[fname]) scn.load(['image']) self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) self.assertEqual(np.sum(scn['image'].data[0][:10, :10].compute()), 0) fname = os.path.join(self.base_dir, 'test_l_nan_nofillvalue.tif') scn = Scene(reader='generic_image', filenames=[fname]) scn.load(['image']) self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) self.assertTrue( np.all(np.isnan(scn['image'].data[0][:10, :10].compute())))
def from_files(cls, files_to_sort, reader=None, **kwargs): """Create multiple Scene objects from multiple files. This uses the :func:`satpy.readers.group_files` function to group files. See this function for more details on possible keyword arguments. .. versionadded:: 0.12 """ from satpy.readers import group_files file_groups = group_files(files_to_sort, reader=reader, **kwargs) scenes = (Scene(filenames=fg) for fg in file_groups) return cls(scenes)
def scene_examples(): from datetime import datetime from satpy.scene import Scene scn = Scene( platform_name="SNPP", sensor="viirs", start_time=datetime(2015, 4, 20, 12, 3), end_time=datetime(2015, 4, 20, 12, 10), base_dir="/home/a000680/data/polar_in/direct_readout/npp/lvl1/npp_20150420_1202_18019", reader="viirs_sdr" ) scn.load(['M05', 'M08', 'M15']) met10scn = Scene( sensor="seviri", base_dir="/home/a000680/data/hrit/20150420", reader="hrit_msg" ) met10scn.load([0.6, 0.8, 11.0]) return
# along with this program. If not, see <http://www.gnu.org/licenses/>. """Code example with OSISAF SST with satpy """ from satpy.scene import Scene #from satpy.utils import debug_on # debug_on() if __name__ == '__main__': scn = Scene( sensor='viirs', satid='NPP', filenames=[ "/home/a000680/data/osisaf/S-OSI_-FRA_-NPP_-NARSST_FIELD-201609081300Z.nc"], reader='ghrsst_osisaf' ) scn.load(['sea_surface_temperature']) lcd = scn.resample('euro4', radius_of_influence=2000) sstdata = lcd['sea_surface_temperature'][:] import numpy as np arr = np.ma.where(np.less_equal(sstdata, 0), 0, sstdata - 273.15) # Convert sst to numbers between 0 and 28, corresponding to the lut: data = np.ma.where(np.less(arr, 0), 28, 28.0 - arr) data = np.ma.where(np.greater(arr, 23.0), 4, data).round().astype('uint8')
from glob import glob from satpy.scene import Scene from satpy.utils import debug_on from pycoast import ContourWriterAGG import aggdraw import PIL from PIL import Image, ImageFont, ImageDraw from mpop.projector import get_area_def debug_on() fname="msg4-alps-snow.png" my_area="europe_center" # Load data by filenames files = glob("data/H-*") scn = Scene(reader="hrit_msg", filenames=files) scn.load(["natural"]) lscn = scn.resample(my_area) # Save RGB geotiff lscn.save_dataset("natural", filename=fname) cw = ContourWriterAGG('/opt/pytroll/shapes') europe = get_area_def(my_area) cw.add_coastlines_to_file(fname, europe, resolution='l', level=1, outline=(255, 255, 255)) cw.add_borders_to_file(fname, europe, outline=(255, 255, 255),resolution='i') img = Image.open(fname) draw = ImageDraw.Draw(img) print(img.size) draw.rectangle([(0, 0), (img.size[0], 25)], fill=(255,165,0,200)) font = ImageFont.truetype("/usr/openv/java/jre/lib/fonts/LucidaTypewriterBold.ttf", 18) textSizeName = draw.textsize("Meteosat 11", font=font)
#metopa.retrieve(metopa_url, "metopa.txt") #metopb = urllib.URLopener() #metopb.retrieve(metopb_url, "metopb.txt") cw = ContourWriterAGG('/opt/pytroll/shapes') europe = load_area(os.environ['PPP_CONFIG_DIR']+"/areas.def", 'ccs4large') world = load_area(os.environ['PPP_CONFIG_DIR']+"/areas.def", 'world_plat_1350_675') for sat in ["M01", "M02", "M03"]: fnames = glob(dataDir+"AVHR_xxx_*"+sat+"*") if not fnames: continue glbl = Scene(reader="avhrr_eps_l1b", filenames=fnames) if sat == "M01": satname="B" if sat == "M02": satname="A" if sat == "M03": satname="C" #glbl.load(['true_color_raw', 'night_fog']) glbl.load(['natural_color', 'night_fog']) delta_time = unix_time_sec(glbl.end_time) - unix_time_sec(glbl.start_time) sat_pos_time = glbl.start_time + timedelta(seconds=delta_time) st = sat_pos_time.strftime('%y%m%d%H%M') #orb = Orbital("Metop-A", tle_file="metopa.txt") #orb = Orbital("Metop-"+satname) #dtobj = datetime(int(sat_pos_time.strftime('%Y')), # int(sat_pos_time.strftime('%m')),