Ejemplo n.º 1
0
    def test_getitem_modifiers(self):
        """Test __getitem__ with names and modifiers"""
        from satpy import Scene, Dataset, DatasetID
        import numpy as np

        # Return least modified item
        scene = Scene()
        scene['1'] = ds1_m0 = Dataset(np.arange(5))
        scene[DatasetID(name='1', modifiers=('mod1',))] = ds1_m1 = Dataset(np.arange(5))
        self.assertIs(scene['1'], ds1_m0)
        self.assertEquals(len(list(scene.keys())), 2)

        scene = Scene()
        scene['1'] = ds1_m0 = Dataset(np.arange(5))
        scene[DatasetID(name='1', modifiers=('mod1',))] = ds1_m1 = Dataset(np.arange(5))
        scene[DatasetID(name='1', modifiers=('mod1', 'mod2'))] = ds1_m2 = Dataset(np.arange(5))
        self.assertIs(scene['1'], ds1_m0)
        self.assertEquals(len(list(scene.keys())), 3)

        scene = Scene()
        scene[DatasetID(name='1', modifiers=('mod1', 'mod2'))] = ds1_m2 = Dataset(np.arange(5))
        scene[DatasetID(name='1', modifiers=('mod1',))] = ds1_m1 = Dataset(np.arange(5))
        self.assertIs(scene['1'], ds1_m1)
        self.assertIs(scene[DatasetID('1', modifiers=('mod1', 'mod2'))], ds1_m2)
        self.assertRaises(KeyError, scene.__getitem__, DatasetID(name='1', modifiers=tuple()))
        self.assertEquals(len(list(scene.keys())), 2)
Ejemplo n.º 2
0
    def test_geotiff_scene_nan(self):
        """Test reading TIFF images originally containing NaN values via satpy.Scene()."""
        from satpy import Scene

        fname = os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif')
        scn = Scene(reader='generic_image', filenames=[fname])
        scn.load(['image'])
        self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size))
        self.assertEqual(np.sum(scn['image'].data[0][:10, :10].compute()), 0)

        fname = os.path.join(self.base_dir, 'test_l_nan_nofillvalue.tif')
        scn = Scene(reader='generic_image', filenames=[fname])
        scn.load(['image'])
        self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size))
        self.assertTrue(
            np.all(np.isnan(scn['image'].data[0][:10, :10].compute())))
Ejemplo n.º 3
0
def _create_scene(file_format, filenames, calib_coefs):
    return Scene(reader=file_format,
                 filenames=filenames,
                 reader_kwargs={
                     'calib_mode': CALIB_MODE,
                     'ext_calib_coefs': calib_coefs
                 })
Ejemplo n.º 4
0
    def _create_scene(self):
        """Create a scene object from available data.

        Returns
        -------
         satpy.scene.Scene
            Inialized scene object
        """
        data = self.message.data
        filter_parameters = {
            "start_time": data["start_time"] - ORBIT_SLACK,
            "end_time": data["end_time"] + ORBIT_SLACK,
            "platform_name": data["platform_name"],
        }
        filenames = find_files_and_readers(
            base_dir="/viirs/sdr",
            reader="viirs_sdr",
            filter_parameters=filter_parameters,
        )
        try:
            scene = Scene(filenames=filenames, reader="viirs_sdr")
        except ValueError as e:
            logger.exception("Loading files didn't go well: %s", filenames)
            raise e

        return scene
Ejemplo n.º 5
0
def _create_scene(file_format, filenames, calib_coefs):
    return Scene(reader=file_format,
                 filenames=filenames,
                 reader_kwargs={
                     'calib_mode': CalibrationData.SATPY_CALIB_MODE.value,
                     'ext_calib_coefs': calib_coefs
                 })
Ejemplo n.º 6
0
def process_one_scene(scene_files,
                      out_path,
                      use_iband_res=False,
                      engine='h5netcdf',
                      all_channels=False,
                      pps_channels=False,
                      orbit_n=0):
    """Make level 1c files in PPS-format."""
    tic = time.time()
    scn_ = Scene(reader='viirs_sdr', filenames=scene_files)

    MY_MBAND = MBAND_DEFAULT
    MY_IBAND_I = IBAND_DEFAULT_I
    MY_IBAND_M = IBAND_DEFAULT_M

    if all_channels:
        MY_MBAND = MBANDS
        MY_IBAND_I = IBANDS
        MY_IBAND_M = MBANDS
    if pps_channels:
        MY_MBAND = MBAND_PPS
        MY_IBAND_I = IBAND_PPS_I
        MY_IBAND_M = IBAND_PPS_M

    if use_iband_res:
        scn_.load(MY_IBAND_I + ANGLE_NAMES + ['i_latitude', 'i_longitude'],
                  resolution=371)
        scn_.load(MY_IBAND_M, resolution=742)
        scn_ = scn_.resample(resampler='native')
    else:
        scn_.load(MY_MBAND + ANGLE_NAMES + ['m_latitude', 'm_longitude'],
                  resolution=742)

    # one ir channel
    irch = scn_['M15']

    # Set header and band attributes
    set_header_and_band_attrs(scn_, orbit_n=orbit_n)

    # Rename longitude, latitude to lon, lat.
    rename_latitude_longitude(scn_)

    # Convert angles to PPS
    convert_angles(scn_, delete_azimuth=True)
    update_angle_attributes(scn_, irch)

    filename = compose_filename(scn_, out_path, instrument='viirs', band=irch)
    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=get_header_attrs(scn_,
                                                     band=irch,
                                                     sensor='viirs'),
                       engine=engine,
                       include_lonlats=False,
                       flatten_attrs=True,
                       encoding=get_encoding_viirs(scn_))
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time() - tic))
    return filename
Ejemplo n.º 7
0
def plot_coastlines_on_map(composite, files, photo_extent, points, result_path, dpi=800):
    import matplotlib.pyplot as plt
    import cartopy.crs as ccrs
    from satpy.scene import Scene
    # fig = plt.figure(figsize=(16,12))
    # col = ['r', 'g', 'b', 'y', 'm', 'k', 'c', 'w']

    scn = Scene(filenames=files)
    scn.load([composite])

    new_scn = scn
    crs = new_scn[composite].attrs['area'].to_cartopy_crs()
    ax1 = plt.axes(projection=ccrs.Mercator())
    ax1.set_extent(photo_extent)
    #ax1.drawcountries()
    #ax1.drawstates()
    #ax1.gridlines()
    # ax.coastlines(resolution='50m', color='red')
    ax1.coastlines(color='r')
    plt.plot()
    # ax.gridlines()
    # ax.set_global()
    for i, (lat, lon) in enumerate(zip(points[0], points[1])):
        plt.plot(lon, lat, 'r*', ms=15, transform=ccrs.Geodetic())
    # plt.plot(400, 2000, 'ok', markersize=400, color=col[i],projection=crs)

    #   fig.suptitle(description, fontsize=20, fontweight='bold')
    # ax.scatter(10,40,latlon=True,color='blue')
    plt.imshow(new_scn['VIS006'], transform=crs, extent=crs.bounds, origin='upper', cmap='gray')
    # cbar = plt.colorbar()
    # cbar.set_label("Kelvin")
    plt.savefig(result_path, dpi=dpi)
    # plt.show()
    # plt.imsave(result_path, dpi=dpi)
    return ()
Ejemplo n.º 8
0
 def test_setitem(self):
     from satpy import Scene, Dataset
     import numpy as np
     scene = Scene()
     scene["1"] = ds1 = Dataset(np.arange(5))
     self.assertSetEqual(set(scene.datasets.keys()), {ds1.id})
     self.assertSetEqual(set(scene.wishlist), {ds1.id})
Ejemplo n.º 9
0
    def from_files(cls, files_to_sort, reader=None,
                   ensure_all_readers=False, **kwargs):
        """Create multiple Scene objects from multiple files.

        Args:
            files_to_sort (Collection[str]): files to read
            reader (str or Collection[str]): reader or readers to use
            ensure_all_readers (bool): If True, limit to scenes where all
                readers have at least one file.  If False (default), include
                all scenes where at least one reader has at least one file.

        This uses the :func:`satpy.readers.group_files` function to group
        files. See this function for more details on additional possible
        keyword arguments.  In particular, it is strongly recommended to pass
        `"group_keys"` when using multiple instruments.

        .. versionadded:: 0.12

        """
        from satpy.readers import group_files
        file_groups = group_files(files_to_sort, reader=reader, **kwargs)
        if ensure_all_readers:
            file_groups = [fg for fg in file_groups if all(fg.values())]
        scenes = (Scene(filenames=fg) for fg in file_groups)
        return cls(scenes)
Ejemplo n.º 10
0
    def blend(self, blend_function=stack):
        """Blend the datasets into one scene.

        Reduce the :class:`MultiScene` to a single :class:`~satpy.scene.Scene`.  Datasets
        occurring in each scene will be passed to a blending
        function, which shall take as input a list of datasets
        (:class:`xarray.DataArray` objects) and shall return a single
        dataset (:class:`xarray.DataArray` object).  The blend method
        then assigns those datasets to the blended scene.

        Blending functions provided in this module are :func:`stack`
        (the default) and :func:`timeseries`, but the Python built-in
        function :func:`sum` also works and may be appropriate for
        some types of data.

        .. note::

            Blending is not currently optimized for generator-based
            MultiScene.

        """
        new_scn = Scene()
        common_datasets = self.shared_dataset_ids
        for ds_id in common_datasets:
            datasets = [scn[ds_id] for scn in self.scenes if ds_id in scn]
            new_scn[ds_id] = blend_function(datasets)

        return new_scn
Ejemplo n.º 11
0
    def _get_test_dataset_calibration_one_dataset(self, bands=1):
        """Helper function to create a single test dataset."""
        import xarray as xr
        import dask.array as da
        from datetime import datetime
        from pyresample.geometry import AreaDefinition
        from pyresample.utils import proj4_str_to_dict
        from satpy import DatasetID
        from satpy.scene import Scene
        area_def = AreaDefinition(
            'test',
            'test',
            'test',
            proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 '
                              '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'),
            100,
            200,
            (-1000., -1500., 1000., 1500.),
        )

        d = [DatasetID(name='4', calibration='brightness_temperature')]
        scene = Scene()
        scene["4"] = xr.DataArray(
            da.zeros((100, 200), chunks=50),
            dims=('y', 'x'),
            attrs={'calibration': 'brightness_temperature'})

        data = scene['4']
        calibration = []
        for p in scene:
            calibration.append(p.attrs['calibration'])
        new_attrs = {
            'name': 'datasets',
            'start_time': datetime.utcnow(),
            'platform_name': "TEST_PLATFORM_NAME",
            'sensor': 'test-sensor',
            'area': area_def,
            'prerequisites': d,
            'metadata_requirements': {
                'order': ['4'],
                'config': {
                    '4': {
                        'alias': 'BT',
                        'calibration': 'brightness_temperature',
                        'min-val': '-150',
                        'max-val': '50'
                    },
                },
                'translate': {
                    '4': '4',
                },
                'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff'
            }
        }
        ds1 = xr.DataArray(data=data.data,
                           attrs=new_attrs,
                           dims=data.dims,
                           coords=data.coords)
        return ds1
Ejemplo n.º 12
0
 def test_available_composites_no_datasets(self):
     from satpy import Scene
     scene = Scene()
     id_list = scene.available_composite_ids(available_datasets=[])
     self.assertListEqual(id_list, [])
     # no sensors are loaded so we shouldn't get any comps either
     id_list = scene.available_composite_names(available_datasets=[])
     self.assertListEqual(id_list, [])
Ejemplo n.º 13
0
 def __init__(self, **kwargs):
     self.reader = kwargs.pop("reader", self.DEFAULT_READER_NAME)
     super(ReaderWrapper, self).__init__(**kwargs)
     pathnames = self.find_files_with_extensions()
     # Create a satpy Scene object
     self.scene = Scene(reader=self.reader, filenames=pathnames)
     self._begin_time = self.scene.start_time
     self._end_time = self.scene.end_time
Ejemplo n.º 14
0
 def test_contains(self):
     from satpy import Scene, Dataset
     import numpy as np
     scene = Scene()
     scene["1"] = ds1 = Dataset(np.arange(5), wavelength=(0.1, 0.2, 0.3))
     self.assertTrue('1' in scene)
     self.assertTrue(0.15 in scene)
     self.assertFalse('2' in scene)
     self.assertFalse(0.31 in scene)
Ejemplo n.º 15
0
 def test_iter(self):
     from satpy import Scene, Projectable
     import numpy as np
     scene = Scene()
     scene["1"] = Projectable(np.arange(5))
     scene["2"] = Projectable(np.arange(5))
     scene["3"] = Projectable(np.arange(5))
     for x in scene:
         self.assertIsInstance(x, Projectable)
Ejemplo n.º 16
0
def step_impl(context):
    from satpy.scene import Scene
    from datetime import datetime
    os.chdir("/tmp/")
    scn = Scene(platform_name="Suomi-NPP",
                sensor="viirs",
                start_time=datetime(2015, 3, 11, 11, 20),
                end_time=datetime(2015, 3, 11, 11, 26))
    context.available_dataset_ids = scn.available_dataset_ids()
Ejemplo n.º 17
0
 def test_available_dataset_names_no_readers(self):
     from satpy import Scene
     scene = Scene()
     self.assertRaises(KeyError, scene.available_dataset_names, reader_name='fake')
     name_list = scene.available_dataset_names()
     self.assertListEqual(name_list, [])
     # no sensors are loaded so we shouldn't get any comps either
     name_list = scene.available_dataset_names(composites=True)
     self.assertListEqual(name_list, [])
Ejemplo n.º 18
0
 def test_iter(self):
     from satpy import Scene, Dataset
     import numpy as np
     scene = Scene()
     scene["1"] = Dataset(np.arange(5))
     scene["2"] = Dataset(np.arange(5))
     scene["3"] = Dataset(np.arange(5))
     for x in scene:
         self.assertIsInstance(x, Dataset)
Ejemplo n.º 19
0
    def blend(self, blend_function=stack):
        """Blend the datasets into one scene."""
        new_scn = Scene()
        common_datasets = self.shared_dataset_ids
        for ds_id in common_datasets:
            datasets = [scn[ds_id] for scn in self.scenes if ds_id in scn]
            new_scn[ds_id] = blend_function(datasets)

        return new_scn
Ejemplo n.º 20
0
def step_impl(context):
    from satpy.scene import Scene
    from datetime import datetime
    os.chdir("/tmp/")
    scn = Scene(platform_name="Suomi-NPP",
                sensor="viirs",
                start_time=datetime(2015, 3, 11, 11, 20),
                end_time=datetime(2015, 3, 11, 11, 26))
    scn.load(["M02"])
    context.scene = scn
Ejemplo n.º 21
0
    def test_png_scene(self):
        """Test reading PNG images via satpy.Scene()."""
        from satpy import Scene

        fname = os.path.join(self.base_dir, 'test_l.png')
        scn = Scene(reader='generic_image', filenames=[fname])
        scn.load(['image'])
        self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size))
        self.assertEqual(scn.attrs['sensor'], set(['images']))
        self.assertEqual(scn.attrs['start_time'], None)
        self.assertEqual(scn.attrs['end_time'], None)

        fname = os.path.join(self.base_dir, '20180101_0000_test_la.png')
        scn = Scene(reader='generic_image', filenames=[fname])
        scn.load(['image'])
        data = da.compute(scn['image'].data)
        self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size))
        self.assertEqual(scn.attrs['sensor'], set(['images']))
        self.assertEqual(scn.attrs['start_time'], self.date)
        self.assertEqual(scn.attrs['end_time'], self.date)
        self.assertEqual(np.sum(np.isnan(data)), 100)
Ejemplo n.º 22
0
def step_impl(context):
    """
    :type context: behave.runner.Context
    """
    from satpy.scene import Scene
    from datetime import datetime
    from satpy.dataset import Dataset
    scn = Scene(platform_name="Suomi-NPP", sensor="viirs",
                start_time=datetime(2015, 3, 11, 11, 20),
                end_time=datetime(2015, 3, 11, 11, 26))
    scn["MyDataset"] = Dataset([[1, 2], [3, 4]])
    context.scene = scn
Ejemplo n.º 23
0
    def test_geotiff_scene(self):
        """Test reading PNG images via satpy.Scene()."""
        from satpy import Scene

        fname = os.path.join(self.base_dir, '20180101_0000_test_rgb.tif')
        scn = Scene(reader='generic_image', filenames=[fname])
        scn.load(['image'])
        self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size))
        self.assertEqual(scn.attrs['sensor'], set(['images']))
        self.assertEqual(scn.attrs['start_time'], self.date)
        self.assertEqual(scn.attrs['end_time'], self.date)
        self.assertEqual(scn['image'].area, self.area_def)

        fname = os.path.join(self.base_dir, 'test_rgba.tif')
        scn = Scene(reader='generic_image', filenames=[fname])
        scn.load(['image'])
        self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size))
        self.assertEqual(scn.attrs['sensor'], set(['images']))
        self.assertEqual(scn.attrs['start_time'], None)
        self.assertEqual(scn.attrs['end_time'], None)
        self.assertEqual(scn['image'].area, self.area_def)
Ejemplo n.º 24
0
 def test_getitem(self):
     """Test __getitem__ with names only"""
     from satpy import Scene, Dataset
     import numpy as np
     scene = Scene()
     scene["1"] = ds1 = Dataset(np.arange(5))
     scene["2"] = ds2 = Dataset(np.arange(5))
     scene["3"] = ds3 = Dataset(np.arange(5))
     self.assertIs(scene['1'], ds1)
     self.assertIs(scene['2'], ds2)
     self.assertIs(scene['3'], ds3)
     self.assertRaises(KeyError, scene.__getitem__, '4')
Ejemplo n.º 25
0
    def crop(self, st, et, delta):
        # Crop data every 'delta' and split into IC and CG
        scn = Scene(glob.glob(entln_path + 'LtgFlashPortions' +
                              st.strftime('%Y%m%d') + '.csv'),
                    reader='entln')
        vname = 'timestamp'  # any name in data is OK, because we just bin the counts
        scn.load([vname])

        # ---- loop through hour and delta interval ----- #
        for h in range(st.hour, et.hour):
            for m in range(0, 60, delta):
                # 1. -----Crop by delta----- #
                timestamp = scn[vname].timestamp.values.astype('datetime64[s]')
                if m + delta < 60:
                    cond = (timestamp >= st.replace(hour=h, minute=m)) & (
                        timestamp < st.replace(hour=h, minute=m + delta))
                else:
                    cond = (timestamp >= st.replace(hour=h, minute=m)) & (
                        timestamp < st.replace(hour=h + 1, minute=0))

                # 2. -----Crop by type ----- #
                self.ic = copy.deepcopy(scn)
                self.cg = copy.deepcopy(scn)
                cond_cg = (scn[vname].type != 1) & (cond)
                cond_ic = (scn[vname].type == 1) & (cond)

                self.cg[vname] = self.cg[vname][cond_cg]
                # if we only use CG data, IC is eaual to CG here
                #   and the constant ratio: IC/CG = iccg_ratio is used later
                if only_cg:
                    self.ic[vname] = self.ic[vname][cond_cg]
                else:
                    self.ic[vname] = self.ic[vname][cond_ic]

                # Correct attrs
                area_ic = SwathDefinition(lons=self.ic[vname].coords['longitude'], \
                                          lats=self.ic[vname].coords['latitude']
                                          )
                area_cg = SwathDefinition(lons=self.cg[vname].coords['longitude'], \
                                          lats=self.cg[vname].coords['latitude']
                                          )
                self.correct_attrs(self.ic, area_ic, vname)
                self.correct_attrs(self.cg, area_cg, vname)

                # 3. -----Crop by WRF_grid ----- #
                self.resample_WRF()
                if only_cg:
                    self.tl = (self.ic[vname] * iccg_ratio +
                               self.cg[vname]) / cg_de
                else:
                    self.tl = self.ic[vname] / ic_de + self.cg[vname] / cg_de
                self.save(vname, h, m)
Ejemplo n.º 26
0
 def test_delitem(self):
     from satpy import Scene, Dataset
     import numpy as np
     scene = Scene()
     scene["1"] = ds1 = Dataset(np.arange(5), wavelength=(0.1, 0.2, 0.3))
     scene["2"] = ds2 = Dataset(np.arange(5), wavelength=(0.4, 0.5, 0.6))
     scene["3"] = ds3 = Dataset(np.arange(5), wavelength=(0.7, 0.8, 0.9))
     del scene['1']
     del scene['3']
     del scene[0.45]
     self.assertEquals(len(scene.wishlist), 0)
     self.assertEquals(len(scene.datasets.keys()), 0)
     self.assertRaises(KeyError, scene.__delitem__, 0.2)
Ejemplo n.º 27
0
def scene_examples():
    from datetime import datetime
    from satpy.scene import Scene

    scn = Scene(
        platform_name="SNPP",
        sensor="viirs",
        start_time=datetime(2015, 4, 20, 12, 3),
        end_time=datetime(2015, 4, 20, 12, 10),
        base_dir="/home/a000680/data/polar_in/direct_readout/npp/lvl1/npp_20150420_1202_18019",
        reader="viirs_sdr"
    )

    scn.load(['M05', 'M08', 'M15'])

    met10scn = Scene(
        sensor="seviri",
        base_dir="/home/a000680/data/hrit/20150420",
        reader="hrit_msg"
    )
    met10scn.load([0.6, 0.8, 11.0])
    return
Ejemplo n.º 28
0
    def from_files(cls, files_to_sort, reader=None, **kwargs):
        """Create multiple Scene objects from multiple files.

        This uses the :func:`satpy.readers.group_files` function to group
        files. See this function for more details on possible keyword
        arguments.

        .. versionadded:: 0.12

        """
        from satpy.readers import group_files
        file_groups = group_files(files_to_sort, reader=reader, **kwargs)
        scenes = (Scene(filenames=fg) for fg in file_groups)
        return cls(scenes)
Ejemplo n.º 29
0
 def test_create_reader_instances_with_reader(self):
     from satpy.scene import Scene
     reader = "foo"
     filenames = ["1", "2", "3"]
     sensors = None
     with mock.patch('satpy.scene.Scene._compute_metadata_from_readers'):
         with mock.patch('satpy.scene.ReaderFinder') as findermock:
             scene = Scene(reader=reader, filenames=filenames)
             findermock.assert_called_once_with(ppp_config_dir=mock.ANY,
                                                base_dir=None,
                                                area=None,
                                                end_time=None,
                                                start_time=None)
             findermock.return_value.assert_called_once_with(
                 reader=reader, sensor=sensors, filenames=filenames)
Ejemplo n.º 30
0
 def __init__(self, **kwargs):
     self.reader = kwargs.pop("reader", self.DEFAULT_READER_NAME)
     super(ReaderWrapper, self).__init__(**kwargs)
     pathnames = self.find_files_with_extensions()
     # Remove keyword arguments that Satpy won't understand
     for key in ('search_paths', 'keep_intermediate', 'overwrite_existing',
                 'exit_on_error'):
         kwargs.pop(key, None)
     # Create a satpy Scene object
     self.scene = Scene(reader=self.reader,
                        filenames=pathnames,
                        reader_kwargs=kwargs)
     self._begin_time = self.scene.start_time
     self._end_time = self.scene.end_time
     self.wishlist = set()