コード例 #1
0
ファイル: db.py プロジェクト: gerritholl/fogtools
 def load(self, timestamp):
     self.ensure(timestamp)
     logger.debug(f"Loading {self!s}")
     sc = satpy.Scene(filenames=self.find(timestamp, complete=True),
                      reader=self.reader)
     sc.load(sc.available_dataset_names())
     return sc
コード例 #2
0
ファイル: reader.py プロジェクト: sfinkens/pygac-fdr
def read_gac(filename, reader_kwargs=None):
    """Read and calibrate AVHRR GAC level 1b data using satpy.

    Args:
        filename (str): AVHRR GAC level 1b file
        reader_kwargs (dict): Keyword arguments to be passed to the reader.
    Returns:
        The loaded data in a satpy.Scene object.
    """
    scene = satpy.Scene(filenames=[filename],
                        reader="avhrr_l1b_gaclac",
                        reader_kwargs=reader_kwargs)
    scene.load(BANDS)
    scene.load(AUX_DATA)

    # Add additional metadata
    basename = os.path.basename(filename)
    fname_info = trollsift.parse(GAC_FORMAT, basename)
    orbit_number_end = (fname_info["orbit_number"] // 100 * 100 +
                        fname_info["end_orbit_last_digits"])
    scene.attrs.update({
        "gac_filename": basename,
        "orbit_number_start": fname_info["orbit_number"],
        "orbit_number_end": orbit_number_end,
        "ground_station": fname_info["station"],
    })

    return scene
コード例 #3
0
ファイル: db.py プロジェクト: gerritholl/fogtools
 def load(self, timestamp):
     """Get scene containing relevant ABI channels
     """
     self.ensure(timestamp)
     selection = self.find(timestamp, complete=True)
     # I want to select those files where the time matches.  More files may
     # have been downloaded, in particular for the benefit of NWCSAF.  How
     # to do this matching?  Could use pathlib.Path.match or perhaps
     # satpy.readers.group_files.
     #
     # cannot match against {timestamp:%Y%j%H%M%S%f} (which I copied from
     # the Satpy abi.yaml definition file): strftime will always
     # generate a 6-digit microsecond component, but the ABI filenames only
     # contain a single digit for deciseconds (see PUG L1B, Volume 3, page
     # 291, # PDF page 326).  This doesn't affect strptime which is
     # apparently what Satpy uses.
     logger.debug("Loading ABI from local disk")
     sc = satpy.Scene(filenames={str(x)
                                 for x in selection},
                      reader="abi_l1b")
     sc.load([
         f"C{ch:>02d}"
         for ch in abi.nwcsaf_abi_channels | abi.fogpy_abi_channels
     ])
     return sc
コード例 #4
0
ファイル: core.py プロジェクト: gerritholl/fogtools
def get_fog(sensor_reader, sensor_files, cloud_reader, cloud_files, area,
            blend_background):
    """Get daytime fog blend for sensor

    Get daytime fog.

    Args:
        sensor_reader (str): For which sensor/reader to derive the fog product.
            Must be a sensor/reader supported by fogpy.  Currently those are
            "seviri_l1b_hrit" or "abi_l1b".
        sensor_files (List[str]): List of filenames corresponding to satellite
            data.
        cloud_reader (str): Reader providing the cloud products.  Can be
            "nwcsaf-geo" or "cmsaf-claas2_l2_nc".
        cloud_files (List[str]): List of filenames corresponding to cloud
            microphysics data.
        area (str): Area for which to calculate fog.  Must be an AreaDefinition
            defined in satpy (or PPP_CONFIG_DIR), fcitools, or fogtools.
        blend_background (str): Satpy composite to be used as the background
            onto which the fog mask will be blended using :func:`blend_fog`.

    Returns:

        Scene: Scene object reprojected onto area, containing the composite and
            all its dependencies.
    """
    sc = satpy.Scene(filenames={
        sensor_reader: sensor_files,
        cloud_reader: cloud_files
    })

    chans = {
        "seviri_l1b_hrit":
        ["IR_108", "IR_087", "IR_016", "VIS006", "IR_120", "VIS008", "IR_039"],
        "abi_l1b": ["C02", "C03", "C05", "C07", "C11", "C14", "C15"]
    }
    cmic = {
        "nwcsaf-geo": ["cmic_reff", "cmic_lwp", "cmic_cot"],
        "cmsaf-claas2_l2_nc": ["reff", "cwp", "cot"]
    }

    sensor = sensor_reader.split("_")[0]
    sattools.ptc.add_all_pkg_comps_mods(sc, ["satpy", "fogpy"],
                                        sensors=[sensor])
    areas = {}
    for pkg in ["satpy", "fcitools", "fogtools"]:
        try:
            areas.update(sattools.ptc.get_all_areas([pkg]))
        except ModuleNotFoundError:
            pass
    sc.load(chans[sensor_reader] + cmic[cloud_reader] + ["overview"],
            unload=False)
    ls = sc.resample(areas[area], unload=False)
    ls.load(["fls_day", "fls_day_extra"], unload=False)

    return ls
コード例 #5
0
def fakescene():
    """Return a fake scene with mocked areas."""
    # let's make a Scene
    #
    # should I mock get_xy_from_lonlat here?  Probably as it's an external
    # dependency that I can assume to be correct, and it simplifies the
    # unit test here.
    st_tm = datetime.datetime(1899, 12, 31, 23, 55)
    sc = satpy.Scene()
    sc["raspberry"] = xarray.DataArray(
            numpy.arange(25).reshape(5, 5),
            dims=("x", "y"),
            attrs={"area": unittest.mock.MagicMock(),
                   "start_time": st_tm,
                   "name": "raspberry"})
    sc["cloudberry"] = xarray.DataArray(
            numpy.arange(25).reshape(5, 5),
            dims=("x", "y"),
            attrs={"area": unittest.mock.MagicMock(),
                   "start_time": st_tm + datetime.timedelta(microseconds=1),
                   "name": "cloudberry"})
    sc["cloudberry_pal"] = xarray.DataArray(
            numpy.arange(25).reshape(5, 5),
            dims=("color_a", "color_b"),
            attrs={"name": "cloudberry_pal",
                   "start_time": st_tm})
    sc["maroshki"] = xarray.DataArray(
            numpy.arange(25).reshape(5, 5),
            dims=("xx", "yy"),
            attrs={"name": "maroshki",
                   "area": unittest.mock.MagicMock(),
                   "start_time": st_tm})
    sc["banana"] = xarray.DataArray(
            numpy.arange(25).reshape(1, 5, 5),
            dims=("t", "x", "y"),
            attrs={"area": unittest.mock.MagicMock(),
                   "start_time": st_tm,
                   "name": "banana"})
    sc["raspberry"].attrs["area"].get_xy_from_lonlat.return_value = (
            numpy.ma.masked_array(
                numpy.array([1, 1]),
                [False, False]),
            numpy.ma.masked_array(
                numpy.array([1, 2]),
                [False, False]))
    for nm in ("banana", "cloudberry"):
        sc[nm].attrs["area"].get_xy_from_lonlat.return_value = (
                numpy.ma.masked_array(
                    numpy.array([2, 2]),
                    [False, False]),
                numpy.ma.masked_array(
                    numpy.array([2, 3]),
                    [False, False]))
    return sc
コード例 #6
0
def fakescene(fakearea):
    """Return a fake scene with real areas."""
    # let's make a Scene
    #
    # should I mock get_xy_from_lonlat here?  Probably as it's an external
    # dependency that I can assume to be correct, and it simplifies the
    # unit test here.
    sc = satpy.Scene()
    for v in {"raspberry", "blueberry", "maroshki", "strawberry"}:
        sc[v] = xarray.DataArray(numpy.arange(25).reshape(5, 5),
                                 dims=("x", "y"),
                                 attrs={"area": fakearea})
    return sc
コード例 #7
0
ファイル: test_writer.py プロジェクト: sfinkens/pygac-fdr
 def scene(self, scene_dataset_attrs, scene_lonlats):
     acq_time = np.array([0, 1], dtype="datetime64[s]")
     lons, lats = scene_lonlats
     scene = satpy.Scene()
     scene.attrs = {"sensor": "avhrr-1"}
     ch4_id = make_dataid(
         name="4",
         resolution=1234.0,
         wavelength="10.8um",
         modifiers=(),
         calibration="brightness_temperature",
     )
     lon_id = make_dataid(name="longitude", resolution=1234.0, modifiers=())
     lat_id = make_dataid(name="latitude", resolution=1234.0, modifiers=())
     qual_flags_id = make_dataid(name="qual_flags",
                                 resolution=1234.0,
                                 modifiers=())
     scene[ch4_id] = xr.DataArray(
         [[1, 2], [3, 4]],
         dims=("y", "x"),
         coords={
             "acq_time": ("y", acq_time),
         },
         attrs=scene_dataset_attrs,
     )
     scene[lat_id] = xr.DataArray(
         lats,
         dims=("y", "x"),
         coords={
             "acq_time": ("y", acq_time),
         },
     )
     scene[lon_id] = xr.DataArray(
         lons,
         dims=("y", "x"),
         coords={
             "acq_time": ("y", acq_time),
         },
     )
     scene[qual_flags_id] = xr.DataArray(
         [[0, 1, 0], [0, 0, 1]],
         dims=("y", "num_flags"),
         coords={
             "acq_time": ("y", acq_time),
         },
     )
     return scene
コード例 #8
0
def _mk_fakescene_realarea(fakearea, st_tm, *names):
    """Return a fake scene with real fake areas."""
    import dask.array as da
    sc = satpy.Scene()
    for name in names:
        sc[name] = xarray.DataArray(
                da.arange(25).reshape(5, 5),
                dims=("x", "y"),
                attrs={"area": fakearea,
                       "start_time": st_tm,
                       "name": name})
        sc[name] = xarray.DataArray(
                da.arange(25).reshape(5, 5),
                dims=("x", "y"),
                attrs={"area": fakearea,
                       "start_time": st_tm,
                       "name": name})
    return sc
コード例 #9
0
def _get_measurements_from_hrit(hrit_files, year, month, day, slot):
    """ Read SEVIRI HRIT slot and calculate solar zenith angle for RGB. """
    scene = satpy.Scene(reader="seviri_l1b_hrit", filenames=hrit_files)
    scene.load(['VIS006', 'VIS008', 'IR_016', 'IR_108'])

    utc_time = datetime(int(year), int(month), int(day), int(slot[:2]),
                        int(slot[2:]))
    lon, lat = scene['VIS006'].attrs['area'].get_lonlats()
    sunzen = astronomy.sun_zenith_angle(utc_time, lon, lat)

    data = {
        'VIS006': scene['VIS006'].values,
        'VIS008': scene['VIS008'].values,
        'IR_016': scene['IR_016'].values,
        'IR_108': scene['IR_108'].values,
        'sunzen': sunzen,
        'lon': lon,
        'lat': lat
    }

    print('  READ HRIT DATA')

    return data, hrit_files
コード例 #10
0
ファイル: vis.py プロジェクト: gerritholl/fcitools
def show_testdata_from_dir(files,
                           composites,
                           channels,
                           regions,
                           d_out,
                           fn_out,
                           path_to_coastlines=None,
                           label="",
                           show_only_coastlines=False):
    """Visualise a directory of EUM FCI test data

    From a directory containing EUMETSAT FCI test data, visualise composites
    and channels for the given regions/areas, possibly adding coastlines.

    Args:
        files (List[pathlib.Path]):
            Paths to files

        composites (List[str]):
            List of composites to be generated

        channels (List[str]):
            List of channels (datasets) to be generated

        regions (List[str]):
            List of AreaDefinition objects these shall be generated for.
            The special region 'native' means no reprojection is applied.

        d_out (pathlib.Path):
            Path to directory where output files shall be written.

        fn_out (Optional[str]):
            Pattern of filename in output directory.  Using Python's string
            formatting syntax, the fields ``area`` and ``dataset`` will be
            replaced by the region/area and the composite/channel.

        path_to_coastlines (Optional[Str]):
            If given, directory to use for coastlines.

        label (Optiona[Str]):
            Additional label to substitute into fn_out.

        show_only_coastlines (Optional[bool]):
            If true, prepare images showing only coastlines.  Needs at least
            one channel to be loaded.  Backgrounds will be white, black, and
            transparent.

    Returns:
        List of filenames written
    """
    L = []
    sc = satpy.Scene(sensor="fci", filenames=files, reader=["fci_l1c_fdhsi"])
    if path_to_coastlines is not None:
        overlay = {"coast_dir": path_to_coastlines, "color": "red"}
    else:
        overlay = None
    sc.load(channels)
    sc.load(composites)
    if show_only_coastlines:
        sc["black"] = xarray.zeros_like(sc[(channels + composites).pop(0)])
        sc["white"] = xarray.ones_like(sc[(channels + composites).pop(0)])
        sc["nans"] = xarray.full_like(sc[(channels + composites).pop(0)],
                                      numpy.nan)
    for la in regions:
        ls = sc.resample(la)
        for dn in ls.keys():
            fn = pathlib.Path(d_out) / fn_out.format(area=getattr(
                la, "area_id", "native"),
                                                     dataset=dn["name"],
                                                     label=label)
            ls.save_dataset(dn, filename=str(fn), overlay=overlay)
            L.append(fn)
    return L
コード例 #11
0
def read_seviri_channel(channel_list, time, subdomain=(), regrid=False, my_area=geometry.AreaDefinition('Tropical Africa', 'Tropical Africa', 'Hatano Equal Area',{'proj' : 'hatano', 'lon_0' : 15.0}, 1732, 1510, (-4330000.,-3775000.,4330000., 3775000.)), interp_coeffs=(), reflectance_correct=False):
    '''Read SEVIRI data for given channels and time

    Includes functionality to subsample or regrid. Requires satpy.
    Assumes SEVIRI files are located in sev_data_dir1 set above, with 
    directory structure sev_data_dir1/Year/YearMonthDay/Hour/

    Args:
        channel_list (list): list of channels to read, see file_dict for 
                             possible values
        time (datetime): SEVIRI file date and time, every 00, 15, 30 or 
                         45 minutes exactly,
        subdomain (tuple, optional): If not empty and regrid is False, then 
                                     tuple values are (West boundary, 
                                     South boundary, East boundary, 
                                     North boundary) Defaults to empty tuple.
        regrid (bool, optional): If True, then data is regriddedonto grid 
                                 defined by my_area. Defaults to False.
        my_area (AreaDefinition, optional): pyresmaple.geometry.AreaDefinition
                                            Only used if regrid=True
                                            Defaults to a Hatano equal area 
                                            projection ~4.5 km resolution
                                            extending from ~33W to ~63E and
                                            ~29S to ~29N.
        interp_coeffs (tuple, optional): Interpolation coefficients that may be
                                         used for bilinear interpolation onto 
                                         new grid. Faccilitates use of same 
                                         coeffcients when regridding operation 
                                         is repeated in multiple calls to 
                                         read_seviri_channel. 
                                         Defaults to empty tuple.
        reflectance_correct(bool, optional): Correct visible reflectances for 
                                             variation in solar zenith angle and
                                             earth-sun distances.
                                             Defaults to False.

    Returns:
        data (dict): Dictionary containing following entries:
                     lons (ndarray, shape(nlat,nlon)): Array of longitude values
                     lats (ndarray, shape(nlat,nlon)): Array of latitude values
                     interp_coeffs (tuple): If regrid is True, then the 
                                            interpolation coefficients are 
                                            returned in this variable to 
                                            speed up future regridding
                     channel (ndarray, shape(nlat,nlon)): Dictionary contains 
                                                          separate entry for 
                                                          each channel in 
                                                          channel_list

    '''
    filenames = []
    sat_names = ['MSG4', 'MSG3', 'MSG2', 'MSG1']
    sat_ind = -1
    if time in unavailable_times:
        raise UnavailableFileError("SEVIRI observations for "+time.strftime("%Y/%m/%d_%H%M")+" are not available")
    while ((len(filenames) == 0) & (sat_ind < len(sat_names)-1)): # Sometimes have data from multiple instruments (e.g. 20160504_1045 has MSG3 and MSG1), this ensures most recent is prioritised.
        sat_ind += 1
        filenames=glob.glob(sev_data_dir1+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]+time.strftime("*EPI*%Y%m%d%H%M-*"))+ glob.glob(sev_data_dir1+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]+time.strftime("*PRO*%Y%m%d%H%M-*"))# PRO and EPI files necessary in all scenarios
        sev_dir = sev_data_dir1+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]
    if len(filenames) == 0: # Try alternative directory for SEVIRI data.
        filenames=glob.glob(sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")+time.strftime("*EPI*%Y%m%d%H%M-*"))+ glob.glob(sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")+time.strftime("*PRO*%Y%m%d%H%M-*"))# PRO and EPI files necessary in all scenarios
        sev_dir = sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")
    if len(filenames) == 0:
        print 'sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]+time.strftime("*EPI*%Y%m%d%H%M-*")=', sev_data_dir2+time.strftime("%Y/%Y%m%d/%H/*")+sat_names[sat_ind]+time.strftime("*EPI*%Y%m%d%H%M-*")
        raise MissingFileError("SEVIRI observations for "+time.strftime("%Y/%m/%d_%H%M")+" are missing. Please check if they can be downloaded and if not, add to the list of unavailable times.")
    else:
        for channel in channel_list:
            filenames=filenames + glob.glob(sev_dir+'*'+file_dict[channel]+time.strftime("*%Y%m%d%H%M-*")) # add channels required
        scene = satpy.Scene(reader="seviri_l1b_hrit", filenames=filenames)
        data = {}
        scene.load(channel_list)
        if reflectance_correct:
            lons, lats = scene[channel_list[0]].area.get_lonlats()
            if 0.8 in channel_list:
                scene[0.8] = reflectance_correction(scene[0.8], lons, lats) 
            if 0.6 in channel_list:
                scene[0.6] = reflectance_correction(scene[0.6], lons, lats) 
        if regrid != False:
            lons, lats = my_area.get_lonlats()
            if len(interp_coeffs) == 0:
                interp_coeffs = bilinear.get_bil_info(scene[channel_list[0]].area, my_area, radius=50e3, nprocs=1)
                data.update({'interp_coeffs': interp_coeffs})
            for channel in channel_list:
                data.update({str(channel): bilinear.get_sample_from_bil_info(scene[channel].values.ravel(), interp_coeffs[0], interp_coeffs[1], interp_coeffs[2], interp_coeffs[3], output_shape=my_area.shape)})
        else:
            if len(subdomain) > 0:
                scene = scene.crop(ll_bbox=subdomain)
            lons, lats = scene[channel_list[0]].area.get_lonlats()
            lons = lons[:,::-1] # Need to invert y-axis to get longitudes increasing.
            lats = lats[:,::-1]
            for channel in channel_list:
                data.update({str(channel) : scene[channel].values[:,::-1]})
        data.update({'lons' : lons, 'lats' : lats, 'interp_coeffs' : interp_coeffs})
        # Compressed files are decompressed to TMPDIR. Now tidy up
        delete_list = glob.glob(my_tmpdir+time.strftime("*%Y%m%d%H%M-*"))
        for d in delete_list: os.remove(d)
        return data
コード例 #12
0
ファイル: main.py プロジェクト: python-kurs/exercise-6-IvoJJ
from pathlib import Path
import satpy
from pyresample.geometry import AreaDefinition
from utils import createResArea

##----

input_dir = Path("data")
input_dir.mkdir(parents=True, exist_ok=True)
plot_dir = Path("plots")
plot_dir.mkdir(parents=True, exist_ok=True)

# 1. Read the Scene that you downloaded from the data directory using SatPy. [2P]

files = satpy.find_files_and_readers(base_dir=input_dir)
scene = satpy.Scene(files)

# 2. Load the composites "natural_color" and "convection" [2P]

scene.load(["natural_color", "convection"])

# 3. Resample the fulldisk to the Dem. Rep. Kongo and its neighbours [4P]
#    by defining your own area in Lambert Azimuthal Equal Area.
#    Use the following settings:
#      - lat and lon of origin: -3/23
#      - width and height of the resulting domain: 500px
#      - projection x/y coordinates of lower left: -15E5
#      - projection x/y coordinates of upper right: 15E5

# resample to defined area (using function from utils)
コード例 #13
0
ファイル: utils.py プロジェクト: duncanwp/satpy
def make_fake_scene(content_dict, daskify=False, area=True, common_attrs=None):
    """Create a fake Scene.

    Create a fake Scene object from fake data.  Data are provided in
    the ``content_dict`` argument.  In ``content_dict``, keys should be
    strings or DatasetID/DataID, and values may be either numpy.ndarray
    or xarray.DataArray, in either case with exactly two dimensions.
    The function will convert each of the numpy.ndarray objects into
    an xarray.DataArray and assign those as datasets to a Scene object.
    A fake AreaDefinition will be assigned for each array, unless disabled
    by passing ``area=False``.  When areas are automatically generated,
    arrays with the same shape will get the same area.

    This function is exclusively intended for testing purposes.

    If regular ndarrays are passed and the keyword argument daskify is
    True, DataArrays will be created as dask arrays.  If False (default),
    regular DataArrays will be created.  When the user passes xarray.DataArray
    objects then this flag has no effect.

    Args:
        content_dict (Mapping): Mapping where keys correspond to objects
            accepted by ``Scene.__setitem__``, i.e. strings or DatasetID,
            and values may be either ``numpy.ndarray`` or
            ``xarray.DataArray``.
        daskify (bool): optional, to use dask when converting
            ``numpy.ndarray`` to ``xarray.DataArray`.  No effect when the
            values in ``content_dict`` are already ``xarray.DataArray`.
        area (bool or BaseDefinition): Can be ``True``, ``False``, or an
            instance of ``pyresample.geometry.BaseDefinition`` such as
            ``AreaDefinition`` or ``SwathDefinition``.  If ``True``, which is
            the default, automatically generate areas.  If ``False``, values
            will not have assigned areas.  If an instance of
            ``pyresample.geometry.BaseDefinition``, those instances will be
            used for all generated fake datasets.  Warning: Passing an area as
            a string (``area="germ"``) is not supported.
        common_attrs (Mapping): optional, additional attributes that will
            be added to every dataset in the scene.

    Returns:
        Scene object with datasets corresponding to content_dict.
    """
    import pyresample
    import satpy
    import xarray as xr
    if common_attrs is None:
        common_attrs = {}
    if daskify:
        import dask.array
    sc = satpy.Scene()
    for (did, arr) in content_dict.items():
        extra_attrs = common_attrs.copy()
        if isinstance(area, pyresample.geometry.BaseDefinition):
            extra_attrs["area"] = area
        elif area:
            extra_attrs["area"] = pyresample.create_area_def("test-area", {
                "proj": "eqc",
                "lat_ts": 0,
                "lat_0": 0,
                "lon_0": 0,
                "x_0": 0,
                "y_0": 0,
                "ellps": "sphere",
                "units": "m",
                "no_defs": None,
                "type": "crs"
            },
                                                             units="m",
                                                             shape=arr.shape,
                                                             resolution=1000,
                                                             center=(0, 0))
        if isinstance(arr, xr.DataArray):
            sc[did] = arr.copy()  # don't change attributes of input
            sc[did].attrs.update(extra_attrs)
        else:
            if daskify:
                arr = dask.array.from_array(arr)
            sc[did] = xr.DataArray(arr, dims=("y", "x"), attrs=extra_attrs)
    return sc
コード例 #14
0
def fake_multiscene_empty():
    """Fake multiscene with empty scenes."""
    return satpy.MultiScene([satpy.Scene() for _ in range(3)])
コード例 #15
0
# Exercise 6
from pathlib import Path
import satpy
from pyresample.geometry import AreaDefinition
from utils import createResArea

input_dir = Path("data")
input_dir.mkdir(parents=True, exist_ok=True)
plot_dir = Path("plots")
plot_dir.mkdir(parents=True, exist_ok=True)
# 1. Read the Scene that you downloaded from the data directory using SatPy. [2P]
files = satpy.find_files_and_readers(base_dir=input_dir)
scn = satpy.Scene(files)

# 2. Load the composites "natural_color" and "convection" [2P]
scn.load(["natural_color", "convection"])

# 3. Resample the fulldisk to the Dem. Rep. Kongo and its neighbours [4P]
#    by defining your own area in Lambert Azimuthal Equal Area.
#    Use the following settings:
#      - lat and lon of origin: -3/23
#      - width and height of the resulting domain: 500px
#      - projection x/y coordinates of lower left: -15E5
#      - projection x/y coordinates of upper right: 15E5

# resample to defined area (using function from utils)
def_area = createResArea(
    area_ID="Kongo",
    description=
    "The Kongo and neighbouring countries in Lambert Equal Area projection",
    proj_id="LambertEqualArea",
コード例 #16
0
    scenes = []
    glm = {}
    abi_cont = {}
    for old in ms.scenes:
        for did in sorted(old.keys()):
            if (sens := old[did].attrs["sensor"]) == "glm":
                if did["name"] == "flash_extent_density":
                    if did not in glm:
                        glm[did] = []
                    glm[did].append(old[did])
                else:
                    raise ValueError("For GLM I can only handle "
                                     f"flash_extent_density, not {did!s}")
            elif sens == "abi":
                abi_cont[did] = old[did]
            else:
                raise ValueError("I can only handle GLM and ABI, but I got "
                                 f"{sens!s}")
        # gone through all in the scene now...
        # if I had new ABI, then make new scene collecting GLM...
        if abi_cont:
            sc = satpy.Scene()
            for (did, val) in abi_cont.items():
                sc[did] = val
            for (did, vals) in glm.items():
                sc[did] = xarray.concat(vals, "dummy").mean("dummy")
            scenes.append(sc)
            glm.clear()
            abi_cont.clear()
    return satpy.MultiScene(scenes)
コード例 #17
0
import satpy
import matplotlib.pyplot as plt
import warnings

warnings.filterwarnings("ignore")

# Read MSG's NetCDF file
scn = satpy.Scene(
    reader='seviri_l1b_nc',
    filenames=[
        'W_XX-EUMETSAT-Darmstadt,VIS+IR+HRV+IMAGERY,MSG1+SEVIRI_C_EUMG_20060402120009.nc'
    ])

# Get info on attributes
scn.attrs

# Get info on channels
scn.all_dataset_names()

# Load specific channels:
scn.load(["VIS006", 'VIS008', 'IR_016', 'WV_073', 'WV_062', "IR_097"])

scn.keys(
)  # the VIS006 and VIS008 have IR_016 are been calibration  to reflectance automatically satpy
コード例 #18
0
# Exercise 6
from pathlib import Path

main_dir = Path("./")
output_dir = main_dir / "results"
import utils as uts
# 1. Read the Scene that you downloaded from the data directory using SatPy. [2P]
import satpy as satpy

files = satpy.find_files_and_readers(base_dir="./data", reader="seviri_l1b_nc")
scn = satpy.Scene(filenames=files)  #save it as a satpy scene
scn.available_composite_names()  #to check which composites are aviable
# 2. Load the composites "natural_color" and "convection" [2P]
scn.load(["natural_color"])  # can be skipped thanks to function printSatImg
scn.load(["convection"])  # can be skipped thanks to function printSatImg
# 3. Resample the fulldisk to the Dem. Rep. Kongo and its neighbours [4P]
#    by defining your own area in Lambert Azimuthal Equal Area.
#    Use the following settings:
#      - lat and lon of origin: -3/23
#      - width and height of the resulting domain: 500px
#      - projection x/y coordinates of lower left: -15E5
#      - projection x/y coordinates of upper right: 15E5
# see function printSatImg in Task 4
# 4. Save both loaded composites of the resampled Scene as simple png images. [2P]
uts.dirCreate(output_dir)
uts.printSatImg(scn=scn,
                scene="natural_color",
                proj="laea",
                lat_or=-3,
                lon_or=23,
                width=500,
コード例 #19
0
# Exercise 6
from pathlib import Path
import satpy

from pyresample.geometry import AreaDefinition

#input_dir  = Path("data")

# 1. Read the Scene that you downloaded from the data directory using SatPy. [2P]

dateien = ["../data/kongo.nc"]
files = {'seviri_l1b_nc': dateien}
scn = satpy.Scene(filenames=files)

# 2. Load the composites "natural_color" and "convection" [2P]

scn.load(["natural_color", "convection"])

# 3. Resample the fulldisk to the Dem. Rep. Kongo and its neighbours [4P]
#    by defining your own area in Lambert Azimuthal Equal Area.
#    Use the following settings:
#      - lat and lon of origin: -3/23
#      - width and height of the resulting domain: 500px
#      - projection x/y coordinates of lower left: -15E5
#      - projection x/y coordinates of upper right: 15E5

area_id = 'Kongo'
description = 'Kongo Lambert Azimuthal Equal Area projection'
proj_id = 'Kongo'
proj_dict = {'proj': 'laea', 'lat_0': -3, 'lon_0': 23}
width = 500
コード例 #20
0
def show(files,
         channels,
         composites,
         regions,
         d_out,
         fn_out,
         reader=None,
         path_to_coastlines=None,
         label="",
         show_only_coastlines=False):
    """Visualise satellite data with pytroll.

    From a set of files containing satellite data, visualise channels and
    composites for the given regions/areas, possibly adding coastlines.

    Args:
        files (List[pathlib.Path]):
            Paths to files

        composites (List[str]):
            List of composites to be generated

        channels (List[str]):
            List of channels (datasets) to be generated

        regions (List[str]):
            List of AreaDefinition strings or objects these shall be generated
            for.
            The special region 'native' means no reprojection is applied.

        d_out (pathlib.Path):
            Path to directory where output files shall be written.

        fn_out (str):
            Pattern of filename in output directory.  Using Python's string
            formatting syntax, the fields ``area`` and ``dataset`` will be
            replaced by the region/area and the composite/channel.

        reader (Optional[str]):
            What reader.  If none, let satpy figure it out.

        path_to_coastlines (Optional[str]):
            If given, directory to use for coastlines.

        label (Optiona[str]):
            Additional label to substitute into fn_out.

        show_only_coastlines (Optional[str or area]):
            If set, prepare images showing only coastlines.  May be
            set to either a channel name or composite for which the area will
            be taken for these images, or to an areadefinition that will be
            used.

    Returns:
        Set of paths written
    """
    L = set()
    sc = satpy.Scene(filenames=[str(f) for f in files], reader=reader)
    if path_to_coastlines is None:
        overlay = None
    else:
        overlay = {"coast_dir": path_to_coastlines, "color": "yellow"}
    sc.load(channels)
    sc.load(composites)
    if show_only_coastlines:
        try:
            da = sc[show_only_coastlines]
        except (KeyError, ValueError, TypeError):
            ar = show_only_coastlines
        else:
            ar = da.attrs["area"]
        sc["black"] = xarray.DataArray(numpy.zeros(shape=ar.shape),
                                       attrs=(atr := {
                                           "area": ar
                                       }))