Beispiel #1
0
class ReaderWrapper(roles.FrontendRole):
    FILE_EXTENSIONS = []
    DEFAULT_READER_NAME = None
    DEFAULT_DATASETS = []
    # This is temporary until a better solution is found for loading start/end time on init
    PRIMARY_FILE_TYPE = None

    def __init__(self, **kwargs):
        self.reader = kwargs.pop("reader", self.DEFAULT_READER_NAME)
        super(ReaderWrapper, self).__init__(**kwargs)
        pathnames = self.find_files_with_extensions()
        # Create a satpy Scene object
        self.scene = Scene(reader=self.reader, filenames=pathnames, reader_kwargs=kwargs)
        self._begin_time = self.scene.start_time
        self._end_time = self.scene.end_time
        self.wishlist = set()

    @property
    def begin_time(self):
        return self._begin_time

    @property
    def end_time(self):
        return self._end_time

    @property
    def available_product_names(self):
        return self.scene.available_dataset_names(reader_name=self.reader, composites=True)

    @property
    def all_product_names(self):
        return self.scene.all_dataset_names(reader_name=self.reader, composites=True)

    @property
    def default_products(self):
        return self.DEFAULT_DATASETS

    def filter(self, scene):
        pass

    def create_scene(self, products=None, **kwargs):
        LOG.debug("Loading scene data...")
        # If the user didn't provide the products they want, figure out which ones we can create
        if products is None:
            LOG.debug("No products specified to frontend, will try to load logical defaults products")
            products = self.default_products

        kwargs.pop("overwrite_existing")
        kwargs.pop("exit_on_error")
        kwargs.pop("keep_intermediate")
        self.scene.load(products, **kwargs)
        self.wishlist = self.scene.wishlist

        # Apply Filters
        self.filter(self.scene)

        # Delete the satpy scene so memory is cleared out if it isn't used by the caller
        scene = self.scene
        self.scene = None
        return scene
Beispiel #2
0
def plot_coastlines_on_map(composite, files, photo_extent, points, result_path, dpi=800):
    import matplotlib.pyplot as plt
    import cartopy.crs as ccrs
    from satpy.scene import Scene
    # fig = plt.figure(figsize=(16,12))
    # col = ['r', 'g', 'b', 'y', 'm', 'k', 'c', 'w']

    scn = Scene(filenames=files)
    scn.load([composite])

    new_scn = scn
    crs = new_scn[composite].attrs['area'].to_cartopy_crs()
    ax1 = plt.axes(projection=ccrs.Mercator())
    ax1.set_extent(photo_extent)
    #ax1.drawcountries()
    #ax1.drawstates()
    #ax1.gridlines()
    # ax.coastlines(resolution='50m', color='red')
    ax1.coastlines(color='r')
    plt.plot()
    # ax.gridlines()
    # ax.set_global()
    for i, (lat, lon) in enumerate(zip(points[0], points[1])):
        plt.plot(lon, lat, 'r*', ms=15, transform=ccrs.Geodetic())
    # plt.plot(400, 2000, 'ok', markersize=400, color=col[i],projection=crs)

    #   fig.suptitle(description, fontsize=20, fontweight='bold')
    # ax.scatter(10,40,latlon=True,color='blue')
    plt.imshow(new_scn['VIS006'], transform=crs, extent=crs.bounds, origin='upper', cmap='gray')
    # cbar = plt.colorbar()
    # cbar.set_label("Kelvin")
    plt.savefig(result_path, dpi=dpi)
    # plt.show()
    # plt.imsave(result_path, dpi=dpi)
    return ()
Beispiel #3
0
def step_impl(context):
    from satpy.scene import Scene
    from datetime import datetime
    os.chdir("/tmp/")
    scn = Scene(platform_name="Suomi-NPP", sensor="viirs",
                start_time=datetime(2015, 3, 11, 11, 20),
                end_time=datetime(2015, 3, 11, 11, 26))
    scn.load(["M02"])
    context.scene = scn
Beispiel #4
0
def step_impl(context):
    from satpy.scene import Scene
    from datetime import datetime
    os.chdir("/tmp/")
    scn = Scene(platform_name="Suomi-NPP",
                sensor="viirs",
                start_time=datetime(2015, 3, 11, 11, 20),
                end_time=datetime(2015, 3, 11, 11, 26))
    scn.load(["M02"])
    context.scene = scn
Beispiel #5
0
    def crop(self, st, et, delta):
        # Crop data every 'delta' and split into IC and CG
        scn = Scene(glob.glob(entln_path + 'LtgFlashPortions' +
                              st.strftime('%Y%m%d') + '.csv'),
                    reader='entln')
        vname = 'timestamp'  # any name in data is OK, because we just bin the counts
        scn.load([vname])

        # ---- loop through hour and delta interval ----- #
        for h in range(st.hour, et.hour):
            for m in range(0, 60, delta):
                # 1. -----Crop by delta----- #
                timestamp = scn[vname].timestamp.values.astype('datetime64[s]')
                if m + delta < 60:
                    cond = (timestamp >= st.replace(hour=h, minute=m)) & (
                        timestamp < st.replace(hour=h, minute=m + delta))
                else:
                    cond = (timestamp >= st.replace(hour=h, minute=m)) & (
                        timestamp < st.replace(hour=h + 1, minute=0))

                # 2. -----Crop by type ----- #
                self.ic = copy.deepcopy(scn)
                self.cg = copy.deepcopy(scn)
                cond_cg = (scn[vname].type != 1) & (cond)
                cond_ic = (scn[vname].type == 1) & (cond)

                self.cg[vname] = self.cg[vname][cond_cg]
                # if we only use CG data, IC is eaual to CG here
                #   and the constant ratio: IC/CG = iccg_ratio is used later
                if only_cg:
                    self.ic[vname] = self.ic[vname][cond_cg]
                else:
                    self.ic[vname] = self.ic[vname][cond_ic]

                # Correct attrs
                area_ic = SwathDefinition(lons=self.ic[vname].coords['longitude'], \
                                          lats=self.ic[vname].coords['latitude']
                                          )
                area_cg = SwathDefinition(lons=self.cg[vname].coords['longitude'], \
                                          lats=self.cg[vname].coords['latitude']
                                          )
                self.correct_attrs(self.ic, area_ic, vname)
                self.correct_attrs(self.cg, area_cg, vname)

                # 3. -----Crop by WRF_grid ----- #
                self.resample_WRF()
                if only_cg:
                    self.tl = (self.ic[vname] * iccg_ratio +
                               self.cg[vname]) / cg_de
                else:
                    self.tl = self.ic[vname] / ic_de + self.cg[vname] / cg_de
                self.save(vname, h, m)
Beispiel #6
0
def process_one_scene(scene_files,
                      out_path,
                      engine='h5netcdf',
                      all_channels=False,
                      pps_channels=False,
                      orbit_n=0):
    """Make level 1c files in PPS-format."""
    tic = time.time()
    scn_ = Scene(reader='modis_l1b', filenames=scene_files)

    MY_BANDNAMES = BANDNAMES_DEFAULT
    if all_channels:
        MY_BANDNAMES = BANDNAMES
    if pps_channels:
        MY_BANDNAMES = BANDNAMES_PPS

    scn_.load(MY_BANDNAMES + ['latitude', 'longitude'] + ANGLE_NAMES,
              resolution=1000)
    # one ir channel
    irch = scn_['31']

    # Set header and band attributes
    set_header_and_band_attrs(scn_, orbit_n=orbit_n)

    # Rename longitude, latitude to lon, lat.
    rename_latitude_longitude(scn_)

    # Convert angles to PPS
    convert_angles(scn_, delete_azimuth=True)
    update_angle_attributes(scn_, irch)

    # Apply sunz correction
    apply_sunz_correction(scn_, REFL_BANDS)

    filename = compose_filename(scn_, out_path, instrument='modis', band=irch)
    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=get_header_attrs(scn_,
                                                     band=irch,
                                                     sensor='modis'),
                       engine=engine,
                       include_lonlats=False,
                       flatten_attrs=True,
                       encoding=get_encoding_modis(scn_))
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time() - tic))
    return filename
Beispiel #7
0
    def test_geotiff_scene_nan(self):
        """Test reading TIFF images originally containing NaN values via satpy.Scene()."""
        from satpy import Scene

        fname = os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif')
        scn = Scene(reader='generic_image', filenames=[fname])
        scn.load(['image'])
        self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size))
        self.assertEqual(np.sum(scn['image'].data[0][:10, :10].compute()), 0)

        fname = os.path.join(self.base_dir, 'test_l_nan_nofillvalue.tif')
        scn = Scene(reader='generic_image', filenames=[fname])
        scn.load(['image'])
        self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size))
        self.assertTrue(
            np.all(np.isnan(scn['image'].data[0][:10, :10].compute())))
Beispiel #8
0
def process_one_scene(scene_files, out_path):
    """Make level 1c files in PPS-format."""
    tic = time.time()
    scn_ = Scene(reader='vii_l1b_nc', filenames=scene_files)
    scn_.load(BANDNAMES + ANGLE_NAMES + ['lat_pixels', 'lon_pixels'])

    # Transpose data to get scanlines as row dimension
    for key in BANDNAMES + ANGLE_NAMES + ['lat_pixels', 'lon_pixels']:
        if 'num_pixels' in scn_[key].dims:
            # satpy <= 0 .26.0
            scn_[key] = scn_[key].transpose('num_lines', 'num_pixels')
        elif scn_[key].dims[0] == 'x':
            # first dim should be y
            scn_[key] = scn_[key].transpose('y', 'x')

    # one ir channel
    irch = scn_['vii_10690']

    # Set header and band attributes
    set_header_and_band_attrs(scn_)

    # Rename longitude, latitude to lon, lat.
    rename_latitude_longitude(scn_)

    # Adjust lons to valid range:
    adjust_lons_to_valid_range(scn_)

    # Convert angles to PPS
    convert_angles(scn_, delete_azimuth=True)
    update_angle_attributes(scn_, irch)

    # Apply sunz correction
    # apply_sunz_correction(scn_, REFL_BANDS)

    filename = compose_filename(scn_, out_path, instrument='metimage', band=irch)
    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=get_header_attrs(scn_, band=irch, sensor='metimage'),
                       engine='h5netcdf',
                       include_lonlats=False,
                       flatten_attrs=True,
                       encoding=get_encoding_metimage(scn_))
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time()-tic))
Beispiel #9
0
def draw_polygons_on_map(polygons, lines, points, composite, files, photo_extent , result_path, projection='Stereographic', dpi=200):
    import matplotlib.pyplot as plt
    import cartopy.crs as ccrs
    from satpy.scene import Scene

    col = ['g', 'b', 'y', 'm', 'k', 'c', 'w']
    col2 = ['m', 'k', 'c', 'w']
    col3 = ['y', 'y', 'y']
    scn = Scene(filenames=files)
    scn.load([composite])
    new_scn = scn
    crs = new_scn[composite].attrs['area'].to_cartopy_crs()
    proj = getattr(ccrs,projection)()
    ax0 = plt.axes(projection=proj)
    if photo_extent == 'global':
        ax0.set_global()
    else:
        ax0.set_extent(photo_extent, crs=ccrs.PlateCarree())
    ax0.gridlines()
    ax0.coastlines(color='r')
    plt.plot()

    # Plot 1 polygon:

    for i, polygon in enumerate(polygons):
        poly_lats, poly_lons = polygon[0], polygon[1]
        for la, lo in zip(poly_lats, poly_lons):
            plt.fill(lo, la, transform=ccrs.PlateCarree(), color=col[i])

    # Plot line:
    for i, line in enumerate(lines):
        poly_lats, poly_lons = line[0], line[1]
        for la, lo in zip(poly_lats, poly_lons):
            plt.plot(lo, la, 'ok', markersize=4,transform=ccrs.PlateCarree(), color=col2[i])

    # Plot points:
    for i, point in enumerate(points):
        la, lo = point[0], point[1]
        print(la,lo)
        plt.plot(lo, la, 'ok', markersize=4, transform=ccrs.PlateCarree(), color=col3[i])

    plt.imshow(new_scn[composite], transform=crs, extent=crs.bounds, origin='upper', cmap='gray')
    plt.savefig(result_path, dpi=dpi)
    return ()
Beispiel #10
0
def process_one_scene(scene_files, out_path, engine='h5netcdf', orbit_n=0):
    """Make level 1c files in PPS-format."""
    tic = time.time()
    if 'AVHR_xxx' in scene_files[0]:
        avhrr_reader = 'avhrr_l1b_eps'
        angles = ANGLE_NAMES_EPS
    else:
        avhrr_reader = 'avhrr_l1b_aapp'
        angles = ANGLE_NAMES_AAPP
    scn_ = Scene(
        reader=avhrr_reader,
        filenames=scene_files)
    scn_.load(BANDNAMES + ['latitude', 'longitude'] + angles)
    # one ir channel
    irch = scn_['4']

    # Check if we have old hrpt format with data only every 20th line
    check_broken_data(scn_)

    # Set header and band attributes
    set_header_and_band_attrs(scn_, orbit_n=orbit_n)

    # Rename longitude, latitude to lon, lat.
    rename_latitude_longitude(scn_)

    # Convert angles to PPS
    convert_angles(scn_, delete_azimuth=True)
    update_angle_attributes(scn_, irch)

    # Apply sunz correction
    apply_sunz_correction(scn_, REFL_BANDS)

    filename = compose_filename(scn_, out_path, instrument='avhrr', band=irch)
    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=get_header_attrs(scn_, band=irch, sensor='avhrr'),
                       engine=engine,
                       include_lonlats=False,
                       flatten_attrs=True,
                       encoding=get_encoding_avhrr(scn_))
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time()-tic))
    return filename
Beispiel #11
0
def process_one_scene(scene_files, out_path, engine='h5netcdf',
                      all_channels=False, pps_channels=False):
    """Make level 1c files in PPS-format."""
    tic = time.time()
    scn_ = Scene(
        reader='slstr_l1b',
        filenames=scene_files)

    MY_BANDNAMES = BANDNAMES_DEFAULT
    if all_channels:
        MY_BANDNAMES = BANDNAMES
    if pps_channels:
        MY_BANDNAMES = BANDNAMES_PPS

    scn_.load(MY_BANDNAMES + ['latitude', 'longitude'] + ANGLE_NAMES)

    # Everything should be on the same grid, to be saved as ppsleve1c
    scn_ = scn_.resample(resampler="native")

    # one ir channel
    irch = scn_['S8']

    # Set header and band attributes
    set_header_and_band_attrs(scn_)

    # Rename longitude, latitude to lon, lat.
    rename_latitude_longitude(scn_)

    # Convert angles to PPS
    convert_angles(scn_, delete_azimuth=True)
    update_angle_attributes(scn_, irch)

    filename = compose_filename(scn_, out_path, instrument='slstr', band=irch)
    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=get_header_attrs(scn_, band=irch, sensor='slstr'),
                       engine=engine,
                       include_lonlats=False,
                       flatten_attrs=True,
                       encoding=get_encoding_slstr(scn_))
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time()-tic))
def create_image(path: tp.Optional[str] = None) -> (Scene, tp.Optional[str]):
    """
    Create image of the given satellite data of a SentinelSat-2 satellite
    :param path: Path to the raw satellite data
    :return: (Scene, str), Scene of the satellite image, full path to the .tif create
    """
    files = find_files_and_readers(base_dir=path, reader='msi_safe')

    _scn = Scene(filenames=files)
    _scn.load(['true_color'])

    filename = None
    if path is not None:
        filename = os.path.join(path, 'RGB.tif')
        if not os.path.exists(filename):
            _scn.save_dataset('true_color',
                              filename,
                              writer='simple_image',
                              fill_value=0)
    return _scn, filename
Beispiel #13
0
def process_one_scene(scene_files, out_path, engine='h5netcdf'):
    """Make level 1c files in PPS-format."""
    tic = time.time()
    scn_ = Scene(reader='mersi2_l1b', filenames=scene_files)

    scn_.load(BANDNAMES + ['latitude', 'longitude'] + ANGLE_NAMES,
              resolution=1000)

    # Remove bad data at first and last column
    remove_broken_data(scn_)

    # one ir channel
    irch = scn_['24']

    # Set header and band attributes
    set_header_and_band_attrs(scn_)

    # Rename longitude, latitude to lon, lat.
    rename_latitude_longitude(scn_)

    # Convert angles to PPS
    convert_angles(scn_, delete_azimuth=True)
    update_angle_attributes(scn_, irch)
    for angle in ['sunzenith', 'satzenith', 'azimuthdiff']:
        scn_[angle].attrs['file_key'] = ANGLE_ATTRIBUTES['mersi2_file_key'][
            angle]

    filename = compose_filename(scn_, out_path, instrument='mersi2', band=irch)
    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=get_header_attrs(scn_,
                                                     band=irch,
                                                     sensor='mersi-2'),
                       engine=engine,
                       include_lonlats=False,
                       flatten_attrs=True,
                       encoding=get_encoding_mersi2(scn_))
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time() - tic))
    return filename
Beispiel #14
0
    def test_geotiff_scene(self):
        """Test reading PNG images via satpy.Scene()."""
        from satpy import Scene

        fname = os.path.join(self.base_dir, '20180101_0000_test_rgb.tif')
        scn = Scene(reader='generic_image', filenames=[fname])
        scn.load(['image'])
        self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size))
        self.assertEqual(scn.attrs['sensor'], set(['images']))
        self.assertEqual(scn.attrs['start_time'], self.date)
        self.assertEqual(scn.attrs['end_time'], self.date)
        self.assertEqual(scn['image'].area, self.area_def)

        fname = os.path.join(self.base_dir, 'test_rgba.tif')
        scn = Scene(reader='generic_image', filenames=[fname])
        scn.load(['image'])
        self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size))
        self.assertEqual(scn.attrs['sensor'], set(['images']))
        self.assertEqual(scn.attrs['start_time'], None)
        self.assertEqual(scn.attrs['end_time'], None)
        self.assertEqual(scn['image'].area, self.area_def)
Beispiel #15
0
    def test_png_scene(self):
        """Test reading PNG images via satpy.Scene()."""
        from satpy import Scene

        fname = os.path.join(self.base_dir, 'test_l.png')
        scn = Scene(reader='generic_image', filenames=[fname])
        scn.load(['image'])
        self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size))
        self.assertEqual(scn.attrs['sensor'], set(['images']))
        self.assertEqual(scn.attrs['start_time'], None)
        self.assertEqual(scn.attrs['end_time'], None)

        fname = os.path.join(self.base_dir, '20180101_0000_test_la.png')
        scn = Scene(reader='generic_image', filenames=[fname])
        scn.load(['image'])
        data = da.compute(scn['image'].data)
        self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size))
        self.assertEqual(scn.attrs['sensor'], set(['images']))
        self.assertEqual(scn.attrs['start_time'], self.date)
        self.assertEqual(scn.attrs['end_time'], self.date)
        self.assertEqual(np.sum(np.isnan(data)), 100)
Beispiel #16
0
def process_one_scene(scene_files,
                      out_path,
                      use_iband_res=False,
                      engine='h5netcdf',
                      all_channels=False,
                      pps_channels=False,
                      orbit_n=0):
    """Make level 1c files in PPS-format."""
    tic = time.time()
    scn_ = Scene(reader='viirs_sdr', filenames=scene_files)

    MY_MBAND = MBAND_DEFAULT
    MY_IBAND_I = IBAND_DEFAULT_I
    MY_IBAND_M = IBAND_DEFAULT_M

    if all_channels:
        MY_MBAND = MBANDS
        MY_IBAND_I = IBANDS
        MY_IBAND_M = MBANDS
    if pps_channels:
        MY_MBAND = MBAND_PPS
        MY_IBAND_I = IBAND_PPS_I
        MY_IBAND_M = IBAND_PPS_M

    if use_iband_res:
        scn_.load(MY_IBAND_I + ANGLE_NAMES + ['i_latitude', 'i_longitude'],
                  resolution=371)
        scn_.load(MY_IBAND_M, resolution=742)
        scn_ = scn_.resample(resampler='native')
    else:
        scn_.load(MY_MBAND + ANGLE_NAMES + ['m_latitude', 'm_longitude'],
                  resolution=742)

    # one ir channel
    irch = scn_['M15']

    # Set header and band attributes
    set_header_and_band_attrs(scn_, orbit_n=orbit_n)

    # Rename longitude, latitude to lon, lat.
    rename_latitude_longitude(scn_)

    # Convert angles to PPS
    convert_angles(scn_, delete_azimuth=True)
    update_angle_attributes(scn_, irch)

    filename = compose_filename(scn_, out_path, instrument='viirs', band=irch)
    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=get_header_attrs(scn_,
                                                     band=irch,
                                                     sensor='viirs'),
                       engine=engine,
                       include_lonlats=False,
                       flatten_attrs=True,
                       encoding=get_encoding_viirs(scn_))
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time() - tic))
    return filename
Beispiel #17
0
def scene_examples():
    from datetime import datetime
    from satpy.scene import Scene

    scn = Scene(
        platform_name="SNPP",
        sensor="viirs",
        start_time=datetime(2015, 4, 20, 12, 3),
        end_time=datetime(2015, 4, 20, 12, 10),
        base_dir="/home/a000680/data/polar_in/direct_readout/npp/lvl1/npp_20150420_1202_18019",
        reader="viirs_sdr"
    )

    scn.load(['M05', 'M08', 'M15'])

    met10scn = Scene(
        sensor="seviri",
        base_dir="/home/a000680/data/hrit/20150420",
        reader="hrit_msg"
    )
    met10scn.load([0.6, 0.8, 11.0])
    return
Beispiel #18
0
def load_and_calibrate(filenames, apply_sun_earth_distance_correction):
    """Load and calibrate data.

    Uses inter-calibration coefficients from Meirink et al.

    Args:
        filenames: List of data files
        filename_info: Corresponding information from the filenames
        file_format: Specifies the file format (HRIT/Native)
        apply_sun_earth_distance_correction: If True, apply sun-earth-distance-
            correction to visible channels.

    Returns:
        Satpy scene holding calibrated channels
    """
    # Parse filenames
    parser = SEVIRIFilenameParser()
    file_format, info = parser.parse(os.path.basename(filenames[0]))

    # Get calibration coefficients (time-dependent)
    coefs = get_calibration_for_time(platform=info['platform_shortname'],
                                     time=info['start_time'])

    # Load and calibrate data
    scn_ = Scene(reader=file_format,
                 filenames=filenames,
                 reader_kwargs={
                     'calib_mode': CALIB_MODE,
                     'ext_calib_coefs': coefs
                 })
    if not scn_.attrs['sensor'] == {'seviri'}:
        raise ValueError('Not SEVIRI data')
    scn_.load(BANDNAMES)
    if not apply_sun_earth_distance_correction:
        remove_sun_earth_distance_correction(scn_)

    return scn_
Beispiel #19
0
def load_s5p(date_in, s5p_nc_dir, tm5_prof):
    '''Load s5p data'''
    # get the s5p data by the datetime
    f_s5p_pattern = os.path.join(s5p_nc_dir, f'*{date_in.strftime("%Y%m%d")}*')
    f_s5p = glob.glob(f_s5p_pattern)
    logging.info(' ' * 4 + f'Reading {f_s5p} ...')
    s5p = Scene(f_s5p, reader='tropomi_l2')

    vnames = [
        'nitrogendioxide_slant_column_density',
        'nitrogendioxide_stratospheric_column',
        'nitrogendioxide_tropospheric_column',
        'nitrogendioxide_ghost_column',
        'assembled_lat_bounds',
        'assembled_lon_bounds',
        'latitude',
        'longitude',
        'latitude_bounds',
        'longitude_bounds',
        'surface_albedo_nitrogendioxide_window',
        'surface_pressure',
        'cloud_pressure_crb',
        'cloud_albedo_crb',
        'cloud_fraction_crb_nitrogendioxide_window',
        'cloud_radiance_fraction_nitrogendioxide_window',
        'solar_azimuth_angle',
        'viewing_azimuth_angle',
        'solar_zenith_angle',
        'viewing_zenith_angle',
        'tm5_constant_a',
        'tm5_constant_b',
        'tm5_tropopause_layer_index',
        'qa_value',
        'no2_scd_flag',  # only available for processed cloudy data
        'time_utc',
        'air_mass_factor_troposphere',
        'air_mass_factor_clear',
        'air_mass_factor_cloudy',
        'amf_geo',
        'air_mass_factor_stratosphere'
    ]

    if tm5_prof:
        # Read the TM5-MP a-priori profile
        vnames.extend(['no2_vmr', 'temperature'])

    logging.debug(' ' * 4 + f'Reading vnames: {vnames}')
    s5p.load(vnames)
    # another option: load all available variables
    # s5p.load(s5p.all_dataset_names())

    # using pandas to convert string to timestamp, then to datetime without tz.
    mean_t = pd.to_datetime(s5p['time_utc'].values).mean() \
        .to_pydatetime().replace(tzinfo=None)

    # set global attrs
    s5p.attrs['s5p_filename'] = os.path.basename(f_s5p[0])

    # calculate pressure levels
    a = s5p['tm5_constant_a']
    b = s5p['tm5_constant_b']
    psfc = s5p['surface_pressure']

    low_p = (a[:, 0] + b[:, 0] * psfc) / 1e2
    high_p = (a[:, 1] + b[:, 1] * psfc) / 1e2

    s5p['p'] = xr.concat([low_p, high_p.isel(layer=-1)], dim='layer')
    s5p['p'] = s5p['p'].rename('tm5_pressure')
    s5p['p'].attrs['units'] = 'hPa'

    # read lut
    lut_pattern = os.path.join(s5p_nc_dir, 'S5P_OPER_LUT_NO2AMF*')
    lut = xr.open_mfdataset(lut_pattern, combine='by_coords')

    logging.info(' ' * 8 + 'Finish reading')

    return s5p, vnames, lut, mean_t
Beispiel #20
0
print(filename)
##!#if(composite):
##!#    day_filenames = glob(filename[:50]+'*')
##!#    cmpst_add = '_composite'
##!#else:
day_filenames = glob(filename)
cmpst_add = ''

# Extract the modis true-color plot limits
# ----------------------------------------
##lat_lims = plot_limits_dict[dt_date_str.strftime('%Y-%m-%d')][dt_date_str.strftime('%H%M')]['modis_Lat']
##lon_lims = plot_limits_dict[dt_date_str.strftime('%Y-%m-%d')][dt_date_str.strftime('%H%M')]['modis_Lon']

# Use satpy (Scene) to open the file
# ----------------------------------
scn = Scene(reader='modis_l1b', filenames=day_filenames)

# Load true-color data
scn.load(['true_color'])

scn.save_dataset('true_color', 'test_image_true2.png')
scn.show('true_color')

plt.show()
##!#if(save):
##!#    outname = 'modis_true_color_' + date_str + zoom_add + cmpst_add + '_satpy.png'
##!#    plt.savefig(outname,dpi=300)
##!#    print("Saved image",outname)
##!#else:
##!#    plt.show()
Beispiel #21
0
    def read(self,
             fname,
             datavars,
             gvars,
             metadata,
             chans=None,
             sector_definition=None):

        # Use filename field for filename_datetime if it is available.
        dfn = DataFileName(os.path.basename(glob(os.path.join(fname, '*'))[0]))
        if dfn:
            sdfn = dfn.create_standard()
            metadata['top']['filename_datetime'] = sdfn.datetime

        metadata['top']['start_datetime'] = sdfn.datetime
        metadata['top']['end_datetime'] = sdfn.datetime
        metadata['top']['dataprovider'] = 'nesdisstar'
        metadata['top']['platform_name'] = sdfn.satname
        metadata['top']['source_name'] = 'seviri'
        # MUST be set on readers that sector at read time.
        # Affects how reading/processing is done in driver.py
        metadata['top']['sector_definition'] = sector_definition
        metadata['top']['SECTOR_ON_READ'] = True

        si = SatSensorInfo(metadata['top']['platform_name'],
                           metadata['top']['source_name'])
        if not si:
            from ..scifileexceptions import SciFileError
            raise SciFileError(
                'Unrecognized platform and source name combination: ' +
                metadata['top']['platform_name'] + ' ' +
                metadata['top']['source_name'])

        # chans == [] specifies we don't want to read ANY data, just metadata.
        # chans == None specifies that we are not specifying a channel list,
        #               and thus want ALL channels.
        if chans == []:
            # If NO CHANNELS were specifically requested, just return at this
            # point with the metadata fields populated. A dummy SciFile dataset
            # will be created with only metadata. This is for checking what
            # platform/source combination we are using, etc.
            return

        outdir = os.path.join(gpaths['LOCALSCRATCH'],
                              os.path.dirname(sdfn.name))
        self.decompress_msg(fname, outdir, chans)
        try:
            global_data = Scene(platform_name="Meteosat-8",
                                sensor="seviri",
                                reader="hrit_msg",
                                start_time=sdfn.datetime,
                                base_dir=outdir)
        except TypeError:
            global_data = Scene(
                filenames=glob(os.path.join(outdir, '*')),
                reader="hrit_msg",
                filter_parameters={'start_time': sdfn.datetime})
        metadata['top']['start_datetime'] = global_data.start_time
        metadata['top']['end_datetime'] = global_data.end_time

        # Loop through each dataset name found in the dataset_info property above.
        for dsname in self.dataset_info.keys():
            # Loop through the variables found in the current dataset
            # The dataset_info dictionary maps the geoips varname to the
            # varname found in the original datafile
            for geoipsvarname, spvarname in self.dataset_info[dsname].items():
                # If we requested specific channels, and the current channel
                # is not in the list, skip this variable.
                if chans and geoipsvarname not in chans:
                    continue
                # Read the current channel data into the datavars dictionary
                log.info('    Initializing ' + dsname + ' channel "' +
                         spvarname + '" from file into SciFile channel: "' +
                         geoipsvarname + '"...')
                global_data.load([spvarname])
                # Read spvarname from the original datafile into datavars[dsname][geoipsvarname]
        ad = sector_definition.area_definition
        log.info('    Sectoring data to ' + ad.name + ' ...')
        sectored_data = global_data.resample(ad)
        for spvarname in sectored_data.datasets.keys():
            for dsname in self.dataset_info.keys():
                for geoipsvarname in self.dataset_info[dsname].keys():
                    if self.dataset_info[dsname][
                            geoipsvarname] == spvarname.name:
                        if 'Longitude' not in gvars[dsname].keys():
                            log.info('    Saving Longitude to gvars')
                            gvars[dsname]['Longitude'] = np.ma.array(
                                ad.get_lonlats()[0])
                        if 'Latitude' not in gvars[dsname].keys():
                            log.info('    Saving Latitude to gvars')
                            gvars[dsname]['Latitude'] = np.ma.array(
                                ad.get_lonlats()[1])
                        if 'SunZenith' not in gvars[dsname].keys():
                            from geoips.scifile.solar_angle_calc import satnav
                            log.info(
                                '        Using satnav, can only calculate Sun Zenith angles'
                            )
                            gvars[dsname]['SunZenith'] = satnav(
                                'SunZenith', metadata['top']['start_datetime'],
                                gvars[dsname]['Longitude'],
                                gvars[dsname]['Latitude'])
                        self.set_variable_metadata(metadata, dsname,
                                                   geoipsvarname)
                        try:
                            datavars[dsname][geoipsvarname] =\
                             np.ma.array(sectored_data.datasets[spvarname.name].data,
                             mask=sectored_data.datasets[spvarname.name].mask)
                            log.warning('Sectored variable %s ' %
                                        (spvarname.name))
                        except AttributeError:
                            log.warning(
                                'Variable %s does not contain a mask, masking invalid values! Might take longer'
                                % (spvarname.name))
                            datavars[dsname][geoipsvarname] =\
                                np.ma.masked_invalid(sectored_data.datasets[spvarname.name].data)
Beispiel #22
0
    'proj':
    'lcc',
    'lon_0':
    -95.,
    'lat_0':
    25.,
    'lat_1':
    25.,
    'lat_2':
    25.
})
new_scn = global_scene.resample(my_area)
new_scn.save_dataset(10.5)

global_scene['I05'].area
global_scene.load(['I05'])

rs_scn = global_scene.resample("euro4")
rs_scn.save_dataset(10.5)

from pyresample.geometry import AreaDefinition

my_area = AreaDefinition("nebraska")

import numpy as np
import os
import mpop
from mpop.satellites import PolarFactory
from datetime import datetime
import glob
Beispiel #23
0
def msg1Proc1_5(dateSnap, avail_times, fldrs):
    """
    What does this definition do?
    This script processes the raw MSG-1 Level 1.5 data to produces radiance/reflectance image
    files in netCDF,-4 geoTIFF & png file formats.

    :param dateSnap:
    :param avail_times:  A single string NOT an array
    :param fldrs:
    :return:
    """
    #- Start coding
    # import necessary modules
    import os, sys, glob
    from satpy.utils import debug_on
    from satpy.scene import Scene
    from datetime import datetime
    from myDefinitions import nc_write_sat_level_1_5, embellish, imResize

    # Start the logic
    debug_on()
    print("\n \t \t \t STARTING THE msg1Proc1_5 run @ time: %s \t \t \t \n \n" % str(datetime.now()))
    print("\n.Processing Date set is: %s" % dateSnap)

    #  Test whether all data folders are appropriately set or not.
    basDir, datDir, outDir, logDir, webDir, geoTdir, GSHHS_ROOT = fldrs
    print("\n.Base directory is set to: %s" % basDir)
    print("\n.Data directory is set to %s" % datDir)
    print("\n.NetCDF output directory is set to: %s" % outDir)
    print("\n.Log directory is set to: %s" % logDir)
    print("\n.Web directory is set to: %s" % webDir)
    print("\n.GeoTiff directory is set to: %s" % geoTdir)

    avail_times = str(avail_times).split()
    for tt in avail_times:
        try:
            # Start for-loop-1
            print("..Started processing for time: %s" % tt)
            searchStr = datDir + 'H-000-MSG1*' + dateSnap + tt + '-*'
            files = glob.glob(searchStr)
            #  for testing
            print(">>>>>>>>>> For Testing <<<<<<<<<<")
            print("datDir is set to %s: " % datDir)
            print("Search string is %s" % searchStr)
            print(files)

            # Start reading filename in satpy
            scn = Scene(filenames=files, reader='hrit_msg')

            # Get the dataset names in the scene
            allChnls = scn.all_dataset_names()
            allChnls.remove('HRV')          # due to higher resolution

            # Save the individual channels (except HRV) as separate gray-scale GeoTIFF files..
            for ii in allChnls:
                try:
                    str(ii).split()
                    print("Working on channel: %s" % ii)
                    scn.load(str(ii).split())
                    indImg = scn.resample('IndiaSC')

                    # Save as netCDF data
                    outImgStr1 = outDir + 'ind_MSG1-Band_' + ii + '_' + dateSnap + '_' + tt + '.nc'
                    nc_write_sat_level_1_5(indImg, outImgStr1, ii)

                    # Save as Full Resolution GeoTIFF files
                    outImgStr2 = geoTdir + 'ind_' + ii + '_' + dateSnap + '_' + tt + '.tiff'
                    indImg.save_dataset(ii, filename = outImgStr2, writer = 'geotiff')
                    # Add graphics
                    # img2 = embellish(basDir, GSHHS_ROOT, outImgStr2, ii, dateSnap, tt)
                    # img2.save(outImgStr2)

                    # Save the data as resized png files
                    outImgStr3 = webDir + 'ind_' + ii + '_' + dateSnap + '_' + tt + '.png'
                    indImg.save_dataset(ii, filename = outImgStr3, writer = "simple_image")
                    outImgStr3 = imResize(outImgStr3)
                    # Add graphics
                    img3 = embellish(basDir, GSHHS_ROOT, outImgStr3, ii, dateSnap, tt)
                    img3.save(outImgStr3)

                    # unload the read channel data
                    scn.unload(str(ii).split())
                    print("Finished processing for channel: %s " % ii)
                except:
                    print("Something went wrong with this Channel: %s" % ii)
                    continue
                # end try-except block
            #end for-loop
            print("Finished processing for time-stamp: %s" % tt)
        except:
            print("Something went wrong with this time: %s" % tt)
            continue
Beispiel #24
0
def msg1NDVI(dateSnap, avail_times, fldrs):
    """
    What does this function do?
    This definition/function is meant for computing NDVI from SEVIRI data

    Ref: https://nbviewer.jupyter.org/github/pytroll/pytroll-examples/blob/master/satpy/hrit_msg_tutorial.ipynb

    :param dateSnap:
    :param avail_times:
    :param fldrs:
    :return: NDVI
    """

    # Start the logic
    import os, sys, glob
    from satpy.utils import debug_on
    from satpy.scene import Scene
    from satpy.dataset import combine_metadata
    from datetime import datetime
    from myDefinitions import nc_write_sat_level_2, embellish, imResize

    debug_on()

    print("\n \t \t \t STARTING THE msg1NDVI run @ time: %s \t \t \t \n \n" % str(datetime.now()))
    print("\n.Processing Date set is: %s" % dateSnap)

    #  Test whether all data folders are appropriately set or not.
    basDir, datDir, outDir, logDir, webDir, geoTdir, GSHHS_ROOT = fldrs
    print("\n.Base directory is set to: %s" % basDir)
    print("\n.Data directory is set to %s" % datDir)
    print("\n.NetCDF output directory is set to: %s" % outDir)
    print("\n.Log directory is set to: %s" % logDir)
    print("\n.Web directory is set to: %s" % webDir)
    print("\n.GeoTiff directory is set to: %s" % geoTdir)

    avail_times = str(avail_times).split()
    for tt in avail_times:
        # Start for-loop-1
        print("..Started processing for time: %s" % tt)
        files = glob.glob(datDir + 'H-000-MSG1*' + dateSnap + tt + '-*')
        print(">>>>>>>>>>> Testing 123: <<<<<<<<<<<<<<<\n")
        print(files)

        # Start reading filename in satpy
        scn = Scene(filenames=files, reader='hrit_msg')

        #  start the NDVI computation
        scn.load(['VIS006', 0.6])
        scn.load(['VIS008', 0.8])
        ndvi = (scn[0.8] - scn[0.6]) / (scn[0.8] + scn[0.6])
        ndvi.attrs = combine_metadata(scn[0.6], scn[0.8])
        scn['ndvi'] = ndvi

        composite = 'ndvi'
        prodStr = 'ndvi'
        capStr = 'NDVI'

        # resample the data to Indian region
        indScn = scn.resample('IndiaSC')

        #  save the data
        # # Save as netCDF data ---- TO BE IMPLEMENTED ----
        outImgStr1 = outDir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.nc'
        nc_write_sat_level_2(indScn, outImgStr1, prodStr)

        # Save as Full Resolution GeoTIFF files
        outImgStr2 = geoTdir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.tiff'
        indScn.save_dataset(composite, filename = outImgStr2, writer = 'geotiff')
        # Add graphics
        # img2 = embellish(basDir, GSHHS_ROOT, outImgStr2, capStr, dateSnap, tt)
        # img2.save(outImgStr2)

        # Save the data as resized png files
        outImgStr3 = webDir + 'ind_MSG1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.png'
        indScn.save_dataset(composite, filename = outImgStr3, writer = "simple_image")
        outImgStr3 = imResize(outImgStr3)
        # Add graphics
        img3 = embellish(basDir, GSHHS_ROOT, outImgStr3, capStr, dateSnap, tt)
        img3.save(outImgStr3)
        print("msg1NDVI() says: Finished with processing of time-slot - %s - at: %s " % (tt, str(datetime.now())))
Beispiel #25
0
class ReaderWrapper(roles.FrontendRole):
    FILE_EXTENSIONS = []
    DEFAULT_READER_NAME = None
    DEFAULT_DATASETS = []
    # This is temporary until a better solution is found for loading start/end time on init
    PRIMARY_FILE_TYPE = None

    def __init__(self, **kwargs):
        self.reader = kwargs.pop("reader", self.DEFAULT_READER_NAME)
        super(ReaderWrapper, self).__init__(**kwargs)
        pathnames = self.find_files_with_extensions()
        # Create a satpy Scene object
        self.scene = Scene(reader=self.reader, filenames=pathnames)
        self._begin_time = self.scene.start_time
        self._end_time = self.scene.end_time

    @property
    def begin_time(self):
        return self._begin_time

    @property
    def end_time(self):
        return self._end_time

    @property
    def available_product_names(self):
        return self.scene.available_dataset_names(reader_name=self.reader,
                                                  composites=True)

    @property
    def all_product_names(self):
        return self.scene.all_dataset_names(reader_name=self.reader,
                                            composites=True)

    @property
    def default_products(self):
        return self.DEFAULT_DATASETS

    def filter(self, scene):
        pass

    def create_scene(self, products=None, **kwargs):
        LOG.debug("Loading scene data...")
        # If the user didn't provide the products they want, figure out which ones we can create
        if products is None:
            LOG.debug(
                "No products specified to frontend, will try to load logical defaults products"
            )
            products = self.default_products

        kwargs.pop("overwrite_existing")
        kwargs.pop("exit_on_error")
        kwargs.pop("keep_intermediate")
        self.scene.load(products, **kwargs)
        self.wishlist = self.scene.wishlist

        # Apply Filters
        self.filter(self.scene)

        # Delete the satpy scene so memory is cleared out if it isn't used by the caller
        scene = self.scene
        self.scene = None
        return scene
Beispiel #26
0
def process_one_file(gac_file, out_path='.'):
    """Make level 1c files in PPS-format."""
    tic = time.time()
    image_num = 0  # name of first dataset is image0
    # platform_shortname = p__.parse(
    #     os.path.basename(tslot_files[0]))['platform_shortname']
    # start_time = p__.parse(
    #     os.path.basename(tslot_files[0]))['start_time']
    # platform_name = PLATFORM_SHORTNAMES[platform_shortname]
    # #Load channel data for one scene and set some attributes
    # coefs = get_calibration_for_time(platform=platform_shortname,
    #                                  time=start_time)

    scn_ = Scene(reader='avhrr_l1b_gaclac', filenames=[gac_file])

    if 'avhrr-3' in scn_.attrs['sensor']:
        sensor = 'avhrr'
        scn_.load(BANDNAMES + [
            'latitude', 'longitude', 'sensor_zenith_angle',
            'solar_zenith_angle', 'sun_sensor_azimuth_difference_angle'
        ])
    for band in BANDNAMES:
        try:
            idtag = PPS_TAGNAMES.get(band, band)
            scn_[band].attrs['id_tag'] = idtag
            scn_[band].attrs['description'] = 'AVHRR ' + str(band)
            scn_[band].attrs['sun_earth_distance_correction_applied'] = 'False'
            scn_[band].attrs['sun_earth_distance_correction_factor'] = 1.0
            scn_[band].attrs['sun_zenith_angle_correction_applied'] = 'False'
            scn_[band].attrs['name'] = "image{:d}".format(image_num)
            scn_[band].attrs['coordinates'] = 'lon lat'
            del scn_[band].attrs['area']
            image_num += 1
        except KeyError:
            continue

    # Set some header attributes:
    scn_.attrs['instrument'] = sensor.upper()
    scn_.attrs['source'] = "gac2pps.py"
    nowutc = datetime.utcnow()
    scn_.attrs['date_created'] = nowutc.strftime("%Y-%m-%dT%H:%M:%SZ")

    # Find lat/lon data
    irch = scn_['4']
    scn_.attrs['platform'] = irch.attrs['platform_name']
    scn_.attrs['platform_name'] = irch.attrs['platform_name']
    scn_.attrs['orbit_number'] = '{:05d}'.format(irch.attrs['orbit_number'])
    scn_.attrs['orbit'] = scn_.attrs['orbit_number']
    # lons = lons.where(lons <= 360, -999.0)
    # lons = lons.where(lons >= -360, 999.0)
    # lats = lats.where(lats <= 90, -999.0)
    # lats = lats.where(lats >= -90, 999.0)

    scn_['lat'] = scn_['latitude']
    del scn_['latitude']
    scn_['lat'].attrs['long_name'] = 'latitude coordinate'
    del scn_['lat'].coords['acq_time']

    scn_['lon'] = scn_['longitude']
    del scn_['longitude']
    scn_['lon'].attrs['long_name'] = 'longitude coordinate'
    del scn_['lon'].coords['acq_time']

    angle_names = []
    scn_['sunzenith'] = scn_['solar_zenith_angle']
    del scn_['solar_zenith_angle']
    scn_['sunzenith'].attrs['id_tag'] = 'sunzenith'
    scn_['sunzenith'].attrs['long_name'] = 'sun zenith angle'
    scn_['sunzenith'].attrs['valid_range'] = [0, 18000]
    scn_['sunzenith'].attrs['name'] = "image{:d}".format(image_num)
    angle_names.append("image{:d}".format(image_num))
    scn_['sunzenith'].attrs['coordinates'] = 'lon lat'
    del scn_['sunzenith'].attrs['area']
    scn_['sunzenith'].coords['time'] = irch.attrs['start_time']
    del scn_['sunzenith'].coords['acq_time']
    image_num += 1

    # satzenith
    scn_['satzenith'] = scn_['sensor_zenith_angle']
    del scn_['sensor_zenith_angle']
    scn_['satzenith'].attrs['id_tag'] = 'satzenith'
    scn_['satzenith'].attrs['long_name'] = 'satellite zenith angle'
    scn_['satzenith'].attrs['valid_range'] = [0, 9000]
    scn_['satzenith'].attrs['name'] = "image{:d}".format(image_num)
    angle_names.append("image{:d}".format(image_num))
    scn_['satzenith'].attrs['coordinates'] = 'lon lat'
    del scn_['satzenith'].attrs['area']
    scn_['satzenith'].coords['time'] = irch.attrs['start_time']
    del scn_['satzenith'].coords['acq_time']
    image_num += 1

    # azidiff
    scn_['azimuthdiff'] = abs(scn_['sun_sensor_azimuth_difference_angle'])
    scn_['azimuthdiff'].attrs = scn_[
        'sun_sensor_azimuth_difference_angle'].attrs
    del scn_['sun_sensor_azimuth_difference_angle']
    scn_['azimuthdiff'].attrs['id_tag'] = 'azimuthdiff'
    # scn_['azimuthdiff'].attrs['standard_name'] = (
    #     'angle_of_rotation_from_solar_azimuth_to_platform_azimuth')
    scn_['azimuthdiff'].attrs[
        'long_name'] = 'absolute azimuth difference angle'
    scn_['azimuthdiff'].attrs['valid_range'] = [0, 18000]
    scn_['azimuthdiff'].attrs['name'] = "image{:d}".format(image_num)
    angle_names.append("image{:d}".format(image_num))
    scn_['azimuthdiff'].attrs['coordinates'] = 'lon lat'
    del scn_['azimuthdiff'].attrs['area']
    scn_['azimuthdiff'].coords['time'] = irch.attrs['start_time']
    del scn_['azimuthdiff'].coords['acq_time']
    image_num += 1

    # Get filename
    start_time = irch.attrs['start_time']
    end_time = irch.attrs['end_time']
    platform_name = irch.attrs['platform_name']
    orbit_number = int(scn_.attrs['orbit_number'])
    filename = os.path.join(
        out_path, "S_NWC_avhrr_{:s}_{:05d}_{:s}Z_{:s}Z.nc".format(
            platform_name.lower().replace('-', ''), orbit_number,
            start_time.strftime('%Y%m%dT%H%M%S%f')[:-5],
            end_time.strftime('%Y%m%dT%H%M%S%f')[:-5]))

    for dataset in scn_.keys():
        if hasattr(scn_[dataset], 'attrs'):
            if hasattr(scn_[dataset].attrs, 'modifiers'):
                scn_[dataset].attrs['modifiers'] = 0.0

    # Encoding for channels
    save_info = {}
    for band in BANDNAMES:
        idtag = PPS_TAGNAMES[band]
        try:
            name = scn_[band].attrs['name']
        except KeyError:
            logger.debug("No band named %s", band)
            continue
        # Add time coordinate. To make cfwriter aware that we want 3D data.
        scn_[band].coords['time'] = irch.attrs['start_time']
        del scn_[band].coords['acq_time']

        if 'tb' in idtag:
            save_info[name] = {
                'dtype': 'int16',
                'scale_factor': 0.01,
                '_FillValue': -32767,
                'zlib': True,
                'complevel': 4,
                'add_offset': 273.15
            }
        else:
            save_info[name] = {
                'dtype': 'int16',
                'scale_factor': 0.01,
                'zlib': True,
                'complevel': 4,
                '_FillValue': -32767,
                'add_offset': 0.0
            }
    # Encoding for angles and lat/lon
    for name in angle_names:
        save_info[name] = {
            'dtype': 'int16',
            'scale_factor': 0.01,
            'zlib': True,
            'complevel': 4,
            '_FillValue': -32767,
            'add_offset': 0.0
        }
    for name in ['lon', 'lat']:
        save_info[name] = {
            'dtype': 'float32',
            'zlib': True,
            'complevel': 4,
            '_FillValue': -999.0
        }
    header_attrs = scn_.attrs.copy()
    header_attrs['start_time'] = time.strftime(
        "%Y-%m-%d %H:%M:%S", irch.attrs['start_time'].timetuple())
    header_attrs['end_time'] = time.strftime(
        "%Y-%m-%d %H:%M:%S", irch.attrs['end_time'].timetuple())
    header_attrs['sensor'] = sensor.lower()

    for band in BANDNAMES:
        idtag = PPS_TAGNAMES[band]
        try:
            to_pop = []
            for attr in scn_[band].attrs.keys():
                if hasattr(scn_[band].attrs[attr], 'keys'):
                    print("found dict", attr)
                    to_pop.append(attr)
            for attr in to_pop:
                attr_dict = scn_[band].attrs[attr]
                scn_[band].attrs.pop(attr)
                for key in attr_dict.keys():
                    scn_[band].attrs[attr + str(key)] = attr_dict[key]
        except KeyError:
            continue

    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=header_attrs,
                       engine='netcdf4',
                       encoding=save_info)
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time() - tic))
Beispiel #27
0
from satpy.scene import Scene
#from satpy.utils import debug_on
# debug_on()

if __name__ == '__main__':

    scn = Scene(
        sensor='viirs',
        satid='NPP',
        filenames=[
            "/home/a000680/data/osisaf/S-OSI_-FRA_-NPP_-NARSST_FIELD-201609081300Z.nc"
        ],
        reader='ghrsst_osisaf')

    scn.load(['sea_surface_temperature'])
    lcd = scn.resample('euro4', radius_of_influence=2000)

    sstdata = lcd['sea_surface_temperature'][:]
    import numpy as np
    arr = np.ma.where(np.less_equal(sstdata, 0), 0, sstdata - 273.15)

    # Convert sst to numbers between 0 and 28, corresponding to the lut:
    data = np.ma.where(np.less(arr, 0), 28, 28.0 - arr)
    data = np.ma.where(np.greater(arr, 23.0), 4, data).round().astype('uint8')

    from trollimage.image import Image
    from satpy.imageo import palettes
    palette = palettes.sstlut_osisaf_metno()

    img = Image(data, mode='P', palette=palette)
def process_one_file(eumgacfdr_file,
                     out_path='.',
                     reader_kwargs=None,
                     start_line=None,
                     end_line=None,
                     engine='h5netcdf',
                     remove_broken=True):
    """Make level 1c files in PPS-format."""
    tic = time.time()
    scn_ = Scene(reader='avhrr_l1c_eum_gac_fdr_nc', filenames=[eumgacfdr_file])

    scn_.load(BANDNAMES)
    scn_.load([
        'latitude', 'longitude', 'qual_flags', 'equator_crossing_time',
        'equator_crossing_longitude', 'acq_time'
    ] + ANGLENAMES)

    # Only load these if we do not crop data
    if start_line is None and end_line is None:
        scn_.load(['overlap_free_end', 'overlap_free_start', 'midnight_line'])

    # Needs to be done before everything else to avoid problems with attributes.
    if remove_broken:
        logger.info("Setting low quality data (qual_flags) to nodata.")
        remove_broken_data(scn_)

    # Crop after all renaming of variables are done
    # Problems to rename if cropping is done first.
    set_exact_time_and_crop(scn_, start_line, end_line, time_key='acq_time')
    irch = scn_[
        'brightness_temperature_channel_4']  # Redefine, to get updated start/end_times

    # One ir channel
    irch = scn_['brightness_temperature_channel_4']

    # Set header and band attributes
    set_header_and_band_attrs(scn_)

    # Rename longitude, latitude to lon, lat.
    rename_latitude_longitude(scn_)

    # Convert angles to PPS
    convert_angles(scn_)
    update_angle_attributes(scn_, irch)  # Standard name etc

    # Handle gac specific datasets qual_flags and scanline_timestamps
    update_ancilliary_datasets(scn_)

    filename = compose_filename(scn_, out_path, instrument='avhrr', band=irch)
    encoding = get_encoding_gac(scn_)
    scn_.save_datasets(
        writer='cf',
        filename=filename,
        header_attrs=get_header_attrs(scn_, band=irch, sensor='avhrr'),
        engine=engine,
        flatten_attrs=True,
        include_lonlats=False,  # Included anyway as they are datasets in scn_
        pretty=True,
        encoding=encoding)

    logger.info("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time() - tic))
    return filename
Beispiel #29
0
def process_one_scan(tslot_files,
                     out_path,
                     process_buggy_satellite_zenith_angles=False):
    """ Make level 1c files in PPS-format """
    tic = time.time()
    image_num = 0  # name of first dataset is image0
    #if len(tslot_files) != 8 * len(BANDNAMES) + 2:
    #    raise Exception("Some data is missing")
    platform_shortname = p__.parse(os.path.basename(
        tslot_files[0]))['platform_shortname']
    start_time = p__.parse(os.path.basename(tslot_files[0]))['start_time']
    platform_name = PLATFORM_SHORTNAMES[platform_shortname]
    #Load channel data for one scene and set some attributes
    coefs = get_calibration_for_time(platform=platform_shortname,
                                     time=start_time)

    scn_ = Scene(reader='seviri_l1b_hrit',
                 filenames=tslot_files,
                 reader_kwargs={
                     'calib_mode': CALIB_MODE,
                     'ext_calib_coefs': coefs
                 })
    scn_.attrs['platform_name'] = platform_name

    #SEVIRI data only
    if scn_.attrs['sensor'] == {'seviri'}:
        sensor = 'seviri'
        scn_.load(BANDNAMES)
    for band in BANDNAMES:
        idtag = PPS_TAGNAMES[band]
        scn_[band].attrs['id_tag'] = idtag
        scn_[band].attrs['description'] = 'SEVIRI ' + str(band)
        scn_[band].attrs['sun_earth_distance_correction_applied'] = 'False'
        scn_[band].attrs['sun_earth_distance_correction_factor'] = 1.0
        scn_[band].attrs['sun_zenith_angle_correction_applied'] = 'False'
        scn_[band].attrs['name'] = "image{:d}".format(image_num)
        scn_[band].attrs['coordinates'] = 'lon lat'
        image_num += 1

    #correct area
    area_corr = pyresample.geometry.AreaDefinition(
        'seviri-corrected', 'Corrected SEVIRI L1.5 grid (since Dec 2017)',
        'geosmsg', {
            'a': 6378169.00,
            'b': 6356583.80,
            'h': 35785831.0,
            'lon_0': 0.0,
            'proj': 'geos',
            'units': 'm'
        }, 3712, 3712, (5567248.28340708, 5570248.686685662,
                        -5570248.686685662, -5567248.28340708))
    if not scn_['IR_108'].attrs['georef_offset_corrected']:
        scn_ = scn_.resample(area_corr)
        print(scn_['IR_108'].attrs['georef_offset_corrected'])

    #import pdb;pdb.set_trace()
    #Set som header attributes:
    scn_.attrs['platform'] = platform_name
    scn_.attrs['instrument'] = sensor.upper()
    scn_.attrs['source'] = "seviri2pps.py"
    scn_.attrs['orbit_number'] = "99999"
    #scn_.attrs['orbit'] = "99999"
    nowutc = datetime.utcnow()
    scn_.attrs['date_created'] = nowutc.strftime("%Y-%m-%dT%H:%M:%SZ")
    #Find lat/lon data
    irch = scn_['IR_108']
    lons, lats = irch.attrs['area'].get_lonlats()
    lons[lons > 360] = -999.0
    lons[lons < -360] = -999.0
    lats[lats > 360] = -999.0
    lats[lats < -360] = -999.0

    #Find angles data
    sunalt, suna = get_alt_az(irch.attrs['start_time'],
                              *irch.attrs['area'].get_lonlats())
    suna = np.rad2deg(suna)
    sunz = sun_zenith_angle(irch.attrs['start_time'],
                            *irch.attrs['area'].get_lonlats())

    # if:
    #   Buggy data is requested buggy data is prepared!
    # elif:
    #   1) get_observer_look() gives wrong answer ...
    #   ... for satellite altitude in m. AND
    #   2) get_observer_look() gives correct answer ...
    #   ....  for satellite altitude in km. AND
    #   3) Satellite altitude is m.:
    #    => Satellite alltitude need to be converted to km.
    # else:
    #    => There have been updates to SatPy and this script
    #       need to be modified.
    if process_buggy_satellite_zenith_angles:
        print(" Making buggy satellite zenith angels on purpose!")
        sata, satel = get_observer_look(
            irch.attrs['orbital_parameters']['satellite_actual_longitude'],
            irch.attrs['orbital_parameters']['satellite_actual_latitude'],
            irch.attrs['orbital_parameters']['satellite_actual_altitude'],
            irch.attrs['start_time'], lons, lats, 0)
    elif (get_observer_look(0, 0, 36000 * 1000, datetime.utcnow(),
                            np.array([16]), np.array([58]), np.array(
                                [0]))[1] > 30
          and get_observer_look(0, 0, 36000, datetime.utcnow(), np.array([16]),
                                np.array([58]), np.array([0]))[1] < 23
          and irch.attrs['orbital_parameters']['satellite_actual_altitude'] >
          38000):
        sata, satel = get_observer_look(
            irch.attrs['orbital_parameters']['satellite_actual_longitude'],
            irch.attrs['orbital_parameters']['satellite_actual_latitude'],
            0.001 *
            irch.attrs['orbital_parameters']['satellite_actual_altitude'],
            irch.attrs['start_time'], lons, lats, 0)
    else:
        raise UnexpectedSatpyVersion(
            "You might have a newer version of satpy/pyorbital that"
            "handles units. In that case the m => km conversion might"
            "be unneeded and wrong.")

    satz = 90 - satel
    azidiff = make_azidiff_angle(sata, suna, -32767)
    #Add lat/lon  and angles datasets to the scen object
    my_coords = scn_['IR_108'].coords
    my_coords['time'] = irch.attrs['start_time']
    scn_['lat'] = xr.DataArray(da.from_array(lats, chunks=(53, 3712)),
                               dims=['y', 'x'],
                               coords={
                                   'y': scn_['IR_108']['y'],
                                   'x': scn_['IR_108']['x']
                               })
    scn_['lat'].attrs['long_name'] = 'latitude coordinate'
    scn_['lat'].attrs['standard_name'] = 'latitude'
    scn_['lat'].attrs['units'] = 'degrees_north'
    scn_['lat'].attrs['start_time'] = irch.attrs['start_time']
    scn_['lat'].attrs['end_time'] = irch.attrs['end_time']
    scn_['lon'] = xr.DataArray(da.from_array(lons, chunks=(53, 3712)),
                               dims=['y', 'x'],
                               coords={
                                   'y': scn_['IR_108']['y'],
                                   'x': scn_['IR_108']['x']
                               })
    scn_['lon'].attrs['long_name'] = 'longitude coordinate'
    scn_['lon'].attrs['standard_name'] = 'longitude'
    scn_['lon'].attrs['units'] = 'degrees_east'
    scn_['lon'].attrs['start_time'] = irch.attrs['start_time']
    scn_['lon'].attrs['end_time'] = irch.attrs['end_time']
    #sunzenith
    scn_['sunzenith'] = xr.DataArray(da.from_array(sunz[:, :],
                                                   chunks=(53, 3712)),
                                     dims=['y', 'x'],
                                     coords=my_coords)
    scn_['sunzenith'].attrs['id_tag'] = 'sunzenith'
    scn_['sunzenith'].attrs['long_name'] = 'sun zenith angle'
    scn_['sunzenith'].attrs['standard_name'] = 'solar_zenith_angle'
    scn_['sunzenith'].attrs['valid_range'] = [0, 18000]
    scn_['sunzenith'].attrs['name'] = "image{:d}".format(image_num)
    image_num += 1
    #satzenith
    scn_['satzenith'] = xr.DataArray(da.from_array(satz[:, :],
                                                   chunks=(53, 3712)),
                                     dims=['y', 'x'],
                                     coords=my_coords)
    scn_['satzenith'].attrs['id_tag'] = 'satzenith'
    scn_['satzenith'].attrs['long_name'] = 'satellite zenith angle'
    scn_['satzenith'].attrs['standard_name'] = 'platform_zenith_angle'
    scn_['satzenith'].attrs['valid_range'] = [0, 9000]
    scn_['satzenith'].attrs['name'] = "image{:d}".format(image_num)
    image_num += 1
    #azidiff
    scn_['azimuthdiff'] = xr.DataArray(da.from_array(azidiff[:, :],
                                                     chunks=(53, 3712)),
                                       dims=['y', 'x'],
                                       coords=my_coords)
    scn_['azimuthdiff'].attrs['id_tag'] = 'azimuthdiff'
    #scn_['azimuthdiff'].attrs['standard_name'] = (
    #    'angle_of_rotation_from_solar_azimuth_to_platform_azimuth')
    scn_['azimuthdiff'].attrs[
        'long_name'] = 'absoulte azimuth difference angle'
    scn_['azimuthdiff'].attrs['valid_range'] = [0, 18000]
    scn_['azimuthdiff'].attrs['name'] = "image{:d}".format(image_num)
    image_num += 1
    for angle in ['azimuthdiff', 'satzenith', 'sunzenith']:
        scn_[angle].attrs['units'] = 'degree'
        for attr in irch.attrs.keys():
            if attr in [
                    "start_time", "end_time", "navigation",
                    "georef_offset_corrected", "projection"
            ]:
                scn_[angle].attrs[attr] = irch.attrs[attr]

    #Get filename
    start_time = scn_['IR_108'].attrs['start_time']
    end_time = scn_['IR_108'].attrs['end_time']
    filename = os.path.join(
        out_path, "S_NWC_seviri_{:s}_{:s}_{:s}Z_{:s}Z.nc".format(
            platform_name.lower().replace('-', ''), "99999",
            start_time.strftime('%Y%m%dT%H%M%S%f')[:-5],
            end_time.strftime('%Y%m%dT%H%M%S%f')[:-5]))

    #Encoding for channels
    save_info = {}
    for band in BANDNAMES:
        idtag = PPS_TAGNAMES[band]
        name = scn_[band].attrs['name']
        scn_[band].attrs.pop('area', None)
        # Add time coordinate. To make cfwriter aware that we want 3D data.
        my_coords = scn_[band].coords
        my_coords['time'] = irch.attrs['start_time']

        if 'tb' in idtag:
            save_info[name] = {
                'dtype': 'int16',
                'scale_factor': 0.01,
                '_FillValue': -32767,
                'zlib': True,
                'complevel': 4,
                'add_offset': 273.15
            }
        else:
            save_info[name] = {
                'dtype': 'int16',
                'scale_factor': 0.01,
                'zlib': True,
                'complevel': 4,
                '_FillValue': -32767,
                'add_offset': 0.0
            }
    #Encoding for angles and lat/lon
    for name in ['image11', 'image12', 'image13']:
        save_info[name] = {
            'dtype': 'int16',
            'scale_factor': 0.01,
            'zlib': True,
            'complevel': 4,
            '_FillValue': -32767,
            'add_offset': 0.0
        }

    for name in ['lon', 'lat']:
        save_info[name] = {
            'dtype': 'float32',
            'zlib': True,
            'complevel': 4,
            '_FillValue': -999.0
        }
    header_attrs = scn_.attrs.copy()
    header_attrs['start_time'] = time.strftime(
        "%Y-%m-%d %H:%M:%S", irch.attrs['start_time'].timetuple())
    header_attrs['end_time'] = time.strftime(
        "%Y-%m-%d %H:%M:%S", irch.attrs['end_time'].timetuple())
    header_attrs['sensor'] = sensor.lower()
    header_attrs.pop('platform_name', None)

    scn_.save_datasets(writer='cf',
                       filename=filename,
                       header_attrs=header_attrs,
                       engine='netcdf4',
                       encoding=save_info,
                       include_lonlats=False,
                       pretty=True,
                       flatten_attrs=True,
                       exclude_attrs=['raw_metadata'])
    print("Saved file {:s} after {:3.1f} seconds".format(
        os.path.basename(filename),
        time.time() - tic))  #About 40 seconds
    return filename
Beispiel #30
0
from glob import glob
from satpy.scene import Scene
from satpy.utils import debug_on
from pycoast import ContourWriterAGG
import aggdraw
import PIL
from PIL import Image, ImageFont, ImageDraw
from mpop.projector import get_area_def

debug_on()
fname="msg4-alps-snow.png"
my_area="europe_center"
# Load data by filenames
files = glob("data/H-*")
scn = Scene(reader="hrit_msg", filenames=files)
scn.load(["natural"])
lscn = scn.resample(my_area)
# Save RGB geotiff
lscn.save_dataset("natural", filename=fname)

cw = ContourWriterAGG('/opt/pytroll/shapes')
europe = get_area_def(my_area)
cw.add_coastlines_to_file(fname, europe, resolution='l', level=1, outline=(255, 255, 255))
cw.add_borders_to_file(fname, europe, outline=(255, 255, 255),resolution='i')

img = Image.open(fname)
draw = ImageDraw.Draw(img)
print(img.size)
draw.rectangle([(0, 0), (img.size[0], 25)], fill=(255,165,0,200))
font = ImageFont.truetype("/usr/openv/java/jre/lib/fonts/LucidaTypewriterBold.ttf", 18)
textSizeName = draw.textsize("Meteosat 11", font=font)
Beispiel #31
0
def msg1RGBProc(dateSnap, avail_times, fldrs):
    """
    What does this definition do?
    This script processes the raw MSG-1 data into RGB Data Products in netCDF-4, geoTIFF &
    png file formats

    :param dateSnap:
    :param avail_times: A single string NOT an array
    :param fldrs:
    :return:
    """
    #-Start coding
    # start the logic
    import os, sys, glob
    from satpy.utils import debug_on
    from satpy.scene import Scene
    from datetime import datetime
    from myDefinitions import nc_write_sat_level_1_5, embellish, imResize

    # Start the logic
    debug_on()
    print("\n \t \t \t STARTING THE msg1RGBProc run @ time: %s \t \t \t \n \n" % str(datetime.now()))
    print("\n.Processing Date set is: %s" % dateSnap)

    #  Test whether all data folders are appropriately set or not.
    basDir, datDir, outDir, logDir, webDir, geoTdir, GSHHS_ROOT = fldrs
    print("\n.Base directory is set to: %s" % basDir)
    print("\n.Data directory is set to %s" % datDir)
    print("\n.NetCDF output directory is set to: %s" % outDir)
    print("\n.Log directory is set to: %s" % logDir)
    print("\n.Web directory is set to: %s" % webDir)
    print("\n.GeoTiff directory is set to: %s" % geoTdir)

    avail_times = str(avail_times).split()
    for tt in avail_times:
        # Start for-loop-1
        print("..Started processing for time: %s" % tt)
        files = glob.glob(datDir + 'H-000-MSG1*' + dateSnap + tt + '-*')

        # Start reading filename in satpy
        scn = Scene(filenames=files, reader='hrit_msg')

        # loop into available composites
        for composite in ['natural', 'ir_overview', 'night_fog', 'convection', 'dust', 'airmass', 'cloud_top_temperature', 'cloud_top_height', 'cloudtype', 'cloud_top_phase', 'cloud_top_pressure', 'cloudmask']:
            if (composite == 'natural'):
                prodStr = 'NAT'
                capStr = 'Quasi True Colour'
            elif (composite == 'night_fog'):
                prodStr = 'NFog'
                capStr = 'Night Fog'
            elif (composite == 'convection'):
                prodStr = 'CON'  # Problematic
                capStr = 'Convection Activity'
            elif (composite == 'cloud_optical_thickness'):
                prodStr = 'COP'  # 2 much Problematic
                capStr = 'Cloud Optical Thickness'
            elif (composite == 'realistic_colors'):
                prodStr = 'REAL'  # problematic
                capStr = 'Realistic RGB Colors'
            elif (composite == 'ir_overview'):
                prodStr = 'IR'
                capStr = 'Infra-Red'
            elif (composite == 'cloud_top_temperature'):
                prodStr = 'CTT'  # problematic
                capStr = 'Cloud Top Temperature'
            elif (composite == 'airmass'):
                prodStr = 'airM'
                capStr = 'Air Mass'
            elif (composite == 'dust'):
                prodStr = 'dust'
                capStr = 'DUST'
            elif (composite == 'cloud_top_height'):
                prodStr = 'CTH'
                capStr = 'Cloud Top Height'
            elif (composite == 'cloudtype'):
                prodStr = 'CType'
                capStr = 'Cloud Type'
            elif (composite == 'cloud_top_pressure'):
                prodStr = 'CTP'
                capStr = 'Cloud Top Pressure'
            elif (composite == 'cloud_top_phase'):
                prodStr = 'CTPh'
                capStr = 'Cloud Top Phase'
            elif (composite == 'cloudmask'):
                prodStr = 'CMask'
                capStr = 'Cloud Mask'
            # end if condition

            try:
                # Load the scene
                scn.load([composite])

                # India Specific Scene
                indScn = scn.resample("IndiaSC")
                indScn.load([composite])

                # # Save as netCDF data ---- TO BE IMPLEMENTED ----
                # outImgStr1 = outDir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.nc'
                # # indImg.save_datasets(writer = 'cf', filename = outImgStr1)
                # nc_write_sat_level_1_5(indScn, outImgStr1, prodStr)

                # Save as Full Resolution GeoTIFF files
                outImgStr2 = geoTdir + 'ind_MSG-1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.tiff'
                indScn.save_dataset(composite, filename = outImgStr2, writer = 'geotiff')
                # Add graphics
                # img2 = embellish(basDir, GSHHS_ROOT, outImgStr2, capStr, dateSnap, tt)
                # img2.save(outImgStr2)

                # Save the data as resized png files
                outImgStr3 = webDir + 'ind_MSG1_RGB_' + prodStr + '_' + dateSnap + '_' + tt + '.png'
                indScn.save_dataset(composite, filename = outImgStr3, writer = "simple_image")
                outImgStr3 = imResize(outImgStr3)
                # Add graphics
                img3 = embellish(basDir, GSHHS_ROOT, outImgStr3, capStr, dateSnap, tt)
                img3.save(outImgStr3)

                # unload the read channel data
                scn.unload([composite])
                indScn.unload([composite])
                print("Finished processing for RGB Composite: %s " % composite)
            except:
                print("Something went wrong with this RGB composite: %s" % composite)
                continue
            # end try-except block
        # end for-loop

        #Finished time slots
        finTmStmps = [tt]
        print("\n.Reading Finished Time slots as: %s" % finTmStmps)
        finTmsFile = logDir + "finishedTimeSlots_" + dateSnap + ".txt"
        fp = open(finTmsFile, 'a+')
        for item in finTmStmps:
            fp.write("%s \n" % item)
        # end for loop to write
        fp.close()
        # end for-loop
    # end-for-loop
    print("msg1RGBProc() says: Finished with processing of time-slot - %s - at: %s " % (tt, str(datetime.now())))
Beispiel #32
0
class ReaderWrapper(roles.FrontendRole):
    FILE_EXTENSIONS = []
    DEFAULT_READER_NAME = None
    DEFAULT_DATASETS = []
    # This is temporary until a better solution is found for loading start/end time on init
    PRIMARY_FILE_TYPE = None
    GENERATE_COMPOSITES = False

    def __init__(self, **kwargs):
        self.reader = kwargs.pop("reader", self.DEFAULT_READER_NAME)
        super(ReaderWrapper, self).__init__(**kwargs)
        pathnames = self.find_files_with_extensions()
        # Remove keyword arguments that Satpy won't understand
        for key in ('search_paths', 'keep_intermediate', 'overwrite_existing',
                    'exit_on_error'):
            kwargs.pop(key, None)
        # Create a satpy Scene object
        self.scene = Scene(reader=self.reader,
                           filenames=pathnames,
                           reader_kwargs=kwargs)
        self._begin_time = self.scene.start_time
        self._end_time = self.scene.end_time
        self.wishlist = set()
        self.missing_datasets = set()

    @property
    def begin_time(self):
        return self._begin_time

    @property
    def end_time(self):
        return self._end_time

    @property
    def available_product_names(self):
        return self.scene.available_dataset_names(reader_name=self.reader,
                                                  composites=True)

    @property
    def all_product_names(self):
        return self.scene.all_dataset_names(reader_name=self.reader,
                                            composites=True)

    @property
    def default_products(self):
        return self.DEFAULT_DATASETS

    def filter(self, scene):
        pass

    def _purge_node(self, parent_node, missing_nodes):
        """We no longer need this Node, remove it and any children."""
        for child in parent_node.children:
            self._purge_node(child, missing_nodes)
        if parent_node.name is None:
            # root node
            return
        if all(parent in missing_nodes or parent is None for parent in parent_node.parents) and \
            parent_node.name in self.scene:
            # we aren't needed by anything
            # if not parent_node.parents and parent_node.name in self.scene:
            LOG.debug("Removing {} because it is no longer needed".format(
                parent_node.name))
            del self.scene[parent_node.name]

    def _update_filtered_dep_tree(self, parent_node):
        """Update Scene wishlist and needed datasets based on filtered datasets."""
        missing_deps = set()
        for child in parent_node.children:
            _req_deps = self._update_filtered_dep_tree(child)
            missing_deps.update(_req_deps)

        # we are the root node no need to do the rest of the checks
        if parent_node.name is None:
            # get rid of any dependencies that are no longer needed
            self._purge_node(self.scene.dep_tree, missing_deps)
            return None
        # if we are missing any of our required dependencies then we can't be made
        if missing_deps or (not parent_node.children
                            and parent_node.name not in self.scene):
            missing_deps.add(parent_node)
        if missing_deps:
            if parent_node.name in self.scene:
                LOG.debug(
                    "Removing {} because it is missing some dependencies".
                    format(parent_node.name))
                del self.scene[parent_node.name]
            elif parent_node.name in self.scene.wishlist:
                # it was asked for but hasn't been generated yet
                LOG.debug(
                    "Removing {} because it is missing some dependencies".
                    format(parent_node.name))
                self.scene.wishlist.remove(parent_node.name)
        return missing_deps

    def create_scene(self, products=None, **kwargs):
        LOG.debug("Loading scene data...")
        # If the user didn't provide the products they want, figure out which ones we can create
        if products is None:
            LOG.debug(
                "No products specified to frontend, will try to load logical defaults products"
            )
            products = self.default_products

        kwargs.pop("overwrite_existing")
        kwargs.pop("exit_on_error")
        kwargs.pop("keep_intermediate")
        self.scene.load(products, generate=self.GENERATE_COMPOSITES, **kwargs)

        # Apply Filters
        self.filter(self.scene)
        if not self.GENERATE_COMPOSITES:
            self._update_filtered_dep_tree(self.scene.dep_tree)
        self.wishlist = self.scene.wishlist
        self.missing_datasets = self.scene.missing_datasets

        # Delete the satpy scene so memory is cleared out if it isn't used by the caller
        scene = self.scene
        self.scene = None
        return scene
Beispiel #33
0

for sat in ["M01", "M02", "M03"]:
 
    fnames = glob(dataDir+"AVHR_xxx_*"+sat+"*")
    if not fnames:
        continue
    glbl = Scene(reader="avhrr_eps_l1b", filenames=fnames)
    if sat == "M01":
        satname="B"
    if sat == "M02":
        satname="A"
    if sat == "M03":
        satname="C"
    #glbl.load(['true_color_raw', 'night_fog'])
    glbl.load(['natural_color', 'night_fog'])
    delta_time = unix_time_sec(glbl.end_time) - unix_time_sec(glbl.start_time)
    sat_pos_time = glbl.start_time + timedelta(seconds=delta_time)
    st = sat_pos_time.strftime('%y%m%d%H%M')
    #orb = Orbital("Metop-A", tle_file="metopa.txt")
    #orb = Orbital("Metop-"+satname)
    #dtobj = datetime(int(sat_pos_time.strftime('%Y')),
    #             int(sat_pos_time.strftime('%m')),
    #             int(sat_pos_time.strftime('%d')),
    #             int(sat_pos_time.strftime('%H')),
    #             int(sat_pos_time.strftime('%M')),
    #             0)
    #print("---")
    #print(orb.get_lonlatalt(dtobj))
    #print("---")
Beispiel #34
0
def read_GOES_satpy(date_str, channel, zoom = True):


    # Extract the channel wavelength using the input string
    # -----------------------------------------------------
    channel = channel_dict[str(channel)]['wavelength']

    # Determine the correct GOES files associated with the date
    # ---------------------------------------------------------
    dt_date_str = datetime.strptime(date_str,"%Y%m%d%H%M")
    dt_date_str_end = dt_date_str + timedelta(minutes = 10)

    # Use the Satpy find_files_and_readers to grab the files
    # ------------------------------------------------------
    files = find_files_and_readers(start_time = dt_date_str, \
        end_time = dt_date_str_end, base_dir = data_dir, reader = 'abi_l1b')

    print(files)

    # Extract the goes true-color plot limits
    # ----------------------------------------
    lat_lims = [29.5, 48.0]
    lon_lims = [-122.0, -100.5]

    # Use satpy (Scene) to open the file
    # ----------------------------------
    scn = Scene(reader = 'abi_l1b', filenames = files)

    # Load the desired channel data
    # -----------------------------
    scn.load([channel], calibration = [calib_dict[channel]])

    ## Set the map projection and center the data
    ## ------------------------------------------
    #my_area = scn[channel].attrs['area'].compute_optimal_bb_area({\
    #    'proj':'lcc', 'lon_0': lon_lims[0], 'lat_0': lat_lims[0], \
    #    'lat_1': lat_lims[0], 'lat_2': lat_lims[0]})
    #new_scn = scn.resample(my_area)

    ##!## Enhance the image for plotting
    ##!## ------------------------------
    ##!#var = get_enhanced_image(scn[channel]).data
    ##!#var = var.transpose('y','x','bands')

    # Zoom the image on the desired area
    # ----------------------------------
    if(zoom):
        scn = scn.crop(ll_bbox = (lon_lims[0] + 0.65, lat_lims[0], \
            lon_lims[1] - 0.65, lat_lims[1]))


    # Extract the lats, lons, and data
    # -----------------------------------------------------
    lons, lats = scn[channel].attrs['area'].get_lonlats()
    var = scn[channel].data

    # Extract the map projection from the data for plotting
    # -----------------------------------------------------
    crs = scn[channel].attrs['area'].to_cartopy_crs()

    # Extract the appropriate units
    # -----------------------------
    units = label_dict[channel]
    #units = scn[channel].units
    plabel = calib_dict[channel].title() + ' [' + units + ']'

    return var, crs, lons, lats, lat_lims, lon_lims, plabel
Beispiel #35
0
from satpy.scene import Scene
#from satpy.utils import debug_on
# debug_on()

if __name__ == '__main__':

    scn = Scene(
        sensor='viirs',
        satid='NPP',
        filenames=[
            "/home/a000680/data/osisaf/S-OSI_-FRA_-NPP_-NARSST_FIELD-201609081300Z.nc"],
        reader='ghrsst_osisaf'
    )

    scn.load(['sea_surface_temperature'])
    lcd = scn.resample('euro4', radius_of_influence=2000)

    sstdata = lcd['sea_surface_temperature'][:]
    import numpy as np
    arr = np.ma.where(np.less_equal(sstdata, 0), 0, sstdata - 273.15)

    # Convert sst to numbers between 0 and 28, corresponding to the lut:
    data = np.ma.where(np.less(arr, 0), 28, 28.0 - arr)
    data = np.ma.where(np.greater(arr, 23.0), 4, data).round().astype('uint8')

    from trollimage.image import Image
    from satpy.imageo import palettes
    palette = palettes.sstlut_osisaf_metno()

    img = Image(data, mode='P', palette=palette)