Exemple #1
0
def main():
    from satpy import Scene
    from satpy.writers.scmi import add_backend_argument_groups as add_writer_argument_groups
    import argparse
    parser = argparse.ArgumentParser(description="Convert GEOCAT Level 1 and 2 to AWIPS SCMI files")
    parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0,
                        help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)')
    parser.add_argument('-l', '--log', dest="log_fn", default=None,
                        help="specify the log filename")
    subgroups = add_scene_argument_groups(parser)
    subgroups += add_writer_argument_groups(parser)
    args = parser.parse_args()

    scene_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[0]._group_actions}
    load_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[1]._group_actions}
    writer_init_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[2]._group_actions}
    writer_call_args = {ga.dest: getattr(args, ga.dest) for ga in subgroups[3]._group_actions}

    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
    logging.basicConfig(level=levels[min(3, args.verbosity)], filename=args.log_fn)

    scn = Scene(**scene_args)
    scn.load(load_args['datasets'])
    writer_args = {}
    writer_args.update(writer_init_args)
    writer_args.update(writer_call_args)
    scn.save_datasets(writer='scmi', **writer_args)
def step_impl(context, composite):
    from satpy import Scene
    scn = Scene(reader=context.dformat,
                filenames=get_all_files(os.path.join(context.data_path, 'data'),
                                        '*'))
    scn.load([composite])
    context.scn = scn
    context.composite = composite
Exemple #3
0
def step_impl(context):
    from satpy import Scene, find_files_and_readers
    from datetime import datetime
    os.chdir("/tmp/")
    readers_files = find_files_and_readers(sensor='viirs',
                                           start_time=datetime(2015, 3, 11, 11, 20),
                                           end_time=datetime(2015, 3, 11, 11, 26))
    scn = Scene(filenames=readers_files)
    scn.load(["M02"])
    context.scene = scn
Exemple #4
0
def step_impl_user_loads_no_config(context):
    from satpy import Scene, find_files_and_readers
    from datetime import datetime
    os.chdir("/tmp/")
    readers_files = find_files_and_readers(
        sensor='viirs',
        start_time=datetime(2015, 3, 11, 11, 20),
        end_time=datetime(2015, 3, 11, 11, 26))
    scn = Scene(filenames=readers_files)
    scn.load(["M02"])
    context.scene = scn
def test_1088(fake_open_dataset):
    """Check that copied arrays gets resampled."""
    from satpy import Scene
    fake_open_dataset.side_effect = generate_fake_abi_xr_dataset

    scene = Scene(abi_file_list, reader='abi_l1b')
    scene.load(['C04'], calibration='radiance')

    my_id = make_dataid(name='my_name', wavelength=(10, 11, 12))
    scene[my_id] = scene['C04'].copy()
    resampled = scene.resample('eurol')
    assert resampled[my_id].shape == (2048, 2560)
Exemple #6
0
 def test_load_250m_cloud_mask_dataset(self, input_files, exp_area):
     """Test loading 250m cloud mask."""
     scene = Scene(reader='modis_l2', filenames=input_files)
     dataset_name = 'cloud_mask'
     scene.load([dataset_name], resolution=250)
     cloud_mask_id = make_dataid(name=dataset_name, resolution=250)
     assert cloud_mask_id in scene
     cloud_mask = scene[cloud_mask_id]
     assert cloud_mask.shape == _shape_for_resolution(250)
     # mask variables should be integers
     assert np.issubdtype(cloud_mask.dtype, np.integer)
     assert cloud_mask.attrs.get('_FillValue') is not None
     _check_shared_metadata(cloud_mask, expect_area=exp_area)
def calc_historical_rho(datetime, wndw):
    '''
    This function returns ground and cloud reflectance matrices for the datetime
    entered by the user. The first for loop iterates over the time instances,
    which is defined by tr_window_length. Although currently fixed, tr_window_length
    could be made a function argument.
    The function appends all the preceding satellite images and calculates the
    5% and 95% for the rho_ground and rho_cloud, respectively.
    Arguments:
    - period: pandas datetime Timestamp.
    - wndw: scalar with the training window length (for validation purposes)
    Output:
    - Returns numpy arrays of rho_ground and rho_cloud.
    '''
    dirs = create_historical_filenames(datetime, wndw)
    if dirs is None:  # Check whether azimuth is lower than 85, skips if not.
        return
    else:
        sat_imgs = []
        for filename in dirs:  # Loop over the trailing window tr_window_length.
            filename = glob(
                filename + '*.nat'
            )  # Because dirs was a list of lists extract first element.
            if not filename:  # If filename is empty, continue to the next iteration.
                continue
            else:
                global_scene = Scene(reader="seviri_l1b_native",
                                     filenames=filename)
                # Load the HRV channel:
                global_scene.load(['HRV'])
                # Resample:
                #local_scene = global_scene.resample("scan1",radius_of_influence=50e3,resampler='nearest',neighbours=16) # nearest='bilinear',cache_dir=REFLECTANCE_DATA_PATH
                # radius_of_influence: maximum distance to search for a neighbour for each point in the target grid
                #hrv = local_scene['HRV'] # xarray.DataArray
                hrv = global_scene['HRV']  # xarray.DataArray
                if hrv.values.shape == (
                        1587, 3840
                ):  # Check only for the original HRV images! Some original HRV images are of different size so check this:
                    # Add time dimension and coordinate so I can easily slice the resulting xArray:
                    hrv = hrv.assign_coords(time=hrv.attrs['end_time'])
                    hrv = hrv.expand_dims('time')
                    sat_imgs.append(hrv)
        # Concatenate the list above on the 'time' dimension
        combined = xr.concat(sat_imgs, dim='time')
        # Extract the reflectances (this takes a while)
        reflectance = combined.values
        # Calculate the lowest 5% for each pixel
        rho_ground = np.percentile(reflectance, 5, 0)
        # Calculate the lowest 5% for each pixel (added 2020-09-14)
        rho_cloud = np.percentile(reflectance, 95, 0)
        return (rho_ground, rho_cloud)
Exemple #8
0
def main():
    from satpy import Scene
    from satpy.writers.scmi import add_backend_argument_groups as add_writer_argument_groups
    import argparse
    parser = argparse.ArgumentParser(
        description="Convert GEOCAT Level 1 and 2 to AWIPS SCMI files")
    parser.add_argument(
        '-v',
        '--verbose',
        dest='verbosity',
        action="count",
        default=0,
        help=
        'each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)'
    )
    parser.add_argument('-l',
                        '--log',
                        dest="log_fn",
                        default=None,
                        help="specify the log filename")
    subgroups = add_scene_argument_groups(parser)
    subgroups += add_writer_argument_groups(parser)
    args = parser.parse_args()

    scene_args = {
        ga.dest: getattr(args, ga.dest)
        for ga in subgroups[0]._group_actions
    }
    load_args = {
        ga.dest: getattr(args, ga.dest)
        for ga in subgroups[1]._group_actions
    }
    writer_init_args = {
        ga.dest: getattr(args, ga.dest)
        for ga in subgroups[2]._group_actions
    }
    writer_call_args = {
        ga.dest: getattr(args, ga.dest)
        for ga in subgroups[3]._group_actions
    }

    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
    logging.basicConfig(level=levels[min(3, args.verbosity)],
                        filename=args.log_fn)

    scn = Scene(**scene_args)
    scn.load(load_args['datasets'])
    writer_args = {}
    writer_args.update(writer_init_args)
    writer_args.update(writer_call_args)
    scn.save_datasets(writer='scmi', **writer_args)
Exemple #9
0
 def test_load_l2_dataset(self, input_files, loadables, exp_resolution,
                          exp_area, exp_value):
     """Load and check an L2 variable."""
     scene = Scene(reader='modis_l2', filenames=input_files)
     scene.load(loadables)
     for ds_name in loadables:
         assert ds_name in scene
         data_arr = scene[ds_name]
         assert isinstance(data_arr.data, da.Array)
         data_arr = data_arr.compute()
         assert data_arr.values[0, 0] == exp_value
         assert data_arr.shape == _shape_for_resolution(exp_resolution)
         assert data_arr.attrs.get('resolution') == exp_resolution
         _check_shared_metadata(data_arr, expect_area=exp_area)
def preproc_data(infile):
    """Read MODIS data and find good pixels."""
    # Load relevant bands
    scn = Scene([infile], reader='modis_l1b')
    scn.load(['31'], resolution=1000)

    # Get lat / lon grid to ensure we focus on tropics
    in_lons, in_lats = scn['31'].area.get_lonlats()
    in_lats = in_lats.compute()
    in_lons = in_lons.compute()

    in_lats = in_lats.ravel()
    in_lons = in_lons.ravel()

    lats2 = np.abs(in_lats)

    if np.nanmin(lats2) < 33.5:
        scn_data = scn['31'].values
        scn_data = scn_data.ravel()
        # Only look at points in tropics
        gd_pts = (lats2 < 33.5).nonzero()
        scn_data = scn_data[gd_pts]
        in_lons = in_lons[gd_pts]
        in_lats = in_lats[gd_pts]
        lats2 = lats2[gd_pts]

        # Sometimes there's bad data, <155K BT
        gd_pts = (scn_data > 155).nonzero()
        scn_data = scn_data[gd_pts]
        in_lons = in_lons[gd_pts]
        in_lats = in_lats[gd_pts]
        lats2 = lats2[gd_pts]

        # Now find points colder than 205K for stats
        gd_pts = (scn_data < 205).nonzero()
        scn_data = scn_data[gd_pts]
        in_lons = in_lons[gd_pts]
        in_lats = in_lats[gd_pts]
        lats2 = lats2[gd_pts]
        if len(gd_pts[0]) < 1:
            return None, None, None
        if np.nanmin(lats2) > 33.5:
            return None, None, None
    else:
        # Every everything is high latitude, return nothing
        print(" -   All data-points outside latitude range, skipping.")
        return None, None, None
    # Return data
    return in_lats, in_lons, scn_data
Exemple #11
0
 def test_load_longitude_latitude(self):
     """Test that longitude and latitude datasets are loaded correctly."""
     from satpy import DatasetID
     scene = Scene(reader='modis_l2', filenames=[self.file_name])
     for dataset_name in ['longitude', 'latitude']:
         # Default resolution should be the interpolated 1km
         scene.load([dataset_name])
         longitude_1km_id = DatasetID(name=dataset_name, resolution=1000)
         longitude_1km = scene[longitude_1km_id]
         self.assertEqual(longitude_1km.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4))
         # Specify original 5km scale
         longitude_5km = scene.load([dataset_name], resolution=5000)
         longitude_5km_id = DatasetID(name=dataset_name, resolution=5000)
         longitude_5km = scene[longitude_5km_id]
         self.assertEqual(longitude_5km.shape, TEST_DATA[dataset_name.capitalize()]['data'].shape)
Exemple #12
0
    def test_mcmip_get_dataset(self, xr_):
        """Test getting channel from MCMIP file."""
        from datetime import datetime

        from pyresample.geometry import AreaDefinition

        from satpy import Scene
        from satpy.dataset.dataid import WavelengthRange
        xr_.open_dataset.return_value = _create_mcmip_dataset()

        fn = "OR_ABI-L2-MCMIPF-M6_G16_s20192600241149_e20192600243534_c20192600245360.nc"
        scn = Scene(reader='abi_l2_nc', filenames=[fn])
        scn.load(['C14'])

        exp_data = np.array([[2 * 0.3052037, np.nan],
                             [32768 * 0.3052037, 32767 * 0.3052037]])

        exp_attrs = {
            'instrument_ID': None,
            'modifiers': (),
            'name': 'C14',
            'orbital_slot': None,
            'reader': 'abi_l2_nc',
            'platform_name': 'GOES-16',
            'platform_shortname': 'G16',
            'production_site': None,
            'satellite_altitude': 35786020.,
            'satellite_latitude': 0.0,
            'satellite_longitude': -89.5,
            'scan_mode': 'M6',
            'scene_id': None,
            'sensor': 'abi',
            'timeline_ID': None,
            'start_time': datetime(2017, 9, 20, 17, 30, 40, 800000),
            'end_time': datetime(2017, 9, 20, 17, 41, 17, 500000),
            'calibration': 'brightness_temperature',
            'ancillary_variables': [],
            'wavelength': WavelengthRange(10.8, 11.2, 11.6, unit='µm'),
            'units': 'm'
        }

        res = scn['C14']
        np.testing.assert_allclose(res.data, exp_data, equal_nan=True)
        assert isinstance(res.attrs['area'], AreaDefinition)
        # don't complicate the comparison below
        for key in ('area', '_satpy_id'):
            del res.attrs[key]
        assert dict(res.attrs) == exp_attrs
Exemple #13
0
def main():
    failures = open(r'D:\failures.txt', 'w')
    # Get List of downloaded files
    file_list = os.listdir(data_dir)
    print('Roughly {} data samples found'.format(len(file_list)/3))
    for file in file_list:
        # Only run loop when you have a geo file
        if not file.startswith('VNP03'):
            continue
        # Get the band file
        if os.path.exists(os.path.join(pp_dir, file[:-3] + '.png')):
            continue
        geo_path = os.path.join(data_dir, file)
        subs = file[:4] + '2' + file[5:24]
        band_path = [i for i in file_list if subs in i]
        if len(band_path) != 1:
            print('failure {} {}'.format(file, band_path))
            break
        band_path = os.path.join(data_dir, band_path[0])
        # Get comparable GOES file
        v_time = datetime.strptime(file[10:22], '%Y%j.%H%M')
        g_subs = 'C07_G17_s{}'.format(file[10:17])
        g_files = [i for i in file_list if g_subs in i]
        goes_file = None
        for g_file in g_files:
            g_time = datetime.strptime(g_file[27:38], '%Y%j%H%M')
            tm_delta = v_time - g_time
            if abs(tm_delta.total_seconds()) < 4*60:
                goes_file = g_file
        if not goes_file:
            print('No Goes File for {}'.format(file))
            failures.write("No Match found for {}\n".format(file))
            continue      
        # Load SatPy Scenes
        viirs_files = [band_path, geo_path]
        goes_files = [os.path.join(data_dir, goes_file)]
        viirs_scene = Scene(reader = v_reader, filenames = viirs_files)
        goes_scene = Scene(reader = g_reader, filenames = goes_files)
        viirs_scene.load(['I04'])
        goes_scene.load(['C07'])
        
        # Resample and Save PNGs
        print(file)
        rs = viirs_scene.resample(viirs_scene['I04'].attrs['area'], resampler = 'nearest')
        rs.save_dataset('I04', os.path.join(pp_dir, file[:-3] + '.png'))
        rs_g = goes_scene.resample(viirs_scene['I04'].attrs['area'], resampler = 'nearest')
        rs_g.save_dataset('C07', os.path.join(pp_dir, goes_file[:-3] + '.png'))     
    failures.close
Exemple #14
0
def _get_fake_g16_abi_c01_scene(mocker):
    attrs = {
        'name': 'C01',
        'wavelength': (1.0, 2.0, 3.0),
        'area': AreaDefinition(
            'test', 'test', 'test',
            {
                'proj': 'geos',
                'sweep': 'x',
                'lon_0': -75,
                'h': 35786023,
                'ellps': 'GRS80',
                'units': 'm',
            }, 5, 5,
            (-5434894.885056, -5434894.885056, 5434894.885056, 5434894.885056)
        ),
        'start_time': datetime(2018, 9, 10, 17, 0, 31, 100000),
        'end_time': datetime(2018, 9, 10, 17, 11, 7, 800000),
        'standard_name': 'toa_bidirectional_reflectance',
        'sensor': 'abi',
        'platform_name': 'GOES-16',
        'platform_shortname': 'G16',
    }
    data_arr = xr.DataArray(da.from_array(np.empty((5, 5), dtype=np.float64), chunks='auto'),
                            attrs=attrs)
    scn = Scene()
    scn['C01'] = data_arr
    scn.load = mocker.MagicMock()  # don't do anything on load
    return scn
Exemple #15
0
def read_sat_data(fname, channels, reader):
    """Read satellite data"""
    if not isinstance(fname, (list, set, tuple)):
        fname = [fname, ]
    glbl = Scene(filenames=fname, reader=reader)
    glbl.load(channels)
    # Compute the dask arrays
    for chan in channels:
        logging.info("Loading %s", chan)
        try:
            glbl[chan] = glbl[chan].compute()
        except KeyError:
            logging.error("Channel %s not available", chan)
            return None
    glbl.attrs["proc_time"] = dt.datetime.utcnow()

    return glbl
Exemple #16
0
 def test_orbital_parameters(self):
     """Test that the orbital parameters in attributes are handled correctly."""
     filename = 'testingcfwriter4{:s}-viirs-mband-20201007075915-20201007080744.nc'.format(
         datetime.utcnow().strftime('%Y%j%H%M%S'))
     try:
         self.scene.save_datasets(writer='cf',
                                  filename=filename,
                                  header_attrs={'instrument': 'avhrr'})
         scn_ = Scene(reader='satpy_cf_nc', filenames=[filename])
         scn_.load(['image0'])
         orig_attrs = self.scene['image0'].attrs['orbital_parameters']
         new_attrs = scn_['image0'].attrs['orbital_parameters']
         assert isinstance(new_attrs, dict)
         for key in orig_attrs:
             assert orig_attrs[key] == new_attrs[key]
     finally:
         with suppress(PermissionError):
             os.remove(filename)
Exemple #17
0
def process_set(grouped_files, curr_idx, total_groups):
    """process_ALLis a list of parsed filenames (DNB, Mband, ABI, Cband)
    Given these files, use Scene to load the appropriate channels.
    Then resample (colocate) to make the channels match up.
    Then save these colocated channels.
    Crop the NaN edges, tag with meta information (which files were used as input),
    And finally save the numpy arrays (so we don't need to recompute next time)"""
    log.info(
        f'{rgb(255,0,0)}Processing{reset} timestep {bold}{curr_idx + 1}/{total_groups}{reset}'
    )
    dt = grouped_files['viirs'][0]["datetime"]

    viirsfiles = [f["path"] for f in grouped_files['viirs']]
    abifiles = [f["path"] for f in grouped_files['abi']]

    master_scene = Scene(filenames={
        'viirs_sdr': viirsfiles,
        'abi_l1b': abifiles
    })
    master_scene.load(VIIRS_channels + ABI_channels + lat_long_both)

    #load and pair  the reflectance
    reflectfile = grouped_files['reflectance']['path']
    Reflectance = xarray.open_dataset(reflectfile)
    swath_def = SwathDefinition(Reflectance['longitude'],
                                Reflectance['latitude'])
    sm_refl = Reflectance['SM_Reflectance']
    sm_refl.attrs['area'] = swath_def
    #bring reflectance back to the satpy "Scene"
    master_scene['SM_Reflectance'] = sm_refl
    resample_scn = master_scene.resample(master_scene['DNB'].attrs['area'],
                                         resampler='nearest')

    log.info(f'Cropping nan edges of {blue}{dt}{reset}')
    t = time.time()
    data = crop.crop_nan_edges(resample_scn, crop_channels, all_channels)
    log.debug(
        f'Cropping nan edges took {rgb(255,0,0)}{time.time() - t:.2f}{reset} seconds'
    )

    data['channels'] = list(data)
    data['filenames'] = viirsfiles + abifiles + [reflectfile]
    data["datetime"] = dt
    return data
Exemple #18
0
def read_image(filepath):
    """Read the image from *filepath* and return it as PIL Image object."""
    scn = Scene(reader='generic_image', filenames=[
        filepath,
    ])
    scn.load(['image'])
    y_size = scn['image']['y'].size
    x_size = scn['image']['x'].size
    arr = np.array(scn['image'])
    alpha = np.isnan(arr[0, :, :])
    arr = arr.astype(np.uint8)
    if np.any(alpha):
        alpha = 255 * np.invert(alpha).astype(np.uint8)
        alpha = alpha.reshape(1, y_size, x_size)
        arr = np.vstack((arr, alpha))
    arr = np.squeeze(np.rollaxis(arr, 0, 3))
    img = Image.fromarray(arr)

    return img
Exemple #19
0
def get_cloud_mask(l1_filename, cloud_mask_dir):
    """ return a 2d mask, with cloudy pixels marked as 1, non-cloudy pixels marked as 0 """

    basename = os.path.split(l1_filename)

    cloud_mask_filename = glob.glob(
        os.path.join(cloud_mask_dir,
                     'MYD35*' + l1_filename.split('.A')[1][:12] + '*'))[0]

    # satpy returns(0=Cloudy, 1=Uncertain, 2=Probably Clear, 3=Confident Clear)
    swath = Scene(reader='modis_l2', filenames=[cloud_mask_filename])
    swath.load(['cloud_mask'], resolution=1000)

    cloud_mask = np.array(swath['cloud_mask'].load())[:MAX_HEIGHT, :MAX_WIDTH]

    cloud_mask = (cloud_mask == 0)
    cloud_mask = cloud_mask.astype(np.intp)

    return cloud_mask
Exemple #20
0
 def test_load_category_dataset(self, input_files, loadables,
                                request_resolution, exp_resolution,
                                exp_area):
     """Test loading category products."""
     scene = Scene(reader='modis_l2', filenames=input_files)
     kwargs = {
         "resolution": request_resolution
     } if request_resolution is not None else {}
     scene.load(loadables, **kwargs)
     for ds_name in loadables:
         cat_id = make_dataid(name=ds_name, resolution=exp_resolution)
         assert cat_id in scene
         cat_data_arr = scene[cat_id]
         assert cat_data_arr.shape == _shape_for_resolution(exp_resolution)
         assert cat_data_arr.attrs.get('resolution') == exp_resolution
         # mask variables should be integers
         assert np.issubdtype(cat_data_arr.dtype, np.integer)
         assert cat_data_arr.attrs.get('_FillValue') is not None
         _check_shared_metadata(cat_data_arr, expect_area=exp_area)
Exemple #21
0
def get_swath(radiance_filename, myd03_dir):
    """
    :param radiance_filename: MYD02 filename
    :param myd03_dir: root directory of MYD03 geolocational files
    :return swath: numpy.ndarray of size (15, HEIGHT, WIDTH) 
    Uses the satpy Scene reader with the modis-l1b files. Issues reading files might be due to pyhdf not being
    installed - otherwise try pip install satpy[modis_0l1b]
    Creates a scene with the MYD02 and MYD03 files, and extracts them as multi-channel arrays. The lat and long are
    are appended as additional channels.
    """

    # bands selected from MODIS
    composite = [
        '1', '2', '29', '33', '34', '35', '36', '26', '27', '20', '21', '22',
        '23'
    ]

    # find a corresponding geolocational (MYD03) file for the provided radiance (MYD02) file
    geoloc_filename = find_matching_geoloc_file(radiance_filename, myd03_dir)

    # load the global scene using satpy
    global_scene = Scene(reader='modis_l1b',
                         filenames=[radiance_filename, geoloc_filename])

    # load composite, resolution of 1km
    global_scene.load(composite, resolution=1000)

    # load latitudes and longitudes, resolution 1km
    global_scene.load(['latitude', 'longitude'], resolution=1000)
    latitude = np.array(global_scene['latitude'].load())
    longitude = np.array(global_scene['longitude'].load())

    swath = []

    for comp in composite:
        temp = np.array(global_scene[comp].load())
        swath.append(temp[:MAX_HEIGHT, :MAX_WIDTH])

    swath.append(latitude[:MAX_HEIGHT, :MAX_WIDTH])
    swath.append(longitude[:MAX_HEIGHT, :MAX_WIDTH])

    return np.array(swath, dtype=np.float16)
Exemple #22
0
 def test_scene_dataset_values(self):
     """Test loading data."""
     from satpy import Scene
     fname = os.path.join(self.base_dir, FILENAME)
     scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname])
     for name in scn.available_dataset_names():
         scn.load([name])
         loaded_values = scn[name].values
         fill_value = scn[name].attrs['fill_value']
         # replace nans in data loaded from file with the fill value defined in the .yaml
         # to make them comparable
         loaded_values_nan_filled = np.nan_to_num(loaded_values,
                                                  nan=fill_value)
         key = scn[name].attrs['key']
         original_values = MSG[key]
         # this makes each assertion below a separate test from unittest's point of view
         # (note: if all subtests pass, they will count as one test)
         with self.subTest(msg="Test failed for dataset: " + name):
             self.assertTrue(
                 np.allclose(original_values, loaded_values_nan_filled))
Exemple #23
0
    def test_scene_loading(self, calibrate, *mocks):
        """Test masking of space pixels."""
        from satpy import Scene
        nrows = 25
        ncols = 100
        calibrate.return_value = np.ones((nrows, ncols))
        with _fake_hsd_handler() as fh:
            with mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler') as fh_cls:
                fh_cls.return_value = fh
                fh.filename_info['total_segments'] = 1
                fh.filename_info['segment'] = 1
                fh.data_info['number_of_columns'] = ncols
                fh.data_info['number_of_lines'] = nrows
                scn = Scene(reader='ahi_hsd', filenames=['HS_H08_20210225_0700_B07_FLDK_R20_S0110.DAT'])
                scn.load(['B07'])
                im = scn['B07']

                # Make sure space masking worked
                mask = im.to_masked_array().mask
                ref_mask = np.logical_not(get_geostationary_mask(fh.area).compute())
                np.testing.assert_equal(mask, ref_mask)
Exemple #24
0
    def read(self, filename, fields=None, **kwargs):
        scene = Scene(reader=self.satpy_reader, filenames=[filename.path])

        # If the user has not passed any fields to us, we load all per default.
        if fields is None:
            fields = scene.available_dataset_ids()

        # Load all selected fields
        scene.load(fields, **kwargs)

        if isinstance(fields[0], str):
            data_arrays = {field: scene.get(field) for field in fields}
        else:
            data_arrays = {field.name: scene.get(field) for field in fields}

        for name, array in data_arrays.items():
            array.name = name

        dataset = xr.merge(data_arrays.values())

        return dataset
Exemple #25
0
 def test_load_chlor_a(self, input_files, exp_plat, exp_sensor, exp_rps,
                       apply_quality_flags):
     """Test that we can load 'chlor_a'."""
     reader_kwargs = {"apply_quality_flags": apply_quality_flags}
     scene = Scene(reader='seadas_l2',
                   filenames=input_files,
                   reader_kwargs=reader_kwargs)
     scene.load(['chlor_a'])
     data_arr = scene['chlor_a']
     assert data_arr.attrs['platform_name'] == exp_plat
     assert data_arr.attrs['sensor'] == exp_sensor
     assert data_arr.attrs['units'] == 'mg m^-3'
     assert data_arr.dtype.type == np.float32
     assert isinstance(data_arr.attrs["area"], SwathDefinition)
     assert data_arr.attrs["rows_per_scan"] == exp_rps
     data = data_arr.data.compute()
     if apply_quality_flags:
         assert np.isnan(data[2, 2])
         assert np.count_nonzero(np.isnan(data)) == 1
     else:
         assert np.count_nonzero(np.isnan(data)) == 0
Exemple #26
0
def simple_export(hrit_files, time, dpi, photo_path, load_photo=['VIS006']):
    from satpy import Scene
    from satpy import find_files_and_readers
    from datetime import datetime
    import matplotlib as mpl
    mpl.rcParams['figure.dpi'] = dpi
    # load_photo = 'VIS006'

    first = 0
    last = len(time) - 1
    # print(len(time),last)

    yearF, monthF, dayF, hourF, minuteF, secondF = time[first].tt_calendar()
    yearL, monthL, dayL, hourL, minuteL, secondL = time[last].tt_calendar()
    # IT IS NOT WORKING FIND SOLUTION - Adding a minute in case there is only one point on map
    if len(time) == 1:
        time[last].tt = time[last].tt + 1 / 3600
        yearL, monthL, dayL, hourL, minuteL, secondL = time[last].tt_calendar()
        # print("It works")

    # print(yearF, monthF, dayF, hourF, minuteF, secondF )
    # print(yearL, monthL, dayL, hourL, minuteL, secondL)
    # time[0].tt_calendar()[0]
    files = find_files_and_readers(base_dir=hrit_files,
                                   start_time=datetime(yearF, monthF, dayF,
                                                       hourF, minuteF),
                                   end_time=datetime(yearL, monthL, dayL,
                                                     hourL, minuteL),
                                   reader='seviri_l1b_hrit')
    scn = Scene(filenames=files)

    scn.load(load_photo)
    file = photo_path + 'globe_' + load_photo[
        0] + '_{date:%Y-%m-%d_%H_%M_%S}.png'.format(date=scn.start_time)
    scn.save_dataset(load_photo[0],
                     writer='simple_image',
                     filename=file,
                     num_threads=8)

    return ()
def load_sat_polar(in_reader, globber, chans, latpos, lonpos, deller):
    """Load data for a given sensor, find BT at particular lat/lon."""
    storm_bbox = [
        lonpos - deller, latpos - deller, lonpos + deller, latpos + deller
    ]

    files = glob(globber)
    if len(files) < 1:
        return None
    scn = Scene(files, reader=in_reader)
    scn.load(chans)

    lons = None
    lats = None
    output_dict = {}
    for chan in chans:
        try:
            if lons is None or lats is None:
                lons, lats = scn[chan].attrs['area'].get_lonlats()
                try:
                    lats = lats.compute()
                    lons = lons.compute()
                except:
                    pass
            btdata = np.array(scn[chan])
            pts = (lons < storm_bbox[0]).nonzero()
            btdata[pts] = np.nan
            pts = (lats < storm_bbox[1]).nonzero()
            btdata[pts] = np.nan
            pts = (lons > storm_bbox[2]).nonzero()
            btdata[pts] = np.nan
            pts = (lats > storm_bbox[3]).nonzero()
            btdata[pts] = np.nan
            output_dict[chan] = [
                np.nanmin(btdata), scn[chan].wavelength.min,
                scn[chan].wavelength.central, scn[chan].wavelength.max
            ]
        except KeyError:
            output_dict[chan] = [None, None, None, None]
    return output_dict
def load_himawari(indir, in_time, comp_type, timedelt, mode):
    """
    Load a Himawari/AHI scene as given by img_time.

    img_time should be the *start* time for the scan, as the ending time
    will be automatically defined from this using timedelt

    The image will be loaded with Satpy, return value is a cartopy object

    Arguments:
        indir - directory holding the Himawari data in HSD (unzipped) format
        img_time - a datetime indicating the scene start time in UTC
        comp_type - the Satpy composite to create (true_color, B03, etc)
        timedelt - the scanning time delta (10 min for full disk AHI)
        mode - scanning mode (FD = Full disk, MESO = Mesoscale sector)
    Returns:
        sat_data - the satellite data object, unresampled
    """
    if mode == 'MESO':
        tmp_t = in_time
        minu = tmp_t.minute
        minu = minu - (minu % 10)

        tmp_t = tmp_t.replace(minute=minu)
        tmp_t = tmp_t.replace(second=0)
        dt = (in_time - tmp_t).total_seconds() / 60.
        src_str = '*_R30' + str(int(dt/timedelt) + 1) + '*'
        dtstr = tmp_t.strftime("%Y%m%d_%H%M")
        files = glob(indir + '*' + dtstr + src_str + '.DAT')
        files.sort()
    else:
        files = ffar(start_time=in_time,
                     end_time=in_time + timedelta(minutes=timedelt-1),
                     base_dir=indir,
                     reader='ahi_hsd')

    scn = Scene(reader='ahi_hsd', filenames=files)
    scn.load([comp_type], pad_data=False)

    return scn
Exemple #29
0
def process_trio(trio, curr_idx, len_trio):
    """trio is a list of three parsed filenames (see the function parse_filename below).
    Given these three files, use Scene to load the appropriate channels.
    Then resample (colocate) to make the channels match up.
    Then save these colocated channels.
    Crop the NaN edges, tag with meta information (which files were used as input),
    And finally save the numpy arrays (so we don't need to recompute next time)"""
    dt = trio[0]["datetime"]
    log.info(
        f'{rgb(255,0,0)}Processing{reset} timestep {bold}{curr_idx + 1}/{len_trio}{reset} {blue}{dt}{reset}  '
    )

    #load the sat data
    scn = Scene(
        reader='viirs_sdr',
        filenames=[f['path'] for f in trio if f['filename'].endswith(".h5")])
    scn.load(viirs_channels + lat_long_both)
    #load and pair  the reflectance
    Reflectance = xarray.open_dataset(find_ncfile(trio)['path'])
    swath_def = SwathDefinition(Reflectance['longitude'],
                                Reflectance['latitude'])
    sm_refl = Reflectance['SM_Reflectance']
    sm_refl.attrs['area'] = swath_def
    #bring reflectance back to the satpy "Scene"
    scn['SM_reflectance'] = sm_refl

    log.info(f'Resampling {blue}{dt}{reset}')
    resample_scn = scn.resample(scn['DNB'].attrs['area'], resampler='nearest')

    log.info(f'Cropping nan edges of {blue}{dt}{reset}')
    t = time.time()
    data = crop.crop_nan_edges(resample_scn, all_channels)
    log.debug(
        f'Cropping nan edges took {rgb(255,0,0)}{time.time() - t:.2f}{reset} seconds'
    )

    data['channels'] = list(data)
    data['filenames'] = [f['filename'] for f in trio]
    data["datetime"] = dt
    return data
Exemple #30
0
def test_satpy_importer_contour_0_360(tmpdir, monkeypatch, mocker):
    """Test import of grib contour data using Satpy."""
    db_sess = mocker.MagicMock()
    attrs = {
        'name': 'gh',
        'level': 125,
        'area': AreaDefinition(
            'test', 'test', 'test',
            {
                'proj': 'eqc',
                'lon_0': 0,
                'pm': 180,
                'R': 6371229,
            }, 240, 120,
            (-20015806.220738243, -10007903.110369122, 20015806.220738243, 10007903.110369122)
        ),
        'start_time': datetime(2018, 9, 10, 17, 0, 31, 100000),
        'end_time': datetime(2018, 9, 10, 17, 11, 7, 800000),
        'model_time': datetime(2018, 9, 10, 17, 11, 7, 800000),
        'standard_name': 'geopotential_height',
    }
    data_arr = xr.DataArray(da.from_array(np.random.random((120, 240)).astype(np.float64), chunks='auto'),
                            attrs=attrs)
    scn = Scene()
    scn['gh'] = data_arr
    scn.load = mocker.MagicMock()  # don't do anything on load

    imp = SatpyImporter(['/test/file.nc'], tmpdir, db_sess,
                        scene=scn,
                        reader='grib',
                        dataset_ids=[make_dataid(name='gh', level=125)])
    imp.merge_resources()
    assert imp.num_products == 1
    products = list(imp.merge_products())
    assert len(products) == 1
    assert products[0].info[Info.STANDARD_NAME] == 'geopotential_height'
    assert products[0].info[Info.KIND] == Kind.CONTOUR

    query_mock = mocker.MagicMock(name='query')
    filter1_mock = mocker.MagicMock(name='filter1')
    filter2_mock = mocker.MagicMock(name='filter2')
    db_sess.query.return_value = query_mock
    query_mock.filter.return_value = filter1_mock
    filter1_mock.filter.return_value = filter2_mock
    filter2_mock.all.return_value = products
    import_gen = imp.begin_import_products()
    content_progresses = list(import_gen)
    # image and contour content
    assert len(content_progresses) == 2
    # make sure data was swapped to -180/180 space
    assert (content_progresses[0].data[:, :120] == data_arr.data[:, 120:].astype(np.float32)).all()
    assert (content_progresses[0].data[:, 120:] == data_arr.data[:, :120].astype(np.float32)).all()
Exemple #31
0
    def test_load_vis_saturation(self, mask_saturated, modis_l1b_nasa_mod021km_file):
        """Test loading visible band."""
        scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file,
                      reader_kwargs={"mask_saturated": mask_saturated})
        dataset_name = '2'
        scene.load([dataset_name])
        dataset = scene[dataset_name]
        assert dataset.shape == _shape_for_resolution(1000)
        assert dataset.attrs['resolution'] == 1000
        _check_shared_metadata(dataset)

        # check saturation fill values
        data = dataset.values
        assert np.isnan(data[-1, -1])  # normal fill value
        if mask_saturated:
            assert np.isnan(data[-1, -2])  # saturation
            assert np.isnan(data[-1, -3])  # can't aggregate
        else:
            # test data factor/offset are 1/0
            # albedos are converted to %
            assert data[-1, -2] >= 32767 * 100.0  # saturation
            assert data[-1, -3] >= 32767 * 100.0  # can't aggregate
Exemple #32
0
def get_swath_rgb(radiance_filename, myd03_dir, composite='true_color'):
    """
    :param radiance_filename: MYD02 filename
    :param myd03_dir: root directory of MYD03 geolocational files
    :return visible RGB channels: numpy.ndarray of size (3, 2030, 1354) 
    Uses the satpy Scene reader with the modis-l1b files. Issues reading files might be due to pyhdf not being
    installed - otherwise try pip install satpy[modis_0l1b]
    Creates a scene with the MYD02 file, and extracts the RGB channels from the 1, 4, 3 visible MODIS bands.
    """

    # find a corresponding geolocational (MOD03) file for the provided radiance (MYD02) file
    geoloc_filename = find_matching_geoloc_file(radiance_filename, myd03_dir)

    global_scene = Scene(reader='modis_l1b',
                         filenames=[radiance_filename, geoloc_filename])

    # load it in, make sure resolution is 1000 to match our other datasets
    global_scene.load([composite], resolution=1000)

    rgb = np.array(global_scene[composite])[:, :MAX_HEIGHT, :MAX_WIDTH]

    return rgb
Exemple #33
0
def importCM(data, myd03_p, cor_inds):
    tnames = []
    m03_hdf = SD(myd03_p, SDC.READ)
    d = m03_hdf.Input_Files

    sts = [i + 6 for i in range(len(d)) if d.startswith('MYD03.A', i)]
    eds = [i - 18 for i in range(len(d)) if d.startswith('.hdf', i)]
    eds.pop(0)

    for i in range(len(sts)):
        tnames.append(d[sts[i]:eds[i]])
    ifi = m03_hdf.select('Input_File_Index')
    ifp = m03_hdf.select('Input_Pixel_Index')
    ifi = ifi[:]
    ifp = ifp[:, :]
    print(ifp[0], ifp[1][0])
    cm_results = [-9999 for i in range(len(data))]
    current_file = ''
    files_used = []
    for index, d in enumerate(data):
        if not d == -9999:
            if not tnames[ifi[index]] in current_file:
                current_file = myd35_dict.get(tnames[ifi[index]])
                print(current_file)
                files_used.append(current_file)
                swath = Scene(reader='modis_l2',
                              filenames=[myd35_path + current_file])
                swath.load(['cloud_mask'], resolution=1000)
                cm = swath['cloud_mask'].values
            cm_results[index] = cm[ifp[index][0]][ifp[index][1]]  # pylint: disable=E1136
        #cm_results.append()
    # print(len(cm_results))
    # print(len(ifi), len(ifp))
    fillVal(ifi, cor_inds, len(ifi))
    fillVal(ifp, cor_inds, len(ifp))
    # print(len(ifi), len(ifp))
    #print(tnames)
    return cm_results, files_used
Exemple #34
0
hourS = "%02d" % hour
minS = "%02d" % min

filenames = glob("/var/tmp/cll/data/H-*MSG4*"+yearS+monthS+dayS+hourS+minS+"*__")
global_scene = Scene(reader="hrit_msg", filenames=filenames)

# first try, it stays here only for the memory
# global_scene.load(["HRV", "IR_108"])
# local_scene = global_scene.resample("ccs4")
# lonlats = local_scene["HRV"].area.get_lonlats()
# sza = sun_zenith_angle(local_scene.start_time, lonlats[0], lonlats[1])
# ds = DataArray(sza, dims=['y','x'])
# local_scene['sza'] = ds
# end of the first try, stuff below here is working again

global_scene.load(["ir108", "hrv", "IR_108", "hrv_with_ir"])
local_scene = global_scene.resample("ccs4")
local_scene.load(["hrv_with_ir", "IR_108"])

swiss = load_area("/opt/users/cll/cllwork/etc_work/areas.def", "ccs4")
tmpFileA = "/tmp/welcome.png"
tmpFileB = "/tmp/welcome-ir.png"
outputFile = "/var/tmp/cll/out/PY_visir-ch_"+yearS+monthS+dayS+hourS+minS+".png" 
bgFile = "/opt/users/cll/cllwork/ccs4.png"
local_scene.save_dataset("hrv_with_ir", tmpFileA)
local_scene.save_dataset("ir108", tmpFileB)

background = Image.open(bgFile)
foreground = Image.open(tmpFileA)
background = background.convert("RGBA")
foreground.putalpha(foreground.convert('L'))
Exemple #35
0
def main(argv=sys.argv[1:]):
    global LOG
    from satpy import Scene
    from satpy.resample import get_area_def
    from satpy.writers import compute_writer_results
    from dask.diagnostics import ProgressBar
    from polar2grid.core.script_utils import (
        setup_logging, rename_log_file, create_exc_handler)
    import argparse
    prog = os.getenv('PROG_NAME', sys.argv[0])
    # "usage: " will be printed at the top of this:
    usage = """
    %(prog)s -h
see available products:
    %(prog)s -r <reader> -w <writer> --list-products -f file1 [file2 ...]
basic processing:
    %(prog)s -r <reader> -w <writer> [options] -f file1 [file2 ...]
basic processing with limited products:
    %(prog)s -r <reader> -w <writer> [options] -p prod1 prod2 -f file1 [file2 ...]
"""
    parser = argparse.ArgumentParser(prog=prog, usage=usage,
                                     description="Load, composite, resample, and save datasets.")
    parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0,
                        help='each occurrence increases verbosity 1 level through ERROR-WARNING-INFO-DEBUG (default INFO)')
    parser.add_argument('-l', '--log', dest="log_fn", default=None,
                        help="specify the log filename")
    parser.add_argument('--progress', action='store_true',
                        help="show processing progress bar (not recommended for logged output)")
    parser.add_argument('--num-workers', type=int, default=4,
                        help="specify number of worker threads to use (default: 4)")
    parser.add_argument('--match-resolution', dest='preserve_resolution', action='store_false',
                        help="When using the 'native' resampler for composites, don't save data "
                             "at its native resolution, use the resolution used to create the "
                             "composite.")
    parser.add_argument('-w', '--writers', nargs='+',
                        help='writers to save datasets with')
    parser.add_argument("--list-products", dest="list_products", action="store_true",
                        help="List available reader products and exit")
    subgroups = add_scene_argument_groups(parser)
    subgroups += add_resample_argument_groups(parser)

    argv_without_help = [x for x in argv if x not in ["-h", "--help"]]
    args, remaining_args = parser.parse_known_args(argv_without_help)

    # get the logger if we know the readers and writers that will be used
    if args.reader is not None and args.writers is not None:
        glue_name = args.reader + "_" + "-".join(args.writers or [])
        LOG = logging.getLogger(glue_name)
    # add writer arguments
    if args.writers is not None:
        for writer in (args.writers or []):
            parser_func = WRITER_PARSER_FUNCTIONS.get(writer)
            if parser_func is None:
                continue
            subgroups += parser_func(parser)
    args = parser.parse_args(argv)

    if args.reader is None:
        parser.print_usage()
        parser.exit(1, "\nERROR: Reader must be provided (-r flag).\n"
                       "Supported readers:\n\t{}\n".format('\n\t'.join(['abi_l1b', 'ahi_hsd', 'hrit_ahi'])))
    if args.writers is None:
        parser.print_usage()
        parser.exit(1, "\nERROR: Writer must be provided (-w flag) with one or more writer.\n"
                       "Supported writers:\n\t{}\n".format('\n\t'.join(['geotiff'])))

    def _args_to_dict(group_actions):
        return {ga.dest: getattr(args, ga.dest) for ga in group_actions if hasattr(args, ga.dest)}
    scene_args = _args_to_dict(subgroups[0]._group_actions)
    load_args = _args_to_dict(subgroups[1]._group_actions)
    resample_args = _args_to_dict(subgroups[2]._group_actions)
    writer_args = {}
    for idx, writer in enumerate(args.writers):
        sgrp1, sgrp2 = subgroups[3 + idx * 2: 5 + idx * 2]
        wargs = _args_to_dict(sgrp1._group_actions)
        if sgrp2 is not None:
            wargs.update(_args_to_dict(sgrp2._group_actions))
        writer_args[writer] = wargs
        # get default output filename
        if 'filename' in wargs and wargs['filename'] is None:
            wargs['filename'] = get_default_output_filename(args.reader, writer)

    if not args.filenames:
        parser.print_usage()
        parser.exit(1, "\nERROR: No data files provided (-f flag)\n")

    # Prepare logging
    rename_log = False
    if args.log_fn is None:
        rename_log = True
        args.log_fn = glue_name + "_fail.log"
    levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
    setup_logging(console_level=levels[min(3, args.verbosity)], log_filename=args.log_fn)
    logging.getLogger('rasterio').setLevel(levels[min(2, args.verbosity)])
    sys.excepthook = create_exc_handler(LOG.name)
    if levels[min(3, args.verbosity)] > logging.DEBUG:
        import warnings
        warnings.filterwarnings("ignore")
    LOG.debug("Starting script with arguments: %s", " ".join(sys.argv))

    # Set up dask and the number of workers
    if args.num_workers:
        from multiprocessing.pool import ThreadPool
        dask.config.set(pool=ThreadPool(args.num_workers))

    # Parse provided files and search for files if provided directories
    scene_args['filenames'] = get_input_files(scene_args['filenames'])
    # Create a Scene, analyze the provided files
    LOG.info("Sorting and reading input files...")
    try:
        scn = Scene(**scene_args)
    except ValueError as e:
        LOG.error("{} | Enable debug message (-vvv) or see log file for details.".format(str(e)))
        LOG.debug("Further error information: ", exc_info=True)
        return -1
    except OSError:
        LOG.error("Could not open files. Enable debug message (-vvv) or see log file for details.")
        LOG.debug("Further error information: ", exc_info=True)
        return -1

    if args.list_products:
        print("\n".join(sorted(scn.available_dataset_names(composites=True))))
        return 0

    # Rename the log file
    if rename_log:
        rename_log_file(glue_name + scn.attrs['start_time'].strftime("_%Y%m%d_%H%M%S.log"))

    # Load the actual data arrays and metadata (lazy loaded as dask arrays)
    if load_args['products'] is None:
        try:
            reader_mod = importlib.import_module('polar2grid.readers.' + scene_args['reader'])
            load_args['products'] = reader_mod.DEFAULT_PRODUCTS
            LOG.info("Using default product list: {}".format(load_args['products']))
        except (ImportError, AttributeError):
            LOG.error("No default products list set, please specify with `--products`.")
            return -1

    LOG.info("Loading product metadata from files...")
    scn.load(load_args['products'])

    resample_kwargs = resample_args.copy()
    areas_to_resample = resample_kwargs.pop('grids')
    grid_configs = resample_kwargs.pop('grid_configs')
    resampler = resample_kwargs.pop('resampler')

    if areas_to_resample is None and resampler in [None, 'native']:
        # no areas specified
        areas_to_resample = ['MAX']
    elif areas_to_resample is None:
        raise ValueError("Resampling method specified (--method) without any destination grid/area (-g flag).")
    elif not areas_to_resample:
        # they don't want any resampling (they used '-g' with no args)
        areas_to_resample = [None]

    has_custom_grid = any(g not in ['MIN', 'MAX', None] for g in areas_to_resample)
    if has_custom_grid and resampler == 'native':
        LOG.error("Resampling method 'native' can only be used with 'MIN' or 'MAX' grids "
                  "(use 'nearest' method instead).")
        return -1

    p2g_grid_configs = [x for x in grid_configs if x.endswith('.conf')]
    pyresample_area_configs = [x for x in grid_configs if not x.endswith('.conf')]
    if not grid_configs or p2g_grid_configs:
        # if we were given p2g grid configs or we weren't given any to choose from
        from polar2grid.grids import GridManager
        grid_manager = GridManager(*p2g_grid_configs)
    else:
        grid_manager = {}

    if pyresample_area_configs:
        from pyresample.utils import parse_area_file
        custom_areas = parse_area_file(pyresample_area_configs)
        custom_areas = {x.area_id: x for x in custom_areas}
    else:
        custom_areas = {}

    ll_bbox = resample_kwargs.pop('ll_bbox')
    if ll_bbox:
        scn = scn.crop(ll_bbox=ll_bbox)

    wishlist = scn.wishlist.copy()
    preserve_resolution = get_preserve_resolution(args, resampler, areas_to_resample)
    if preserve_resolution:
        preserved_products = set(wishlist) & set(scn.datasets.keys())
        resampled_products = set(wishlist) - preserved_products

        # original native scene
        to_save = write_scene(scn, args.writers, writer_args, preserved_products)
    else:
        preserved_products = set()
        resampled_products = set(wishlist)
        to_save = []

    LOG.debug("Products to preserve resolution for: {}".format(preserved_products))
    LOG.debug("Products to use new resolution for: {}".format(resampled_products))
    for area_name in areas_to_resample:
        if area_name is None:
            # no resampling
            area_def = None
        elif area_name == 'MAX':
            area_def = scn.max_area()
        elif area_name == 'MIN':
            area_def = scn.min_area()
        elif area_name in custom_areas:
            area_def = custom_areas[area_name]
        elif area_name in grid_manager:
            from pyresample.geometry import DynamicAreaDefinition
            p2g_def = grid_manager[area_name]
            area_def = p2g_def.to_satpy_area()
            if isinstance(area_def, DynamicAreaDefinition) and p2g_def['cell_width'] is not None:
                area_def = area_def.freeze(scn.max_area(),
                                           resolution=(abs(p2g_def['cell_width']), abs(p2g_def['cell_height'])))
        else:
            area_def = get_area_def(area_name)

        if resampler is None and area_def is not None:
            rs = 'native' if area_name in ['MIN', 'MAX'] else 'nearest'
            LOG.debug("Setting default resampling to '{}' for grid '{}'".format(rs, area_name))
        else:
            rs = resampler

        if area_def is not None:
            LOG.info("Resampling data to '%s'", area_name)
            new_scn = scn.resample(area_def, resampler=rs, **resample_kwargs)
        elif not preserve_resolution:
            # the user didn't want to resample to any areas
            # the user also requested that we don't preserve resolution
            # which means we have to save this Scene's datasets
            # because they won't be saved
            new_scn = scn

        to_save = write_scene(new_scn, args.writers, writer_args, resampled_products, to_save=to_save)

    if args.progress:
        pbar = ProgressBar()
        pbar.register()

    LOG.info("Computing products and saving data to writers...")
    compute_writer_results(to_save)
    LOG.info("SUCCESS")
    return 0
parser.add_argument('--sat_id', dest='sat_id', action="store", help="Satellite ID", default="8888")
parser.add_argument('--data_cat', dest='data_cat', action="store", help="Category of data (one of GORN, GPRN, P**N)", default="GORN")
parser.add_argument('--area', dest='areadef', action="store", help="Area name, the definition must exist in your areas configuration file", default="nrEURO1km_NPOL_COALeqc")
parser.add_argument('--ph_unit', dest='ph_unit', action="store", help="Physical unit", default="CELSIUS")
parser.add_argument('--data_src', dest='data_src', action="store", help="Data source", default="EUMETCAST")
args = parser.parse_args()

if (args.input_dir != None):
    os.chdir(args.input_dir)

cfg = vars(args)
if (args.cfg != None):
    with open(args.cfg, 'r') as ymlfile:
        cfg = yaml.load(ymlfile)

narea = get_area_def(args.areadef)
global_data = Scene(sensor="images", reader="generic_image", area=narea)
global_data.load(['image'])

global_data['image'].info['area'] = narea
fname = global_data['image'].info['filename']
ofname = fname[:-3] + "tif"

#global_data.save_dataset('image', filename="out.png", writer="simple_image")
global_data.save_dataset('image', filename=ofname, writer="ninjotiff",
                      sat_id=cfg['sat_id'],
                      chan_id=cfg['chan_id'],
                      data_cat=cfg['data_cat'],
                      data_source=cfg['data_src'],
                      physic_unit=cfg['ph_unit'])
Exemple #37
0
def hex_to_rgb(value):
    value = value.lstrip('#')
    lv = len(value)
    return [int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3)]


if __name__ == '__main__':
    if len(sys.argv) < 2:
        print("Usage: " + sys.argv[0] + " MAIA_file ")
        sys.exit()

    fnmaia = sys.argv[1]
    maia_scene = Scene(reader='maia', filenames=[fnmaia])
    print(maia_scene.available_dataset_ids())
    maia_scene.load(["CloudType", "ct", "cma", "cma_conf",
                     'opaq_cloud', "CloudTopPres",
                     "CloudTopTemp", "Alt_surface"])

    # CloudType is a bit field containing the actual "ct" with values
    # from 0 to 20 which can be interpreted according to the cpool colormap

    # "ct" can be display in black and white:
    maia_scene.show("ct")

    # but it is better to palettize the image:
    # step 1: creation of the palette
    mycolors = []
    for i in range(21):
        mycolors.append(hex_to_rgb(cpool[i]))
    arr = np.array(mycolors)
    np.save("/tmp/binary_maia_ct_colormap.npy", arr)
import os
from satpy import Scene
from datetime import datetime
from satpy.utils import debug_on
import pyninjotiff
from glob import glob
from pyresample.utils import load_area
debug_on()


chn = "airmass"
ninjoRegion = load_area("areas.def", "nrEURO3km")

filenames = glob("data/*__")
global_scene = Scene(reader="hrit_msg", filenames=filenames)
global_scene.load([chn])
local_scene = global_scene.resample(ninjoRegion)
local_scene.save_dataset(chn, filename="airmass.tif", writer='ninjotiff',
                      sat_id=6300014,
                      chan_id=6500015,
                      data_cat='GPRN',
                      data_source='EUMCAST',
                      nbits=8)