Example #1
0
    def _get_xy_arrays(self):
        gd = self.area_definition
        ts = self.tile_shape
        tc = self.tile_count
        # Since our tiles may go over the edge of the original "grid" we
        # need to make sure we calculate X/Y to the edge of all of the tiles
        imaginary_data_size = (ts[0] * tc[0], ts[1] * tc[1])
        ps_x = gd.pixel_size_x
        ps_y = gd.pixel_size_y
        # tiles start from upper-left
        new_extents = (
            gd.area_extent[0],
            gd.area_extent[1] - ps_y * (imaginary_data_size[1] - gd.y_size),
            gd.area_extent[2] + ps_x * (imaginary_data_size[0] - gd.x_size),
            gd.area_extent[3])
        imaginary_grid_def = AreaDefinition(
            gd.area_id,
            gd.name,
            gd.proj_id,
            gd.proj_dict,
            imaginary_data_size[1],
            imaginary_data_size[0],
            new_extents,
        )

        x, y = imaginary_grid_def.get_proj_coords()
        x = x[0].squeeze()  # all rows should have the same coordinates
        y = y[:, 0].squeeze()  # all columns should have the same coordinates
        # scale the X and Y arrays to fit in the file for 16-bit integers
        # AWIPS is dumb and requires the integer values to be 0, 1, 2, 3, 4
        # Max value of a signed 16-bit integer is 32767 meaning
        # 32768 values.
        if x.shape[0] > 2**15:
            # awips uses 0, 1, 2, 3 so we can't use the negative end of the variable space
            raise ValueError("X variable too large for AWIPS-version of 16-bit integer space")
        if y.shape[0] > 2**15:
            # awips uses 0, 1, 2, 3 so we can't use the negative end of the variable space
            raise ValueError("Y variable too large for AWIPS-version of 16-bit integer space")
        # NetCDF library doesn't handle numpy arrays nicely anymore for some
        # reason and has been masking values that shouldn't be
        return np.ma.masked_array(x), np.ma.masked_array(y)
Example #2
0
    def test_hncc_dnb(self):
        """Test the 'hncc_dnb' compositor."""
        import xarray as xr
        import dask.array as da
        import numpy as np
        from satpy.composites.viirs import NCCZinke
        from pyresample.geometry import AreaDefinition
        rows = 5
        cols = 10
        area = AreaDefinition('test', 'test', 'test', {
            'proj': 'eqc',
            'lon_0': 0.0,
            'lat_0': 0.0
        }, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17))

        comp = NCCZinke('hncc_dnb',
                        prerequisites=('dnb', ),
                        standard_name='toa_outgoing_radiance_per_'
                        'unit_wavelength')
        dnb = np.zeros((rows, cols)) + 0.25
        dnb[3, :] += 0.25
        dnb[4:, :] += 0.5
        dnb = da.from_array(dnb, chunks=25)
        c01 = xr.DataArray(dnb,
                           dims=('y', 'x'),
                           attrs={
                               'name': 'DNB',
                               'area': area
                           })
        sza = np.zeros((rows, cols)) + 70.0
        sza[:, 3] += 20.0
        sza[:, 4:] += 45.0
        sza = da.from_array(sza, chunks=25)
        c02 = xr.DataArray(sza,
                           dims=('y', 'x'),
                           attrs={
                               'name': 'solar_zenith_angle',
                               'area': area
                           })
        lza = np.zeros((rows, cols)) + 70.0
        lza[:, 3] += 20.0
        lza[:, 4:] += 45.0
        lza = da.from_array(lza, chunks=25)
        c03 = xr.DataArray(lza,
                           dims=('y', 'x'),
                           attrs={
                               'name': 'lunar_zenith_angle',
                               'area': area
                           })
        mif = xr.DataArray(da.zeros((5, ), chunks=5) + 0.1,
                           dims=('y', ),
                           attrs={
                               'name': 'moon_illumination_fraction',
                               'area': area
                           })
        res = comp((c01, c02, c03, mif))
        self.assertIsInstance(res, xr.DataArray)
        self.assertIsInstance(res.data, da.Array)
        self.assertEqual(res.attrs['name'], 'hncc_dnb')
        self.assertEqual(res.attrs['standard_name'], 'ncc_radiance')
        data = res.compute()
        unique = np.unique(data)
        np.testing.assert_allclose(unique, [
            3.48479712e-04, 6.96955799e-04, 1.04543189e-03, 4.75394738e-03,
            9.50784532e-03, 1.42617433e-02, 1.50001560e+03, 3.00001560e+03,
            4.50001560e+03
        ])
Example #3
0
def _sunz_bigger_area_def():
    """Get area that is twice the size of 'sunz_area_def'."""
    bigger_area = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 4,
                                 4, (-2000, -2000, 2000, 2000))
    return bigger_area
Example #4
0
def run(input_dir, output_dir):
    DATA_DIR_2 = os.path.join(input_dir, "ch2")
    DATA_DIR_6 = os.path.join(input_dir, "ch6")
    DATA_DIR_7 = os.path.join(input_dir, "ch7")
    DATA_DIR_14 = os.path.join(input_dir, "ch14")

    # Get contents of data dir for ch 7
    data_list_7 = os.listdir(DATA_DIR_7)
    if ".DS_Store" in data_list_7:
        data_list_7.remove(".DS_Store")  # For mac users
    data_list_7 = sorted(data_list_7)

    # Get contents of data dir for ch14
    data_list_14 = os.listdir(DATA_DIR_14)
    if ".DS_Store" in data_list_14:
        data_list_14.remove(".DS_Store")  # For mac users
    data_list_14 = sorted(data_list_14)

    # Get contents of data dir for ch 2
    data_list_2 = os.listdir(DATA_DIR_2)
    if ".DS_Store" in data_list_2:
        data_list_2.remove(".DS_Store")  # For mac users
    data_list_2 = sorted(data_list_2)

    # Get contents of data dir for ch 6
    data_list_6 = os.listdir(DATA_DIR_6)
    if ".DS_Store" in data_list_6:
        data_list_6.remove(".DS_Store")  # For mac users
    data_list_6 = sorted(data_list_6)

    # Load ch7 for projection constants
    first_ds_name = data_list_7[0]
    first_ds_path = os.path.join(DATA_DIR_7, first_ds_name)
    first_ds = GOES.open_dataset(first_ds_path)
    var_ch02, lons, lats = first_ds.image("Rad",
                                          domain=[LLLon, URLon, LLLat, URLat])
    var_ch02, lons, lats = var_ch02.data, lons.data, lats.data
    HEIGHT = var_ch02.shape[0]
    WIDTH = var_ch02.shape[1]

    # Setup projection constants used throughout the script.
    tiff_path = os.path.join(TIFF_DIR, "0.tif")
    p_crs = CRS.from_epsg(3857)
    p_latlon = CRS.from_proj4("+proj=latlon")
    crs_transform = Transformer.from_crs(p_latlon, p_crs)
    ll_x, ll_y = crs_transform.transform(LLLon, LLLat)
    ur_x, ur_y = crs_transform.transform(URLon, URLat)
    area_extent = (ll_x, ll_y, ur_x, ur_y)
    ul_x = ll_x  # Why these?
    ul_y = ur_y
    area_id = "California Coast"
    description = "See area ID"
    proj_id = "Mercator"
    pixel_size_x = (ur_x - ll_x) / (WIDTH - 1)
    pixel_size_y = (ur_y - ll_y) / (HEIGHT - 1)
    new_affine = Affine(pixel_size_x, 0.0, ul_x, 0.0, -pixel_size_y, ul_y)
    area_def = AreaDefinition(area_id, description, proj_id, p_crs, WIDTH,
                              HEIGHT, area_extent)
    fill_value = np.nan

    # Load ch7 for land masking
    first_ds_name = data_list_7[0]
    first_ds_path = os.path.join(DATA_DIR_7, first_ds_name)
    first_ds = GOES.open_dataset(first_ds_path)
    var_ch07, lons, lats = first_ds.image("Rad",
                                          domain=[LLLon, URLon, LLLat, URLat])
    var_ch07, lons, lats = var_ch07.data, lons.data, lats.data
    swath_def = SwathDefinition(lons, lats)
    first_ds = None  # Free the memory from these big datasets
    var_ch07 = kd_tree.resample_nearest(swath_def,
                                        var_ch07.ravel(),
                                        area_def,
                                        radius_of_influence=5000,
                                        nprocs=2,
                                        fill_value=fill_value)

    ###### New land masking system #######################
    with rasterio.open(
            tiff_path,
            "w",
            driver="GTiff",
            height=HEIGHT,
            width=WIDTH,
            count=1,  #????
            dtype=var_ch07.dtype,
            crs=p_crs,
            transform=new_affine,
            nodata=fill_value,
    ) as dst:
        dst.write(np.reshape(var_ch07, (1, HEIGHT, WIDTH)))

    src = rasterio.open(tiff_path, mode='r+')
    geodf = geopandas.read_file(LAND_POLYGON_SHAPE)
    land_masking, other_affine = mask.mask(src,
                                           geodf[['geometry'
                                                  ]].values.flatten(),
                                           invert=True,
                                           filled=False)
    land_masking = np.ma.getmask(land_masking)
    land_masking = np.reshape(land_masking, (HEIGHT, WIDTH))
    src.close()  # Free memory
    src = None
    geodf = None
    ############################################################

    # Init multi-tracker
    trackers = MultiTrackerImproved(cv2.TrackerCSRT_create)

    image_list = []
    # BTD_list = []
    refl_ch2_list = []
    refl_ch6_list = []

    i = 0
    for ds_name_7 in data_list_7:
        ds_name_14 = data_list_14[i]
        ds_name_2 = data_list_2[i]
        ds_name_6 = data_list_6[i]
        ds_path_7 = os.path.join(DATA_DIR_7, ds_name_7)
        ds_path_14 = os.path.join(DATA_DIR_14, ds_name_14)
        ds_path_2 = os.path.join(DATA_DIR_2, ds_name_2)
        ds_path_6 = os.path.join(DATA_DIR_6, ds_name_6)

        # Load channel 2
        ds_2 = GOES.open_dataset(ds_path_2)
        var_ch02, lons, lats = ds_2.image("Rad",
                                          domain=[LLLon, URLon, LLLat, URLat])
        var_ch02, lons, lats = var_ch02.data, lons.data, lats.data
        swath_def = SwathDefinition(lons, lats)
        var_ch02 = kd_tree.resample_nearest(swath_def,
                                            var_ch02.ravel(),
                                            area_def,
                                            radius_of_influence=5000,
                                            nprocs=2,
                                            fill_value=fill_value)

        # Load channel 2 reflectivity
        ds_2 = GOES.open_dataset(ds_path_2)
        refl_var_ch02, lons, lats = ds_2.image(
            "Rad", up_level=True, domain=[LLLon, URLon, LLLat, URLat])
        refl_var_ch02 = refl_var_ch02.refl_fact_to_refl(lons, lats).data
        swath_def = SwathDefinition(lons.data, lats.data)
        refl_var_ch02 = kd_tree.resample_nearest(swath_def,
                                                 refl_var_ch02.ravel(),
                                                 area_def,
                                                 radius_of_influence=5000,
                                                 nprocs=2,
                                                 fill_value=fill_value)

        # Load channel 6 reflectivity
        ds_6 = GOES.open_dataset(ds_path_6)
        refl_var_ch06, lons, lats = ds_6.image(
            "Rad", up_level=True, domain=[LLLon, URLon, LLLat, URLat])
        refl_var_ch06 = refl_var_ch06.refl_fact_to_refl(lons, lats).data
        swath_def = SwathDefinition(lons.data, lats.data)
        refl_var_ch06 = kd_tree.resample_nearest(swath_def,
                                                 refl_var_ch06.ravel(),
                                                 area_def,
                                                 radius_of_influence=5000,
                                                 nprocs=2,
                                                 fill_value=fill_value)

        # Load channel 7
        ds_7 = GOES.open_dataset(ds_path_7)
        var_ch07, lons, lats = ds_7.image("Rad",
                                          domain=[LLLon, URLon, LLLat, URLat])
        var_ch07, lons, lats = var_ch07.data, lons.data, lats.data
        swath_def = SwathDefinition(lons, lats)
        var_ch07 = kd_tree.resample_nearest(swath_def,
                                            var_ch07.ravel(),
                                            area_def,
                                            radius_of_influence=5000,
                                            nprocs=2,
                                            fill_value=fill_value)

        # Load channel 14
        ds_14 = GOES.open_dataset(ds_path_14)
        var_ch14, lons, lats = ds_14.image("Rad",
                                           domain=[LLLon, URLon, LLLat, URLat])
        var_ch14, lons, lats = var_ch14.data, lons.data, lats.data
        swath_def = SwathDefinition(lons, lats)
        var_ch14 = kd_tree.resample_nearest(swath_def,
                                            var_ch14.ravel(),
                                            area_def,
                                            radius_of_influence=5000,
                                            nprocs=2,
                                            fill_value=fill_value)

        # Make BTD
        var = calc_BTD.main_func(var_ch14, var_ch07, 14, 7)

        # Skip day if it has bad data
        if np.isnan(var).any():
            i = i + 1
            continue

        # Make copy of the BTD for use as a backround in cv2 image output
        # Maps the BTD values to a range of [0,255]
        # BTD = copy.deepcopy(var)
        BTD_img = copy.deepcopy(var)
        min_BTD = np.nanmin(BTD_img)
        if min_BTD < 0:
            BTD_img = BTD_img + np.abs(min_BTD)
        max_BTD = np.nanmax(BTD_img)
        BTD_img = BTD_img / max_BTD
        # BTD_img = cv2.cvtColor(BTD_img*255, cv2.COLOR_GRAY2BGR)
        # BTD_img_trackers = copy.deepcopy(BTD_img) # Next two lines are for new BTD data for trackers
        # BTD_img_trackers = np.array(BTD_img_trackers).astype('uint8') # Since it seems the trackers need images of type uint8

        # Filter out the land
        var[land_masking] = np.nan

        # Create mask array for the highest clouds
        high_cloud_mask = calc_BTD.bt_ch14_temp_conv(
            var_ch14) < 5  # TODO: Make this more robust

        #### Use reflectivity of channel 2 and BT of channel 14 to filter out open ocean data ###########
        BT = calc_BTD.bt_ch14_temp_conv(var_ch14)

        BT = BT[np.logical_and(
            np.logical_not(land_masking), np.logical_not(high_cloud_mask)
        )]  # Filter out the land since golden arches works best when only over water
        var_ch02 = var_ch02[np.logical_and(
            np.logical_not(land_masking), np.logical_not(high_cloud_mask)
        )]  # Filter out the land since golden arches works best when only over water

        BT_and_CH02 = np.vstack((BT, var_ch02)).T
        BT_and_CH02_sample, _ = train_test_split(BT_and_CH02, train_size=10000)

        clusterer = DBSCAN(eps=1.5,
                           min_samples=100)  # Found through extensive testing
        classifier = DecisionTreeClassifier()
        inductive_cluster = InductiveClusterer(
            clusterer, classifier).fit(BT_and_CH02_sample)
        IC_labels = inductive_cluster.predict(BT_and_CH02) + 1

        all_labels = np.unique(IC_labels)
        min_refl = np.Inf
        open_ocean_label = 0
        for j in all_labels:
            labeled_refl_array = var_ch02[IC_labels == j]
            mean_refl = np.nanmean(labeled_refl_array)
            if mean_refl < min_refl:
                open_ocean_label = j
                min_refl = mean_refl
        golden_arch_mask_ocean = IC_labels == open_ocean_label

        golden_arch_mask = np.zeros(var.shape, dtype=bool)
        golden_arch_mask[np.logical_and(
            np.logical_not(land_masking),
            np.logical_not(high_cloud_mask))] = golden_arch_mask_ocean

        var = np.where(golden_arch_mask, np.nan, var)
        ###############################################################################################

        #Filter out the cold high altitude clouds
        var = np.where(high_cloud_mask, np.nan, var)

        var = feature.canny(var,
                            sigma=2.2,
                            low_threshold=0,
                            high_threshold=1.2)
        var = np.where(var == np.nan, 0, var)

        ## Skimage hough line transform #################################
        var = np.array(var).astype('uint8')
        img = cv2.cvtColor(var * 255, cv2.COLOR_GRAY2BGR)

        # Was 0, 30, 1
        threshold = 0
        minLineLength = 30
        maxLineGap = 2
        theta = np.linspace(-np.pi, np.pi, 1000)

        lines = transform.probabilistic_hough_line(var,
                                                   threshold=threshold,
                                                   line_length=minLineLength,
                                                   line_gap=maxLineGap,
                                                   theta=theta)
        #############################################################

        #### TRACKER #################
        trackers.update(img, i)

        if lines is not None:
            for line in lines:
                p0, p1 = line
                x1 = p0[0]
                y1 = p0[1]
                x2 = p1[0]
                y2 = p1[1]

                min_x = np.minimum(x1, x2)
                min_y = np.minimum(y1, y2)
                max_x = np.maximum(x1, x2)
                max_y = np.maximum(y1, y2)

                rect = (min_x - 2, min_y - 2, max_x - min_x + 4,
                        max_y - min_y + 4
                        )  #TODO: Maybe expand the size of the boxes a bit?
                trackers.add_tracker(img, rect, len(data_list_7))
        ###############################

        image_list.append(BTD_img)
        # BTD_list.append(BTD)
        refl_ch2_list.append(refl_var_ch02)
        refl_ch6_list.append(refl_var_ch06)

        print("Image " + str(i) + " Calculated")
        i = i + 1

    # TODO: Remove BTD_list in all areas if I am not using it for real final pngs
    for i in range(len(image_list)):
        label_name = "labels"
        data_name = "data"
        filename = str(i) + ".tif"
        data_file_path = os.path.join(output_dir, data_name, filename)
        label_file_path = os.path.join(output_dir, label_name, filename)
        boxes = trackers.get_boxes(i)

        BTD_img = image_list[i]
        # BTD = BTD_list[i]
        refl_var_ch02 = refl_ch2_list[i]
        refl_var_ch06 = refl_ch6_list[i]

        # Make box plots for trackers
        # Also make and highlight the labels
        labels = np.zeros([BTD_img.shape[0], BTD_img.shape[1]],
                          dtype=np.float32)
        for box in boxes:
            (x, y, w, h) = [int(v) for v in box]

            if w > 0 and h > 0 and x >= 0 and y >= 0 and y + h <= BTD_img.shape[
                    0] and x + w <= BTD_img.shape[1] and y < BTD_img.shape[
                        0] and x < BTD_img.shape[1]:
                ch2_slice = refl_var_ch02[y:y + h, x:x + w]
                ch6_slice = refl_var_ch06[y:y + h, x:x + w]

                labels_slice = labels[y:y + h, x:x + w]
                labels_slice = np.where(
                    np.logical_and(ch6_slice >= 0.28, ch2_slice >= 0.3), 1.0,
                    labels_slice)
                labels[y:y + h, x:x + w] = labels_slice  # Add red for labels

        with rasterio.open(
                data_file_path,
                "w",
                driver="GTiff",
                height=HEIGHT,
                width=WIDTH,
                count=1,  #????
                dtype=BTD_img.dtype,
                crs=p_crs,
                transform=new_affine,
                nodata=fill_value,
        ) as dst:
            dst.write(np.reshape(BTD_img, (1, HEIGHT, WIDTH)))

        with rasterio.open(
                label_file_path,
                "w",
                driver="GTiff",
                height=HEIGHT,
                width=WIDTH,
                count=1,  #????
                dtype=labels.dtype,
                crs=p_crs,
                transform=new_affine,
                nodata=fill_value,
        ) as dst:
            dst.write(np.reshape(labels, (1, HEIGHT, WIDTH)))

        # BTD_img = cv2.addWeighted(BTD_img, 1.0, labels, 0.5, 0)

        # cv2.imwrite(file_path, BTD_img)

        print("Image " + str(i) + " Complete")
Example #5
0
    def test_multivar_numbered_tiles_glm(self, sector):
        """Test creating a tiles with multiple variables."""
        import xarray as xr
        from satpy.writers.awips_tiled import AWIPSTiledWriter
        from xarray import DataArray
        from pyresample.geometry import AreaDefinition
        from pyresample.utils import proj4_str_to_dict
        w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True)
        area_def = AreaDefinition(
            'test',
            'test',
            'test',
            proj4_str_to_dict(
                '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
                '+lat_0=25 +lat_1=25 +units=m +no_defs'),
            100,
            200,
            (-1000., -1500., 1000., 1500.),
        )
        now = datetime(2018, 1, 1, 12, 0, 0)
        end_time = now + timedelta(minutes=20)
        ds1 = DataArray(da.from_array(np.linspace(0.,
                                                  1.,
                                                  20000,
                                                  dtype=np.float32).reshape(
                                                      (200, 100)),
                                      chunks=50),
                        attrs=dict(name='total_energy',
                                   platform_name='GOES-17',
                                   sensor='SENSOR',
                                   units='1',
                                   area=area_def,
                                   start_time=now,
                                   end_time=end_time,
                                   scan_mode='M3',
                                   scene_abbr=sector,
                                   platform_shortname="G17"))
        ds2 = ds1.copy()
        ds2.attrs.update({
            'name': 'flash_extent_density',
        })
        ds3 = ds1.copy()
        ds3.attrs.update({
            'name': 'average_flash_area',
        })
        dqf = ds1.copy()
        dqf = (dqf * 255).astype(np.uint8)
        dqf.attrs = ds1.attrs.copy()
        dqf.attrs.update({
            'name': 'DQF',
            '_FillValue': 1,
        })

        w.save_datasets([ds1, ds2, ds3, dqf],
                        sector_id='TEST',
                        source_name="TESTS",
                        tile_count=(3, 3),
                        template='glm_l2_rad{}'.format(sector.lower()))
        all_files = glob(os.path.join(self.base_dir, '*_GLM*.nc'))
        assert len(all_files) == 9
        for fn in all_files:
            ds = xr.open_dataset(fn, mask_and_scale=False)
            check_required_common_attributes(ds)
            if sector == 'C':
                assert ds.attrs['time_coverage_end'] == end_time.strftime(
                    '%Y-%m-%dT%H:%M:%S.%fZ')
            else:  # 'F'
                assert ds.attrs['time_coverage_end'] == end_time.strftime(
                    '%Y-%m-%dT%H:%M:%SZ')
Example #6
0
    def setUp(self):
        """Create temporary images to test on."""
        import tempfile
        from datetime import datetime

        from pyresample.geometry import AreaDefinition

        from satpy.scene import Scene

        self.date = datetime(2018, 1, 1)

        # Create area definition
        pcs_id = 'ETRS89 / LAEA Europe'
        proj4_dict = "EPSG:3035"
        self.x_size = 100
        self.y_size = 100
        area_extent = (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222)
        self.area_def = AreaDefinition('geotiff_area', pcs_id, pcs_id,
                                       proj4_dict, self.x_size, self.y_size,
                                       area_extent)

        # Create datasets for L, LA, RGB and RGBA mode images
        r__ = da.random.randint(0,
                                256,
                                size=(self.y_size, self.x_size),
                                chunks=(50, 50)).astype(np.uint8)
        g__ = da.random.randint(0,
                                256,
                                size=(self.y_size, self.x_size),
                                chunks=(50, 50)).astype(np.uint8)
        b__ = da.random.randint(0,
                                256,
                                size=(self.y_size, self.x_size),
                                chunks=(50, 50)).astype(np.uint8)
        a__ = 255 * np.ones((self.y_size, self.x_size), dtype=np.uint8)
        a__[:10, :10] = 0
        a__ = da.from_array(a__, chunks=(50, 50))

        r_nan__ = np.random.uniform(0., 1., size=(self.y_size, self.x_size))
        r_nan__[:10, :10] = np.nan
        r_nan__ = da.from_array(r_nan__, chunks=(50, 50))

        ds_l = xr.DataArray(da.stack([r__]),
                            dims=('bands', 'y', 'x'),
                            attrs={
                                'name': 'test_l',
                                'start_time': self.date
                            })
        ds_l['bands'] = ['L']
        ds_la = xr.DataArray(da.stack([r__, a__]),
                             dims=('bands', 'y', 'x'),
                             attrs={
                                 'name': 'test_la',
                                 'start_time': self.date
                             })
        ds_la['bands'] = ['L', 'A']
        ds_rgb = xr.DataArray(da.stack([r__, g__, b__]),
                              dims=('bands', 'y', 'x'),
                              attrs={
                                  'name': 'test_rgb',
                                  'start_time': self.date
                              })
        ds_rgb['bands'] = ['R', 'G', 'B']
        ds_rgba = xr.DataArray(da.stack([r__, g__, b__, a__]),
                               dims=('bands', 'y', 'x'),
                               attrs={
                                   'name': 'test_rgba',
                                   'start_time': self.date
                               })
        ds_rgba['bands'] = ['R', 'G', 'B', 'A']

        ds_l_nan = xr.DataArray(da.stack([r_nan__]),
                                dims=('bands', 'y', 'x'),
                                attrs={
                                    'name': 'test_l_nan',
                                    'start_time': self.date
                                })
        ds_l_nan['bands'] = ['L']

        # Temp dir for the saved images
        self.base_dir = tempfile.mkdtemp()

        # Put the datasets to Scene for easy saving
        scn = Scene()
        scn['l'] = ds_l
        scn['l'].attrs['area'] = self.area_def
        scn['la'] = ds_la
        scn['la'].attrs['area'] = self.area_def
        scn['rgb'] = ds_rgb
        scn['rgb'].attrs['area'] = self.area_def
        scn['rgba'] = ds_rgba
        scn['rgba'].attrs['area'] = self.area_def
        scn['l_nan'] = ds_l_nan
        scn['l_nan'].attrs['area'] = self.area_def

        # Save the images.  Two images in PNG and two in GeoTIFF
        scn.save_dataset('l',
                         os.path.join(self.base_dir, 'test_l.png'),
                         writer='simple_image')
        scn.save_dataset('la',
                         os.path.join(self.base_dir,
                                      '20180101_0000_test_la.png'),
                         writer='simple_image')
        scn.save_dataset('rgb',
                         os.path.join(self.base_dir,
                                      '20180101_0000_test_rgb.tif'),
                         writer='geotiff')
        scn.save_dataset('rgba',
                         os.path.join(self.base_dir, 'test_rgba.tif'),
                         writer='geotiff')
        scn.save_dataset('l_nan',
                         os.path.join(self.base_dir,
                                      'test_l_nan_fillvalue.tif'),
                         writer='geotiff',
                         fill_value=0)
        scn.save_dataset('l_nan',
                         os.path.join(self.base_dir,
                                      'test_l_nan_nofillvalue.tif'),
                         writer='geotiff')

        self.scn = scn
Example #7
0
tiff_path = os.path.join(TIFF_DIR, "0.tif")
p_crs = CRS.from_epsg(3857)
p_latlon = CRS.from_proj4("+proj=latlon")
crs_transform = Transformer.from_crs(p_latlon,p_crs)
ll_x, ll_y = crs_transform.transform(LLLon, LLLat)
ur_x, ur_y = crs_transform.transform(URLon, URLat)
area_extent = (ll_x, ll_y, ur_x, ur_y)
ul_x = ll_x # Why these?
ul_y = ur_y
area_id = "California Coast"
description = "See area ID"
proj_id = "Mercator"
pixel_size_x = (ur_x - ll_x)/(WIDTH - 1)
pixel_size_y = (ur_y - ll_y)/(HEIGHT - 1)
new_affine = Affine(pixel_size_x, 0.0, ul_x, 0.0, -pixel_size_y, ul_y)
area_def = AreaDefinition(area_id, description, proj_id, p_crs,
                            WIDTH, HEIGHT, area_extent)
fill_value = np.nan

# Load ch7 for land masking
first_ds_name = data_list_7[0]
first_ds_path = os.path.join(DATA_DIR_7, first_ds_name)
first_ds = GOES.open_dataset(first_ds_path)
var_ch07, lons, lats = first_ds.image("Rad", domain=[LLLon, URLon, LLLat, URLat])
var_ch07, lons, lats = var_ch07.data, lons.data, lats.data
swath_def = SwathDefinition(lons, lats)
first_ds = None # Free the memory from these big datasets
var_ch07 = kd_tree.resample_nearest(
    swath_def,
    var_ch07.ravel(),
    area_def,
    radius_of_influence=5000,
Example #8
0
    def setUp(self):
        """Create test data."""
        from pyresample.geometry import AreaDefinition
        area = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 2, 2,
                              (-2000, -2000, 2000, 2000))
        attrs = {
            'area': area,
            'start_time': datetime(2018, 1, 1, 18),
            'modifiers': tuple(),
            'resolution': 1000,
            'name': 'test_vis'
        }
        ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64),
                           attrs=attrs,
                           dims=('y', 'x'),
                           coords={
                               'y': [0, 1],
                               'x': [0, 1]
                           })
        self.ds1 = ds1
        ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2,
                           attrs=attrs,
                           dims=('y', 'x'),
                           coords={
                               'y': [0, 1],
                               'x': [0, 1]
                           })
        ds2.attrs['name'] += '2'
        self.ds2 = ds2
        ds3 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 3,
                           attrs=attrs,
                           dims=('y', 'x'),
                           coords={
                               'y': [0, 1],
                               'x': [0, 1]
                           })
        ds3.attrs['name'] += '3'
        self.ds3 = ds3
        ds4 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 4,
                           attrs=attrs,
                           dims=('y', 'x'),
                           coords={
                               'y': [0, 1],
                               'x': [0, 1]
                           })
        ds4.attrs['name'] += '4'
        ds4.attrs['resolution'] = 500
        self.ds4 = ds4

        # high res version
        ds4 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64) + 4,
                           attrs=attrs.copy(),
                           dims=('y', 'x'),
                           coords={
                               'y': [0, 1, 2, 3],
                               'x': [0, 1, 2, 3]
                           })
        ds4.attrs['name'] += '4'
        ds4.attrs['resolution'] = 500
        ds4.attrs['rows_per_scan'] = 1
        ds4.attrs['area'] = AreaDefinition('test', 'test', 'test',
                                           {'proj': 'merc'}, 4, 4,
                                           (-2000, -2000, 2000, 2000))
        self.ds4_big = ds4
Example #9
0
scenes_mon2 = [Scene(reader="seviri_l1b_nc", filenames=[f]) for f in data_mon2]
mscn_mon2 = MultiScene(scenes_mon2)

#show all available spectral bands
scenes_mon1[1].all_dataset_names()

#https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-2-msi/level-1c/cloud-masks
#https://gisgeography.com/spectral-signature/

#load SWIR
mscn_mon1.load(["IR_134"])
mscn_mon2.load(["IR_134"])

#Area definition dem. rep kongo
area_def_kongo = AreaDefinition("Kongo",
                                "A Lambert Azimutal Equal Area projection of Kongo",
                                "Projection of Kongo", {"proj":"laea", "lat_0":2.5, "lon_0":6},
                                1000, 1000, (4E5, -17E5, 30E5, 8E5))


mscn1_kongo = mscn_mon1.resample(area_def_kongo)
mscn2_kongo = mscn_mon2.resample(area_def_kongo)

blended_scene1 = mscn1_kongo.blend()
blended_scene2 = mscn2_kongo.blend()

#blended_scene1["IR_134"].plot(x="x", y="y")
#blended_scene2["IR_134"].plot(x="x", y="y")

output_sc1 = output_dir / "month1_plot.png"
output_sc2 = output_dir / "month2_plot.png"
Example #10
0
class TestGradientResampler(unittest.TestCase):
    """Test case for the gradient resampling."""
    def setUp(self):
        """Set up the test case."""
        from pyresample.gradient import GradientSearchResampler
        self.src_area = AreaDefinition('dst', 'dst area', None, {
            'ellps': 'WGS84',
            'h': '35785831',
            'proj': 'geos'
        }, 100, 100, (5550000.0, 5550000.0, -5550000.0, -5550000.0))
        self.src_swath = SwathDefinition(*self.src_area.get_lonlats())
        self.dst_area = AreaDefinition('nrMET3km', 'nrMET3km', None, {
            'proj': 'eqc',
            'lon_0': 0.0,
            'a': R
        }, 360, 180, (-np.pi * R, -np.pi / 2 * R, np.pi * R, np.pi / 2 * R))

        self.resampler = GradientSearchResampler(self.src_area, self.dst_area)
        self.swath_resampler = GradientSearchResampler(self.src_swath,
                                                       self.dst_area)

    @mock.patch('pyresample.gradient.parallel_gradient_search')
    def test_coords_initialization(self, pgsoic):
        """Check that the coordinates get initialized correctly."""
        data = xr.DataArray(da.ones((100, 100), dtype=np.float64),
                            dims=['y', 'x'])
        pgsoic.return_value = da.ones(self.dst_area.shape)
        self.resampler.compute(data, meth='bil')
        cdst_x = self.resampler.dst_x.compute()
        cdst_y = self.resampler.dst_y.compute()
        assert (np.isinf(cdst_x[0, 0]))
        assert (np.isinf(cdst_y[0, 0]))
        assert (cdst_y[90, 180] == -55285.59156767167)
        assert (cdst_x[90, 180] == 55656.13605425304)
        assert (self.resampler.use_input_coords)
        pgsoic.assert_called_once_with(data.data[:, :],
                                       self.resampler.src_x,
                                       self.resampler.src_y,
                                       self.resampler.dst_x,
                                       self.resampler.dst_y,
                                       meth='bil')

    def test_resample_area_to_area_2d(self):
        """Resample area to area, 2d."""
        data = xr.DataArray(da.ones((100, 100), dtype=np.float64),
                            dims=['y', 'x'])
        res = self.resampler.compute(
            data, meth='bil').compute(scheduler='single-threaded')
        assert (res.shape == self.dst_area.shape)
        assert (not np.all(np.isnan(res)))

    def test_resample_area_to_area_3d(self):
        """Resample area to area, 3d."""
        data = xr.DataArray(da.ones((3, 100, 100), dtype=np.float64) *
                            np.array([1, 2, 3])[:, np.newaxis, np.newaxis],
                            dims=['bands', 'y', 'x'])
        res = self.resampler.compute(
            data, meth='bil').compute(scheduler='single-threaded')
        assert (res.shape == (3, ) + self.dst_area.shape)
        assert (not np.all(np.isnan(res)))

    def test_resample_swath_to_area_2d(self):
        """Resample swath to area, 2d."""
        data = xr.DataArray(da.ones((100, 100), dtype=np.float64),
                            dims=['y', 'x'])
        res = self.swath_resampler.compute(
            data, meth='bil').compute(scheduler='single-threaded')
        assert (res.shape == self.dst_area.shape)
        assert (not np.all(np.isnan(res)))

    def test_resample_swath_to_area_3d(self):
        """Resample area to area, 3d."""
        data = xr.DataArray(da.ones((3, 100, 100), dtype=np.float64) *
                            np.array([1, 2, 3])[:, np.newaxis, np.newaxis],
                            dims=['bands', 'y', 'x'])
        res = self.swath_resampler.compute(
            data, meth='bil').compute(scheduler='single-threaded')
        assert (res.shape == (3, ) + self.dst_area.shape)
        assert (not np.all(np.isnan(res)))
Example #11
0
def load(satscene, calibrate=True, area_extent=None, **kwargs):
    """Load MSG SEVIRI data from hdf5 format.
    """

    # Read config file content
    conf = ConfigParser()
    conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg"))
    values = {
        "orbit": satscene.orbit,
        "satname": satscene.satname,
        "number": satscene.number,
        "instrument": satscene.instrument_name,
        "satellite": satscene.fullname
    }

    LOG.info("assume seviri-level4")
    print "... assume seviri-level4"

    satscene.add_to_history("hdf5 data read by mpop/msg_seviri_hdf.py")

    if "reader_level" in kwargs.keys():
        reader_level = kwargs["reader_level"]
    else:
        reader_level = "seviri-level4"

    if "RSS" in kwargs.keys():
        if kwargs["RSS"]:
            dt_end = 4
        else:
            dt_end = 12
    else:
        from my_msg_module import check_RSS
        RSS = check_RSS(satscene.sat_nr(), satscene.time_slot)
        if RSS == None:
            print "*** Error in mpop/satin/msg_seviri_hdf.py"
            print "    satellite MSG", satscene.sat_nr(), " is not active yet"
            quit()
        else:
            if RSS:
                dt_end = 4
            else:
                dt_end = 12

    print "... hdf file name is specified by observation end time"
    print "    assume ", dt_end, " min between start and end time of observation"

    # end of scan time 4 min after start
    end_time = satscene.time_slot + datetime.timedelta(minutes=dt_end)

    filename = os.path.join(
        end_time.strftime(conf.get(reader_level, "dir", raw=True)),
        end_time.strftime(conf.get(reader_level, "filename", raw=True)) %
        values)

    print "... search for file: ", filename
    filenames = glob(str(filename))
    if len(filenames) == 0:
        print "*** Error, no file found"
        return  # just return without exit the program
    elif len(filenames) > 1:
        print "*** Warning, more than 1 datafile found: ", filenames
    filename = filenames[0]
    print("... read data from %s" % str(filename))

    # read data from hdf5 file
    data_folder = 'U-MARF/MSG/Level1.5/'

    # Load data from hdf file
    with h5py.File(filename, 'r') as hf:

        subset_info = hf.get(data_folder + 'METADATA/SUBSET')
        for i in range(subset_info.len()):
            #print subset_info[i]['EntryName'], subset_info[i]['Value']
            if subset_info[i][
                    'EntryName'] == "VIS_IRSouthLineSelectedRectangle":
                VIS_IRSouthLine = int(subset_info[i]['Value'])
            if subset_info[i][
                    'EntryName'] == "VIS_IRNorthLineSelectedRectangle":
                VIS_IRNorthLine = int(subset_info[i]['Value'])
            if subset_info[i][
                    'EntryName'] == "VIS_IREastColumnSelectedRectangle":
                VIS_IREastColumn = int(subset_info[i]['Value'])
            if subset_info[i][
                    'EntryName'] == "VIS_IRWestColumnSelectedRectangle":
                VIS_IRWestColumn = int(subset_info[i]['Value'])
            if subset_info[i][
                    'EntryName'] == "HRVLowerNorthLineSelectedRectangle":
                HRVLowerNorthLine = int(subset_info[i]['Value'])
            if subset_info[i][
                    'EntryName'] == "HRVLowerSouthLineSelectedRectangle":
                HRVLowerSouthLine = int(subset_info[i]['Value'])
            if subset_info[i][
                    'EntryName'] == "HRVLowerEastColumnSelectedRectangle":
                HRVLowerEastColumn = int(subset_info[i]['Value'])
            if subset_info[i][
                    'EntryName'] == "HRVLowerWestColumnSelectedRectangle":
                HRVLowerWestColumn = int(subset_info[i]['Value'])
            if subset_info[i][
                    'EntryName'] == "HRVUpperSouthLineSelectedRectangle":
                HRVUpperSouthLine = int(subset_info[i]['Value'])  # 0
            if subset_info[i][
                    'EntryName'] == "HRVUpperNorthLineSelectedRectangle":
                HRVUpperNorthLine = int(subset_info[i]['Value'])  # 0
            if subset_info[i][
                    'EntryName'] == "HRVUpperEastColumnSelectedRectangle":
                HRVUpperEastColumn = int(subset_info[i]['Value'])  # 0
            if subset_info[i][
                    'EntryName'] == "HRVUpperWestColumnSelectedRectangle":
                HRVUpperWestColumn = int(subset_info[i]['Value'])  # 0

        sat_status = hf.get(
            data_folder +
            'METADATA/HEADER/SatelliteStatus/SatelliteStatus_DESCR')
        for i in range(subset_info.len()):
            if sat_status[i][
                    'EntryName'] == "SatelliteDefinition-NominalLongitude":
                sat_lon = sat_status[i]['Value']
                break

        #print 'VIS_IRSouthLine', VIS_IRSouthLine
        #print 'VIS_IRNorthLine', VIS_IRNorthLine
        #print 'VIS_IREastColumn', VIS_IREastColumn
        #print 'VIS_IRWestColumn', VIS_IRWestColumn
        #print 'sat_longitude', sat_lon, type(sat_lon), 'GEOS<'+'{:+06.1f}'.format(sat_lon)+'>'

        if 1 == 0:
            # works only if all pixels are on the disk
            from msg_pixcoord2area import msg_pixcoord2area
            print "VIS_IRNorthLine, VIS_IRWestColumn, VIS_IRSouthLine, VIS_IREastColumn: ", VIS_IRNorthLine, VIS_IRWestColumn, VIS_IRSouthLine, VIS_IREastColumn
            area_def = msg_pixcoord2area(VIS_IRNorthLine, VIS_IRWestColumn,
                                         VIS_IRSouthLine, VIS_IREastColumn,
                                         "vis", sat_lon)
        else:
            # works also for pixels outside of the disk
            pname = 'GEOS<' + '{:+06.1f}'.format(
                sat_lon) + '>'  # "GEOS<+009.5>"
            proj = {
                'proj': 'geos',
                'a': '6378169.0',
                'b': '6356583.8',
                'h': '35785831.0',
                'lon_0': str(sat_lon)
            }
            aex = (-5570248.4773392612, -5567248.074173444, 5567248.074173444,
                   5570248.4773392612)

            # define full disk projection
            from pyresample.geometry import AreaDefinition
            full_disk_def = AreaDefinition('full_disk', 'full_disk', pname,
                                           proj, 3712, 3712, aex)

            # define name and calculate area for sub-demain
            area_name = 'MSG_' + '{:04d}'.format(
                VIS_IRNorthLine) + '_' + '{:04d}'.format(
                    VIS_IRWestColumn) + '_' + '{:04d}'.format(
                        VIS_IRSouthLine) + '_' + '{:04d}'.format(
                            VIS_IREastColumn)
            aex = full_disk_def.get_area_extent_for_subset(
                3712 - VIS_IRSouthLine, 3712 - VIS_IRWestColumn,
                3712 - VIS_IRNorthLine, 3712 - VIS_IREastColumn)

            area_def = AreaDefinition(
                area_name, area_name, pname, proj,
                (VIS_IRWestColumn - VIS_IREastColumn) + 1,
                (VIS_IRNorthLine - VIS_IRSouthLine) + 1, aex)

        #print area_def
        #print "REGION:", area_def.area_id, "{"
        #print "\tNAME:\t", area_def.name
        #print "\tPCS_ID:\t", area_def.proj_id
        #print ("\tPCS_DEF:\tproj="+area_def.proj_dict['proj']+", lon_0=" + area_def.proj_dict['lon_0'] + ", a="+area_def.proj_dict['a']+", b="+area_def.proj_dict['b']+", h="+area_def.proj_dict['h'])
        #print "\tXSIZE:\t", area_def.x_size
        #print "\tYSIZE:\t", area_def.y_size
        #print "\tAREA_EXTENT:\t", area_def.area_extent
        #print "};"

        # copy area to satscene
        satscene.area = area_def

        # write information used by mipp.xrit.MSG._Calibrator in a fake header file
        hdr = dict()

        # satellite ID number
        hdr["SatelliteDefinition"] = dict()
        hdr["SatelliteDefinition"]["SatelliteId"] = SatelliteIds[str(
            satscene.sat_nr())]

        # processing
        hdr["Level 1_5 ImageProduction"] = dict()
        hdr["Level 1_5 ImageProduction"]["PlannedChanProcessing"] = np_array(
            [2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2], int)

        # calibration factors
        Level15ImageCalibration = hf.get(
            data_folder +
            'METADATA/HEADER/RadiometricProcessing/Level15ImageCalibration_ARRAY'
        )
        hdr["Level1_5ImageCalibration"] = dict()

        for chn_name in channel_numbers.keys():
            chn_nb = channel_numbers[chn_name] - 1
            hdr["Level1_5ImageCalibration"][chn_nb] = dict()
            #print chn_name, chn_nb, Level15ImageCalibration[chn_nb]['Cal_Slope'], Level15ImageCalibration[chn_nb]['Cal_Offset']
            hdr["Level1_5ImageCalibration"][chn_nb][
                'Cal_Slope'] = Level15ImageCalibration[chn_nb]['Cal_Slope']
            hdr["Level1_5ImageCalibration"][chn_nb][
                'Cal_Offset'] = Level15ImageCalibration[chn_nb]['Cal_Offset']

        # loop over channels to load
        for chn_name in satscene.channels_to_load:

            dataset_name = data_folder + 'DATA/' + dict_channel[
                chn_name] + '/IMAGE_DATA'
            if dataset_name in hf:
                data_tmp = hf.get(data_folder + 'DATA/' +
                                  dict_channel[chn_name] + '/IMAGE_DATA')

                LOG.info('hdr["SatelliteDefinition"]["SatelliteId"]: ' +
                         str(hdr["SatelliteDefinition"]["SatelliteId"]))
                #LOG.info('hdr["Level 1_5 ImageProduction"]["PlannedChanProcessing"]', hdr["Level 1_5 ImageProduction"]["PlannedChanProcessing"])
                chn_nb = channel_numbers[chn_name] - 1
                LOG.info(
                    'hdr["Level1_5ImageCalibration"][chn_nb]["Cal_Slope"]:  ' +
                    str(hdr["Level1_5ImageCalibration"][chn_nb]["Cal_Slope"]))
                LOG.info(
                    'hdr["Level1_5ImageCalibration"][chn_nb]["Cal_Offset"]: ' +
                    str(hdr["Level1_5ImageCalibration"][chn_nb]["Cal_Offset"]))

                if calibrate:
                    #Calibrator = _Calibrator(hdr, chn_name)
                    bits_per_pixel = 10  ### !!! I have no idea if this is correct !!!
                    Calibrator = _Calibrator(
                        hdr, chn_name,
                        bits_per_pixel)  ## changed call in mipp/xrit/MSG.py
                    data, calibration_unit = Calibrator(data_tmp, calibrate=1)
                else:
                    data = data_tmp
                    calibration_unit = "counts"

                LOG.info(chn_name + " min/max: " + str(data.min()) + "," +
                         str(data.max()) + " " + calibration_unit)

                satscene[chn_name] = ma.asarray(data)

                satscene[chn_name].info['units'] = calibration_unit
                satscene[chn_name].info['satname'] = satscene.satname
                satscene[chn_name].info['satnumber'] = satscene.number
                satscene[chn_name].info[
                    'instrument_name'] = satscene.instrument_name
                satscene[chn_name].info['time'] = satscene.time_slot
                satscene[chn_name].info['is_calibrated'] = True

            else:
                print "*** Warning, no data for channel " + chn_name + " in file " + filename
                data = np_nan
                calibration_unit = ""
                LOG.info("*** Warning, no data for channel " + chn_name +
                         " in file " + filename)
Example #12
0
class Test(unittest.TestCase):

    adef = AreaDefinition(
        'eurol', 'description', '', {
            'ellps': 'WGS84',
            'lat_0': '90.0',
            'lat_ts': '60.0',
            'lon_0': '0.0',
            'proj': 'stere'
        }, 2560, 2048, (-3780000.0, -7644000.0, 3900000.0, -1500000.0))

    chunks = 2
    lons = da.from_array(np.array([[25., 25.], [25., 25.]]), chunks=chunks)
    lats = da.from_array(np.array([[60., 60.00001], [60.2, 60.3]]),
                         chunks=chunks)

    def setUp(self):
        self.resampler = bucket.BucketResampler(self.adef, self.lons,
                                                self.lats)

    @patch('pyresample.bucket.Proj')
    @patch('pyresample.bucket.BucketResampler._get_indices')
    def test_init(self, get_indices, prj):
        resampler = bucket.BucketResampler(self.adef, self.lons, self.lats)
        get_indices.assert_called_once()
        prj.assert_called_once_with(self.adef.proj_dict)
        self.assertTrue(hasattr(resampler, 'target_area'))
        self.assertTrue(hasattr(resampler, 'source_lons'))
        self.assertTrue(hasattr(resampler, 'source_lats'))
        self.assertTrue(hasattr(resampler, 'x_idxs'))
        self.assertTrue(hasattr(resampler, 'y_idxs'))
        self.assertTrue(hasattr(resampler, 'idxs'))
        self.assertTrue(hasattr(resampler, 'get_sum'))
        self.assertTrue(hasattr(resampler, 'get_count'))
        self.assertTrue(hasattr(resampler, 'get_average'))
        self.assertTrue(hasattr(resampler, 'get_fractions'))
        self.assertIsNone(resampler.counts)

    def test_round_to_resolution(self):
        """Test rounding to given resolution"""
        # Scalar, integer resolution
        self.assertEqual(bucket.round_to_resolution(5.5, 2.), 6)
        # Scalar, non-integer resolution
        self.assertEqual(bucket.round_to_resolution(5.5, 1.7), 5.1)
        # List
        self.assertTrue(
            np.all(
                bucket.round_to_resolution([4.2, 5.6], 2) == np.array([4., 6.
                                                                       ])))
        # Numpy array
        self.assertTrue(
            np.all(
                bucket.round_to_resolution(np.array([4.2, 5.6]), 2) ==
                np.array([4., 6.])))
        # Dask array
        self.assertTrue(
            np.all(
                bucket.round_to_resolution(da.array([4.2, 5.6]), 2) ==
                np.array([4., 6.])))

    def test_get_proj_coordinates(self):
        """Test calculation of projection coordinates."""
        prj = MagicMock()
        prj.return_value = ([3.1, 3.1, 3.1], [4.8, 4.8, 4.8])
        lons = [1., 1., 1.]
        lats = [2., 2., 2.]
        x_res, y_res = 0.5, 0.5
        self.resampler.prj = prj
        result = self.resampler._get_proj_coordinates(lons, lats, x_res, y_res)
        prj.assert_called_once_with(lons, lats)
        self.assertTrue(isinstance(result, np.ndarray))
        self.assertEqual(result.shape, (2, 3))
        self.assertTrue(
            np.all(result == np.array([[3., 3., 3.], [5., 5., 5.]])))

    def test_get_bucket_indices(self):
        """Test calculation of array indices."""
        # Ensure nothing is calculated
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            self.resampler._get_indices()
        x_idxs, y_idxs = da.compute(self.resampler.x_idxs,
                                    self.resampler.y_idxs)
        self.assertTrue(np.all(x_idxs == np.array([1709, 1709, 1706, 1705])))
        self.assertTrue(np.all(y_idxs == np.array([465, 465, 458, 455])))

    def test_get_sum(self):
        """Test drop-in-a-bucket sum."""
        data = da.from_array(np.array([[2., 2.], [2., 2.]]),
                             chunks=self.chunks)
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_sum(data)

        result = result.compute()
        # One bin with two hits, so max value is 2.0
        self.assertTrue(np.max(result) == 4.)
        # Two bins with the same value
        self.assertEqual(np.sum(result == 2.), 2)
        # One bin with double the value
        self.assertEqual(np.sum(result == 4.), 1)
        self.assertEqual(result.shape, self.adef.shape)

        # Test that also Xarray.DataArrays work
        data = xr.DataArray(data)
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_sum(data)
        # One bin with two hits, so max value is 2.0
        self.assertTrue(np.max(result) == 4.)
        # Two bins with the same value
        self.assertEqual(np.sum(result == 2.), 2)
        # One bin with double the value
        self.assertEqual(np.sum(result == 4.), 1)
        self.assertEqual(result.shape, self.adef.shape)

        # Test masking all-NaN bins
        data = da.from_array(np.array([[np.nan, np.nan], [np.nan, np.nan]]),
                             chunks=self.chunks)
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_sum(data, mask_all_nan=True)
        self.assertTrue(np.all(np.isnan(result)))
        # By default all-NaN bins have a value of 0.0
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_sum(data)
        self.assertEqual(np.nanmax(result), 0.0)

    def test_get_count(self):
        """Test drop-in-a-bucket sum."""
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_count()
        result = result.compute()
        self.assertTrue(np.max(result) == 2)
        self.assertEqual(np.sum(result == 1), 2)
        self.assertEqual(np.sum(result == 2), 1)
        self.assertTrue(self.resampler.counts is not None)

    def test_get_average(self):
        """Test averaging bucket resampling."""
        data = da.from_array(np.array([[2., 4.], [3., np.nan]]),
                             chunks=self.chunks)
        # Without pre-calculated indices
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_average(data)
        result = result.compute()
        self.assertEqual(np.nanmax(result), 3.)
        self.assertTrue(np.any(np.isnan(result)))
        # Use a fill value other than np.nan
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_average(data, fill_value=-1)
        result = result.compute()
        self.assertEqual(np.max(result), 3.)
        self.assertEqual(np.min(result), -1)
        self.assertFalse(np.any(np.isnan(result)))

        # Test masking all-NaN bins
        data = da.from_array(np.array([[np.nan, np.nan], [np.nan, np.nan]]),
                             chunks=self.chunks)
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_average(data, mask_all_nan=True)
        self.assertTrue(np.all(np.isnan(result)))
        # By default all-NaN bins have a value of NaN
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_average(data)
        self.assertTrue(np.all(np.isnan(result)))

    def test_resample_bucket_fractions(self):
        """Test fraction calculations for categorical data."""
        data = da.from_array(np.array([[2, 4], [2, 2]]), chunks=self.chunks)
        categories = [1, 2, 3, 4]
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_fractions(data, categories=categories)
        self.assertEqual(set(categories), set(result.keys()))
        res = result[1].compute()
        self.assertTrue(np.nanmax(res) == 0.)
        res = result[2].compute()
        self.assertTrue(np.nanmax(res) == 1.)
        self.assertTrue(np.nanmin(res) == 0.5)
        res = result[3].compute()
        self.assertTrue(np.nanmax(res) == 0.)
        res = result[4].compute()
        self.assertTrue(np.nanmax(res) == 0.5)
        self.assertTrue(np.nanmin(res) == 0.)
        # There should be NaN values
        self.assertTrue(np.any(np.isnan(res)))

        # Use a fill value
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_fractions(data,
                                                  categories=categories,
                                                  fill_value=-1)

        # There should not be any NaN values
        for i in categories:
            res = result[i].compute()
            self.assertFalse(np.any(np.isnan(res)))
            self.assertTrue(np.min(res) == -1)

        # No categories given, need to compute the data once to get
        # the categories
        with dask.config.set(scheduler=CustomScheduler(max_computes=1)):
            result = self.resampler.get_fractions(data, categories=None)
Example #13
0
class Test(unittest.TestCase):
    """Test bucket resampler."""

    adef = AreaDefinition(
        'eurol', 'description', '', {
            'ellps': 'WGS84',
            'lat_0': '90.0',
            'lat_ts': '60.0',
            'lon_0': '0.0',
            'proj': 'stere'
        }, 2560, 2048, (-3780000.0, -7644000.0, 3900000.0, -1500000.0))
    chunks = 2
    lons = da.from_array(np.array([[25., 25.], [25., 25.]]), chunks=chunks)
    lats = da.from_array(np.array([[60., 60.00001], [60.2, 60.3]]),
                         chunks=chunks)

    def setUp(self):
        self.resampler = bucket.BucketResampler(self.adef, self.lons,
                                                self.lats)

    @patch('pyresample.bucket.Proj')
    @patch('pyresample.bucket.BucketResampler._get_indices')
    def test_init(self, get_indices, prj):
        resampler = bucket.BucketResampler(self.adef, self.lons, self.lats)
        get_indices.assert_called_once()
        prj.assert_called_once_with(self.adef.proj_dict)
        self.assertTrue(hasattr(resampler, 'target_area'))
        self.assertTrue(hasattr(resampler, 'source_lons'))
        self.assertTrue(hasattr(resampler, 'source_lats'))
        self.assertTrue(hasattr(resampler, 'x_idxs'))
        self.assertTrue(hasattr(resampler, 'y_idxs'))
        self.assertTrue(hasattr(resampler, 'idxs'))
        self.assertTrue(hasattr(resampler, 'get_sum'))
        self.assertTrue(hasattr(resampler, 'get_count'))
        self.assertTrue(hasattr(resampler, 'get_min'))
        self.assertTrue(hasattr(resampler, 'get_max'))
        self.assertTrue(hasattr(resampler, 'get_abs_max'))
        self.assertTrue(hasattr(resampler, 'get_average'))
        self.assertTrue(hasattr(resampler, 'get_fractions'))
        self.assertIsNone(resampler.counts)

    def test_round_to_resolution(self):
        """Test rounding to given resolution."""
        # Scalar, integer resolution
        self.assertEqual(bucket.round_to_resolution(5.5, 2.), 6)
        # Scalar, non-integer resolution
        self.assertEqual(bucket.round_to_resolution(5.5, 1.7), 5.1)
        # List
        self.assertTrue(
            np.all(
                bucket.round_to_resolution([4.2, 5.6], 2) == np.array([4., 6.
                                                                       ])))
        # Numpy array
        self.assertTrue(
            np.all(
                bucket.round_to_resolution(np.array([4.2, 5.6]), 2) ==
                np.array([4., 6.])))
        # Dask array
        self.assertTrue(
            np.all(
                bucket.round_to_resolution(da.array([4.2, 5.6]), 2) ==
                np.array([4., 6.])))

    def test_get_proj_coordinates(self):
        """Test calculation of projection coordinates."""
        prj = MagicMock()
        prj.return_value = ([3.1, 3.1, 3.1], [4.8, 4.8, 4.8])
        lons = [1., 1., 1.]
        lats = [2., 2., 2.]
        self.resampler.prj = prj
        result = self.resampler._get_proj_coordinates(lons, lats)
        prj.assert_called_once_with(lons, lats)
        self.assertTrue(isinstance(result, np.ndarray))
        np.testing.assert_equal(result,
                                np.array([[3.1, 3.1, 3.1], [4.8, 4.8, 4.8]]))

    def test_get_bucket_indices(self):
        """Test calculation of array indices."""
        # Ensure nothing is calculated
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            self.resampler._get_indices()
        x_idxs, y_idxs = da.compute(self.resampler.x_idxs,
                                    self.resampler.y_idxs)
        np.testing.assert_equal(x_idxs, np.array([1710, 1710, 1707, 1705]))
        np.testing.assert_equal(y_idxs, np.array([465, 465, 459, 455]))

        # Additional small test case
        adef = create_area_def(area_id='test',
                               projection={'proj': 'latlong'},
                               width=2,
                               height=2,
                               center=(0, 0),
                               resolution=10)
        lons = da.from_array(np.array(
            [-10.0, -9.9, -0.1, 0, 0.1, 9.9, 10.0, -10.1, 0]),
                             chunks=2)
        lats = da.from_array(np.array(
            [-10.0, -9.9, -0.1, 0, 0.1, 9.9, 10.0, 0, 10.1]),
                             chunks=2)
        resampler = bucket.BucketResampler(source_lats=lats,
                                           source_lons=lons,
                                           target_area=adef)
        resampler._get_indices()
        np.testing.assert_equal(resampler.x_idxs,
                                np.array([-1, 0, 0, 1, 1, 1, -1, -1, -1]))
        np.testing.assert_equal(resampler.y_idxs,
                                np.array([-1, 1, 1, 1, 0, 0, -1, -1, -1]))

    def _get_sum_result(self, data, **kwargs):
        """Compute the bucket average with kwargs and check that no dask computation is performed."""
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_sum(data, **kwargs)
        return result.compute()

    def test_get_sum_valid_data(self):
        """Test drop-in-a-bucket sum for valid data input."""
        data = da.from_array(np.array([[2., 3.], [7., 16.]]),
                             chunks=self.chunks)

        result = self._get_sum_result(data)

        # first two values are in same bin
        self.assertEqual(np.count_nonzero(result == 5), 1)
        # others are in separate bins
        self.assertEqual(np.count_nonzero(result == 7), 1)
        self.assertEqual(np.count_nonzero(result == 16), 1)

        self.assertEqual(result.shape, self.adef.shape)

        # Test that also xarray.DataArrays work (same output)
        data = xr.DataArray(data)
        np.testing.assert_array_equal(result, self._get_sum_result(data))

    def test_get_sum_nan_data_skipna_false(self):
        """Test drop-in-a-bucket sum for data input with nan and skipna False."""
        data = da.from_array(np.array([[2., np.nan], [5., np.nan]]),
                             chunks=self.chunks)

        result = self._get_sum_result(data, skipna=False)
        # 2 + nan is nan, all-nan bin is nan
        self.assertEqual(np.count_nonzero(np.isnan(result)), 2)
        # rest is 0
        self.assertEqual(np.nanmin(result), 0)

    def test_get_sum_nan_data_skipna_true(self):
        """Test drop-in-a-bucket sum for data input with nan and skipna True."""
        data = da.from_array(np.array([[2., np.nan], [5., np.nan]]),
                             chunks=self.chunks)

        result = self._get_sum_result(data, skipna=True)
        # 2 + nan is 2
        self.assertEqual(np.count_nonzero(result == 2.), 1)
        # all-nan and rest is 0
        self.assertEqual(np.count_nonzero(np.isnan(result)), 0)
        self.assertEqual(np.nanmin(result), 0)

    def test_get_count(self):
        """Test drop-in-a-bucket sum."""
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_count()
        result = result.compute()
        self.assertTrue(np.max(result) == 2)
        self.assertEqual(np.sum(result == 1), 2)
        self.assertEqual(np.sum(result == 2), 1)
        self.assertTrue(self.resampler.counts is not None)

    def _get_min_result(self, data, **kwargs):
        """Compute the bucket average with kwargs and check that no dask computation is performed."""
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_min(data, **kwargs)
        return result.compute()

    def test_get_min(self):
        """Test min bucket resampling."""
        data = da.from_array(np.array([[2, 11], [5, np.nan]]),
                             chunks=self.chunks)
        result = self._get_min_result(data)
        # test multiple entries minimum
        self.assertEqual(np.count_nonzero(result == 2), 1)
        # test single entry minimum
        self.assertEqual(np.count_nonzero(result == 5), 1)
        # test that minimum of bucket with only nan is nan, and empty buckets are nan
        self.assertEqual(np.count_nonzero(~np.isnan(result)), 2)

    def _get_max_result(self, data, **kwargs):
        """Compute the bucket max with kwargs and check that no dask computation is performed."""
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_max(data, **kwargs)
        return result.compute()

    def test_get_max(self):
        """Test max bucket resampling."""
        data = da.from_array(np.array([[2, 11], [5, np.nan]]),
                             chunks=self.chunks)
        result = self._get_max_result(data)
        # test multiple entries maximum
        self.assertEqual(np.count_nonzero(result == 11), 1)
        # test single entry maximum
        self.assertEqual(np.count_nonzero(result == 5), 1)
        # test that minimum of bucket with only nan is nan, and empty buckets are nan
        self.assertEqual(np.count_nonzero(~np.isnan(result)), 2)

    def test_get_abs_max(self):
        """Test abs max bucket resampling."""
        data = da.from_array(np.array([[2, -11], [5, np.nan]]),
                             chunks=self.chunks)
        result = self._get_abs_max_result(data)
        # test multiple entries absolute maximum
        self.assertEqual(np.count_nonzero(result == -11), 1)
        # test single entry maximum
        self.assertEqual(np.count_nonzero(result == 5), 1)
        # test that minimum of bucket with only nan is nan, and empty buckets are nan
        self.assertEqual(np.count_nonzero(~np.isnan(result)), 2)

    def _get_abs_max_result(self, data, **kwargs):
        """Compute the bucket abs max with kwargs and check that no dask computation is performed."""
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_abs_max(data, **kwargs)
        return result.compute()

    def _get_average_result(self, data, **kwargs):
        """Compute the bucket average with kwargs and check that no dask computation is performed."""
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_average(data, **kwargs)
        return result.compute()

    def test_get_average_basic(self):
        """Test averaging bucket resampling."""
        data = da.from_array(np.array([[2, 11], [5, np.nan]]),
                             chunks=self.chunks)
        result = self._get_average_result(data)
        # test multiple entries average
        self.assertEqual(np.count_nonzero(result == 6.5), 1)
        # test single entry average
        self.assertEqual(np.count_nonzero(result == 5), 1)
        # test that average of bucket with only nan is nan, and empty buckets are nan
        self.assertEqual(np.count_nonzero(~np.isnan(result)), 2)

    def test_get_average_with_fillvalue_for_output(self):
        """Test averaging bucket resampling with defined fill_value for output."""
        data = da.from_array(np.array([[2, 11], [5, np.nan]]),
                             chunks=self.chunks)
        # test fill_value other than np.nan
        result = self._get_average_result(data, fill_value=-1)
        # check that all empty buckets are fill_value
        self.assertEqual(np.count_nonzero(result != -1), 2)

    def test_get_average_skipna_true(self):
        """Test averaging bucket resampling with skipna True."""
        # test skipna
        data = da.from_array(np.array([[2, np.nan], [np.nan, np.nan]]),
                             chunks=self.chunks)
        result = self._get_average_result(data, skipna=True)
        # test that average of 2 and np.nan is 2 for skipna=True
        self.assertEqual(np.count_nonzero(result == 2), 1)

    def test_get_average_skipna_false(self):
        """Test averaging bucket resampling with skipna False."""
        data = da.from_array(np.array([[2, np.nan], [np.nan, np.nan]]),
                             chunks=self.chunks)
        result = self._get_average_result(data, skipna=False)
        # test that average of 2 and np.nan is nan for skipna=False
        self.assertTrue(np.all(np.isnan(result)))

    def test_get_average_only_nan_input(self):
        """Test averaging bucket resampling with only NaN as input."""
        data = da.from_array(np.array([[np.nan, np.nan], [np.nan, np.nan]]),
                             chunks=self.chunks)
        result = self._get_average_result(data, skipna=True)
        # test that average of np.nan and np.nan is np.nan for both skipna
        self.assertTrue(np.all(np.isnan(result)))
        np.testing.assert_array_equal(
            result, self._get_average_result(data, skipna=False))

    def test_get_average_with_fill_value_in_input(self):
        """Test averaging bucket resampling with fill_value in input and skipna True."""
        # test that fill_value in input is recognised as missing value
        data = da.from_array(np.array([[2, -1], [-1, np.nan]]),
                             chunks=self.chunks)
        result = self._get_average_result(data, fill_value=-1, skipna=True)
        # test that average of 2 and -1 (missing value) is 2
        self.assertEqual(np.count_nonzero(result == 2), 1)
        # test than all other buckets are -1
        self.assertEqual(np.count_nonzero(result != -1), 1)

    def test_resample_bucket_fractions(self):
        """Test fraction calculations for categorical data."""
        data = da.from_array(np.array([[2, 4], [2, 2]]), chunks=self.chunks)
        categories = [1, 2, 3, 4]
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_fractions(data, categories=categories)
        self.assertEqual(set(categories), set(result.keys()))
        res = result[1].compute()
        self.assertTrue(np.nanmax(res) == 0.)
        res = result[2].compute()
        self.assertTrue(np.nanmax(res) == 1.)
        self.assertTrue(np.nanmin(res) == 0.5)
        res = result[3].compute()
        self.assertTrue(np.nanmax(res) == 0.)
        res = result[4].compute()
        self.assertTrue(np.nanmax(res) == 0.5)
        self.assertTrue(np.nanmin(res) == 0.)
        # There should be NaN values
        self.assertTrue(np.any(np.isnan(res)))

        # Use a fill value
        with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
            result = self.resampler.get_fractions(data,
                                                  categories=categories,
                                                  fill_value=-1)

        # There should not be any NaN values
        for i in categories:
            res = result[i].compute()
            self.assertFalse(np.any(np.isnan(res)))
            self.assertTrue(np.min(res) == -1)

        # No categories given, need to compute the data once to get
        # the categories
        with dask.config.set(scheduler=CustomScheduler(max_computes=1)):
            _ = self.resampler.get_fractions(data, categories=None)
Example #14
0
class TestGradientResampler(unittest.TestCase):
    """Test case for the gradient resampling."""
    def setUp(self):
        """Set up the test case."""
        from pyresample.gradient import GradientSearchResampler
        self.src_area = AreaDefinition('dst', 'dst area', None, {
            'ellps': 'WGS84',
            'h': '35785831',
            'proj': 'geos'
        }, 100, 100, (5550000.0, 5550000.0, -5550000.0, -5550000.0))
        self.src_swath = SwathDefinition(*self.src_area.get_lonlats())
        self.dst_area = AreaDefinition(
            'euro40', 'euro40', None, {
                'proj': 'stere',
                'lon_0': 14.0,
                'lat_0': 90.0,
                'lat_ts': 60.0,
                'ellps': 'bessel'
            }, 102, 102, (-2717181.7304994687, -5571048.14031214,
                          1378818.2695005313, -1475048.1403121399))

        self.resampler = GradientSearchResampler(self.src_area, self.dst_area)
        self.swath_resampler = GradientSearchResampler(self.src_swath,
                                                       self.dst_area)

    def test_get_projection_coordinates_area_to_area(self):
        """Check that the coordinates are initialized, for area -> area."""
        assert self.resampler.prj is None
        self.resampler._get_projection_coordinates((10, 10))
        cdst_x = self.resampler.dst_x.compute()
        cdst_y = self.resampler.dst_y.compute()
        assert np.allclose(np.min(cdst_x), -2022632.1675016289)
        assert np.allclose(np.max(cdst_x), 2196052.591296284)
        assert np.allclose(np.min(cdst_y), 3517933.413092212)
        assert np.allclose(np.max(cdst_y), 5387038.893400168)
        assert self.resampler.use_input_coords
        assert self.resampler.prj is not None

    def test_get_projection_coordinates_swath_to_area(self):
        """Check that the coordinates are initialized, for swath -> area."""
        assert self.swath_resampler.prj is None
        self.swath_resampler._get_projection_coordinates((10, 10))
        cdst_x = self.swath_resampler.dst_x.compute()
        cdst_y = self.swath_resampler.dst_y.compute()
        assert np.allclose(np.min(cdst_x), -2697103.29912692)
        assert np.allclose(np.max(cdst_x), 1358739.8381279823)
        assert np.allclose(np.min(cdst_y), -5550969.708939591)
        assert np.allclose(np.max(cdst_y), -1495126.5716846888)
        assert self.swath_resampler.use_input_coords is False
        assert self.swath_resampler.prj is not None

    def test_get_gradients(self):
        """Test that coordinate gradients are computed correctly."""
        self.resampler._get_projection_coordinates((10, 10))
        assert self.resampler.src_gradient_xl is None
        self.resampler._get_gradients()
        assert self.resampler.src_gradient_xl.compute().max() == 0.0
        assert self.resampler.src_gradient_xp.compute().max() == -111000.0
        assert self.resampler.src_gradient_yl.compute().max() == 111000.0
        assert self.resampler.src_gradient_yp.compute().max() == 0.0

    def test_get_chunk_mappings(self):
        """Test that chunk overlap, and source and target slices are correct."""
        chunks = (10, 10)
        num_chunks = np.product(chunks)
        self.resampler._get_projection_coordinates(chunks)
        self.resampler._get_gradients()
        assert self.resampler.coverage_status is None
        self.resampler.get_chunk_mappings()
        # 8 source chunks overlap the target area
        covered_src_chunks = np.array([38, 39, 48, 49, 58, 59, 68, 69])
        res = np.where(self.resampler.coverage_status)[0]
        assert np.all(res == covered_src_chunks)
        # All *num_chunks* should have values in the lists
        assert len(self.resampler.coverage_status) == num_chunks
        assert len(self.resampler.src_slices) == num_chunks
        assert len(self.resampler.dst_slices) == num_chunks
        assert len(self.resampler.dst_mosaic_locations) == num_chunks
        # There's only one output chunk, and the covered source chunks
        # should have destination locations of (0, 0)
        res = np.array(self.resampler.dst_mosaic_locations)[covered_src_chunks]
        assert all([all(loc == (0, 0)) for loc in list(res)])

    def test_get_src_poly_area(self):
        """Test defining source chunk polygon for AreaDefinition."""
        chunks = (10, 10)
        self.resampler._get_projection_coordinates(chunks)
        self.resampler._get_gradients()
        poly = self.resampler._get_src_poly(0, 40, 0, 40)
        assert np.allclose(poly.area, 12365358458842.43)

    def test_get_src_poly_swath(self):
        """Test defining source chunk polygon for SwathDefinition."""
        chunks = (10, 10)
        self.swath_resampler._get_projection_coordinates(chunks)
        self.swath_resampler._get_gradients()
        # Swath area defs can't be sliced, so False is returned
        poly = self.swath_resampler._get_src_poly(0, 40, 0, 40)
        assert poly is False

    @mock.patch('pyresample.gradient.get_polygon')
    def test_get_dst_poly(self, get_polygon):
        """Test defining destination chunk polygon."""
        chunks = (10, 10)
        self.resampler._get_projection_coordinates(chunks)
        self.resampler._get_gradients()
        # First call should make a call to get_polygon()
        self.resampler._get_dst_poly('idx1', 0, 10, 0, 10)
        assert get_polygon.call_count == 1
        assert 'idx1' in self.resampler.dst_polys
        # The second call to the same index should come from cache
        self.resampler._get_dst_poly('idx1', 0, 10, 0, 10)
        assert get_polygon.call_count == 1

        # Swath defs raise AttributeError, and False is returned
        get_polygon.side_effect = AttributeError
        self.resampler._get_dst_poly('idx2', 0, 10, 0, 10)
        assert self.resampler.dst_polys['idx2'] is False

    def test_filter_data(self):
        """Test filtering chunks that do not overlap."""
        chunks = (10, 10)
        self.resampler._get_projection_coordinates(chunks)
        self.resampler._get_gradients()
        self.resampler.get_chunk_mappings()

        # Basic filtering.  There should be 8 dask arrays that each
        # have a shape of (10, 10)
        res = self.resampler._filter_data(self.resampler.src_x)
        valid = [itm for itm in res if itm is not None]
        assert len(valid) == 8
        shapes = [arr.shape for arr in valid]
        for shp in shapes:
            assert shp == (10, 10)

        # Destination x/y coordinate array filtering.  Again, 8 dask
        # arrays each with shape (102, 102)
        res = self.resampler._filter_data(self.resampler.dst_x, is_src=False)
        valid = [itm for itm in res if itm is not None]
        assert len(valid) == 8
        shapes = [arr.shape for arr in valid]
        for shp in shapes:
            assert shp == (102, 102)

        # Add a dimension to the given dataset
        data = da.random.random(self.src_area.shape)
        res = self.resampler._filter_data(data, add_dim=True)
        valid = [itm for itm in res if itm is not None]
        assert len(valid) == 8
        shapes = [arr.shape for arr in valid]
        for shp in shapes:
            assert shp == (1, 10, 10)

        # 1D and 3+D should raise NotImplementedError
        data = da.random.random((3, ))
        try:
            res = self.resampler._filter_data(data, add_dim=True)
            raise IndexError
        except NotImplementedError:
            pass
        data = da.random.random((3, 3, 3, 3))
        try:
            res = self.resampler._filter_data(data, add_dim=True)
            raise IndexError
        except NotImplementedError:
            pass

    def test_resample_area_to_area_2d(self):
        """Resample area to area, 2d."""
        data = xr.DataArray(da.ones(self.src_area.shape, dtype=np.float64),
                            dims=['y', 'x'])
        res = self.resampler.compute(
            data, method='bil').compute(scheduler='single-threaded')
        assert res.shape == self.dst_area.shape
        assert np.allclose(res, 1)

    def test_resample_area_to_area_2d_fill_value(self):
        """Resample area to area, 2d, use fill value."""
        data = xr.DataArray(da.full(self.src_area.shape,
                                    np.nan,
                                    dtype=np.float64),
                            dims=['y', 'x'])
        res = self.resampler.compute(
            data, method='bil',
            fill_value=2.0).compute(scheduler='single-threaded')
        assert res.shape == self.dst_area.shape
        assert np.allclose(res, 2.0)

    def test_resample_area_to_area_3d(self):
        """Resample area to area, 3d."""
        data = xr.DataArray(da.ones(
            (3, ) + self.src_area.shape, dtype=np.float64) *
                            np.array([1, 2, 3])[:, np.newaxis, np.newaxis],
                            dims=['bands', 'y', 'x'])
        res = self.resampler.compute(
            data, method='bil').compute(scheduler='single-threaded')
        assert res.shape == (3, ) + self.dst_area.shape
        assert np.allclose(res[0, :, :], 1.0)
        assert np.allclose(res[1, :, :], 2.0)
        assert np.allclose(res[2, :, :], 3.0)

    def test_resample_area_to_area_3d_single_channel(self):
        """Resample area to area, 3d with only a single band."""
        data = xr.DataArray(da.ones((1, ) + self.src_area.shape,
                                    dtype=np.float64),
                            dims=['bands', 'y', 'x'])
        res = self.resampler.compute(
            data, method='bil').compute(scheduler='single-threaded')
        assert res.shape == (1, ) + self.dst_area.shape
        assert np.allclose(res[0, :, :], 1.0)

    def test_resample_swath_to_area_2d(self):
        """Resample swath to area, 2d."""
        data = xr.DataArray(da.ones(self.src_swath.shape, dtype=np.float64),
                            dims=['y', 'x'])
        res = self.swath_resampler.compute(
            data, method='bil').compute(scheduler='single-threaded')
        assert res.shape == self.dst_area.shape
        assert not np.all(np.isnan(res))

    def test_resample_swath_to_area_3d(self):
        """Resample area to area, 3d."""
        data = xr.DataArray(da.ones(
            (3, ) + self.src_swath.shape, dtype=np.float64) *
                            np.array([1, 2, 3])[:, np.newaxis, np.newaxis],
                            dims=['bands', 'y', 'x'])
        res = self.swath_resampler.compute(
            data, method='bil').compute(scheduler='single-threaded')
        assert res.shape == (3, ) + self.dst_area.shape
        for i in range(res.shape[0]):
            arr = np.ravel(res[i, :, :])
            assert np.allclose(arr[np.isfinite(arr)], float(i + 1))
Example #15
0
    25.,
    'lat_2':
    25.
})
new_scn = global_scene.resample(my_area)
new_scn.save_dataset(10.5)

global_scene['I05'].area
global_scene.load(['I05'])

rs_scn = global_scene.resample("euro4")
rs_scn.save_dataset(10.5)

from pyresample.geometry import AreaDefinition

my_area = AreaDefinition("nebraska")

import numpy as np
import os
import mpop
from mpop.satellites import PolarFactory
from datetime import datetime
import glob

input_folder = r"K:\Project_OpenET\PV_VIIRS_Test_Run\Input\VIIRS_RAW"
output_folder = r"K:\Project_OpenET\PV_VIIRS_Test_Run\Input\VIIRS_UTM"
os.chdir(input_folder)
re = glob.glob("GITCO_*.h5")

for filename in re[:]:
Example #16
0
    48.28641356, 49.55596283, 45.21769275, 43.95449327, 30.04053601,
    22.33028017, 13.90584249, -5.59290326, -7.75625031
],
                 dtype='float64')

LATS3 = np.array([
    66.94713585, 67.07854554, 66.53108388, 65.27837805, 63.50223596,
    58.33858588, 57.71210872, 55.14964148, 55.72506407, 60.40889798,
    61.99561474, 63.11425455, 63.67173255, 63.56939058
],
                 dtype='float64')

AREA_DEF_EURON1 = AreaDefinition('euron1', 'Northern Europe - 1km', '', {
    'proj': 'stere',
    'ellps': 'WGS84',
    'lat_0': 90.0,
    'lon_0': 0.0,
    'lat_ts': 60.0
}, 3072, 3072, (-1000000.0, -4500000.0, 2072000.0, -1428000.0))


def assertNumpyArraysEqual(self, other):
    if self.shape != other.shape:
        raise AssertionError("Shapes don't match")
    if not np.allclose(self, other):
        raise AssertionError("Elements don't match!")


def get_n20_orbital():
    """Return the orbital instance for a given set of TLEs for NOAA-20.
    From 16 October 2018.
Example #17
0
def show_sat_perspective3(hrit_files,
                          central_lat,
                          central_lon,
                          elevation,
                          time,
                          save_path,
                          fov,
                          shape,
                          proj,
                          projection_parameters,
                          composite=None,
                          fov_deg=True):
    """Shows in Jupyter Notebook results of pictures seen from sat
    Parameters
        Array of saved on disc files.
        :param save_path:
        :param composite:
        :param time:
        :param elevation:
        :param central_lon:
        :param central_lat:
        :param hrit_files:

    """
    # TO DO: Add local earth radius
    from satpy.scene import Scene
    import math
    from pyresample.geometry import AreaDefinition, SwathDefinition, create_area_def
    from pyresample import create_area_def

    if composite is None:
        composite = 'realistic_colors'

    if fov_deg == True:
        fov = [fov[0] * math.pi / 180, fov[1] * math.pi / 180]

    # lla = mat.find_sourounding_list(earth_rads, latitudes, longitudes, elevations, fov)

    area_def = []
    for i in range(0, len(central_lon)):
        altitude = elevation[i]
        rad = satellite_info(6371228, elevation[i], fov[0], fov[1])[5] / 2
        lat_0, lon_0 = central_lat[i], central_lon[i]
        lat_1, lat_2 = central_lat[i], central_lon[i]
        center = (central_lat[i], central_lon[i])
        radius = (rad, rad)
        area_id = 'wrf_circle'
        proj_dict = {'proj': proj, 'lat_0': lat_0, 'lon_0': lon_0, \
                     'lat_1': lat_1, 'lat_2': lat_2, \
                     'a': 6370000, 'b': 6370000, 'h': altitude, 'azi': projection_parameters[0],
                     'tilt': projection_parameters[1]}
        area_def.append(
            AreaDefinition.from_circle(area_id,
                                       proj_dict,
                                       center,
                                       radius=radius,
                                       shape=shape))
        # area_def.append(AreaDefinition.create_area_def(area_id, proj_dict, center, radius=radius, shape=shape))

    files = return_files(time, hrit_files)

    scn = Scene(filenames=files)
    scn.load([composite])
    new_scn = scn

    for i, area_def in enumerate(area_def, start=0):
        local_scn = scn.resample(area_def, radius_of_influence=50000)
        local_scn.show(composite)
        path = save_path + proj + str(projection_parameters[0]) + '_' + str(
            projection_parameters[1]) + '_' + str(shape[0]) + '_' + str(
                composite) + '_' + '_{date:%Y-%m-%d_%H_%M_%S}'.format(
                    date=scn.start_time) + '/' + str(i) + '.png'
        local_scn.save_dataset(composite,
                               path,
                               writer='simple_image',
                               num_threads=8)

    # if save_path:
    #     if (isinstance(load_photo[0], float)):
    #         photo_type = str(load_photo[0])
    #     else:
    #         photo_type = load_photo[0]
    #     name = photo_path + photo_type + pro_name + '_{date:%Y-%m-%d_%H_%M_%S}.png'.format(date=scn.start_time)
    #     plt.savefig(name, dpi=dpi)
    # if not save_path:
    #     plt.show()

    return ()
Example #18
0
def get_test_data(input_shape=(100, 50),
                  output_shape=(200, 100),
                  output_proj=None,
                  input_dims=('y', 'x')):
    """Get common data objects used in testing.

    Returns: tuple with the following elements
        input_data_on_area: DataArray with dimensions as if it is a gridded
            dataset.
        input_area_def: AreaDefinition of the above DataArray
        input_data_on_swath: DataArray with dimensions as if it is a swath.
        input_swath: SwathDefinition of the above DataArray
        target_area_def: AreaDefinition to be used as a target for resampling

    """
    from xarray import DataArray
    import dask.array as da
    from pyresample.geometry import AreaDefinition, SwathDefinition
    from pyresample.utils import proj4_str_to_dict
    ds1 = DataArray(da.zeros(input_shape, chunks=85),
                    dims=input_dims,
                    attrs={
                        'name': 'test_data_name',
                        'test': 'test'
                    })
    if input_dims and 'y' in input_dims:
        ds1 = ds1.assign_coords(y=da.arange(input_shape[-2], chunks=85))
    if input_dims and 'x' in input_dims:
        ds1 = ds1.assign_coords(x=da.arange(input_shape[-1], chunks=85))
    if input_dims and 'bands' in input_dims:
        ds1 = ds1.assign_coords(bands=list('RGBA'[:ds1.sizes['bands']]))

    input_proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 '
                      '+b=6356752.31414 +sweep=x +units=m +no_defs')
    source = AreaDefinition(
        'test_target',
        'test_target',
        'test_target',
        proj4_str_to_dict(input_proj_str),
        input_shape[1],  # width
        input_shape[0],  # height
        (-1000., -1500., 1000., 1500.))
    ds1.attrs['area'] = source
    if CRS is not None:
        crs = CRS.from_string(input_proj_str)
        ds1 = ds1.assign_coords(crs=crs)

    ds2 = ds1.copy()
    input_area_shape = tuple(ds1.sizes[dim] for dim in ds1.dims
                             if dim in ['y', 'x'])
    geo_dims = ('y', 'x') if input_dims else None
    lons = da.random.random(input_area_shape, chunks=50)
    lats = da.random.random(input_area_shape, chunks=50)
    swath_def = SwathDefinition(DataArray(lons, dims=geo_dims),
                                DataArray(lats, dims=geo_dims))
    ds2.attrs['area'] = swath_def
    if CRS is not None:
        crs = CRS.from_string('+proj=latlong +datum=WGS84 +ellps=WGS84')
        ds2 = ds2.assign_coords(crs=crs)

    # set up target definition
    output_proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 '
                       '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs')
    output_proj_str = output_proj or output_proj_str
    target = AreaDefinition(
        'test_target',
        'test_target',
        'test_target',
        proj4_str_to_dict(output_proj_str),
        output_shape[1],  # width
        output_shape[0],  # height
        (-1000., -1500., 1000., 1500.),
    )
    return ds1, source, ds2, swath_def, target
Example #19
0
import numpy as np

from pyresample.geometry import AreaDefinition
from pyresample.utils import proj4_str_to_dict
from posttroll import message
from posttroll.ns import NameServer
import pytest

satpy = pytest.importorskip("satpy")  # noqa
import pytroll_collectors.global_mosaic as gm

THIS_DIR = os.path.dirname(os.path.abspath(__file__))

ADEF = AreaDefinition("EPSG4326", "EPSG:4326", "EPSG:4326",
                      proj4_str_to_dict("init=EPSG:4326"), 200, 100,
                      (-180., -90., 180., 90.))


class TestWorldCompositeDaemon(unittest.TestCase):
    """Test the world composite daemon."""

    adef = ADEF

    tslot = dt.datetime(2016, 10, 12, 12, 0)
    # Images from individual satellites
    sat_fnames = [
        os.path.join(THIS_DIR, "data", fname) for fname in [
            "20161012_1200_GOES-15_EPSG4326_wv.png",
            "20161012_1200_GOES-13_EPSG4326_wv.png",
            "20161012_1200_Meteosat-10_EPSG4326_wv.png",
Example #20
0
    def test_area_def_coordinates(self):
        """Test coordinates being added with an AreaDefinition."""
        import numpy as np
        import dask.array as da
        import xarray as xr
        from pyresample.geometry import AreaDefinition
        from satpy.resample import add_crs_xy_coords
        area_def = AreaDefinition('test', 'test', 'test', {
            'proj': 'lcc',
            'lat_1': 25,
            'lat_0': 25
        }, 100, 200, [-100, -100, 100, 100])
        data_arr = xr.DataArray(
            da.zeros((200, 100), chunks=50),
            attrs={'area': area_def},
            dims=('y', 'x'),
        )
        new_data_arr = add_crs_xy_coords(data_arr, area_def)
        self.assertIn('y', new_data_arr.coords)
        self.assertIn('x', new_data_arr.coords)

        if CRS is not None:
            self.assertIn('units', new_data_arr.coords['y'].attrs)
            self.assertEqual(new_data_arr.coords['y'].attrs['units'], 'meter')
            self.assertIn('units', new_data_arr.coords['x'].attrs)
            self.assertEqual(new_data_arr.coords['x'].attrs['units'], 'meter')
            self.assertIn('crs', new_data_arr.coords)
            self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS)

        # already has coords
        data_arr = xr.DataArray(da.zeros((200, 100), chunks=50),
                                attrs={'area': area_def},
                                dims=('y', 'x'),
                                coords={
                                    'y': np.arange(2, 202),
                                    'x': np.arange(100)
                                })
        new_data_arr = add_crs_xy_coords(data_arr, area_def)
        self.assertIn('y', new_data_arr.coords)
        self.assertNotIn('units', new_data_arr.coords['y'].attrs)
        self.assertIn('x', new_data_arr.coords)
        self.assertNotIn('units', new_data_arr.coords['x'].attrs)
        np.testing.assert_equal(new_data_arr.coords['y'], np.arange(2, 202))

        if CRS is not None:
            self.assertIn('crs', new_data_arr.coords)
            self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS)

        # lat/lon area
        area_def = AreaDefinition('test', 'test', 'test', {'proj': 'latlong'},
                                  100, 200, [-100, -100, 100, 100])
        data_arr = xr.DataArray(
            da.zeros((200, 100), chunks=50),
            attrs={'area': area_def},
            dims=('y', 'x'),
        )
        new_data_arr = add_crs_xy_coords(data_arr, area_def)
        self.assertIn('y', new_data_arr.coords)
        self.assertIn('x', new_data_arr.coords)

        if CRS is not None:
            self.assertIn('units', new_data_arr.coords['y'].attrs)
            self.assertEqual(new_data_arr.coords['y'].attrs['units'],
                             'degrees_north')
            self.assertIn('units', new_data_arr.coords['x'].attrs)
            self.assertEqual(new_data_arr.coords['x'].attrs['units'],
                             'degrees_east')
            self.assertIn('crs', new_data_arr.coords)
            self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS)
Example #21
0
def load(satscene, calibrate=True, area_extent=None, **kwargs):
    """Load MSG SEVIRI data from hdf5 format.
    """

    # Read config file content
    conf = ConfigParser()
    conf.read(os.path.join(CONFIG_PATH, satscene.fullname + ".cfg"))
    values = {"orbit": satscene.orbit,
    "satname": satscene.satname,
    "number": satscene.number,
    "instrument": satscene.instrument_name,
    "satellite": satscene.fullname
    }

    LOG.info("assume seviri-level4")
    print "... assume seviri-level4"

    satscene.add_to_history("hdf5 data read by mpop/msg_seviri_hdf.py")


    if "reader_level" in kwargs.keys():
        reader_level = kwargs["reader_level"]
    else:
        reader_level = "seviri-level4"

    if "RSS" in kwargs.keys():
        if kwargs["RSS"]:
            dt_end =  4
        else:
            dt_end = 12
    else:
        from my_msg_module import check_RSS
        RSS = check_RSS(satscene.sat_nr(), satscene.time_slot)
        if RSS == None:
            print "*** Error in mpop/satin/msg_seviri_hdf.py"
            print "    satellite MSG", satscene.sat_nr() ," is not active yet"
            quit()
        else:
            if RSS:
                dt_end =  4
            else:
                dt_end = 12

    print "... hdf file name is specified by observation end time"
    print "    assume ", dt_end, " min between start and end time of observation"

    # end of scan time 4 min after start 
    end_time = satscene.time_slot + datetime.timedelta(minutes=dt_end)

    filename = os.path.join( end_time.strftime(conf.get(reader_level, "dir", raw=True)),
                             end_time.strftime(conf.get(reader_level, "filename", raw=True)) % values )
    
    print "... search for file: ", filename
    filenames=glob(str(filename))
    if len(filenames) == 0:
        print "*** Error, no file found"
        return # just return without exit the program 
    elif len(filenames) > 1:
        print "*** Warning, more than 1 datafile found: ", filenames 
    filename = filenames[0]
    print("... read data from %s" % str(filename))

    # read data from hdf5 file 
    data_folder='U-MARF/MSG/Level1.5/'

    # Load data from hdf file
    with h5py.File(filename,'r') as hf:

        subset_info=hf.get(data_folder+'METADATA/SUBSET')
        for i in range(subset_info.len()):
            #print subset_info[i]['EntryName'], subset_info[i]['Value']
            if subset_info[i]['EntryName'] == "VIS_IRSouthLineSelectedRectangle":
                VIS_IRSouthLine = int(subset_info[i]['Value'])
            if subset_info[i]['EntryName'] == "VIS_IRNorthLineSelectedRectangle":
                VIS_IRNorthLine = int(subset_info[i]['Value'])
            if subset_info[i]['EntryName'] == "VIS_IREastColumnSelectedRectangle":
                VIS_IREastColumn = int(subset_info[i]['Value'])
            if subset_info[i]['EntryName'] == "VIS_IRWestColumnSelectedRectangle":
                VIS_IRWestColumn = int(subset_info[i]['Value'])
            if subset_info[i]['EntryName'] == "HRVLowerNorthLineSelectedRectangle":
                HRVLowerNorthLine = int(subset_info[i]['Value'])
            if subset_info[i]['EntryName'] == "HRVLowerSouthLineSelectedRectangle":
                HRVLowerSouthLine = int(subset_info[i]['Value'])
            if subset_info[i]['EntryName'] == "HRVLowerEastColumnSelectedRectangle":
                HRVLowerEastColumn = int(subset_info[i]['Value'])
            if subset_info[i]['EntryName'] == "HRVLowerWestColumnSelectedRectangle":
                HRVLowerWestColumn = int(subset_info[i]['Value'])
            if subset_info[i]['EntryName'] == "HRVUpperSouthLineSelectedRectangle":
                HRVUpperSouthLine = int(subset_info[i]['Value'])  # 0
            if subset_info[i]['EntryName'] == "HRVUpperNorthLineSelectedRectangle":
                HRVUpperNorthLine = int(subset_info[i]['Value'])  # 0
            if subset_info[i]['EntryName'] == "HRVUpperEastColumnSelectedRectangle":
                HRVUpperEastColumn = int(subset_info[i]['Value']) # 0
            if subset_info[i]['EntryName'] == "HRVUpperWestColumnSelectedRectangle":
                HRVUpperWestColumn = int(subset_info[i]['Value']) # 0

        sat_status=hf.get(data_folder+'METADATA/HEADER/SatelliteStatus/SatelliteStatus_DESCR')
        for i in range(subset_info.len()):
            if sat_status[i]['EntryName']=="SatelliteDefinition-NominalLongitude":
                sat_lon = sat_status[i]['Value']
                break

        #print 'VIS_IRSouthLine', VIS_IRSouthLine
        #print 'VIS_IRNorthLine', VIS_IRNorthLine
        #print 'VIS_IREastColumn', VIS_IREastColumn
        #print 'VIS_IRWestColumn', VIS_IRWestColumn
        #print 'sat_longitude', sat_lon, type(sat_lon), 'GEOS<'+'{:+06.1f}'.format(sat_lon)+'>' 

        if 1 == 0:
            # works only if all pixels are on the disk 
            from msg_pixcoord2area import msg_pixcoord2area
            print "VIS_IRNorthLine, VIS_IRWestColumn, VIS_IRSouthLine, VIS_IREastColumn: ", VIS_IRNorthLine, VIS_IRWestColumn, VIS_IRSouthLine, VIS_IREastColumn
            area_def = msg_pixcoord2area ( VIS_IRNorthLine, VIS_IRWestColumn, VIS_IRSouthLine, VIS_IREastColumn, "vis", sat_lon )
        else:
            # works also for pixels outside of the disk 
            pname = 'GEOS<'+'{:+06.1f}'.format(sat_lon)+'>'  # "GEOS<+009.5>"
            proj = {'proj': 'geos', 'a': '6378169.0', 'b': '6356583.8', 'h': '35785831.0', 'lon_0': str(sat_lon)}
            aex=(-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612)

            # define full disk projection 
            from pyresample.geometry import AreaDefinition
            full_disk_def = AreaDefinition('full_disk',
                                           'full_disk',
                                           pname,
                                           proj,
                                           3712,
                                           3712,
                                           aex )

            # define name and calculate area for sub-demain 
            area_name= 'MSG_'+'{:04d}'.format(VIS_IRNorthLine)+'_'+'{:04d}'.format(VIS_IRWestColumn)+'_'+'{:04d}'.format(VIS_IRSouthLine)+'_'+'{:04d}'.format(VIS_IREastColumn)
            aex = full_disk_def.get_area_extent_for_subset(3712-VIS_IRSouthLine,3712-VIS_IRWestColumn,3712-VIS_IRNorthLine,3712-VIS_IREastColumn)

            area_def = AreaDefinition(area_name,
                                      area_name,
                                      pname,
                                      proj,
                                      (VIS_IRWestColumn-VIS_IREastColumn)+1,
                                      (VIS_IRNorthLine-VIS_IRSouthLine)+1,
                                      aex )

        #print area_def
        #print "REGION:", area_def.area_id, "{"
        #print "\tNAME:\t", area_def.name
        #print "\tPCS_ID:\t", area_def.proj_id
        #print ("\tPCS_DEF:\tproj="+area_def.proj_dict['proj']+", lon_0=" + area_def.proj_dict['lon_0'] + ", a="+area_def.proj_dict['a']+", b="+area_def.proj_dict['b']+", h="+area_def.proj_dict['h'])
        #print "\tXSIZE:\t", area_def.x_size
        #print "\tYSIZE:\t", area_def.y_size
        #print "\tAREA_EXTENT:\t", area_def.area_extent
        #print "};"

        # copy area to satscene 
        satscene.area = area_def

        # write information used by mipp.xrit.MSG._Calibrator in a fake header file
        hdr = dict()

        # satellite ID number 
        hdr["SatelliteDefinition"] = dict()
        hdr["SatelliteDefinition"]["SatelliteId"] = SatelliteIds[str(satscene.sat_nr())]
        
        # processing 
        hdr["Level 1_5 ImageProduction"] = dict()
        hdr["Level 1_5 ImageProduction"]["PlannedChanProcessing"] = np_array([2,2,2,2,2,2,2,2,2,2,2,2], int)
        
        # calibration factors  
        Level15ImageCalibration = hf.get(data_folder+'METADATA/HEADER/RadiometricProcessing/Level15ImageCalibration_ARRAY')
        hdr["Level1_5ImageCalibration"] = dict()

        for chn_name in channel_numbers.keys():
            chn_nb = channel_numbers[chn_name]-1
            hdr["Level1_5ImageCalibration"][chn_nb] = dict()
            #print chn_name, chn_nb, Level15ImageCalibration[chn_nb]['Cal_Slope'], Level15ImageCalibration[chn_nb]['Cal_Offset']
            hdr["Level1_5ImageCalibration"][chn_nb]['Cal_Slope']  = Level15ImageCalibration[chn_nb]['Cal_Slope']
            hdr["Level1_5ImageCalibration"][chn_nb]['Cal_Offset'] = Level15ImageCalibration[chn_nb]['Cal_Offset']

        # loop over channels to load 
        for chn_name in satscene.channels_to_load:

            dataset_name = data_folder+'DATA/'+dict_channel[chn_name]+'/IMAGE_DATA'
            if dataset_name in hf:
                data_tmp = hf.get(data_folder+'DATA/'+dict_channel[chn_name]+'/IMAGE_DATA')

                LOG.info('hdr["SatelliteDefinition"]["SatelliteId"]: '+str(hdr["SatelliteDefinition"]["SatelliteId"]))
                #LOG.info('hdr["Level 1_5 ImageProduction"]["PlannedChanProcessing"]', hdr["Level 1_5 ImageProduction"]["PlannedChanProcessing"])
                chn_nb = channel_numbers[chn_name]-1
                LOG.info('hdr["Level1_5ImageCalibration"][chn_nb]["Cal_Slope"]:  '+str(hdr["Level1_5ImageCalibration"][chn_nb]["Cal_Slope"]))
                LOG.info('hdr["Level1_5ImageCalibration"][chn_nb]["Cal_Offset"]: '+str(hdr["Level1_5ImageCalibration"][chn_nb]["Cal_Offset"]))

                if calibrate:
                    #Calibrator = _Calibrator(hdr, chn_name)
                    bits_per_pixel = 10   ### !!! I have no idea if this is correct !!!
                    Calibrator = _Calibrator(hdr, chn_name, bits_per_pixel) ## changed call in mipp/xrit/MSG.py
                    data, calibration_unit = Calibrator (data_tmp, calibrate=1)
                else:
                    data = data_tmp
                    calibration_unit = "counts"

                LOG.info(chn_name+ " min/max: "+str(data.min())+","+str(data.max())+" "+calibration_unit )

                satscene[chn_name] = ma.asarray(data)

                satscene[chn_name].info['units'] = calibration_unit
                satscene[chn_name].info['satname'] = satscene.satname
                satscene[chn_name].info['satnumber'] = satscene.number
                satscene[chn_name].info['instrument_name'] = satscene.instrument_name
                satscene[chn_name].info['time'] = satscene.time_slot
                satscene[chn_name].info['is_calibrated'] = True

            else: 
                print "*** Warning, no data for channel "+ chn_name+ " in file "+ filename
                data = np_nan
                calibration_unit = ""
                LOG.info("*** Warning, no data for channel "+ chn_name+" in file "+filename)
        VNP02 = Dataset(viirs_folder + fn1)
        available_bands = [
            key for key in VNP02['observation_data'].variables.keys()
            if len(key) == 3
        ] + [
            'satellite_azimuth_angle', 'satellite_zenith_angle',
            'solar_azimuth_angle', 'solar_zenith_angle'
        ]

        # load available bands in scene
        scn = Scene(filenames=[viirs_folder + fn1, viirs_folder + fn2],
                    reader='viirs_l1b')  # load VNP02 and VNP03 files together
        scn.load(available_bands + ['dust'])
        dst_area = AreaDefinition('crop_area', 'crop_area', 'crop_latlong',
                                  {'proj': 'latlong'},
                                  (maxlon - minlon) / 0.0075,
                                  (maxlat - minlat) / 0.0075,
                                  [minlon, minlat, maxlon, maxlat])
        local_scn = scn.resample(dst_area)

        if (local_scn[available_bands[0]].shape[0] > matrix_size) and (
                local_scn[available_bands[0]].shape[1] > matrix_size):
            if not path.exists(full_composite + fname[3:] + '_dust.png'):
                local_scn.save_dataset(
                    'dust', full_composite + fname[3:] + '_dust.png')
            composite_image = mpimg.imread(full_composite + fname[3:] +
                                           '_dust.png')

            try:
                scn.load(['true_color_raw'])
                local_scn = scn.resample(dst_area)
Example #23
0
    def test_lettered_tiles_update_existing(self):
        """Test updating lettered tiles with additional data."""
        import shutil
        import xarray as xr
        from satpy.writers.awips_tiled import AWIPSTiledWriter
        from xarray import DataArray
        from pyresample.geometry import AreaDefinition
        from pyresample.utils import proj4_str_to_dict
        import dask
        first_base_dir = os.path.join(self.base_dir, 'first')
        w = AWIPSTiledWriter(base_dir=first_base_dir, compress=True)
        area_def = AreaDefinition(
            'test',
            'test',
            'test',
            proj4_str_to_dict(
                '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
                '+lat_0=25 +lat_1=25 +units=m +no_defs'),
            1000,
            2000,
            (-1000000., -1500000., 1000000., 1500000.),
        )
        now = datetime(2018, 1, 1, 12, 0, 0)
        data = np.linspace(0., 1., 2000000, dtype=np.float32).reshape(
            (2000, 1000))
        # pixels to be filled in later
        data[:, -200:] = np.nan
        ds = DataArray(da.from_array(data, chunks=500),
                       attrs=dict(name='test_ds',
                                  platform_name='PLAT',
                                  sensor='SENSOR',
                                  units='1',
                                  area=area_def,
                                  start_time=now,
                                  end_time=now + timedelta(minutes=20)))
        # tile_count should be ignored since we specified lettered_grid
        w.save_datasets([ds],
                        sector_id='LCC',
                        source_name="TESTS",
                        tile_count=(3, 3),
                        lettered_grid=True)
        all_files = sorted(glob(os.path.join(first_base_dir, 'TESTS_AII*.nc')))
        assert len(all_files) == 16
        first_files = []
        second_base_dir = os.path.join(self.base_dir, 'second')
        os.makedirs(second_base_dir)
        for fn in all_files:
            new_fn = fn.replace(first_base_dir, second_base_dir)
            shutil.copy(fn, new_fn)
            first_files.append(new_fn)

        # Second writing/updating
        # Area is about 100 pixels to the right
        area_def2 = AreaDefinition(
            'test',
            'test',
            'test',
            proj4_str_to_dict(
                '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
                '+lat_0=25 +lat_1=25 +units=m +no_defs'),
            1000,
            2000,
            (-800000., -1500000., 1200000., 1500000.),
        )
        data2 = np.linspace(0., 1., 2000000, dtype=np.float32).reshape(
            (2000, 1000))
        # a gap at the beginning where old values remain
        data2[:, :200] = np.nan
        # a gap at the end where old values remain
        data2[:, -400:-300] = np.nan
        ds2 = DataArray(da.from_array(data2, chunks=500),
                        attrs=dict(name='test_ds',
                                   platform_name='PLAT',
                                   sensor='SENSOR',
                                   units='1',
                                   area=area_def2,
                                   start_time=now,
                                   end_time=now + timedelta(minutes=20)))
        w = AWIPSTiledWriter(base_dir=second_base_dir, compress=True)
        # HACK: The _copy_to_existing function hangs when opening the output
        #   file multiple times...sometimes. If we limit dask to one worker
        #   it seems to work fine.
        with dask.config.set(num_workers=1):
            w.save_datasets([ds2],
                            sector_id='LCC',
                            source_name="TESTS",
                            tile_count=(3, 3),
                            lettered_grid=True)
        all_files = glob(os.path.join(second_base_dir, 'TESTS_AII*.nc'))
        # 16 original tiles + 4 new tiles
        assert len(all_files) == 20

        # these tiles should be the right-most edge of the first image
        first_right_edge_files = [
            x for x in first_files
            if 'P02' in x or 'P04' in x or 'V02' in x or 'V04' in x
        ]
        for new_file in first_right_edge_files:
            orig_file = new_file.replace(second_base_dir, first_base_dir)
            orig_nc = xr.open_dataset(orig_file)
            orig_data = orig_nc['data'].values
            if not np.isnan(orig_data).any():
                # we only care about the tiles that had NaNs originally
                continue

            new_nc = xr.open_dataset(new_file)
            new_data = new_nc['data'].values
            # there should be at least some areas of the file
            # that old data was present and hasn't been replaced
            np.testing.assert_allclose(orig_data[:, :20], new_data[:, :20])
            # it isn't exactly 200 because the tiles aren't aligned with the
            # data (the left-most tile doesn't have data until some columns
            # in), but it should be at least that many columns
            assert np.isnan(orig_data[:, 200:]).all()
            assert not np.isnan(new_data[:, 200:]).all()
Example #24
0
# 3. Resample the fulldisk to the Dem. Rep. Kongo and its neighbours [4P]
#    by defining your own area in Lambert Azimuthal Equal Area.
#    Use the following settings:
#      - lat and lon of origin: -3/23
#      - width and height of the resulting domain: 500px
#      - projection x/y coordinates of lower left: -15E5
#      - projection x/y coordinates of upper right: 15E5

area_id = 'Kongo'
description = 'Kongo Lambert Azimuthal Equal Area projection'
proj_id = 'Kongo'
proj_dict = {'proj': 'laea', 'lat_0': -3, 'lon_0': 23}
width = 500
height = 500
llx = -15E5
lly = -15E5
urx = 15E5
ury = 15E5
area_extent = (llx, lly, urx, ury)
area_def = AreaDefinition(area_id, proj_id, description, proj_dict, width,
                          height, area_extent)
local_scn = scn.resample(area_def)

# 4. Save both loaded composites of the resampled Scene as simple png images. [2P]

local_scn.save_datasets(writer='simple_image',
                        datasets=["natural_color", "convection"],
                        filename='{name}.png',
                        base_dir='../out')
Example #25
0
def _prepare_cf_goes():
    import xarray as xr

    from pyresample.geometry import AreaDefinition
    area_id = 'GOES-East'
    description = '2km at nadir'
    proj_id = 'abi_fixed_grid'
    h = 35786023
    projection = {
        'ellps': 'GRS80',
        'h': h,
        'lon_0': '-75',
        'no_defs': 'None',
        'proj': 'geos',
        'sweep': 'x',
        'type': 'crs',
        'units': 'm',
        'x_0': '0',
        'y_0': '0'
    }
    width = 2500
    height = 1500
    area_extent = (-3627271.2913 / h, 1583173.6575 / h, 1382771.9287 / h,
                   4589199.5895 / h)
    goes_area = AreaDefinition(area_id, description, proj_id, projection,
                               width, height, area_extent)
    x = np.linspace(goes_area.area_extent[0], goes_area.area_extent[2],
                    goes_area.shape[1])
    y = np.linspace(goes_area.area_extent[3], goes_area.area_extent[1],
                    goes_area.shape[0])
    ds = xr.Dataset(
        {
            'C13': (('y', 'x'), np.ma.masked_all((height, width)), {
                'grid_mapping': 'GOES-East'
            })
        },
        coords={
            'y': y,
            'x': x
        })

    ds['x'].attrs['units'] = 'radians'
    ds['x'].attrs['standard_name'] = 'projection_x_coordinate'
    ds['y'].attrs['units'] = 'radians'
    ds['y'].attrs['standard_name'] = 'projection_y_coordinate'

    ds['GOES-East'] = 0
    ds['GOES-East'].attrs['grid_mapping_name'] = 'geostationary'
    ds['GOES-East'].attrs['false_easting'] = 0.0
    ds['GOES-East'].attrs['false_northing'] = 0.0
    ds['GOES-East'].attrs['semi_major_axis'] = 6378137.0
    ds['GOES-East'].attrs['semi_minor_axis'] = 6356752.31414
    ds['GOES-East'].attrs['geographic_crs_name'] = 'unknown'
    ds['GOES-East'].attrs['horizontal_datum_name'] = 'unknown'
    ds['GOES-East'].attrs['inverse_flattening'] = 298.257222096042
    ds['GOES-East'].attrs['latitude_of_projection_origin'] = 0.0
    ds['GOES-East'].attrs['long_name'] = 'GOES-East'
    ds['GOES-East'].attrs['longitude_of_prime_meridian'] = 0.0
    ds['GOES-East'].attrs['longitude_of_projection_origin'] = -75.0
    ds['GOES-East'].attrs['perspective_point_height'] = 35786023.0
    ds['GOES-East'].attrs['prime_meridian_name'] = 'Greenwich'
    ds['GOES-East'].attrs['projected_crs_name'] = 'unknown'
    ds['GOES-East'].attrs['reference_ellipsoid_name'] = 'GRS 1980'
    ds['GOES-East'].attrs['sweep_angle_axis'] = 'x'

    return ds
Example #26
0
    def _get_test_dataset_calibration(self, bands=6):
        """Helper function to create a single test dataset."""
        import xarray as xr
        import dask.array as da
        from datetime import datetime
        from pyresample.geometry import AreaDefinition
        from pyresample.utils import proj4_str_to_dict
        from satpy import DatasetID
        from satpy.scene import Scene
        area_def = AreaDefinition(
            'test',
            'test',
            'test',
            proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 '
                              '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'),
            100,
            200,
            (-1000., -1500., 1000., 1500.),
        )

        d = [
            DatasetID(name='1', calibration='reflectance'),
            DatasetID(name='2', calibration='reflectance'),
            DatasetID(name='3', calibration='brightness_temperature'),
            DatasetID(name='4', calibration='brightness_temperature'),
            DatasetID(name='5', calibration='brightness_temperature'),
            DatasetID(name='6', calibration='reflectance')
        ]
        scene = Scene()
        scene["1"] = xr.DataArray(da.zeros((100, 200), chunks=50),
                                  dims=('y', 'x'),
                                  attrs={'calibration': 'reflectance'})
        scene["2"] = xr.DataArray(da.zeros((100, 200), chunks=50),
                                  dims=('y', 'x'),
                                  attrs={'calibration': 'reflectance'})
        scene["3"] = xr.DataArray(da.zeros((100, 200), chunks=50),
                                  dims=('y', 'x'),
                                  attrs={'calibration': 'brightness_temperature'})
        scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50),
                                  dims=('y', 'x'),
                                  attrs={'calibration': 'brightness_temperature'})
        scene["5"] = xr.DataArray(da.zeros((100, 200), chunks=50),
                                  dims=('y', 'x'),
                                  attrs={'calibration': 'brightness_temperature'})
        scene["6"] = xr.DataArray(da.zeros((100, 200), chunks=50),
                                  dims=('y', 'x'),
                                  attrs={'calibration': 'reflectance'})

        data = xr.concat(scene, 'bands', coords='minimal')
        bands = []
        calibration = []
        for p in scene:
            calibration.append(p.attrs['calibration'])
            bands.append(p.attrs['name'])
        data['bands'] = list(bands)
        new_attrs = {'name': 'datasets',
                     'start_time': datetime.utcnow(),
                     'platform_name': "TEST_PLATFORM_NAME",
                     'sensor': 'test-sensor',
                     'area': area_def,
                     'prerequisites': d,
                     'metadata_requirements': {
                         'order': ['1', '2', '3', '4', '5', '6'],
                         'config': {
                             '1': {'alias': '1-VIS0.63',
                                   'calibration': 'reflectance',
                                   'min-val': '0',
                                   'max-val': '100'},
                             '2': {'alias': '2-VIS0.86',
                                   'calibration': 'reflectance',
                                   'min-val': '0',
                                   'max-val': '100'},
                             '3': {'alias': '3(3B)-IR3.7',
                                   'calibration': 'brightness_temperature',
                                   'min-val': '-150',
                                   'max-val': '50'},
                             '4': {'alias': '4-IR10.8',
                                   'calibration': 'brightness_temperature',
                                   'min-val': '-150',
                                   'max-val': '50'},
                             '5': {'alias': '5-IR11.5',
                                   'calibration': 'brightness_temperature',
                                   'min-val': '-150',
                                   'max-val': '50'},
                             '6': {'alias': '6(3A)-VIS1.6',
                                   'calibration': 'reflectance',
                                   'min-val': '0',
                                   'max-val': '100'}
                         },
                         'translate': {'1': '1',
                                       '2': '2',
                                       '3': '3',
                                       '4': '4',
                                       '5': '5',
                                       '6': '6'
                                       },
                         'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff'
                     }
                     }
        ds1 = xr.DataArray(data=data.data, attrs=new_attrs,
                           dims=data.dims, coords=data.coords)
        return ds1
Example #27
0
def _sunz_area_def():
    """Get fake area for testing sunz generation."""
    area = AreaDefinition('test', 'test', 'test', {'proj': 'merc'}, 2, 2,
                          (-2000, -2000, 2000, 2000))
    return area
Example #28
0
    def _get_test_datasets(self):
        """Helper function to create a datasets list."""
        import xarray as xr
        import dask.array as da
        from datetime import datetime
        from pyresample.geometry import AreaDefinition
        from pyresample.utils import proj4_str_to_dict
        area_def = AreaDefinition(
            'test',
            'test',
            'test',
            proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 '
                              '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'),
            100,
            200,
            (-1000., -1500., 1000., 1500.),
        )

        ds1 = xr.DataArray(
            da.zeros((100, 200), chunks=50),
            dims=('y', 'x'),
            attrs={'name': '1',
                   'start_time': datetime.utcnow(),
                   'platform_name': "TEST_PLATFORM_NAME",
                   'sensor': 'TEST_SENSOR_NAME',
                   'area': area_def,
                   'prerequisites': ['1'],
                   'calibration': 'reflectance',
                   'metadata_requirements': {
                       'order': ['1'],
                       'config': {
                           '1': {'alias': '1-VIS0.63',
                                 'calibration': 'reflectance',
                                 'min-val': '0',
                                 'max-val': '100'},
                       },
                       'translate': {'1': '1',
                                     },
                       'file_pattern': '1_{start_time:%Y%m%d_%H%M%S}.mitiff'
                   }}
        )
        ds2 = xr.DataArray(
            da.zeros((100, 200), chunks=50),
            dims=('y', 'x'),
            attrs={'name': '4',
                   'start_time': datetime.utcnow(),
                   'platform_name': "TEST_PLATFORM_NAME",
                   'sensor': 'TEST_SENSOR_NAME',
                   'area': area_def,
                   'prerequisites': ['4'],
                   'calibration': 'brightness_temperature',
                   'metadata_requirements': {
                       'order': ['4'],
                       'config': {
                           '4': {'alias': '4-IR10.8',
                                 'calibration': 'brightness_temperature',
                                 'min-val': '-150',
                                 'max-val': '50'},
                       },
                       'translate': {'4': '4',
                                     },
                       'file_pattern': '4_{start_time:%Y%m%d_%H%M%S}.mitiff'}
                   }
        )
        return [ds1, ds2]
Example #29
0
    def test_erf_dnb(self):
        """Test the 'dynamic_dnb' or ERF DNB compositor."""
        import xarray as xr
        import dask.array as da
        import numpy as np
        from satpy.composites.viirs import ERFDNB
        from pyresample.geometry import AreaDefinition
        rows = 5
        cols = 10
        area = AreaDefinition('test', 'test', 'test', {
            'proj': 'eqc',
            'lon_0': 0.0,
            'lat_0': 0.0
        }, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17))

        comp = ERFDNB('dynamic_dnb',
                      prerequisites=('dnb', ),
                      standard_name='toa_outgoing_radiance_per_'
                      'unit_wavelength')
        dnb = np.zeros((rows, cols)) + 0.25
        dnb[3, :] += 0.25
        dnb[4:, :] += 0.5
        dnb = da.from_array(dnb, chunks=25)
        c01 = xr.DataArray(dnb,
                           dims=('y', 'x'),
                           attrs={
                               'name': 'DNB',
                               'area': area
                           })
        sza = np.zeros((rows, cols)) + 70.0
        sza[:, 3] += 20.0
        sza[:, 4:] += 45.0
        sza = da.from_array(sza, chunks=25)
        c02 = xr.DataArray(sza,
                           dims=('y', 'x'),
                           attrs={
                               'name': 'solar_zenith_angle',
                               'area': area
                           })
        lza = np.zeros((rows, cols)) + 70.0
        lza[:, 3] += 20.0
        lza[:, 4:] += 45.0
        lza = da.from_array(lza, chunks=25)
        c03 = xr.DataArray(lza,
                           dims=('y', 'x'),
                           attrs={
                               'name': 'lunar_zenith_angle',
                               'area': area
                           })
        mif = xr.DataArray(da.zeros((5, ), chunks=5) + 0.1,
                           dims=('y', ),
                           attrs={
                               'name': 'moon_illumination_fraction',
                               'area': area
                           })
        res = comp((c01, c02, c03, mif))
        self.assertIsInstance(res, xr.DataArray)
        self.assertIsInstance(res.data, da.Array)
        self.assertEqual(res.attrs['name'], 'dynamic_dnb')
        self.assertEqual(res.attrs['standard_name'], 'equalized_radiance')
        data = res.compute()
        unique = np.unique(data)
        np.testing.assert_allclose(unique, [
            0.00000000e+00, 1.00446703e-01, 1.64116082e-01, 2.09233451e-01,
            1.43916324e+02, 2.03528498e+02, 2.49270516e+02
        ])
Example #30
0
def calculate_and_project(hrit_files,
                          sat_positions,
                          time,
                          save_path,
                          fov,
                          shape,
                          proj,
                          nadir_proj=True,
                          composite=None,
                          fov_deg=True,
                          save_data_path=None,
                          save_photos=True):
    """Shows in Jupyter Notebook results of pictures seen from sat
    Parameters
        Array of saved on disc files.
        :param save_path:
        :param composite:
        :param time:
        :param elevation:
        :param central_lon:
        :param central_lat:
        :param hrit_files:

    """
    # TO DO: Add local earth radius
    from satpy.scene import Scene
    from wutsat.fun import mat_fun
    import math
    from pyresample.geometry import AreaDefinition, SwathDefinition, create_area_def
    # from pyresample import create_area_def
    import os

    if composite is None:
        composite = 'realistic_colors'

    if fov_deg == True:
        fov = [fov[0] * math.pi / 180, fov[1] * math.pi / 180]

    if nadir_proj:
        nadir_proj = [0, 0]

    central_lat, central_lon, elevation = mat_fun.find_sourounding_list(
        earth_radius=sat_positions[3],
        lat=sat_positions[0],
        lon=sat_positions[1],
        alt=sat_positions[2],
        fov=fov)

    #print(len(central_lat))
    area_def = []
    for i in range(0, len(central_lon)):
        altitude = elevation[i]
        rad = satellite_info(6371228, elevation[i], fov[0], fov[1])[5] / 2
        lat_0, lon_0 = central_lat[i], central_lon[i]
        lat_1, lat_2 = central_lat[i], central_lon[i]
        center = (central_lat[i], central_lon[i])
        radius = (rad, rad)
        area_id = 'wrf_circle'
        proj_dict = {'proj': proj, 'lat_0': lat_0, 'lon_0': lon_0, \
                     'lat_1': lat_1, 'lat_2': lat_2, \
                     'a': 6370000, 'b': 6370000, 'h': altitude, 'azi': nadir_proj[0],
                     'tilt': nadir_proj[1]}
        area_def.append(
            AreaDefinition.from_circle(area_id,
                                       proj_dict,
                                       center,
                                       radius=radius,
                                       shape=shape))
        # area_def.append(AreaDefinition.create_area_def(area_id, proj_dict, center, radius=radius, shape=shape))

    files = return_files(time, hrit_files)

    if save_photos:
        scn = Scene(filenames=files)
        scn.load([composite])
        for i, area in enumerate(area_def, start=0):
            local_scn = scn.resample(area, radius_of_influence=50000)
            local_scn.show(composite)
            path = save_path + '/' + str(i) + '.png'
            local_scn.save_dataset(composite,
                                   path,
                                   writer='simple_image',
                                   num_threads=8)
    sat_data = [area_def, files, [central_lat, central_lon, elevation]]
    if save_data_path:
        mat_fun.rwdata(save_data_path, 'sat_data.pkl', 'w', sat_data)

    return ()
Example #31
0
sza_vis_exp = xr.DataArray(np.array(
    [[45., 67.5, 90., np.nan], [22.5, 45., 67.5, np.nan],
     [0., 22.5, 45., np.nan], [np.nan, np.nan, np.nan, np.nan]],
    dtype=np.float32),
                           dims=('y', 'x'),
                           attrs=attrs_exp)
sza_ir_wv_exp = xr.DataArray(np.array([[45, 90], [0, 45]], dtype=np.float32),
                             dims=('y', 'x'),
                             attrs=attrs_exp)
area_vis_exp = AreaDefinition(
    area_id='geos_mviri_4x4',
    proj_id='geos_mviri_4x4',
    description='MVIRI Geostationary Projection',
    projection={
        'proj': 'geos',
        'lon_0': 57.0,
        'h': ALTITUDE,
        'a': EQUATOR_RADIUS,
        'b': POLE_RADIUS
    },
    width=4,
    height=4,
    area_extent=[5621229.74392, 5621229.74392, -5621229.74392, -5621229.74392])
area_ir_wv_exp = area_vis_exp.copy(area_id='geos_mviri_2x2',
                                   proj_id='geos_mviri_2x2',
                                   width=2,
                                   height=2)


@pytest.fixture(name='fake_dataset')
def fixture_fake_dataset():
    """Create fake dataset."""
Example #32
0
def show_sat_perspective2(hrit_files,
                          central_lat,
                          central_lon,
                          elevation,
                          time,
                          save_path,
                          fov,
                          shape,
                          composite=None,
                          fov_deg=True):
    """Shows in Jupyter Notebook results of pictures seen from sat
    Parameters
        Array of saved on disc files.
        :param save_path:
        :param composite:
        :param time:
        :param elevation:
        :param central_lon:
        :param central_lat:
        :param hrit_files:

    """
    # TO DO: Add local earth radius
    import datetime as dt
    from satpy.scene import Scene
    import math
    from satpy.resample import get_area_def
    from datetime import datetime
    import matplotlib.pyplot as plt
    import cartopy.crs as ccrs
    import cartopy
    import cartopy.feature as cfeature
    from skyfield.api import Topos, load
    import pyproj
    import numpy as np
    from astropy import units as u
    from astropy.coordinates import Angle
    # %matplotlib inline
    import matplotlib.pyplot as plt
    from pyresample.geometry import AreaDefinition, SwathDefinition, create_area_def

    if composite is None:
        composite = 'realistic_colors'

    if fov_deg == True:
        fov = [fov[0] * math.pi / 180, fov[1] * math.pi / 180]

    area_def = []
    for i in range(0, len(central_lon)):
        area_id = 'ease_sh'
        rad = satellite_info(6371228, elevation[i], fov[0], fov[1])[5] / 2
        lat_0, lon_0 = central_lat[i], central_lon[i]
        lat_1, lat_2 = central_lat[i], central_lon[i]
        center = (central_lat[i], central_lon[i])
        radius = (rad, rad)
        area_id = 'wrf_circle'
        proj_dict = {'proj': 'lcc', 'lat_0': lat_0, 'lon_0': lon_0, \
                     'lat_1': lat_1, 'lat_2': lat_2, \
                     'a': 6370000, 'b': 6370000}
        area_def.append(
            AreaDefinition.from_circle(area_id,
                                       proj_dict,
                                       center,
                                       radius,
                                       shape=shape))

    files = return_files(time, hrit_files)

    scn = Scene(filenames=files)
    scn.load([composite])
    new_scn = scn

    for i, area_def in enumerate(area_def, start=0):
        local_scn = scn.resample(area_def, radius_of_influence=50000)
        local_scn.show(composite)
        path = save_path + str(shape[0]) + '_' + str(composite) + '_' + str(
            i) + '_{date:%Y-%m-%d_%H_%M_%S}.png'.format(date=scn.start_time)
        local_scn.save_dataset(composite,
                               path,
                               writer='simple_image',
                               num_threads=8)

    # if save_path:
    #     if (isinstance(load_photo[0], float)):
    #         photo_type = str(load_photo[0])
    #     else:
    #         photo_type = load_photo[0]
    #     name = photo_path + photo_type + pro_name + '_{date:%Y-%m-%d_%H_%M_%S}.png'.format(date=scn.start_time)
    #     plt.savefig(name, dpi=dpi)
    # if not save_path:
    #     plt.show()

    return ()