Example #1
0
    def test_attach_table_attributes(self):
        """
        Test the attach_table_attributes function.
        """
        attrs = {'CLASS': 'TABLE',
                 'VERSION': '0.2',
                 'TITLE': 'Table',
                 'FIELD_0_NAME': 'float_data',
                 'FIELD_1_NAME': 'integer_data'}

        fname = 'test_attach_table_attributes.h5'
        with h5py.File(fname, **self.memory_kwargs) as fid:
            dset = fid.create_dataset('data', data=self.table_data)
            hdf5.attach_table_attributes(dset, attrs=attrs)
            test = {k: v for k, v in dset.attrs.items()}
            self.assertDictEqual(test, attrs)
Example #2
0
    def test_attach_table_attributes(self):
        """
        Test the attach_table_attributes function.
        """
        attrs = {
            "CLASS": "TABLE",
            "VERSION": "0.2",
            "TITLE": "Table",
            "FIELD_0_NAME": "float_data",
            "FIELD_1_NAME": "integer_data",
        }

        fname = "test_attach_table_attributes.h5"
        with h5py.File(fname, "w", **self.memory_kwargs) as fid:
            dset = fid.create_dataset("data", data=self.table_data)
            hdf5.attach_table_attributes(dset, attrs=attrs)
            test = {k: v for k, v in dset.attrs.items()}
            self.assertDictEqual(test, attrs)
Example #3
0
def _store_parameter_settings(fid, spheriod, orbital_elements, satellite_model,
                              satellite_track, params):
    """
    An internal function for storing the parameter settings for the
    calculate_angles workflow.
    """
    group = fid.create_group('PARAMETERS')

    # generic parameters
    dname = DatasetName.GENERIC.value
    write_scalar('GENERIC PARAMETERS', dname, group, params)

    # sheroid
    desc = "The spheroid used in the satellite and solar angles calculation."
    attrs = {'description': desc}
    dname = DatasetName.SPHEROID.value
    sph_dset = group.create_dataset(dname, data=spheriod)
    attach_table_attributes(sph_dset, title='Spheroid', attrs=attrs)

    # orbital elements
    desc = ("The satellite orbital parameters used in the satellite and "
            "solar angles calculation.")
    attrs = {'description': desc}
    dname = DatasetName.ORBITAL_ELEMENTS.value
    orb_dset = group.create_dataset(dname, data=orbital_elements)
    attach_table_attributes(orb_dset, title='Orbital Elements', attrs=attrs)

    # satellite model
    desc = ("The satellite model used in the satellite and solar angles "
            "calculation.")
    attrs = {'description': desc}
    dname = DatasetName.SATELLITE_MODEL.value
    sat_dset = group.create_dataset(dname, data=satellite_model)
    attach_table_attributes(sat_dset, title='Satellite Model', attrs=attrs)

    # satellite track
    desc = ("The satellite track information used in the satellite and solar "
            "angles calculation.")
    attrs = {'description': desc}
    dname = DatasetName.SATELLITE_TRACK.value
    track_dset = group.create_dataset(dname, data=satellite_track)
    attach_table_attributes(track_dset, title='Satellite Track', attrs=attrs)
Example #4
0
def create_centreline_dataset(geobox, x, n, out_group):
    """
    Creates the centre line dataset.

    :param geobox:
        An instance of a GriddedGeoBox object.

    :param x:
        A 1D np array of type int with the same shape as
        `geobox.shape[0]`.
        Details the column number starting at 0.

    :param n:
        A 1D np array of type int with the same shape as x.
        Details whether or not the track point coordinate is
        averaged.

    :param out_group:
        A writeable HDF5 `Group` object.

    :return:
        None, results are written into the H5Group defined by
        out_group.
    """
    # centreline
    # if more than one pixel in a line was a track point the coordinates
    # are averaged
    wh = n > 1.5
    x[wh] = x[wh] / n[wh]

    # check whether there is no centre pixel in the line. It is assumed that
    # at least the adjacent lines have pixel
    wh = n < 0.5
    temp = x[0:2].copy()
    x[wh] = np.roll(x, 1)[wh]
    # account for first element potentially being changed with the
    # last element
    if wh[0]:
        x[0] = temp[1]

    # convert X centre points to integers (basically array co-ordinates)
    # and correct for FORTRAN offset
    x = np.rint(x) - 1

    rows, _ = geobox.shape
    y = np.arange(rows)

    dtype = np.dtype([
        ("row_index", "int64"),
        ("col_index", "int64"),
        ("n_pixels", "float"),
        ("latitude", "float64"),
        ("longitude", "float64"),
    ])
    data = np.zeros(rows, dtype=dtype)
    lon, lat = convert_to_lonlat(geobox, x, y)

    data["row_index"] = y
    data["col_index"] = x
    data["n_pixels"] = n
    data["latitude"] = lat
    data["longitude"] = lon

    kwargs = H5CompressionFilter.LZF.config().dataset_compression_kwargs()
    dname = DatasetName.CENTRELINE.value
    cent_dset = out_group.create_dataset(dname, data=data, **kwargs)
    desc = ("Contains the array, latitude and longitude coordinates of the "
            "satellite track path.")
    attrs = {"description": desc, "array_coordinate_offset": 0}
    attach_table_attributes(cent_dset, title="Centreline", attrs=attrs)
Example #5
0
def create_boxline(acquisition,
                   view_angle_dataset,
                   centreline_dataset,
                   out_group,
                   max_angle=9.0):
    """
    Creates the boxline (satellite track bi-section) dataset.

    :param acquisition:
        An instance of an `Acquisition` object.

    :param view_angle_dataset:
        A `NumPy` or `NumPy` like dataset that allows indexing
        and returns a `NumPy` dataset containing the satellite view
        angles when index/sliced.

    :param centreline_dataset:
        The dataset created by the create_centreline function.

    :param out_group:
        A writeable HDF5 `Group` object.

    :param max_angle:
        The maximum viewing angle. Default is 9.0 degrees.

    :return:
        None, results are written into the H5Group defined by
        out_group.
    """
    geobox = acquisition.gridded_geo_box()
    rows, _ = view_angle_dataset.shape

    # calculate the column start and end indices
    # (for filtering out pixels of the ortho' array where no observations
    # are expected because the sensor look-angle would be too peripheral.)
    # TODO: similar filtering for pixels where the line acquisition time would
    # be outside of the scene aquisition window.
    istart, iend = swathe_edges(max_angle, view_angle_dataset)

    row_index = np.arange(rows)
    col_index = centreline_dataset["col_index"][:]
    npoints = centreline_dataset["n_pixels"][:]

    intersection, _, bisection = track_bisection(acquisition, npoints,
                                                 col_index[0], col_index[-1])

    # record curves for parcellation (of the raster into interpolation cells)
    boxline_dtype = np.dtype([
        ("row_index", "int64"),
        ("bisection_index", "int64"),
        ("npoints", "int64"),
        ("start_index", "int64"),
        ("end_index", "int64"),
        ("bisection_longitude", "float64"),
        ("bisection_latitude", "float64"),
        ("start_longitude", "float64"),
        ("start_latitude", "float64"),
        ("end_longitude", "float64"),
        ("end_latitude", "float64"),
    ])
    boxline = np.empty(rows, dtype=boxline_dtype)
    boxline["row_index"] = row_index
    boxline["npoints"] = npoints
    boxline["start_index"] = istart
    boxline["end_index"] = iend

    # if not a full intersection, grab the bisection index
    if intersection != TrackIntersection.FULL:
        boxline["bisection_index"] = bisection
    else:
        boxline["bisection_index"] = col_index

    # lon/lat conversions
    lon, lat = convert_to_lonlat(geobox, boxline["bisection_index"], row_index)
    boxline["bisection_longitude"] = lon
    boxline["bisection_latitude"] = lat

    lon, lat = convert_to_lonlat(geobox, istart, row_index)
    boxline["start_longitude"] = lon
    boxline["start_latitude"] = lat

    lon, lat = convert_to_lonlat(geobox, iend, row_index)
    boxline["end_longitude"] = lon
    boxline["end_latitude"] = lat

    kwargs = H5CompressionFilter.LZF.config().dataset_compression_kwargs()
    desc = "Contains the bi-section, column start and column end array " "coordinates."
    attrs = {
        "description": desc,
        "array_coordinate_offset": 0,
        "track_intersection": intersection.name,
    }
    dname = DatasetName.BOXLINE.value
    box_dset = out_group.create_dataset(dname, data=boxline, **kwargs)
    attach_table_attributes(box_dset, title="Boxline", attrs=attrs)
Example #6
0
def collect_ancillary(
    container,
    satellite_solar_group,
    nbar_paths,
    sbt_path=None,
    invariant_fname=None,
    vertices=(3, 3),
    out_group=None,
    compression=H5CompressionFilter.LZF,
    filter_opts=None,
):
    """
    Collects the ancillary required for NBAR and optionally SBT.
    This could be better handled if using the `opendatacube` project
    to handle ancillary retrieval, rather than directory passing,
    and filename grepping.

    :param container:
        An instance of an `AcquisitionsContainer` object.
        The container should consist of a single Granule or None,
        only. Use `AcquisitionsContainer.get_granule` method prior to
        calling this function.

    :param satellite_solar_group:
        The root HDF5 `Group` that contains the solar zenith and
        solar azimuth datasets specified by the pathnames given by:

        * DatasetName.BOXLINE

    :param nbar_paths:
        A `dict` containing the ancillary pathnames required for
        retrieving the NBAR ancillary data. Required keys:

        * aerosol_data
        * water_vapour_data
        * ozone_path
        * dem_path
        * brdf_dict

    :param sbt_path:
        A `str` containing the base directory pointing to the
        ancillary products required for the SBT workflow.

    :param invariant_fname:
        A `str` containing the file path name to the invariant
        geopotential image file.

    :param vertices:
        An integer 2-tuple indicating the number of rows and columns
        of sample-locations ("coordinator") to produce.
        The vertex columns should be an odd number.
        Default is (3, 3).

    :param out_group:
        If set to None (default) then the results will be returned
        as an in-memory hdf5 file, i.e. the `core` driver. Otherwise,
        a writeable HDF5 `Group` object.

    :param compression:
        The compression filter to use.
        Default is H5CompressionFilter.LZF

    :filter_opts:
        A dict of key value pairs available to the given configuration
        instance of H5CompressionFilter. For example
        H5CompressionFilter.LZF has the keywords *chunks* and *shuffle*
        available.
        Default is None, which will use the default settings for the
        chosen H5CompressionFilter instance.

    :return:
        An opened `h5py.File` object, that is either in-memory using the
        `core` driver, or on disk.
    """
    # Initialise the output files
    if out_group is None:
        fid = h5py.File("ancillary.h5",
                        "w",
                        driver="core",
                        backing_store=False)
    else:
        fid = out_group

    if filter_opts is None:
        filter_opts = {}

    kwargs = compression.config(**filter_opts).dataset_compression_kwargs()
    group = fid.create_group(GroupName.ANCILLARY_GROUP.value)

    acquisition = container.get_highest_resolution()[0][0]

    boxline_dataset = satellite_solar_group[DatasetName.BOXLINE.value][:]
    coordinator = create_vertices(acquisition, boxline_dataset, vertices)
    lonlats = zip(coordinator["longitude"], coordinator["latitude"])

    desc = ("Contains the row and column array coordinates used for the "
            "atmospheric calculations.")
    attrs = {"description": desc, "array_coordinate_offset": 0}
    kwargs = compression.config(**filter_opts).dataset_compression_kwargs()
    dset_name = DatasetName.COORDINATOR.value
    coord_dset = group.create_dataset(dset_name, data=coordinator, **kwargs)
    attach_table_attributes(coord_dset, title="Coordinator", attrs=attrs)

    if sbt_path:
        collect_sbt_ancillary(
            acquisition,
            lonlats,
            sbt_path,
            invariant_fname,
            out_group=group,
            compression=compression,
            filter_opts=filter_opts,
        )

    collect_nbar_ancillary(
        container,
        out_group=group,
        compression=compression,
        filter_opts=filter_opts,
        **nbar_paths,
    )

    if out_group is None:
        return fid