Esempio n. 1
0
import rasterio
from click import secho

from eodatasets3 import wagl
from eodatasets3.ui import PathPath

DEFAULT_MATURITY = wagl.ProductMaturity.stable


@click.command(help=__doc__)
@click.option(
    "--level1",
    help="Optional path to the input level1 metadata doc "
    "(otherwise it will be loaded from the level1 path in the HDF5)",
    required=False,
    type=PathPath(exists=True, readable=True, dir_okay=False, file_okay=True),
)
@click.option(
    "--output",
    help="Put the output package into this directory",
    required=True,
    type=PathPath(exists=True, writable=True, dir_okay=True, file_okay=False),
)
@click.option(
    "-p",
    "--product",
    "products",
    help="Package only the given products (can specify multiple times)",
    type=click.Choice(wagl.POSSIBLE_PRODUCTS, case_sensitive=False),
    multiple=True,
)
Esempio n. 2
0
    """
    items = []

    def _find(name, obj):
        if obj.attrs.get("CLASS") == dataset_class:
            items.append(name)

    h5_obj.visititems(_find)
    return items


RES_GROUP_PATH = re.compile(r"(.*/RES-GROUP-\d+)/")


@click.command(help=__doc__)
@click.argument("input", type=PathPath(dir_okay=False, readable=True))
@click.option("--factor", type=int, default=100)
@click.option("--anti-alias/--no-anti-alias", is_flag=True, default=False)
def downsample(input: Path, factor: int, anti_alias: bool):
    # Fail early if h5repack cli command is not available.
    from sh import h5repack, gdal_translate

    granule_name = find_a_granule_name(input)
    fmask_image = input.with_name(f"{granule_name}.fmask.img")

    nbar_size = None
    with h5py.File(input) as f:
        image_paths = find_h5_paths(f, "IMAGE")

        for i, image_path in enumerate(image_paths):
            old_image: Optional[h5py.Dataset] = f[image_path]
Esempio n. 3
0
        blockxsize=block_size_x,
        blockysize=block_size_y,
        tiled=True,
    )

    with output_fp.open(**profile) as output_dataset:
        output_dataset.write(array, 1)
        # Copy gdal metadata
        output_dataset.update_tags(**input_image.tags())
        output_dataset.update_tags(1, **input_image.tags(1))


@click.command(help=__doc__)
@click.option(
    "--output-base",
    type=PathPath(file_okay=False, writable=True),
    help="The base output directory "
    "(default to same dir as input if --clean-inputs).",
)
@click.option("--zlevel",
              type=click.IntRange(0, 9),
              default=5,
              help="Deflate compression level.")
@click.option("--block-size",
              type=int,
              default=512,
              help="Compression block size (both x and y)")
@click.option(
    "--clean-inputs/--no-clean-inputs",
    default=False,
    help="Delete originals after repackaging",
            #     relative_to_dataset_location=True,
            # )
            path_file = os.path.join(ds_path, file_location)
            p.write_measurement(band_aliases[usgs_band_id], path_file)

        p.add_accessory_file("metadata:landsat_mtl", Path(mtl_filename))

        return p.done()


@click.command(help=__doc__)
@click.option(
    "--output-base",
    help="Write output into this directory instead of with the dataset",
    required=True,
    type=PathPath(exists=True, writable=True, dir_okay=True, file_okay=False),
)
@click.option(
    "--producer",
    help="Organisation that produced the data: probably either 'ga.gov.au' or 'usgs.gov'.",
    required=False,
    default="usgs.gov",
)
@click.argument(
    "datasets", type=PathPath(exists=True, readable=True, writable=False), nargs=-1
)
@click.option(
    "--newer-than",
    type=serialise.ClickDatetime(),
    default=None,
    help="Only prepare files newer than this date",
Esempio n. 5
0
                    "non_epsg",
                    f"Prefer an EPSG code to a WKT when possible. (Can change CRS to 'epsg:{wkt_crs.to_epsg()}')",
                )


def _has_some_geo(dataset):
    return dataset.geometry is not None or dataset.grids or dataset.crs


@click.command(help=__doc__ + """
Paths can be both product and dataset
documents, but each product must come before
its datasets to be matched against it.
""")
@click.version_option()
@click.argument("paths", nargs=-1, type=PathPath(exists=True, readable=True))
@click.option(
    "--warnings-as-errors",
    "-W",
    "strict_warnings",
    is_flag=True,
    help="Fail if any warnings are produced",
)
@click.option(
    "--thorough",
    is_flag=True,
    help=
    "Attempt to read the data/measurements, and check their properties match",
)
@click.option(
    "--expect-extra-measurements/--warn-extra-measurements",
Esempio n. 6
0
    def stats(self) -> Dict:
        """Get stats about the lookup table"""
        self.open()
        res = self._db.execute("""
            select
                count(*) as total,
                count(distinct region_code) as unique_regions
                --- count(distinct (lat1, lon1, lat2, lon2)) as unique_areas
            from regions
        """).fetchone()

        return res


@click.group("s2_regions", help=__doc__)
@click.option("--db", default=DEFAULT_DB, type=PathPath())
@click.pass_context
def cli(ctx, db: Path):
    ctx.obj = RegionLookup(db)


@cli.command("create", help="Recreate the database")
@click.option("--scan-path", default=None, type=PathPath(exists=True))
@click.option("-f", "paths_file", default=None, type=PathPath(exists=True))
@pass_obj
def cli_create(db: RegionLookup, scan_path: Path, paths_file: Path):
    if scan_path is None and paths_file is None:
        echo("Nothing specified. Scanning default NCI location.")
        scan_path = NCI_L1C_LOCATION

    if scan_path is not None:
Esempio n. 7
0
                        name=SENTINEL_MSI_BAND_ALIASES[path.stem.replace(
                            "B", "")],
                    )

            p.add_accessory_file("metadata:product_info", product_info_path)
            p.add_accessory_file("metadata:sinergise_metadata",
                                 metadata_xml_path)
            return p.done()
    else:
        raise NotImplementedError("Unknown input file type?")


@click.command(help=__doc__)
@click.option(
    "--dataset",
    type=PathPath(),
    required=True,
    help="Path to ESA zipped dataset or Sinergise dataset directory",
)
@click.option(
    "--dataset-document",
    type=PathPath(),
    required=True,
    help="Location to output the L1C dataset document (yaml)",
)
def main(
    dataset: Path,
    dataset_document: Path,
):

    uuid, path = prepare_and_write(
Esempio n. 8
0
from eodatasets3.model import DatasetDoc
from eodatasets3.ui import PathPath


@click.command(help=__doc__)
@click.option("-v", "--verbose", is_flag=True)
@click.option("--stac-base-url", "-u", help="Base URL of the STAC file")
@click.option("--explorer-base-url", "-e", help="Base URL of the ODC Explorer")
@click.option(
    "--validate/--no-validate",
    default=False,
    help="Validate output STAC Item against online schemas",
)
@click.argument(
    "odc_metadata_files",
    type=PathPath(exists=True, readable=True, writable=False),
    nargs=-1,
)
def run(
    verbose: bool,
    odc_metadata_files: Iterable[Path],
    stac_base_url: str,
    explorer_base_url: str,
    validate: bool,
):
    for input_metadata in odc_metadata_files:
        dataset = serialise.from_path(input_metadata)

        name = input_metadata.stem.replace(".odc-metadata", "")
        output_path = input_metadata.with_name(f"{name}.stac-item.json")