Esempio n. 1
0
    def process(self, input_fc: FeatureCollection) -> FeatureCollection:
        """
        Given the necessary parameters and a feature collection describing the input
        datasets, runs raster sharpening on each input dataset and creates an output
        feature collection.

        Args:
            metadata: A GeoJSON FeatureCollection describing all input datasets

        Returns:
            A GeoJSON FeatureCollection describing all output datasets
        """
        logger.debug("Using sharpening strength: %s", self.strength)

        ensure_data_directories_exist()

        results: List[Feature] = []
        for in_feature in input_fc.features:
            (
                in_feature_name,
                out_feature_name,
                in_feature_path,
                out_feature_path,
            ) = get_in_out_feature_names_and_paths(in_feature)

            logger.debug("Input file: %s", in_feature_name)
            logger.debug("Output file: %s", out_feature_name)

            self.sharpen_raster(in_feature_path, out_feature_path)

            out_feature = Feature(geometry=in_feature["geometry"],
                                  bbox=in_feature["bbox"])
            out_feature["properties"] = self.get_metadata(in_feature)
            set_data_path(out_feature, out_feature_name)
            results.append(out_feature)

            logger.debug("File %s was sharpened.", out_feature_name)
        logger.debug("DONE!")
        return FeatureCollection(results)
Esempio n. 2
0
def fixture_superresolution_clip():
    ensure_data_directories_exist()
    return Superresolution("a.SAFE", "50.550671,26.15174,50.596161,26.19195",
                           True, "/tmp/output")
Esempio n. 3
0
def safe_file():
    """
    This method creats a dummy .SAFE file and also dummy output
    after applying pre-processing steps with snap.
    :return:
    """
    # pylint: disable=too-many-locals
    ensure_data_directories_exist()

    # Set up the whole dummy input
    input_path = Path("/tmp/input")
    safe_path = input_path / "0a99c5a1-75c0-4a0d-a7dc-c2a551936be4"
    if safe_path.exists():
        shutil.rmtree(str(safe_path))
    safe_path.mkdir()

    safe_file_path = (
        safe_path / "S1B_IW_GRDH_1SDV_"
        "20190220T050359_20190220T050424_015025_01C12F_4EA4.SAFE")
    safe_file_path.mkdir()

    _location_ = os.path.realpath(
        os.path.join(os.getcwd(), os.path.dirname(__file__)))

    with open(os.path.join(_location_, "mock_data/data.json"), "rb") as f_p:
        test_featurecollection = geojson.load(f_p)
    test_feature = test_featurecollection.features[0]

    manifest_path = safe_file_path / "manifest.safe"
    manifest_path.write_text("")

    measurement_file_path = safe_file_path / "measurement"
    measurement_file_path.mkdir()

    vh_file = (measurement_file_path / "s1b-iw-grd-vh-"
               "20190220t050359-20190220t050424-015025-01c12f-002.tiff")
    vv_file = (measurement_file_path / "s1b-iw-grd-vv-"
               "20190220t050359-20190220t050424-015025-01c12f-001.tiff")

    make_dummy_raster_file(vh_file)
    make_dummy_raster_file(vv_file)

    test_safe_file = DummySafeFile(
        safe_path,
        safe_file_path,
        manifest_path,
        measurement_file_path,
        vh_file,
        vv_file,
        test_featurecollection,
        test_feature,
    )

    output_file_vv_before_move = Path("/tmp/input/%s_%s.tif" %
                                      (safe_file_path.stem, "vv"))
    make_dummy_raster_file(output_file_vv_before_move)

    output_file_vh_before_move = Path("/tmp/input/%s_%s.tif" %
                                      (safe_file_path.stem, "vh"))
    make_dummy_raster_file(output_file_vh_before_move)

    out_path = Path("/tmp/output/0a99c5a1-75c0-4a0d-a7dc-c2a551936be4")
    if out_path.exists():
        shutil.rmtree(str(out_path))
    out_path.mkdir()
    output_file_vv = out_path / "vv.tif"
    make_dummy_raster_file(output_file_vv)

    output_file_vh = out_path / "vh.tif"
    make_dummy_raster_file(output_file_vh)

    return test_safe_file
Esempio n. 4
0
def safe_files():
    """
    This method creats two dummy .SAFE files and also dummy outputs
    after applying pre-processing steps with snap.
    :return:
    """
    # pylint: disable=too-many-locals
    ensure_data_directories_exist()

    _location_ = os.path.realpath(
        os.path.join(os.getcwd(), os.path.dirname(__file__)))

    with open(os.path.join(_location_, "mock_data/two_data.json"),
              "rb") as f_p:
        test_featurecollection = geojson.load(f_p)

    # Set up the whole dummy input
    input_path = Path("/tmp/input")

    for feature in test_featurecollection.features:
        uid = feature.id
        s1_id = feature.properties["identification"]["externalId"] + ".SAFE"

        safe_path = input_path / uid
        if safe_path.exists():
            shutil.rmtree(str(safe_path))
        safe_path.mkdir()

        safe_file_path = safe_path / s1_id
        safe_file_path.mkdir()

        manifest_path = safe_file_path / "manifest.safe"
        manifest_path.write_text("")

        measurement_file_path = safe_file_path / "measurement"
        measurement_file_path.mkdir()

        vh_file = measurement_file_path / Path(
            "s1b-iw-grd-vh-"
            "%s-002.tiff" % s1_id.lower().replace("_", "-")[17:])
        vv_file = measurement_file_path / Path(
            "s1b-iw-grd-vv-"
            "%s-001.tiff" % s1_id.lower().replace("_", "-")[17:])

        make_dummy_raster_file(vh_file)
        make_dummy_raster_file(vv_file)

        test_fc = DummySafeFile(
            safe_path,
            safe_file_path,
            manifest_path,
            measurement_file_path,
            vh_file,
            vv_file,
            test_featurecollection,
            feature,
        )

        output_file_vv_before_move = Path("/tmp/input/%s_%s.tif" %
                                          (safe_file_path.stem, "vv"))
        make_dummy_raster_file(output_file_vv_before_move)

        output_file_vh_before_move = Path("/tmp/input/%s_%s.tif" %
                                          (safe_file_path.stem, "vh"))
        make_dummy_raster_file(output_file_vh_before_move)

        out_path = Path("/tmp/output/%s" % uid)
        if out_path.exists():
            shutil.rmtree(str(out_path))
        out_path.mkdir()
        output_file_vv = out_path / "vv.tif"
        make_dummy_raster_file(output_file_vv)

        output_file_vh = out_path / "vh.tif"
        make_dummy_raster_file(output_file_vh)

    return test_fc
Esempio n. 5
0
def fixture():
    ensure_data_directories_exist()
Esempio n. 6
0
    def run():
        query: STACQuery = load_query()
        ensure_data_directories_exist()
        query.set_param_if_not_exists(
            "zoom_level", AWSAspectum.DEFAULT_ZOOM_LEVEL
        )
        output_features: List[Feature] = []

        feature_id: str = str(uuid.uuid4())
        out_path = f'/tmp/output/{feature_id}.tif'

        logger.debug(f"File output will be {out_path}")

        poly = Polygon([
            [query.bbox[0], query.bbox[1]],
            [query.bbox[2], query.bbox[1]],
            [query.bbox[2], query.bbox[3]],
            [query.bbox[0], query.bbox[3]]
        ])
        geom = json.loads(json.dumps(mapping(poly)))

        feature = Feature(
            id=feature_id,
            bbox=query.bbox,
            geometry=geom,
            properties={
                'up42.data.aoiclipped': f'{feature_id}.tif'
            }
        )

        s3 = boto3.client(
            's3',
            aws_access_key_id=AWSAspectum.AWS_ACCESS_KEY,
            aws_secret_access_key=AWSAspectum.AWS_SECRET_ACCESS_KEY,
            region_name=AWSAspectum.AWS_REGION,

        )

        try:
            response = s3.head_object(
                Bucket=AWSAspectum.BUCKET_NAME,
                Key=query.file_path
            )
            logger.debug(
                f'[FILE SIZE ON S3] - '
                f'{AWSAspectum.__convert_size(response["ContentLength"])}'
            )
            with open(out_path, 'wb') as f:
                s3.download_fileobj(
                    AWSAspectum.BUCKET_NAME, query.file_path, f)
            output_features.append(feature)
        except botocore.exceptions.ClientError as e:
            if e.response['Error']['Code'] == "404":
                logger.error("The object does not exist.")
            else:
                raise
        logger.debug(
            f'[FILE SIZE AFTER DOWNLOAD] - '
            f'{AWSAspectum.__convert_size(os.path.getsize(out_path))}'
        )
        result = FeatureCollection(list(output_features))

        logger.debug("Saving %s result features", len(result.get("features")))
        save_metadata(result)