def test_s1_polarisations(self):
    product_dv = product_name.S1Product(
        "S1B_IW_SLC__1SDV_20170502T231339_20170502T231407_005426_009835_C052")
    product_sv = product_name.S1Product(
        "S1B_IW_SLC__1SSV_20170502T231339_20170502T231407_005426_009835_C052")

    self.assertEqual(['vh', 'vv'], product_dv.polarisations())
    self.assertEqual(['vv'], product_sv.polarisations())
def test_empty(self):
    expected = product_name.S1Product("")
    expected.product_name = ""
    expected.satellite = ""
    expected.SAR_mode = ""
    expected.product_type = ""
    expected.start_date = ""
    expected.start_time = ""
    expected.stop_date = ""
    expected.stop_time = ""
    expected.orbit = ""
    expected.image = ""

    self.assertEqual(product_name.S1Product(""), expected)
def test_mangled(self):
    expected = product_name.S1Product("")
    expected.product_name = "fgsfdgsdg"
    expected.satellite = "fgs"
    expected.SAR_mode = "dg"
    expected.product_type = "dg"
    expected.start_date = ""
    expected.start_time = ""
    expected.stop_date = ""
    expected.stop_time = ""
    expected.orbit = ""
    expected.image = ""

    res = product_name.S1Product("fgsfdgsdg")
    self.assertEqual(res, expected)
def test_create_s1_swath_dict(self):
    product = product_name.S1Product(
        "S1B_IW_SLC__1SDV_20170502T231339_20170502T231407_005426_009835_C052")

    result = product_name.create_s1_swath_dict("input", 2, 2,
                                               "/data1/pipeline", product,
                                               "vv", "Orb", "dim")
    self.assertEqual(
        {
            "input2":
            f"/data1/pipeline{os.sep}S1_20170502T231339_Orb_iw1_vv.dim",
            "input4":
            f"/data1/pipeline{os.sep}S1_20170502T231339_Orb_iw2_vv.dim",
            "input6":
            f"/data1/pipeline{os.sep}S1_20170502T231339_Orb_iw3_vv.dim",
        }, result)

    result = product_name.create_s1_swath_dict("input", 1, 2,
                                               "/data1/pipeline", product,
                                               "vh", "Orb", "dim")
    self.assertEqual(
        {
            "input1":
            f"/data1/pipeline{os.sep}S1_20170502T231339_Orb_iw1_vh.dim",
            "input3":
            f"/data1/pipeline{os.sep}S1_20170502T231339_Orb_iw2_vh.dim",
            "input5":
            f"/data1/pipeline{os.sep}S1_20170502T231339_Orb_iw3_vh.dim",
        }, result)
def test_create_s1_swath_dict_list_of_polarisations(self):
    product = product_name.S1Product(
        "S1B_IW_SLC__1SDV_20170502T231339_20170502T231407_005426_009835_C052")
    result = product_name.create_s1_swath_dict(
        "input",
        1,
        2,  # in this mode it shouldn't make any difference
        "/data1/pipeline",
        product,
        ["vh", "vv"],
        "Orb",
        "dim")
    self.assertEqual(
        {
            "input1":
            f"/data1/pipeline{os.sep}S1_20170502T231339_Orb_iw1_vh.dim",
            "input2":
            f"/data1/pipeline{os.sep}S1_20170502T231339_Orb_iw1_vv.dim",
            "input3":
            f"/data1/pipeline{os.sep}S1_20170502T231339_Orb_iw2_vh.dim",
            "input4":
            f"/data1/pipeline{os.sep}S1_20170502T231339_Orb_iw2_vv.dim",
            "input5":
            f"/data1/pipeline{os.sep}S1_20170502T231339_Orb_iw3_vh.dim",
            "input6":
            f"/data1/pipeline{os.sep}S1_20170502T231339_Orb_iw3_vv.dim",
        }, result)
def read_product_list(_file):
    _result = []
    with open(_file, 'r') as f:
        for line in f:
            trimmed = line.strip()
            if line and product_name.validate(trimmed):
                _result.append(product_name.S1Product(trimmed))
    return _result
    def test_valid(self):
        expected = product_name.S1Product("")
        expected.product_name = "S1B_IW_SLC__1SDV_20170502T231339_20170502T231407_005426_009835_C052"
        expected.satellite = "S1B"
        expected.SAR_mode = "IW"
        expected.product_type = "SLC__1SDV"
        expected.start_date = "20170502"
        expected.start_time = "231339"
        expected.stop_date = "20170502"
        expected.stop_time = "231407"
        expected.orbit = "005426"
        expected.image = "009835"

        self.assertEqual(
            product_name.S1Product(
                "S1B_IW_SLC__1SDV_20170502T231339_20170502T231407_005426_009835_C052"
            ), expected)
def test_create_result_name(self):
    self.assertEqual(
        f"/foo/bar{os.sep}S1_20170502T231339_test_vh.tif",
        product_name.create_result_name(
            "/foo/bar",
            product_name.S1Product(
                "S1B_IW_SLC__1SDV_20170502T231339_20170502T231407_005426_009835_C052"
            ), "vh", "test", "tif"),
    )
    def test_create_dim_name(self):
        first_product = product_name.S1Product(
            "S1B_IW_SLC__1SDV_20170502T231339_20170502T231407_005426_009835_C052"
        )
        last_product = product_name.S1Product(
            "S1B_IW_SLC__1SDV_20170502T231339_20170502T231407_005426_009835_C052"
        )

        _config.set("Dirs", "working", "/foo/bar")

        coherence_chain = CoherenceChain("test_input", "test_output",
                                         first_product, last_product, True,
                                         True)

        result = coherence_chain._create_dim_name("vh", "test")

        self.assertEqual(
            f"/foo/bar{os.sep}S1_20170502T231339_20170502T231339_test_vh.dim",
            result)
def validate_input(path, first, last):
    """
    This checks that an input image is in the expected format and contains some data.

    This will perform all the checks even if the first check fails. This is so the user is told all the things that
    are wrong with the images.

    :param path: path to the folder containing the input zip files
    :param first: name of the first product
    :param last: name of the last product
    :return: True if the products points to valid input products.
    """
    product_first = product_name.S1Product(first)
    product_last = product_name.S1Product(last)

    path_first = product_name.zip_manifest_path(path, product_first)
    path_last = product_name.zip_manifest_path(path, product_last)
    ok = True

    # Check they are the same satellite
    if product_first.satellite != product_last.satellite:
        logging.error("The two images are from different satellites.")
        ok = False

    # Check the product dates
    if 24 > (product_last.start_timestamp() -
             product_first.start_timestamp()).days < 12:
        logging.error(
            "The two images are less than 12 days apart or more than 24 days apart."
        )
        ok = False

    # Check orbit numbers are from the same relative orbit
    if product_first.relative_orbit() != product_last.relative_orbit():
        logging.error("The two images are from different relative orbits.")
        ok = False

    # set custom logging here to info because gdal and rasterio are very chatty at debug level
    with rasterio.Env():
        src_first = rasterio.open(path_first)
        src_last = rasterio.open(path_last)
        ok_first, bounds_first = validate_single_file(src_first, product_first)
        if not ok_first:
            logging.error("First image was invalid.")
            ok = False

        ok_last, bounds_last = validate_single_file(src_last, product_last)
        if not ok_last:
            logging.error("Last image was invalid.")
            ok = False

        # Now check that the images overlap.
        # Sum up the bounds from the sub images to get the actual bounds.
        if not data_validation.overlap(bounds_first, bounds_last):
            logging.error(f"Images do not overlap.")
            logging.error(
                f"first wkt: {data_validation.bounding_box_to_wkt(bounds_first)}"
            )
            logging.error(
                f"last wkt: {data_validation.bounding_box_to_wkt(bounds_last)}"
            )
            ok = False
        else:
            # TODO: validate that they overlap by at least 50%
            pass
    return ok
            result.append(lambda: ard.delete_dim(
                self._create_dim_name(
                    _polarisation,
                    "Orb_stack_Ifg_Deb_mrg_DInSAR_Flt_TC",
                )))
        if self.gzip:
            result.append(lambda: ard.gzip_file(
                create_result_name(
                    self.output_dir, self.products, _polarisation,
                    "Orb_stack_Ifg_Deb_mrg_DInSAR_Flt_TC", ".tif"),
                create_result_name(
                    self.output_dir, self.products, _polarisation,
                    "Orb_stack_Ifg_Deb_mrg_DInSAR_Flt_TC", ".tif.gz"),
            ))

            if self.clean:
                result.append(lambda: ard.delete_file(
                    create_result_name(
                        self.output_dir, self.products, _polarisation,
                        "Orb_stack_Ifg_Deb_mrg_DInSAR_Flt_TC", ".tif")))
        return result


if __name__ == "__main__":
    args = parse_args()
    first_product = product_name.S1Product(args.first)
    last_product = product_name.S1Product(args.last)
    coherence = CoherenceChain(args.input, args.output, args.gzip, args.clean,
                               first_product, last_product)
    ard.process_chain(coherence.build_chain(), coherence.name())
    s3_client = s3_utils.S3Utils()

    with open(args.targets) as f:
        count = 0
        for line in f:
            first, last = split_product_line(line)
            if not first or not last:
                continue

            if not product_name.validate(first) or not product_name.validate(last):
                logging.error(f"Could not validate {first} or {last} as a product name. Skipping")
                continue

            count = count + 1
            logging.info(f"Processing {first}, {last}")
            first_product = product_name.S1Product(first)
            last_product = product_name.S1Product(last)

            # TODO: make this sort its self out so if first and last are backwards it flips them round.
            try:
                download_product(s3_client, args, download_dir, first_product)
                download_product(s3_client, args, download_dir, last_product)
            except botocore.exceptions.ClientError as e:
                logging.error(f"could not fetch products from s3 {e}")
                continue

            if not validate_coherence_input.validate_input(download_dir, first, last):
                logging.info(f"inputs {first} {last} did not pass validation. Skipping")
                continue

            chains = [
            lambda: ard.convert_to_tif(stage2_output_vh, tif_vh),
            lambda: ard.convert_to_tif(stage2_output_vv, tif_vv),
            # Clean up the stage2 dim files if requested
            lambda: ard.delete_dim(stage2_output_vh) if self.clean else None,
            lambda: ard.delete_dim(stage2_output_vv) if self.clean else None,
            # Gzip the results if requested
            lambda: ard.gzip_file(tif_vh, gzip_vh) if self.gzip else None,
            lambda: ard.gzip_file(tif_vv, gzip_vv) if self.gzip else None,
            # Clean up the tif files that have been compressed if requested.
            lambda: ard.delete_file(tif_vh)
            if self.gzip and self.clean else None,
            lambda: ard.delete_file(tif_vv)
            if self.gzip and self.clean else None,
        ]

        return chain

    def _output(self, extension):
        return product_name.create_polarisation_names(
            self.output_dir, self.product, "Orb_Cal_Deb_ML_Spk_TC_dB",
            extension)


if __name__ == "__main__":
    args = parse_args()
    product = product_name.S1Product(args.product)
    intensity = IntensityChain(args.input, args.output, product, args.gzip,
                               args.clean)
    process = intensity.build_chain()
    ard.process_chain(process, intensity.name())