コード例 #1
0
def _test_attributes(test_data, safe_path):
    """Compare dictionary attributes with given SAFE file."""
    with s2reader.open(safe_path) as safe:
        assert safe is not None
        assert safe.product_start_time == test_data["product_start_time"]
        assert safe.product_stop_time == test_data["product_stop_time"]
        assert safe.generation_time == test_data["generation_time"]
        assert len(safe.granules) == test_data["num_of_granules"]
        assert safe.footprint.is_valid
        assert safe.processing_level == test_data["processing_level"]
        assert safe.product_type == test_data["product_type"]
        assert safe.spacecraft_name == test_data["spacecraft_name"]
        assert safe.sensing_orbit_number == test_data["sensing_orbit_number"]
        assert safe.sensing_orbit_direction == test_data[
            "sensing_orbit_direction"]
        for granule_path in safe.granule_paths("02"):
            assert isinstance(granule_path, str)
        for granule in safe.granules:
            assert granule.srid.startswith("EPSG")
            assert isinstance(granule.metadata_path, str)
            if granule.pvi_path:
                assert isinstance(granule.pvi_path, str)
            assert isinstance(granule.cloud_percent, float)
            assert granule.footprint.is_valid
            assert granule.cloudmask.is_valid
            if not granule.cloudmask.is_empty:
                assert granule.cloudmask.intersects(granule.footprint)
            assert granule.nodata_mask.is_valid
            if not granule.nodata_mask.is_empty:
                assert granule.nodata_mask.intersects(granule.footprint)
            assert isinstance(granule.band_path(2), str)
            assert isinstance(granule.band_path("02", for_gdal=True), str)
コード例 #2
0
    def test_path(self, path):
        if not HAVE_S2READER:
            return False

        try:
            with s2reader.open(path):
                pass
            return True
        except IOError:
            return False
コード例 #3
0
def update_original_package_location(folder):
    ps = PostgresStorage()
    safe_pkgs = os.listdir(folder)
    print safe_pkgs
    print "searching SAFE packages from the: '" + folder + "' directory"
    for f in safe_pkgs:
        print "------> " + f
        if f.endswith(".SAFE"):
            with s2reader.open(folder+f) as safe_pkg:
                for g in safe_pkg.granules:
                    ps.update_original_package_location(f, g.granule_identifier);
コード例 #4
0
ファイル: test.py プロジェクト: ungarj/s2reader
def main(args):
    scriptdir = os.path.dirname(os.path.realpath(__file__))
    example = "data/S2A_OPER_PRD_MSIL1C_PDMC_20160905T104813_R002_V20160905T005712_20160905T010424.SAFE"
    example_safe = os.path.join(scriptdir, example)

    with s2reader.open(example_safe) as testfile:
        print testfile
        print testfile.product_start_time
        print testfile.product_stop_time
        print testfile.generation_time
        print testfile.footprint
コード例 #5
0
def _test_attributes(test_data, safe_path):
    """Compare dictionary attributes with given SAFE file."""
    with s2reader.open(safe_path) as safe:
        assert safe is not None
        assert safe.product_start_time == test_data["product_start_time"]
        assert safe.product_stop_time == test_data["product_stop_time"]
        assert safe.generation_time == test_data["generation_time"]
        assert len(safe.granules) == test_data["num_of_granules"]
        assert safe.footprint.is_valid
        assert safe.processing_level == test_data["processing_level"]
        assert safe.product_type == test_data["product_type"]
        assert safe.spacecraft_name == test_data["spacecraft_name"]
        assert safe.sensing_orbit_number == test_data["sensing_orbit_number"]
        assert safe.sensing_orbit_direction == test_data["sensing_orbit_direction"]
        for granule_path in safe.granule_paths("02"):
            assert isinstance(granule_path, str)
        for granule in safe.granules:
            assert granule.srid.startswith("EPSG")
            assert isinstance(granule.metadata_path, str)
            if granule.pvi_path:
                assert isinstance(granule.pvi_path, str)
            if granule.tci_path:
                assert isinstance(granule.tci_path, str)
            assert isinstance(granule.cloud_percent, float)
            assert granule.footprint.is_valid
            assert granule.cloudmask.is_valid
            if not granule.cloudmask.is_empty:
                assert granule.cloudmask.intersects(granule.footprint)
            assert granule.nodata_mask.is_valid
            if not granule.nodata_mask.is_empty:
                assert granule.nodata_mask.intersects(granule.footprint)
            assert isinstance(granule.band_path(2), str)
            for bid in BAND_IDS:
                abs_path = granule.band_path(bid, absolute=True)
                assert os.path.isabs(abs_path)
                rel_path = granule.band_path(bid, absolute=False)
                abs_gdal_path = granule.band_path(
                    bid, absolute=True, for_gdal=True
                )
                rel_gdal_path = granule.band_path(
                    bid, absolute=False, for_gdal=True
                )
                if safe.is_zip:
                    assert abs_gdal_path.startswith("/vsizip/")
                    assert rel_gdal_path.startswith("/vsizip/")
                    assert rel_path in safe._zipfile.namelist()
                else:
                    assert os.path.isfile(rel_path)
コード例 #6
0
def main(args=None):
    """Print metadata as JSON strings."""
    args = sys.argv[1:]
    parser = argparse.ArgumentParser()
    parser.add_argument("safe_file", type=str, nargs='+')
    parser.add_argument("--granules", action="store_true")
    parsed = parser.parse_args(args)

    pp = pprint.PrettyPrinter()
    for safe_file in parsed.safe_file:
        with s2reader.open(safe_file) as safe_dataset:
            if parsed.granules:
                pp.pprint(
                    dict(
                        safe_file=safe_file,
                        granules=[
                            dict(
                                granule_identifier=granule.granule_identifier,
                                footprint=str(granule.footprint),
                                srid=granule.srid,
                                # cloudmask_polys=str(granule.cloudmask),
                                # nodata_mask=str(granule.nodata_mask),
                                cloud_percent=granule.cloud_percent
                                )
                            for granule in safe_dataset.granules
                            ]
                        )
                    )
            else:
                pp.pprint(
                    dict(
                        safe_file=safe_file,
                        product_start_time=safe_dataset.product_start_time,
                        product_stop_time=safe_dataset.product_stop_time,
                        generation_time=safe_dataset.generation_time,
                        footprint=str(safe_dataset.footprint),
                        granules=len(safe_dataset.granules),
                        granules_srids=list(set([
                            granule.srid
                            for granule in safe_dataset.granules
                            ]))
                        )
                    )
            print "\n"
コード例 #7
0
def main(args=sys.argv[1:]):
    """Generate EO O&M XML metadata."""
    parser = argparse.ArgumentParser()
    parser.add_argument("filename", nargs=1)
    parser.add_argument(
        "--granule-id",
        dest="granule_id",
        help=("Optional. Specify a granule to export metadata from."))
    parser.add_argument(
        "--single-granule",
        dest="single_granule",
        action="store_true",
        default=False,
        help=
        ("When only one granule is contained in the package, include product "
         "metadata from this one granule. Fails when more than one granule "
         "is contained."))
    parser.add_argument(
        "--out-file",
        "-f",
        dest="out_file",
        help=(
            "Specify an output file to write the metadata to. By default, the "
            "XML is printed on stdout."))
    parser.add_argument(
        "--resolution",
        "-r",
        dest="resolution",
        default="10",
        help=(
            "Only produce metadata for bands of this resolution (in meters). "
            "Default is 10."))

    parsed = parser.parse_args(args)

    try:
        safe_pkg = s2reader.open(parsed.filename[0])
    except IOError, e:
        parser.error('Could not open SAFE package. Error was "%s"' % e)
コード例 #8
0
 def __init__(self, input_params, **kwargs):
     """Initialize."""
     self.path = input_params["path"]
     self.pyramid = input_params["pyramid"]
     self.pixelbuffer = input_params["pixelbuffer"]
     self.crs = self.pyramid.crs
     self.srid = self.pyramid.srid
     with s2reader.open(self.path) as s2dataset:
         self.s2metadata = {
             "path":
             s2dataset.path,
             "footprint":
             s2dataset.footprint,
             "granules": [{
                 "id":
                 granule.granule_identifier,
                 "datastrip_id":
                 granule.datastrip_identifier,
                 "srid":
                 granule.srid,
                 "footprint":
                 reproject_geometry(granule.footprint,
                                    src_crs=CRS.from_epsg(4326),
                                    dst_crs=self.crs),
                 "nodatamask":
                 granule.nodata_mask,
                 "cloudmask":
                 granule.cloudmask,
                 "band_path": {
                     index: granule.band_path(_id,
                                              for_gdal=True,
                                              absolute=True)
                     for index, _id in zip(range(1, 14), BAND_IDS)
                 }
             } for granule in s2dataset.granules]
         }
コード例 #9
0
ファイル: ssat2_metadata.py プロジェクト: Schpidi/evo-odas
def main(args):
    if len(args) > 1:
        raise Error("too many parameters!")
    print "+++++ Sentinel2 User Product filename: '" + args[0] + "'"

    storage = PostgresStorage()
    tr = TemplatesResolver()

    with s2reader.open(args[0]) as safe_pkg:
        #mu.print_metadata(safe_pkg)
        for granule in safe_pkg.granules:
            print "--- Processing granule: '" + granule.granule_identifier + "'"
            if (storage.check_granule_identifier(granule.granule_identifier)):
                print "WARNING: Granule '" + granule.granule_identifier + "' already exist, skipping it..."
                continue
            (search_params, other_metadata,
             product_abstract_metadata) = pgmap.collect_sentinel2_metadata(
                 safe_pkg, granule)
            htmlAbstract = tr.generate_product_abstract(
                product_abstract_metadata)
            xml_doc = tr.generate_sentinel2_product_metadata(
                du.join(search_params, other_metadata))
            try:
                search_params['htmlDescription'] = htmlAbstract
                id = storage.persist_product_search_params(
                    du.wrap_keys_among_brackets(search_params), "SENTINEL2")
                storage.persist_thumb(mu.create_thumb(granule.pvi_path), id)
            except LookupError:
                print "ERROR: No related collection found!"
                break
            storage.persist_product_metadata(xml_doc, id)
            ogc_bbox = storage.get_product_OGC_BBOX(granule.granule_identifier)
            storage.persist_ogc_links(
                pgmap.create_ogc_links_dict(
                    tr.generate_ogc_links(
                        pgmap.ogc_links_href_dict(ogc_bbox, id))), id)
コード例 #10
0
    def execute(self, context):
        products = list()
        ids = []

        if self.input_product is not None:
            log.info("Processing single product: " + self.input_product)
            products.append(self.input_product)
        elif self.get_inputs_from is not None:
            log.info("Getting inputs from: " + self.get_inputs_from)
            inputs = context['task_instance'].xcom_pull(
                task_ids=self.get_inputs_from, key=XCOM_RETURN_KEY)
            for input in inputs:
                products.append(input)
        else:
            self.downloaded_products = context['task_instance'].xcom_pull(
                'dhus_download_task', key='downloaded_products')
            if self.downloaded_products is not None and len(
                    self.downloaded_products) != 0:
                products = self.downloaded_products.keys()
                log.info(self.downloaded_products)
                for p in self.downloaded_products:
                    ids.append(self.downloaded_products[p]["id"])
                print "downloaded products keys :", self.downloaded_products.keys(
                )[0]

        if products is None or len(products) == 0:
            log.info("Nothing to process.")
            return

        thumbnail_paths = list()
        for product in products:
            log.info("Processing {}".format(product))
            with s2reader.open(product) as safe_product:
                for granule in safe_product.granules:
                    try:
                        zipf = zipfile.ZipFile(product, 'r')
                        imgdata = zipf.read(granule.pvi_path, 'r')
                        img = Blob(imgdata)
                        img = Image(img)
                        img.scale(self.thumb_size_x + 'x' + self.thumb_size_y)
                        img.quality(80)
                        thumbnail_name = product.strip(
                            ".zip") + "/thumbnail.jpg"
                        if os.path.isdir(product.strip(".zip")):
                            product_rmdir_cmd = "rm -r {} ".format(
                                product.strip(".zip"))
                            product_rmdir_BO = BashOperator(
                                task_id="product_rmdir_{}".format(
                                    product.split("/")[-1].strip(".zip")),
                                bash_command=product_rmdir_cmd)
                            product_rmdir_BO.execute(context)
                        product_mkdir_cmd = "mkdir {} ".format(
                            product.strip(".zip"))
                        product_mkdir_BO = BashOperator(
                            task_id="product_mkdir_{}".format(
                                product.split("/")[-1].strip(".zip")),
                            bash_command=product_mkdir_cmd)
                        product_mkdir_BO.execute(context)
                        if self.output_dir is not None:
                            thumbnail_name = os.path.join(
                                self.output_dir, "thumbnail.jpeg")
                            log.info("Writing thumbnail to {}".format(
                                thumbnail_name))
                            img.write(thumbnail_name)
                        else:
                            img.write(str(thumbnail_name))
                        thumbnail_paths.append(thumbnail_name)
                        # XCOM expects a single file so we push it here:
                        context['task_instance'].xcom_push(
                            key='thumbnail_jpeg_abs_path',
                            value=str(thumbnail_name))
                        context['task_instance'].xcom_push(key='ids',
                                                           value=ids)
                        break
                    except BaseException as e:
                        log.error(
                            "Unable to extract thumbnail from {}: {}".format(
                                product, e))
        return thumbnail_paths
コード例 #11
0
    def execute(self, context):
        if self.get_inputs_from != None:
            log.info("Getting inputs from: {}".format(self.get_inputs_from))
            self.downloaded_products, self.archived_products = context[
                'task_instance'].xcom_pull(task_ids=self.get_inputs_from,
                                           key=XCOM_RETURN_KEY)
        else:
            log.info("Getting inputs from: dhus_download_task")
            self.downloaded_products = context['task_instance'].xcom_pull(
                'dhus_download_task', key='downloaded_products')
        if self.downloaded_products is None:
            log.info("Nothing to process.")
            return

        for product in self.downloaded_products.keys():
            log.info("Processing: {}".format(product))
            with s2reader.open(product) as s2_product:
                coords = []
                links = []
                metadata = s2_product._product_metadata
                granule = s2_product.granules[0]
                granule_metadata = granule._metadata
                product_footprint = [
                    [[m.replace(" ", ",")]
                     for m in str(s2_product.footprint).replace(", ", ",").
                     partition('((')[-1].rpartition('))')[0].split(",")]
                ]
                for item in product_footprint[0]:
                    [x_coordinate, y_coordinate] = item[0].split(",")
                    coords.append([float(x_coordinate), float(y_coordinate)])
                final_metadata_dict = {
                    "type": "Feature",
                    "geometry": {
                        "type": "Polygon",
                        "coordinates": [coords]
                    },
                    "properties": {
                        "eop:identifier":
                        s2_product.manifest_safe_path.rsplit('.SAFE', 1)[0],
                        "timeStart":
                        s2_product.product_start_time,
                        "timeEnd":
                        s2_product.product_stop_time,
                        "originalPackageLocation":
                        os.path.join(
                            self.original_package_download_base_url,
                            os.path.basename(self.archived_products.pop(0))),
                        "thumbnailURL":
                        None,
                        "quicklookURL":
                        None,
                        "eop:parentIdentifier":
                        "SENTINEL2",
                        "eop:productionStatus":
                        None,
                        "eop:acquisitionType":
                        None,
                        "eop:orbitNumber":
                        s2_product.sensing_orbit_number,
                        "eop:orbitDirection":
                        s2_product.sensing_orbit_direction,
                        "eop:track":
                        None,
                        "eop:frame":
                        None,
                        "eop:swathIdentifier":
                        metadata.find('.//Product_Info/Datatake').
                        attrib['datatakeIdentifier'],
                        "opt:cloudCover":
                        int(
                            float(
                                metadata.findtext(
                                    ".//Cloud_Coverage_Assessment"))),
                        "opt:snowCover":
                        None,
                        "eop:productQualityStatus":
                        None,
                        "eop:productQualityDegradationStatus":
                        None,
                        "eop:processorName":
                        None,
                        "eop:processingCenter":
                        None,
                        "eop:creationDate":
                        None,
                        "eop:modificationDate":
                        None,
                        "eop:processingDate":
                        None,
                        "eop:sensorMode":
                        None,
                        "eop:archivingCenter":
                        granule_metadata.findtext('.//ARCHIVING_CENTRE'),
                        "eop:processingMode":
                        None,
                        "eop:availabilityTime":
                        s2_product.generation_time,
                        "eop:acquisitionStation":
                        None,
                        "eop:acquisitionSubtype":
                        None,
                        "eop:startTimeFromAscendingNode":
                        None,
                        "eop:completionTimeFromAscendingNode":
                        None,
                        "eop:illuminationAzimuthAngle":
                        metadata.findtext('.//Mean_Sun_Angle/AZIMUTH_ANGLE'),
                        "eop:illuminationZenithAngle":
                        metadata.findtext('.//Mean_Sun_Angle/ZENITH_ANGLE'),
                        "eop:illuminationElevationAngle":
                        None,
                        "eop:resolution":
                        None
                    }
                }
                for i in self.bands_res.values():
                    features_list = []
                    granule_counter = 1
                    for granule in s2_product.granules:
                        granule_coords = []
                        granule_coordinates = [[
                            [m.replace(" ", ",")]
                            for m in str(granule.footprint).replace(", ", ",").
                            partition('((')[-1].rpartition('))')[0].split(",")
                        ]]

                        for item in granule_coordinates[0]:
                            [granule_x_coordinate,
                             granule_y_coordinate] = item[0].split(",")
                            granule_coords.append([
                                float(granule_x_coordinate),
                                float(granule_y_coordinate)
                            ])
                        zipped_product = zipfile.ZipFile(product)
                        for file_name in zipped_product.namelist():
                            if file_name.endswith(
                                    '.jp2'
                            ) and not file_name.endswith('PVI.jp2'):
                                features_list.append({"type": "Feature", "geometry": { "type": "Polygon", "coordinates": [granule_coords]},\
                       "properties": {\
                       "location":os.path.join(self.remote_dir, granule.granule_path.rsplit("/")[-1], "IMG_DATA", file_name.rsplit("/")[-1]), "band": self.bands_dict[file_name.rsplit("/")[-1].rsplit(".")[0][-3:]]},\
                       "id": "GRANULE.{}".format(granule_counter)})
                                granule_counter += 1
            final_granules_dict = {
                "type": "FeatureCollection",
                "features": features_list
            }

            timeStart, timeEnd = final_metadata_dict["properties"][
                "timeStart"], final_metadata_dict["properties"]["timeEnd"]
            # create description.html and dump it to file
            log.info("Creating description.html")
            tr = TemplatesResolver()
            htmlAbstract = tr.generate_product_abstract({
                "timeStart":
                timeStart,
                "timeEnd":
                timeEnd,
                "originalPackageLocation":
                final_metadata_dict["properties"]["originalPackageLocation"]
            })
            log.debug(pprint.pformat(htmlAbstract))
            final_metadata_dict['htmlDescription'] = htmlAbstract

            with open(product.strip(".zip") + '/description.html',
                      'w') as product_outfile:
                product_outfile.write(htmlAbstract)
            # Note here that the SRID is a property of the granule not the product
            final_metadata_dict["properties"]["crs"] = granule.srid
            with open(product.strip(".zip") + '/product.json',
                      'w') as product_outfile:
                json.dump(final_metadata_dict, product_outfile, indent=4)
            with open(product.strip(".zip") + '/granules.json',
                      'w') as granules_outfile:
                json.dump(final_granules_dict, granules_outfile, indent=4)

            product_identifier = s2_product.manifest_safe_path.rsplit(
                '.SAFE', 1)[0]
            bbox = get_bbox_from_granules_coordinates(granule_coordinates)

            ows_links_dict = create_owslinks_dict(
                product_identifier=product_identifier,
                timestart=timeStart,
                timeend=timeEnd,
                granule_bbox=bbox,
                gs_workspace=self.gs_workspace,
                gs_wms_layer=self.gs_wms_layer,
                gs_wms_width=self.gs_wms_width,
                gs_wms_height=self.gs_wms_height,
                gs_wms_format=self.gs_wms_format,
                gs_wms_version=self.gs_wms_version,
                gs_wfs_featuretype=self.gs_wfs_featuretype,
                gs_wfs_format=self.gs_wfs_format,
                gs_wfs_version=self.gs_wfs_version,
                gs_wcs_coverage_id=self.gs_wcs_coverage_id,
                gs_wcs_scale_i=self.gs_wcs_scale_i,
                gs_wcs_scale_j=self.gs_wcs_scale_j,
                gs_wcs_format=self.gs_wcs_format,
                gs_wcs_version=self.gs_wcs_version,
            )

            log.info("ows links: {}".format(pprint.pformat(ows_links_dict)))

            with open(product.strip(".zip") + '/owsLinks.json',
                      'w') as owslinks_outfile:
                json.dump(ows_links_dict, owslinks_outfile, indent=4)

        self.custom_archived = []
        for archive_line in self.downloaded_products.keys():
            jp2_files_paths = []
            archive_path = archive_line
            archived_product = zipfile.ZipFile(archive_line, 'r')
            for file_name in archived_product.namelist():
                if file_name.endswith(
                        '.jp2') and not file_name.endswith('PVI.jp2'):
                    archived_product.extract(file_name,
                                             archive_path.strip(".zip"))
                    jp2_files_paths.append(
                        os.path.join(archive_path.strip(".zip"), file_name))
                    parent_dir = os.path.dirname(jp2_files_paths[0])
                if file_name.endswith('MTD_TL.xml'):
                    archived_product.extract(file_name,
                                             archive_path.strip(".zip"))
                    mtd_tl_xml = os.path.join(archive_path.strip(".zip"),
                                              file_name)
            tree = ET.parse(mtd_tl_xml)
            root = tree.getroot()
            geometric_info = root.find(
                root.tag.split('}', 1)[0] + "}Geometric_Info")
            tile_geocoding = geometric_info.find("Tile_Geocoding")
            wld_files = []
            prj_files = []
            for jp2_file in jp2_files_paths:
                wld_name = os.path.splitext(jp2_file)[0]
                gdalinfo_cmd = "gdalinfo {} > {}".format(
                    jp2_file, wld_name + ".prj")
                gdalinfo_BO = BashOperator(
                    task_id="bash_operator_gdalinfo_{}".format(wld_name[-3:]),
                    bash_command=gdalinfo_cmd)
                gdalinfo_BO.execute(context)
                sed_cmd = "sed -i -e '1,4d;29,$d' {}".format(wld_name + ".prj")
                sed_BO = BashOperator(task_id="bash_operator_sed_{}".format(
                    wld_name[-3:]),
                                      bash_command=sed_cmd)
                sed_BO.execute(context)
                prj_files.append(wld_name + ".prj")
                wld_file = open(wld_name + ".wld", "w")
                wld_files.append(wld_name + ".wld")
                for key, value in self.bands_res.items():
                    if wld_name[-3:] in value:
                        element = key
                geo_position = tile_geocoding.find(
                    './/Geoposition[@resolution="{}"]'.format(element))
                wld_file.write(
                    geo_position.find("XDIM").text + "\n" + "0" + "\n" + "0" +
                    "\n")
                wld_file.write(geo_position.find("YDIM").text + "\n")
                wld_file.write(geo_position.find("ULX").text + "\n")
                wld_file.write(geo_position.find("ULY").text + "\n")
            parent_dir = os.path.dirname(jp2_files_paths[0])
            self.custom_archived.append(os.path.dirname(parent_dir))
            log.info(os.path.dirname(parent_dir))
        log.info(self.custom_archived)
        context['task_instance'].xcom_push(key='downloaded_products',
                                           value=self.downloaded_products)
        context['task_instance'].xcom_push(
            key='downloaded_products_with_wldprj',
            value=' '.join(self.custom_archived))
        return self.custom_archived
コード例 #12
0
def test_product_abstract_generation(pkg_path):
    tr = TemplatesResolver()
    with s2reader.open(pkg_path) as safe_pkg:
        for granule in safe_pkg.granules:
            (search_params, other_metadata, product_abstract_metadata) = s2.collect_sentinel2_metadata(safe_pkg, granule)
            print tr.generate_product_abstract(product_abstract_metadata)
コード例 #13
0
def test_metadata_read(pkg_path):
    with s2reader.open(pkg_path) as safe_pkg:
        mu.print_metadata(safe_pkg)
コード例 #14
0
ファイル: io_funcs.py プロジェクト: ungarj/mapchete
def file_bbox(
    input_file,
    tile_pyramid
):
    """Return the bounding box of a raster or vector file in a given CRS."""
    out_crs = tile_pyramid.crs
    # Read raster data with rasterio, vector data with fiona.
    file_ext = os.path.splitext(input_file)[1][1:]
    if file_ext in ["shp", "geojson"]:
        is_vector_file = True
    else:
        is_vector_file = False

    if is_vector_file:
        with fiona.open(input_file) as inp:
            inp_crs = CRS(inp.crs)
            bounds = inp.bounds
    else:
        if file_ext in ["SAFE", "zip", "ZIP"]:
            with s2reader.open(input_file) as s2dataset:
                inp_crs = CRS().from_epsg(4326)
                if inp_crs != out_crs:
                    bounds = reproject_geometry(
                        s2dataset.footprint,
                        src_crs=inp_crs,
                        dst_crs=out_crs
                        ).bounds
                    inp_crs = out_crs
                else:
                    bounds = s2dataset.footprint.bounds
        else:
            with rasterio.open(input_file) as inp:
                inp_crs = inp.crs
                try:
                    assert inp_crs.is_valid
                except AssertionError:
                    raise IOError("CRS could not be read from %s" % input_file)
                bounds = (
                    inp.bounds.left, inp.bounds.bottom, inp.bounds.right,
                    inp.bounds.top)

    out_bbox = bbox = box(*bounds)
    # If soucre and target CRSes differ, segmentize and reproject
    if inp_crs != out_crs:
        if not is_vector_file:
            segmentize = _get_segmentize_value(input_file, tile_pyramid)
            try:
                ogr_bbox = ogr.CreateGeometryFromWkb(bbox.wkb)
                ogr_bbox.Segmentize(segmentize)
                segmentized_bbox = loads(ogr_bbox.ExportToWkt())
                bbox = segmentized_bbox
            except:
                raise
        try:
            out_bbox = reproject_geometry(
                bbox,
                src_crs=inp_crs,
                dst_crs=out_crs
                )
        except:
            raise
    else:
        out_bbox = bbox

    # Validate and, if necessary, try to fix output geometry.
    try:
        assert out_bbox.is_valid
    except AssertionError:
        try:
            cleaned = out_bbox.buffer(0)
            assert cleaned.is_valid
        except Exception as e:
            raise TypeError("invalid file bbox geometry: %s" % e)
        out_bbox = cleaned
    return out_bbox
コード例 #15
0
    def read_path(self, path):
        values = {}
        with s2reader.open(path) as ds:
            metadata = ds._product_metadata
            granule = ds.granules[0]
            granule_metadata = granule._metadata

            values['identifier'] = metadata.findtext('.//PRODUCT_URI')

            values['begin_time'] = ds.product_start_time
            values['end_time'] = ds.product_stop_time
            values['footprint'] = ds.footprint.wkt

            values['masks'] = [
                ('clouds', self._read_mask(granule, 'MSK_CLOUDS')),
                ('nodata', self._read_mask(granule, 'MSK_NODATA')),
            ]

            def tci_path(granule):
                tci_paths = [
                    path for path in granule.dataset._product_metadata.xpath(
                        ".//Granule[@granuleIdentifier='%s']/IMAGE_FILE/text()"
                        % granule.granule_identifier) if path.endswith('TCI')
                ]
                try:
                    return os.path.join(ds._zip_root if ds.is_zip else ds.path,
                                        tci_paths[0]) + '.jp2'
                except IndexError:
                    raise IOError("TCI path does not exist")

            values['browses'] = [(None, tci_path(granule))]

            # TODO: extended metadata

            # values['parent_identifier']
            # values['production_status']
            # values['acquisition_type']
            values['orbit_number'] = ds.sensing_orbit_number
            values['orbit_direction'] = ds.sensing_orbit_direction
            # values['track']
            # values['frame']
            values['swath_identifier'] = metadata.find(
                './/Product_Info/Datatake').attrib['datatakeIdentifier']
            values['product_version'] = metadata.findtext(
                './/Product_Info/PROCESSING_BASELINE')
            # values['product_quality_status']
            # values['product_quality_degradation_tag']
            # values['processor_name']
            # values['processing_center']
            # values['creation_date']
            # values['modification_date']
            values['processing_date'] = ds.generation_time
            # values['sensor_mode']
            values['archiving_center'] = granule_metadata.findtext(
                './/ARCHIVING_CENTRE')
            # values['processing_mode']

            values['availability_time'] = ds.generation_time
            # values['acquisition_station']
            # values['acquisition_sub_type']
            # values['start_time_from_ascending_node']
            # values['completion_time_from_ascending_node']
            values['illumination_azimuth_angle'] = metadata.findtext(
                './/Mean_Sun_Angle/AZIMUTH_ANGLE')
            values['illumination_zenith_angle'] = metadata.findtext(
                './/Mean_Sun_Angle/ZENITH_ANGLE')
            # values['illumination_elevation_angle']
            # values['polarisation_mode']
            # values['polarization_channels']
            # values['antenna_look_direction']
            # values['minimum_incidence_angle']
            # values['maximum_incidence_angle']

            # values['doppler_frequency']
            # values['incidence_angle_variation']

            values['cloud_cover'] = metadata.findtext(
                ".//Cloud_Coverage_Assessment")
            # values['snow_cover']
            # values['lowest_location']
            # values['highest_location']

        return values