Beispiel #1
0
def main():
    base_dir = '/home/sobloo/hack-tbd/original-sobloo'
    conf_file = base_dir + "/eodagconf.yml"
    dag = EODataAccessGateway(user_conf_file_path = conf_file)
    product_type = 'S2_MSI_L1C'


    # Hungary
    longitude_center = 18.282810
    latitude_center = 46.127194
#    # Sweden
#    longitude_center = 18.330000
#    latitude_center = 59.400000
    degrees_from_center = 0.0035
    extent = make_map_rectangle(longitude_center=longitude_center,
        latitude_center=latitude_center,
        degrees_from_center=degrees_from_center)

    dag.set_preferred_provider(provider='airbus-ds')
    #prodTypeList = dag.list_product_types('airbus-ds')
    #print(prodTypeList)

    products = dag.search(product_type,startTimeFromAscendingNode='2016-01-17',completionTimeFromAscendingNode='2018-09-20',geometry=extent,cloudCover=1)
    #products = dag.search(product_type)
    for i in range(len(products)):
        try:
            print('{} : {}'.format(i, products[i]))
            #print(products)
            product = products[i]
            xx, yy = product.as_dict()['geometry']['coordinates'][0][4]

            #longmin, latmin, longmax, latmax = subselect(longitude_center=xx, latitude_center=yy, degrees_from_center=degrees_from_center)
            longmin, latmin, longmax, latmax = no_subselect(extent=extent)



            VIR = product.get_data(crs='epsg:4326', resolution=0.0001, band='B04', extent=(longmin, latmin, longmax, latmax))
            NIR = product.get_data(crs='epsg:4326', resolution=0.0001, band='B08', extent=(longmin, latmin, longmax, latmax))
            NDVI = (NIR - VIR * 1.) / (NIR + VIR)

            plt.imshow(NDVI, cmap='RdYlGn', aspect='auto')
            hms = datetime.datetime.now().strftime('%H%M%S')
            plt.savefig('{}/img/ndvi-{}.png'.format(base_dir, hms))
        except Exception as e:
            print('Exception: {}'.format(e))
            continue
Beispiel #2
0
def list_pt(ctx, **kwargs):
    """Print the list of supported product types"""
    setup_logging(verbose=ctx.obj["verbosity"])
    dag = EODataAccessGateway()
    provider = kwargs.pop("provider")
    text_wrapper = textwrap.TextWrapper()
    guessed_product_types = []
    try:
        guessed_product_types = dag.guess_product_type(
            platformSerialIdentifier=kwargs.get("platformserialidentifier"),
            processingLevel=kwargs.get("processinglevel"),
            sensorType=kwargs.get("sensortype"),
            **kwargs
        )
    except NoMatchingProductType:
        if any(
            kwargs[arg]
            for arg in [
                "instrument",
                "platform",
                "platformserialidentifier",
                "processinglevel",
                "sensortype",
            ]
        ):
            click.echo("No product type match the following criteria you provided:")
            click.echo(
                "\n".join(
                    "-{param}={value}".format(**locals())
                    for param, value in kwargs.items()
                    if value is not None
                )
            )
            sys.exit(1)
    try:
        if guessed_product_types:
            product_types = [
                pt
                for pt in dag.list_product_types(provider=provider)
                if pt["ID"] in guessed_product_types
            ]
        else:
            product_types = dag.list_product_types(provider=provider)
        click.echo("Listing available product types:")
        for product_type in product_types:
            click.echo("\n* {}: ".format(product_type["ID"]))
            for prop, value in product_type.items():
                if prop != "ID":
                    text_wrapper.initial_indent = "    - {}: ".format(prop)
                    text_wrapper.subsequent_indent = " " * len(
                        text_wrapper.initial_indent
                    )
                    if value is not None:
                        click.echo(text_wrapper.fill(value))
    except UnsupportedProvider:
        click.echo("Unsupported provider. You may have a typo")
        click.echo(
            "Available providers: {}".format(", ".join(dag.available_providers()))
        )
        sys.exit(1)
Beispiel #3
0
def download(ctx, **kwargs):
    """Download a bunch of products from a serialized search result"""
    search_result_path = kwargs.pop("search_results")
    if not search_result_path:
        with click.Context(download) as ctx:
            click.echo("Nothing to do (no search results file provided)")
            click.echo(download.get_help(ctx))
        sys.exit(1)
    kwargs["verbose"] = ctx.obj["verbosity"]
    setup_logging(**kwargs)
    conf_file = kwargs.pop("conf")
    if conf_file:
        conf_file = click.format_filename(conf_file)

    satim_api = EODataAccessGateway(user_conf_file_path=conf_file)
    search_results = satim_api.deserialize(search_result_path)
    # register downloader
    for idx, product in enumerate(search_results):
        if product.downloader is None:
            auth = product.downloader_auth
            if auth is None:
                auth = satim_api._plugins_manager.get_auth_plugin(
                    product.provider)
            search_results[idx].register_downloader(
                satim_api._plugins_manager.get_download_plugin(product), auth)

    downloaded_files = satim_api.download_all(search_results)
    if downloaded_files and len(downloaded_files) > 0:
        for downloaded_file in downloaded_files:
            if downloaded_file is None:
                click.echo(
                    "A file may have been downloaded but we cannot locate it")
            else:
                click.echo("Downloaded {}".format(downloaded_file))
    else:
        click.echo(
            "Error during download, a file may have been downloaded but we cannot locate it"
        )
def params_mapping_to_csv(
    ogc_doc_url=OPENSEARCH_DOC_URL, csv_file_path=DEFAULT_CSV_FILE_PATH
):
    """Get providers metadata mapping, with corresponding description from OGC
        documentation and writes it to a csv file

        :param ogc_doc_url: (Optional) URL to OGC OpenSearch documentation
        :type ogc_doc_url: str
        :param csv_file_path: (Optional) path to csv output file
        :type csv_file_path: str
    """
    dag = EODataAccessGateway()

    page = requests.get(ogc_doc_url)
    tree = html.fromstring(page.content.decode("utf8"))

    # list of lists of all parameters per provider
    params_list_of_lists = [
        list(dag.providers_config[p].search.__dict__["metadata_mapping"].keys())
        for p in dag.providers_config.keys()
        if hasattr(dag.providers_config[p], "search")
    ]

    # union of params_list_of_lists
    global_keys = sorted(list(set().union(*(params_list_of_lists))))

    # csv fieldnames
    fieldnames = ["param", "open-search", "class", "description", "type"] + sorted(
        [provider + "_mapping" for provider in dag.providers_config.keys()]
        + [provider + "_query" for provider in dag.providers_config.keys()]
    )
    # py2 compatibility
    if sys.version_info[0] < 3:
        fieldnames = [x.encode("utf8") for x in fieldnames]

    # write to csv
    with open(csv_file_path, "w") as csvfile:
        writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
        writer.writeheader()

        for param in global_keys:
            params_row = {
                "param": param,
                "open-search": "",
                "class": "",
                "description": "",
                "type": "",
            }
            # search html node matching containing param
            param_node_list = tree.xpath(
                '/html/body/main/section/table/tr/td[1]/p[text()="%s"]' % param
            )

            for param_node in param_node_list:
                params_row["open-search"] = "yes"

                # table must have 3 columns, and 'Definition' as 2nd header
                if (
                    len(param_node.xpath("../../../thead/tr/th")) == 3
                    and "Definition"
                    in param_node.xpath("../../../thead/tr/th[2]/text()")[0]
                ):

                    params_row["class"] = param_node.xpath("../../../caption/text()")[
                        1
                    ].strip(": ")

                    # description formatting
                    params_row["description"] = param_node.xpath(
                        "../../td[2]/p/text()"
                    )[0].replace("\n", " ")
                    # multiple spaces to 1
                    params_row["description"] = " ".join(
                        params_row["description"].split()
                    )

                    params_row["type"] = param_node.xpath("../../td[3]/p/text()")[0]
                    break

            # write metadata mapping
            for provider in dag.providers_config.keys():
                if hasattr(dag.providers_config[provider], "search"):
                    mapping_dict = dag.providers_config[provider].search.__dict__[
                        "metadata_mapping"
                    ]
                    if param in mapping_dict.keys():
                        if isinstance(mapping_dict[param], list):
                            params_row[provider + "_query"] = mapping_dict[param][0]
                            params_row[provider + "_mapping"] = mapping_dict[param][1]
                        else:
                            params_row[provider + "_mapping"] = mapping_dict[param]

            # py2 compatibility
            if sys.version_info[0] < 3:
                params_row = {
                    k.encode("utf8"): v.encode("utf8") for k, v in params_row.items()
                }
            writer.writerow(params_row)

    logger.info(csv_file_path + " written")
Beispiel #5
0
def search_crunch(ctx, **kwargs):
    """Search product types and optionnaly apply crunchers to search results"""
    # Process inputs for search
    product_type = kwargs.pop("producttype")
    instrument = kwargs.pop("instrument")
    platform = kwargs.pop("platform")
    platform_identifier = kwargs.pop("platformserialidentifier")
    processing_level = kwargs.pop("processinglevel")
    sensor_type = kwargs.pop("sensortype")
    id_ = kwargs.pop("id")
    custom = kwargs.pop("query")
    if not any([
            product_type,
            instrument,
            platform,
            platform_identifier,
            processing_level,
            sensor_type,
            id_,
    ]):
        with click.Context(search_crunch) as ctx:
            print("Give me some work to do. See below for how to do that:",
                  end="\n\n")
            click.echo(search_crunch.get_help(ctx))
        sys.exit(-1)

    kwargs["verbose"] = ctx.obj["verbosity"]
    setup_logging(**kwargs)

    if kwargs["box"] != (None, ) * 4:
        rect = kwargs.pop("box")
        footprint = {
            "lonmin": rect[0],
            "latmin": rect[1],
            "lonmax": rect[2],
            "latmax": rect[3],
        }
    else:
        footprint = kwargs.pop("geom")

    start_date = kwargs.pop("start")
    stop_date = kwargs.pop("end")
    criteria = {
        "geometry": footprint,
        "startTimeFromAscendingNode": None,
        "completionTimeFromAscendingNode": None,
        "cloudCover": kwargs.pop("cloudcover"),
        "productType": product_type,
        "instrument": instrument,
        "platform": platform,
        "platformSerialIdentifier": platform_identifier,
        "processingLevel": processing_level,
        "sensorType": sensor_type,
        "id": id_,
    }
    if custom:
        custom_dict = parse_qs(custom)
        for k, v in custom_dict.items():
            if isinstance(v, list) and len(v) == 1:
                criteria[k] = v[0]
            else:
                criteria[k] = v
    if start_date:
        criteria["startTimeFromAscendingNode"] = start_date.isoformat()
    if stop_date:
        criteria["completionTimeFromAscendingNode"] = stop_date.isoformat()
    conf_file = kwargs.pop("conf")
    if conf_file:
        conf_file = click.format_filename(conf_file)
    locs_file = kwargs.pop("locs")
    if locs_file:
        locs_file = click.format_filename(locs_file)

    # Process inputs for crunch
    cruncher_names = set(kwargs.pop("cruncher") or [])
    cruncher_args = kwargs.pop("cruncher_args")
    cruncher_args_dict = {}
    if cruncher_args:
        for cruncher, argname, argval in cruncher_args:
            cruncher_args_dict.setdefault(cruncher,
                                          {}).setdefault(argname, argval)

    items_per_page = kwargs.pop("items")
    page = kwargs.pop("page") or 1

    gateway = EODataAccessGateway(user_conf_file_path=conf_file,
                                  locations_conf_path=locs_file)

    # Search
    results, total = gateway.search(page=page,
                                    items_per_page=items_per_page,
                                    **criteria)
    click.echo("Found a total number of {} products".format(total))
    click.echo("Returned {} products".format(len(results)))

    # Crunch !
    crunch_args = {
        cruncher_name: cruncher_args_dict.get(cruncher_name, {})
        for cruncher_name in cruncher_names
    }
    if crunch_args:
        results = gateway.crunch(results,
                                 search_criteria=criteria,
                                 **crunch_args)

    storage_filepath = kwargs.pop("storage")
    if not storage_filepath.endswith(".geojson"):
        storage_filepath += ".geojson"
    result_storage = gateway.serialize(results, filename=storage_filepath)
    click.echo("Results stored at '{}'".format(result_storage))
from eodag.api.core import EODataAccessGateway
from eodag.utils.logging import setup_logging
from eodag.utils import ProgressCallback
import os
import json
import pprint
import tqdm
setup_logging(verbose=1)
WORKSPACE="eodag_workspace"
DESCRIPTORS_PATH = r"products/stuttgart21.json"
PROVIDER = "sobloo"

dag = EODataAccessGateway('eodag.yml')
dag.set_preferred_provider(PROVIDER)


def loadDescriptorJson(path):
    with open(path, "r") as json_file:
        data = json.load(json_file)
    return data


def createDescriptors(data):
    descriptorDict = {}
    for type in data["PRODUCT_TYPES"]:
        descriptorDict[type] = {"productType": data["PRODUCT_TYPES"][type]}
        for prop in data:
            if prop != "PRODUCT_TYPES":
                descriptorDict[type][prop] = data[prop]
    return descriptorDict