Пример #1
0
def create_snapshots():
    """
    Run requests against USGS API for use in tests.
    """

    api_key = api.login(os.environ['USGS_USERNAME'], os.environ['USGS_PASSWORD'])

    # Dataset Fields
    response = api.dataset_fields("LANDSAT_8_C1", "EE", api_key=api_key)
    write_response(response, 'dataset-fields.json')

    # Datasets
    response = api.datasets(None, "EE")
    write_response(response, 'datasets.json')

    # Download
    response = api.download("LANDSAT_8_C1", "EE", ["LC80810712017104LGN00"], product='STANDARD')
    write_response(response, 'download.json')

    # Download Options
    response = api.download_options("LANDSAT_8_C1", "EE", ["LC80810712017104LGN00"])
    write_response(response, 'download-options.json')

    # Metadata
    response = api.metadata("LANDSAT_8_C1", "EE", ["LC80810712017104LGN00"])
    write_response(response, 'metadata.json')

    # Search
    response = api.search("LANDSAT_8_C1", "EE", start_date='20170401', end_date='20170402', max_results=10)
    write_response(response, 'search.json')

    api.logout(api_key)
Пример #2
0
def main(rawargs):
    args = get_parser().parse_args(rawargs)
    scene_ids = query_for_scenes(args.start_date, args.end_date,
                                 verbose=args.verbose,
                                 limit=args.limit)
    scene_list_file = pusher.get_past_list()
    scene_ids = remove_processed_ids(scene_ids, scene_list_file)

    if args.queue:
        scene_ids = remove_queued_ids(scene_ids)

    print '%d scenes identified for processing.' % len(scene_ids)
    sys.stdout.flush()

    if not args.run_directly and not args.queue:
        for i in scene_ids:
            print i
        sys.exit(1)

    if args.queue:
        copy_scene_set_external(args, scene_ids)
    elif args.parallel:
        process_scene_set_external(args, scene_ids, scene_list_file)
    else:
        process_scene_set_internal(args, scene_ids, scene_list_file)
    
    api.logout()
Пример #3
0
def cycle_token(username, password):

    credential_filepath = os.path.join(os.path.expanduser("~"), ".usgs")
    with open(credential_filepath) as f:
        credentials = json.load(f)

    created = datetime.strptime(credentials['created'], "%Y-%m-%dT%H:%M:%S.%f")
    token_lifetime = (datetime.now() - created).seconds
    approx_two_hours = 2 * 60 * 60 - 60
    click.echo('The token lifetime is {} seconds'.format(token_lifetime))
    if token_lifetime > approx_two_hours:
        api.logout()
        api.login(username, password)
Пример #4
0
def create_snapshots():
    """
    Run requests against USGS API for use in tests.
    """

    api_key = api.login(os.environ['USGS_USERNAME'],
                        os.environ['USGS_PASSWORD'])

    # Dataset Fields
    response = api.dataset_fields("LANDSAT_8_C1", "EE", api_key=api_key)
    write_response(response, 'dataset-fields.json')

    # Datasets
    response = api.datasets(None, "EE")
    write_response(response, 'datasets.json')

    # Download
    response = api.download("LANDSAT_8_C1",
                            "EE", ["LC80810712017104LGN00"],
                            product='STANDARD')
    write_response(response, 'download.json')

    # Download Options
    response = api.download_options("LANDSAT_8_C1", "EE",
                                    ["LC80810712017104LGN00"])
    write_response(response, 'download-options.json')

    # Metadata
    response = api.metadata("LANDSAT_8_C1", "EE", ["LC80810712017104LGN00"])
    write_response(response, 'metadata.json')

    # Search
    response = api.search("LANDSAT_8_C1",
                          "EE",
                          start_date='20170401',
                          end_date='20170402',
                          max_results=10)
    write_response(response, 'search.json')

    api.logout(api_key)
Пример #5
0
    def query(self, product_type=None, **kwargs):
        """Search for data on USGS catalogues

        .. versionchanged::
            1.0

                * ``product_type`` is no longer mandatory
        """
        product_type = kwargs.get("productType")
        if product_type is None:
            return [], 0
        api.login(
            self.config.credentials["username"],
            self.config.credentials["password"],
            save=True,
        )
        usgs_dataset = self.config.products[product_type]["dataset"]
        usgs_catalog_node = self.config.products[product_type]["catalog_node"]
        start_date = kwargs.pop("startTimeFromAscendingNode", None)
        end_date = kwargs.pop("completionTimeFromAscendingNode", None)
        footprint = kwargs.pop("geometry", None)

        # Configuration to generate the download url of search results
        result_summary_pattern = re.compile(
            r"^ID: .+, Acquisition Date: .+, Path: (?P<path>\d+), Row: (?P<row>\d+)$"  # noqa
        )
        # See https://pyformat.info/, on section "Padding and aligning strings" to
        # understand {path:0>3} and {row:0>3}.
        # It roughly means: 'if the string that will be passed as "path" has length < 3,
        # prepend as much "0"s as needed to reach length 3' and same for "row"
        dl_url_pattern = "{base_url}/L8/{path:0>3}/{row:0>3}/{entity}.tar.bz"

        final = []
        if footprint and len(footprint.keys()) == 4:  # a rectangle (or bbox)
            lower_left = {
                "longitude": footprint["lonmin"],
                "latitude": footprint["latmin"],
            }
            upper_right = {
                "longitude": footprint["lonmax"],
                "latitude": footprint["latmax"],
            }
        else:
            lower_left, upper_right = None, None
        try:
            results = api.search(
                usgs_dataset,
                usgs_catalog_node,
                start_date=start_date,
                end_date=end_date,
                ll=lower_left,
                ur=upper_right,
            )

            for result in results["data"]["results"]:
                r_lower_left = result["spatialFootprint"]["coordinates"][0][0]
                r_upper_right = result["spatialFootprint"]["coordinates"][0][2]
                summary_match = result_summary_pattern.match(
                    result["summary"]).groupdict()
                result["geometry"] = geometry.box(r_lower_left[0],
                                                  r_lower_left[1],
                                                  r_upper_right[0],
                                                  r_upper_right[1])

                # Same method as in base.py, Search.__init__()
                # Prepare the metadata mapping
                # Do a shallow copy, the structure is flat enough for this to be sufficient
                metas = DEFAULT_METADATA_MAPPING.copy()
                # Update the defaults with the mapping value. This will add any new key
                # added by the provider mapping that is not in the default metadata.
                # A deepcopy is done to prevent self.config.metadata_mapping from being modified when metas[metadata]
                # is a list and is modified
                metas.update(copy.deepcopy(self.config.metadata_mapping))
                metas = mtd_cfg_as_jsonpath(metas)

                result["productType"] = usgs_dataset

                product_properties = properties_from_json(result, metas)

                if getattr(self.config, "product_location_scheme",
                           "https") == "file":
                    product_properties["downloadLink"] = dl_url_pattern.format(
                        base_url="file://")
                else:
                    product_properties["downloadLink"] = dl_url_pattern.format(
                        base_url=self.config.google_base_url.rstrip("/"),
                        entity=result["entityId"],
                        **summary_match)

                final.append(
                    EOProduct(
                        productType=product_type,
                        provider=self.provider,
                        properties=product_properties,
                        geometry=footprint,
                    ))
        except USGSError as e:
            logger.debug(
                "Product type %s does not exist on catalogue %s",
                usgs_dataset,
                usgs_catalog_node,
            )
            logger.debug("Skipping error: %s", e)
        api.logout()
        return final, len(final)
Пример #6
0
    def query(self,
              product_type=None,
              items_per_page=None,
              page=None,
              count=True,
              **kwargs):
        """Search for data on USGS catalogues

        .. versionchanged::
           2.2.0

                * Based on usgs library v0.3.0 which now uses M2M API. The library
                  is used for both search & download

        .. versionchanged::
            1.0

                * ``product_type`` is no longer mandatory
        """
        product_type = kwargs.get("productType")
        if product_type is None:
            return [], 0
        try:
            api.login(
                self.config.credentials["username"],
                self.config.credentials["password"],
                save=True,
            )
        except USGSError:
            raise AuthenticationError(
                "Please check your USGS credentials.") from None

        product_type_def_params = self.config.products.get(
            product_type, self.config.products[GENERIC_PRODUCT_TYPE])
        usgs_dataset = format_dict_items(product_type_def_params,
                                         **kwargs)["dataset"]
        start_date = kwargs.pop("startTimeFromAscendingNode", None)
        end_date = kwargs.pop("completionTimeFromAscendingNode", None)
        geom = kwargs.pop("geometry", None)
        footprint = {}
        if hasattr(geom, "bounds"):
            (
                footprint["lonmin"],
                footprint["latmin"],
                footprint["lonmax"],
                footprint["latmax"],
            ) = geom.bounds
        else:
            footprint = geom

        final = []
        if footprint and len(footprint.keys()) == 4:  # a rectangle (or bbox)
            lower_left = {
                "longitude": footprint["lonmin"],
                "latitude": footprint["latmin"],
            }
            upper_right = {
                "longitude": footprint["lonmax"],
                "latitude": footprint["latmax"],
            }
        else:
            lower_left, upper_right = None, None
        try:
            results = api.scene_search(
                usgs_dataset,
                start_date=start_date,
                end_date=end_date,
                ll=lower_left,
                ur=upper_right,
                max_results=items_per_page,
                starting_number=(1 + (page - 1) * items_per_page),
            )

            # Same method as in base.py, Search.__init__()
            # Prepare the metadata mapping
            # Do a shallow copy, the structure is flat enough for this to be sufficient
            metas = DEFAULT_METADATA_MAPPING.copy()
            # Update the defaults with the mapping value. This will add any new key
            # added by the provider mapping that is not in the default metadata.
            # A deepcopy is done to prevent self.config.metadata_mapping from being modified when metas[metadata]
            # is a list and is modified
            metas.update(copy.deepcopy(self.config.metadata_mapping))
            metas = mtd_cfg_as_jsonpath(metas)

            for result in results["data"]["results"]:

                result["productType"] = usgs_dataset

                product_properties = properties_from_json(result, metas)

                final.append(
                    EOProduct(
                        productType=product_type,
                        provider=self.provider,
                        properties=product_properties,
                        geometry=footprint,
                    ))
        except USGSError as e:
            logger.warning(
                "Product type %s does not exist on USGS EE catalog",
                usgs_dataset,
            )
            logger.warning("Skipping error: %s", e)
        api.logout()

        if final:
            # parse total_results
            path_parsed = parse(
                self.config.pagination["total_items_nb_key_path"])
            total_results = path_parsed.find(results["data"])[0].value
        else:
            total_results = 0

        return final, total_results
Пример #7
0
    def download(self, product, auth=None, progress_callback=None, **kwargs):
        """Download data from USGS catalogues"""

        fs_path, record_filename = self._prepare_download(
            product, outputs_extension=".tar.gz", **kwargs)
        if not fs_path or not record_filename:
            return fs_path

        # progress bar init
        if progress_callback is None:
            progress_callback = get_progress_callback()
        progress_callback.desc = product.properties.get("id", "")
        progress_callback.position = 1

        try:
            api.login(
                self.config.credentials["username"],
                self.config.credentials["password"],
                save=True,
            )
        except USGSError:
            raise AuthenticationError(
                "Please check your USGS credentials.") from None

        download_options = api.download_options(
            product.properties["productType"], product.properties["id"])

        try:
            product_ids = [
                p["id"] for p in download_options["data"]
                if p["downloadSystem"] == "dds"
            ]
        except KeyError as e:
            raise NotAvailableError("%s not found in %s's products" %
                                    (e, product.properties["id"]))

        if not product_ids:
            raise NotAvailableError("No USGS products found for %s" %
                                    product.properties["id"])

        req_urls = []
        for product_id in product_ids:
            download_request = api.download_request(
                product.properties["productType"], product.properties["id"],
                product_id)
            try:
                req_urls.extend([
                    x["url"]
                    for x in download_request["data"]["preparingDownloads"]
                ])
            except KeyError as e:
                raise NotAvailableError("%s not found in %s download_request" %
                                        (e, product.properties["id"]))

        if len(req_urls) > 1:
            logger.warning(
                "%s usgs products found for %s. Only first will be downloaded"
                % (len(req_urls), product.properties["id"]))
        elif not req_urls:
            raise NotAvailableError("No usgs request url was found for %s" %
                                    product.properties["id"])

        req_url = req_urls[0]
        progress_callback.reset()
        with requests.get(
                req_url,
                stream=True,
        ) as stream:
            try:
                stream.raise_for_status()
            except HTTPError:
                import traceback as tb

                logger.error(
                    "Error while getting resource :\n%s",
                    tb.format_exc(),
                )
            else:
                stream_size = int(stream.headers.get("content-length", 0))
                progress_callback.max_size = stream_size
                progress_callback.reset()
                with open(fs_path, "wb") as fhandle:
                    for chunk in stream.iter_content(chunk_size=64 * 1024):
                        if chunk:
                            fhandle.write(chunk)
                            progress_callback(len(chunk), stream_size)

        with open(record_filename, "w") as fh:
            fh.write(product.properties["downloadLink"])
        logger.debug("Download recorded in %s", record_filename)

        api.logout()

        # Check that the downloaded file is really a tar file
        if not tarfile.is_tarfile(fs_path):
            logger.warning(
                "Downloaded product is not a tar File. Please check its file type before using it"
            )
            new_fs_path = fs_path[:fs_path.index(".tar.gz")]
            shutil.move(fs_path, new_fs_path)
            return new_fs_path
        return self._finalize(fs_path, outputs_extension=".tar.gz", **kwargs)
Пример #8
0
def test_logout():
    assert api.logout()
Пример #9
0
def logout():
    print(api.logout())
Пример #10
0
def test_logout():
    assert api.logout()
Пример #11
0
def logout():
    click.echo(api.logout())
Пример #12
0
Файл: cli.py Проект: mapbox/usgs
def logout():
    click.echo(api.logout())
Пример #13
0
def logout():
    print(api.logout())