コード例 #1
0
    def test_config_init_extra_values(self):

        config_path = Path(TEST_DIR, "test_data", "config_extra.yaml")

        dl_obj = l8_downloader.L8Downloader(path_to_config=config_path)

        self.assertIsNotNone(dl_obj)
コード例 #2
0
def download_using_landsat_downloader(tile_name, entity_id, celery_task=None):

    downloader = l8_downloader.L8Downloader(CONFIG_FILE_PATH, verbose=False)

    # download_product requires a product dict, the minimum product dict has:
    # platform_name ['Landsat-8', 'Sentinel-2']
    # dataset_name ['LANDSAT_8', 'SENTINEL_2A']
    # name [name for the file downloaded to be called locally]
    # entity_id ['L1C_T12UVA_A006488_20180603T183037']
    # product_type = ['FR_BUND' archive, FR_THERM, FR_QB, FR_REFL images, STANDARD tar.gz] for Landsat8
    # product_type = ['FRB' .jpg, STANDARD .zip] for Sentinel 2

    product_dict = {
        "platform_name": "Landsat-8",
        "dataset_name": "LANDSAT_8_C1",
        "entity_id": entity_id,
        "name": tile_name,
    }

    def callback(progress_so_far, total_filesize, percentage_complete):
        celery_task.update_state(state=states.STARTED,
                                 meta={"download": percentage_complete})

    product_type = "STANDARD"

    result = downloader.download_product(product_dict,
                                         product_type,
                                         directory=WORKING_FOLDER_PATH,
                                         callback=callback)

    return result
コード例 #3
0
def download_using_landsat_downloader(tile_name):
    downloader = l8_downloader.L8Downloader(CONFIG_FILE_PATH, verbose=False)

    product_dict = {
        "platform_name": "Sentinel-2",
        "dataset_name": "SENTINEL_2A",
        "entity_id": tile_name,
        "name": tile_name,
    }

    query_dict = {"cloud_percent": 100}

    product_result = downloader.search_for_products_by_name(
        product_dict["dataset_name"], [tile_name], query_dict
    )
    product_type = "STANDARD"

    module_logger.debug(product_result)

    result = None

    if len(product_result) > 0:
        for product in product_result:
            result = downloader.download_product(
                product, product_type, directory=WORKING_FOLDER_PATH
            )

    module_logger.debug(result)

    return result
コード例 #4
0
    def test_get_dataset_metadata_info_sentinel2(self):
        config_path = Path(TEST_DIR, "test_data", "config.yaml")

        downloader_obj = l8_downloader.L8Downloader(config_path)

        results = downloader_obj.get_dataset_field_ids(self.SENTINEL2_DATASET_NAME)

        # print(results)
        self.assertTrue(True)
コード例 #5
0
    def test_alberta_ag_extent(self):
        config_path = Path(TEST_DIR, "test_data", "config.yaml")
        downloader_obj = l8_downloader.L8Downloader(config_path)
        results = downloader_obj.search_for_products_polygon_to_tiles(
            self.SENTINEL2_DATASET_NAME,
            self.test_footprint_2,
            self.QUERY_DICT_EXAMPLE,
            detailed=True,
        )

        self.assertEqual(len(results), 988)
コード例 #6
0
    def test_search_for_products_by_tile(self):
        config_path = Path(TEST_DIR, "test_data", "config.yaml")
        downloader_obj = l8_downloader.L8Downloader(config_path)
        results = downloader_obj.search_for_products_by_tile(
            self.SENTINEL2_DATASET_NAME,
            self.tile_list_small,
            self.QUERY_DICT_EXAMPLE,
            detailed=True,
        )

        print(results)
        self.assertEqual(len(results), 12)
コード例 #7
0
    def test_search_for_products_by_tile_detailed(self):
        config_path = Path(TEST_DIR, "test_data", "config.yaml")
        downloader_obj = l8_downloader.L8Downloader(config_path)
        results = downloader_obj.search_for_products_by_tile(
            self.SENTINEL2_DATASET_NAME,
            self.tile_list_small,
            self.query_dict_example_single_tile,
            detailed=True,
        )

        print(results)
        self.assertEqual(len(results), 1)
コード例 #8
0
def check_for_l1c_tile_usgs(tile_name):
    # def search_for_products_by_name(self, dataset_name, product_name_list, query_dict, detailed=False, just_entity_ids=False, write_to_csv=False, call_count=0):
    downloader = l8_downloader.L8Downloader(CONFIG_FILE_PATH)

    query_dict = {"cloud_percent": 100}

    results = downloader.search_for_products_by_name(
        "SENTINEL_2A", [tile_name], query_dict
    )
    module_logger.debug(results)
    if len(results) > 0:
        return results[0]
    else:
        return None
コード例 #9
0
    def test_search_for_products_polygon_to_tiles_sentinel3(self):

        config_path = Path(TEST_DIR, "test_data", "config.yaml")

        downloader_obj = l8_downloader.L8Downloader(config_path)

        results = downloader_obj.search_for_products_polygon_to_tiles(
            self.SENTINEL2_DATASET_NAME,
            self.test_footprint_1,
            self.QUERY_DICT_EXAMPLE,
            detailed=True,
        )

        print(len(results))
        self.assertTrue(True)
コード例 #10
0
    def test_search_scene_metadata(self):
        json_results = None
        with open(self.path_to_intermediate_query_results_usgs_ee, "r") as json_file:
            json_results = json.load(json_file)

        config_path = Path(TEST_DIR, "test_data", "config.yaml")
        downloader_obj = l8_downloader.L8Downloader(config_path)

        data_results = json_results["data"]["results"]
        just_entity_ids = [d["entityId"] for d in data_results]

        print(just_entity_ids)

        detailed_metadata_results = downloader_obj.search_scene_metadata(
            self.SENTINEL2_DATASET_NAME, just_entity_ids
        )

        print(detailed_metadata_results)
        self.assertEqual(len(detailed_metadata_results), 12)
コード例 #11
0
    def test_search_for_products_polygon_to_tiles_sentinel2(self):
        config_path = Path(TEST_DIR, "test_data", "config.yaml")

        downloader_obj = l8_downloader.L8Downloader(config_path)

        results = downloader_obj.search_for_products(
            self.SENTINEL2_DATASET_NAME,
            self.test_footprint_3_lethbridge,
            self.QUERY_DICT_EXAMPLE,
        )

        print(len(results))
        print(results[0])
        print(results)
        single_element_list = []
        single_element_list.append(results[0])
        print(single_element_list)
        # populated_result = self.downloader_obj.populate_result_list(single_element_list,
        #                                                             self.SENTINEL2_PLATFORM_NAME,
        #                                                             self.SENTINEL2_DATASET_NAME)
        # print(populated_result)
        self.assertTrue(True)
コード例 #12
0
    def test_populate_result_list(self):
        # Load previous results
        json_results = None
        with open(self.path_to_intermediate_query_results_usgs_ee, "r") as json_file:
            json_results = json.load(json_file)

        print(json_results)
        config_path = Path(TEST_DIR, "test_data", "config.yaml")

        downloader_obj = l8_downloader.L8Downloader(config_path)

        cleaned_results = downloader_obj.populate_result_list(
            json_results,
            self.SENTINEL2_PLATFORM_NAME,
            self.SENTINEL2_DATASET_NAME,
            detailed=False,
        )

        cleaned_results_compare = None
        with open(self.path_to_cleaned_query_results_usgs_ee, "r") as outfile:
            clean_results_compare = json.load(outfile)

        self.assertEqual(clean_results_compare, cleaned_results)
コード例 #13
0
ファイル: views.py プロジェクト: wscullen/tile_viewer_api
def multi_upload(request):
    if request.method == "POST" and request.FILES.getlist("myfiles"):
        files_urls = []

        shapefile_uploaded = None

        random_rename_string = get_random_string(length=8)

        for afile in request.FILES.getlist("myfiles"):
            fs = FileSystemStorage()

            # check if a file with the same name already exists
            full_path = Path(settings.MEDIA_ROOT, afile.name)
            module_logger.debug(afile.name)
            if full_path.exists():
                filename = (Path(afile.name).stem + random_rename_string +
                            Path(afile.name).suffix)
            else:
                filename = afile.name

            filename = fs.save(filename, afile)
            uploaded_file_url = fs.url(filename)

            if Path(filename).suffix == ".shp":
                module_logger.debug(uploaded_file_url)
                module_logger.debug(filename)
                shapefile_uploaded = Path(settings.MEDIA_ROOT, filename)

                print(shapefile_uploaded)

            files_urls.append(uploaded_file_url)

        if shapefile_uploaded:
            wkt_footprint = grid_intersect.get_wkt_from_shapefile(
                str(shapefile_uploaded))

            mgrs_list = grid_intersect.find_mgrs_intersection(wkt_footprint)
            wrs_list = grid_intersect.find_wrs_intersection(wkt_footprint)

            # config_path
            # landsat_downloader query here
            # search_for_products_by_tile
            # search_for_products_by_tile(self, dataset_name, tile_list, query_dict, just_entity_ids=False, write_to_csv=False, detailed=False):

            date_start = datetime.strptime("20180601", "%Y%m%d")
            date_end = datetime.strptime("20180630", "%Y%m%d")

            arg_list = {"date_start": date_start, "date_end": date_end}

            arg_list["cloud_percent"] = 100

            config_path = Path(
                Path(__file__).absolute().parent.parent,
                "landsat_downloader",
                "config.json",
            )
            module_logger.debug(config_path)
            downloader = l8_downloader.L8Downloader(config_path, verbose=False)
            search_results = downloader.search_for_products_by_tile(
                "SENTINEL_2A", mgrs_list, arg_list, detailed=True)

            module_logger.debug(search_results)

            return render(
                request,
                "s2d2_app/multi_file_upload.html",
                {
                    "uploaded_file_url_list": files_urls,
                    "wkt_footprint": wkt_footprint,
                    "mgrs_list": mgrs_list,
                    "wrs_list": wrs_list,
                },
            )

        else:
            return render(
                request,
                "s2d2_app/multi_file_upload.html",
                {"uploaded_file_url_list": files_urls},
            )
    return render(request, "s2d2_app/multi_file_upload.html")
コード例 #14
0
def download_l8_bulk_order(self, order_id):

    downloader = l8_downloader.L8Downloader(CONFIG_FILE_PATH, verbose=False)

    redis_instance.set(str(self.request.id) + "_download_progress", 0)

    def download_callback(ct, file_name, size, bytes_transferred):
        module_logger.info("Inside download callback")
        module_logger.info(str(ct.request.id))
        current_bytes = int(
            redis_instance.get(str(ct.request.id) + "_download_progress"))
        current_bytes += bytes_transferred
        percent_complete = float(round((current_bytes / size) * 100, 2))
        module_logger.info(bytes_transferred)
        module_logger.info(percent_complete)

        update_dict = {}

        module_logger.info(file_name)
        update_dict[file_name] = {
            "status": states.STARTED,
            "download": percent_complete,
            "download_file_size": size,
        }

        try:
            ct.update_state(task_id=str(ct.request.id),
                            state=states.STARTED,
                            meta=update_dict)
        except BaseException as e:
            module_logger.info("Task isnt ready for updates to meta yet...")
            module_logger.error(str(e))

        if percent_complete > 99:
            redis_instance.set(str(ct.request.id) + "_download_progress", 0)
        else:
            redis_instance.set(
                str(ct.request.id) + "_download_progress", int(current_bytes))

    download_callback_bound = partial(download_callback, self)

    download_folder_path = Path(settings.BASE_DIR, "working_folder")

    try:
        download_result = downloader.download_order(
            order_id,
            directory=download_folder_path,
            callback=download_callback_bound)
    except BaseException as e:
        module_logger.debug("something went wrong while trying to download")
        download_result = TaskStatus(False, "Download step failed", str(e))

    task_status_list = []
    if download_result.status:

        for file_name in download_result.data:

            file_path = Path(WORKING_FOLDER_PATH, file_name)
            # TODO: Extract needs to extract to a folder with the product name instead of to the
            # root of the working folder
            extract_result = unarchive(file_path, WORKING_FOLDER_PATH)
            if extract_result.status:
                module_logger.debug(extract_result)
                l2a_path = find_l2a_path(extract_result.data)

                l2a_name = extract_result.message

                module_logger.warning(f"L2A Path: {l2a_path}")
                module_logger.info(f"L2A Name: {l2a_name}")

                full_path = l2a_path

                if full_path.is_dir():
                    size = sum(f.stat().st_size for f in full_path.glob("**/*")
                               if f.is_file())
                else:
                    size = float(full_path.stat().st_size)

                redis_instance.set(
                    str(self.request.id) + "_upload_progress", 0)

                def upload_callback(ct, file_name, size, bytes_transferred):
                    module_logger.info("Inside upload callback")
                    module_logger.info(str(ct.request.id))
                    current_bytes = int(
                        redis_instance.get(
                            str(ct.request.id) + "_upload_progress"))
                    current_bytes += bytes_transferred
                    percent_complete = float(
                        round((current_bytes / size) * 100, 2))
                    module_logger.info(bytes_transferred)
                    module_logger.info(percent_complete)

                    update_dict = {}

                    module_logger.info(file_name)
                    update_dict[file_name] = {
                        "status": states.STARTED,
                        "upload": percent_complete,
                        "upload_file_size": size,
                    }

                    try:
                        ct.update_state(
                            task_id=str(ct.request.id),
                            state=states.STARTED,
                            meta=update_dict,
                        )
                    except BaseException as e:
                        module_logger.info(
                            "Task isnt ready for updates to meta yet...")
                        module_logger.error(str(e))

                    redis_instance.set(
                        str(ct.request.id) + "_upload_progress",
                        int(current_bytes))

                upload_callback_bound = partial(upload_callback, self,
                                                l2a_name, size)

                upload_result = s3_helper.upload_unarchived_product_to_s3_bucket(
                    l2a_path,
                    OUTPUT_BUCKET_NAME,
                    callback=upload_callback_bound)

                if upload_result.status:
                    # Download, extract and Upload successful, need to upate the individual jobs for each tile
                    task_status_list.append(
                        TaskStatus(True, upload_result.message, None))
                else:
                    task_status_list.append(
                        TaskStatus(False,
                                   f"Upload step failed for {file_name}",
                                   None))

            else:
                task_status_list.append(
                    TaskStatus(False, f"Extract step failed for {file_name}",
                               None))
            # Allow check jobs to get task status successfully
            time.sleep(30)

    task_status_list.append(download_result)

    for task_status in task_status_list:
        if not task_status.status:
            module_logger.error("Failure occurred during bulk download phase")
            module_logger.error(task_status.message)
            raise TaskFailureException(task_status.message)

    if upload_result.status:
        module_logger.info("overall job done successfully")

        extract_result = TaskStatus(extract_result[0], str(extract_result[1]),
                                    str(extract_result[2]))
        download_result = TaskStatus(download_result[0],
                                     str(download_result[1]),
                                     str(extract_result[2]))

        result_list = [
            ("download", download_result),
            ("extract", extract_result),
            ("upload", upload_result),
        ]

        module_logger.debug(result_list)

    clean_up_folder(WORKING_FOLDER_PATH)

    return result_list
コード例 #15
0
    def test_config_init_bad_config2(self):

        config_path = Path(TEST_DIR, "test_data", "config_bad2.yaml")

        with self.assertRaises(ConfigFileProblem):
            l8_downloader.L8Downloader(path_to_config=config_path)
コード例 #16
0
    def test_config_init_no_config(self):

        with self.assertRaises(FileNotFoundError):
            l8_downloader.L8Downloader()
コード例 #17
0
ファイル: views.py プロジェクト: wscullen/tile_viewer_api
    def post(self, request, format=None):
        """
        Standard
        """

        HOSTNAME = request.get_host()
        module_logger.debug("hello")
        module_logger.info(request.FILES)

        shapefiles = request.FILES.getlist("shapefiles")
        module_logger.debug(shapefiles)

        visualization_shapefiles = request.FILES.getlist(
            "visualizationShapefiles")
        module_logger.info(visualization_shapefiles)

        if request.FILES.getlist("shapefiles"):

            files_urls = []
            shapefile_uploaded = None
            random_rename_string = get_random_string(length=8)

            # Make sure that all the required shapefiles are there
            file_ext_name_list = [
                Path(f.name).suffix
                for f in request.FILES.getlist("shapefiles")
            ]
            shapefile_ext_list = [".shp", ".shx", ".dbf", ".prj"]
            missing_ext_list = []
            for ext in shapefile_ext_list:
                if ext not in file_ext_name_list:
                    module_logger.debug(f"missing {ext} file")
                    module_logger.debug(
                        f"files with these extensions found: {file_ext_name_list}"
                    )
                    missing_ext_list.append(ext)

            if missing_ext_list:
                return Response({
                    "error":
                    f'Missing required files for shapefile ({", ".join(missing_ext_list)})'
                })

            module_logger.debug(request.FILES.getlist("shapefiles"))

            for afile in request.FILES.getlist("shapefiles"):

                module_logger.debug(afile.name)

                fs = FileSystemStorage()

                # check if a file with the same name already exists
                full_path = Path(settings.MEDIA_ROOT, afile.name)

                if full_path.exists():
                    filename = (Path(afile.name).stem + random_rename_string +
                                Path(afile.name).suffix)
                else:
                    filename = afile.name

                filename = fs.save(filename, afile)

                uploaded_file_url = fs.url(filename)

                if Path(filename).suffix == ".shp":
                    module_logger.debug(uploaded_file_url)
                    module_logger.debug(filename)
                    shapefile_uploaded = Path(settings.MEDIA_ROOT, filename)

                    module_logger.debug(shapefile_uploaded)

                files_urls.append(uploaded_file_url)

            if shapefile_uploaded:
                visualization_wkt_list = []

                # Handle visualization shapefile conversion
                if request.FILES.getlist("visualizationShapefiles"):
                    module_logger.info("visualization shapefiles uploaded")
                    shapefile_paths = parseVisualizationShapefiles(
                        request.FILES.getlist("visualizationShapefiles"))
                    module_logger.info("Shapefile paths:")
                    module_logger.info(shapefile_paths)

                    if shapefile_paths:
                        for shapefile in shapefile_paths:
                            wkt = grid_intersect.get_wkt_from_shapefile(
                                str(shapefile[1]))
                            visualization_wkt_list.append({
                                "name": shapefile[0],
                                "wkt": wkt
                            })

                    module_logger.info(visualization_wkt_list)

                wkt_footprint = grid_intersect.get_wkt_from_shapefile(
                    str(shapefile_uploaded))

                module_logger.info("Finding MGRS intersection list...")
                mgrs_list = grid_intersect.find_mgrs_intersection(
                    wkt_footprint)
                module_logger.info("Finding WRS intersection list...")
                wrs_list = grid_intersect.find_wrs_intersection(wkt_footprint)

                wrs_wkt_geometry = []
                module_logger.info(len(wrs_list))
                for wrs in wrs_list:
                    wkt = grid_intersect.get_wkt_for_wrs_tile(wrs)
                    module_logger.info(wkt)
                    wrs_wkt_geometry.append((wrs, wkt))

                module_logger.debug("WRS AND WKT")
                module_logger.debug(wrs_wkt_geometry)

                wrs_geojson = create_geojson_wrs_overlay(wrs_wkt_geometry)
                module_logger.debug(wrs_geojson)

                mgrs_wkt_geometry = []

                for mgrs in mgrs_list:
                    wkt = grid_intersect.get_wkt_for_mgrs_tile(mgrs)
                    mgrs_wkt_geometry.append((mgrs, wkt))

                module_logger.debug("MGRS AND WKT")
                module_logger.debug(mgrs_wkt_geometry)

                mgrs_geojson = create_geojson_mgrs_overlay(mgrs_wkt_geometry)
                module_logger.debug(mgrs_geojson)

                # config_path
                # landsat_downloader query here
                # search_for_products_by_tile
                # search_for_products_by_tile(self, dataset_name, tile_list, query_dict, just_entity_ids=False, write_to_csv=False, detailed=False):

                aoi_fields = request.data

                module_logger.debug(aoi_fields)
                date_start = datetime.strptime(aoi_fields["startDate"],
                                               "%Y%m%d").replace(
                                                   hour=0,
                                                   minute=0,
                                                   second=0,
                                                   microsecond=000000)
                date_end = datetime.strptime(aoi_fields["endDate"],
                                             "%Y%m%d").replace(
                                                 hour=23,
                                                 minute=59,
                                                 second=59,
                                                 microsecond=999999)

                arg_list = {"date_start": date_start, "date_end": date_end}

                arg_list["cloud_percent"] = 100
                arg_list["collection_category"] = ["T1", "T2"]

                # {'fieldId': 20510, 'name': 'Collection Category', 'fieldLink': 'https://lta.cr.usgs.gov/DD/landsat_dictionary.html#collection_category', 'valueList': [{'value': None, 'name': 'All'}, {'value': 'T1', 'name': 'Tier 1'}, {'value': 'T2', 'name': 'Tier 2'}, {'value': 'RT', 'name': 'Real-Time'}]},

                module_logger.debug(arg_list)
                config_path = Path(settings.BASE_DIR, "config.yaml")
                module_logger.debug(config_path)

                search_results = {}
                platforms = aoi_fields["platforms"].split(",")
                module_logger.debug(platforms)
                module_logger.debug(wkt_footprint)

                for platform in platforms:
                    if platform == "sentinel2":

                        s2_dl = s2_downloader.S2Downloader(config_path)
                        s2_end_date = date_end + dt.timedelta(days=1)
                        module_logger.debug(mgrs_list)
                        s2_results = s2_dl.search_for_products_by_footprint(
                            wkt_footprint, (f'{date_start.isoformat()}Z',
                                            f'{s2_end_date.isoformat()}Z'),
                            product_type="L1C")
                        module_logger.debug(s2_results)
                        module_logger.debug(wkt_footprint)

                        module_logger.debug("scihub sentinel results ")

                        search_results[platform] = []
                        for key in s2_results.keys():
                            module_logger.debug(key)
                            product_dict = s2_results[key]
                            module_logger.debug(product_dict)
                            if "tileid" not in product_dict.keys():
                                product_dict["tileid"] = product_dict[
                                    "title"].split("_")[5][1:]

                            wkt_string = str(product_dict["footprint"])
                            module_logger.debug(wkt_string)

                            data_footprint = wkt_loads(wkt_string)

                            module_logger.debug(data_footprint.geom_type)

                            if data_footprint.geom_type == "MultiPolygon":
                                # do multipolygon things.
                                actual_polygon = list(data_footprint)[0]
                            elif data_footprint.geom_type == "Polygon":
                                # do polygon things.
                                actual_polygon = data_footprint
                            else:
                                # raise IOError('Shape is not a polygon.')
                                raise IOError(
                                    "Invalid footprint geometry (Not a polygon or multipolygon)."
                                )

                            module_logger.debug(actual_polygon)
                            # check if the valid data footprint actually intersects our area of interest
                            data_intersect = spatial_utils.polygons_intersect(
                                wkt_footprint, str(actual_polygon))

                            module_logger.debug(data_intersect)

                            if data_intersect:
                                product_dict[
                                    "footprint"] = grid_intersect.get_wkt_for_mgrs_tile(
                                        product_dict["tileid"])
                                module_logger.debug(product_dict)
                                product_dict["name"] = product_dict["title"]
                                product_dict[
                                    "acquisition_start"] = product_dict[
                                        "beginposition"]
                                product_dict["acquisition_end"] = product_dict[
                                    "endposition"]
                                # title_parts = product_dict['title'].split('_')
                                # product_dict['usgs_name'] = f'{title_parts[1][3:]}_{title_parts[5]}_A{str(product_dict["orbitnumber"]).zfill(6)}_{title_parts[2]}'
                                product_dict["espg_code"] = 4326
                                product_dict["cloud_percent"] = str(
                                    product_dict["cloudcoverpercentage"])
                                product_dict[
                                    "geojson"] = create_geojson_feature_esa(
                                        product_dict)

                                # Steps
                                # Download preview image to media folder
                                # update low res preview url for each tile.
                                module_logger.debug(
                                    "trying to download lowres preview url")
                                local_filename = download_file_esa(
                                    product_dict["link_icon"],
                                    product_dict["title"])
                                module_logger.debug(HOSTNAME)

                                if local_filename:
                                    module_logger.debug(
                                        f"http://{HOSTNAME}/media/lowres_previews/{local_filename}"
                                    )
                                    product_dict[
                                        "preview_url"] = f"http://{HOSTNAME}/media/lowres_previews/{local_filename}"

                                search_results[platform].append(product_dict)

                    if platform == "landsat8":

                        downloader = l8_downloader.L8Downloader(config_path,
                                                                verbose=False)

                        results = downloader.search_for_products(
                            "LANDSAT_8_C1",
                            wkt_footprint,
                            arg_list,
                            detailed=True,
                            realtime=False)

                        module_logger.info(len(results))

                        for tile in results:
                            # Steps
                            # Download preview image to media folder
                            # update low res preview url for each tile.
                            module_logger.debug(
                                "trying to download lowres preview url----")
                            local_filename = download_file(tile["preview_url"])
                            module_logger.debug(HOSTNAME)

                            module_logger.debug(tile)

                            tile["geojson"] = create_geojson_feature(tile)

                            if local_filename:
                                module_logger.debug(
                                    f"http://{HOSTNAME}/media/lowres_previews/{local_filename}"
                                )
                                tile[
                                    "preview_url"] = f"http://{HOSTNAME}/media/lowres_previews/{local_filename}"

                        search_results[platform] = results

                # Code below is a task for downloading and creating higher resolution previews for each tile (less than 50% cloud)
                # TODO: implement with celery task queue instead of django-workers (unreliable connection to postgres database)
                # for platform in search_results.keys():
                #     for result in search_results[platform]:
                #         print('DJANGO WORKERS TASK')
                #         print(result)
                #         result_serializable = {
                #             'platform_name': result['geojson']['properties']['platform_name'],
                #             'name': result['geojson']['properties']['name'],
                #             'dataset_name': result['geojson']['properties']['dataset_name'],
                #             'entity_id': result['geojson']['properties']['entity_id'],
                #             'api_source': result['geojson']['properties']['api_source'],
                #         }
                #         download_fullrespreview(result_serializable, result_serializable['api_source'])

                if search_results:
                    return Response({
                        "data": {
                            "id": str(uuid.uuid4()),
                            "uploaded_file_url_list": files_urls,
                            "wkt_footprint": wkt_footprint,
                            "wkt_vis_list": visualization_wkt_list,
                            "mgrs_list": mgrs_list,
                            "wrs_list": wrs_list,
                            "sensor_list": platforms,
                            "wrs_geojson": wrs_geojson,
                            "mgrs_geojson": mgrs_geojson,
                            "tile_results": search_results,
                        }
                    })
                else:
                    return Response({
                        "data": {
                            "id": str(uuid.uuid4()),
                            "uploaded_file_url_list": files_urls,
                            "wkt_footprint": wkt_footprint,
                            "wkt_vis_list": visualization_wkt_list,
                            "mgrs_list": mgrs_list,
                            "wrs_list": wrs_list,
                            "tile_results": [],
                        }
                    })
        else:
            return Response({"error": "Missing required shapefiles data"})
コード例 #18
0
    def test_config_init_missing_values(self):

        config_path = Path(TEST_DIR, "test_data", "config_missing.yaml")

        with self.assertRaises(ConfigValueMissing):
            l8_downloader.L8Downloader(path_to_config=config_path)
コード例 #19
0
def download_fullrespreview(tile_dict, api_source):
    print("downloading the full res preview")

    highres_dir = Path(settings.MEDIA_ROOT, "highres_previews")
    print(tile_dict)

    if api_source == "usgs_ee":

        if tile_dict["platform_name"] == "Landsat-8":
            product_type = "FR_REFL"
        else:
            product_type = "FRB"

        l8_dl = l8_downloader.L8Downloader("", verbose=False)

        result = l8_dl.download_product(tile_dict, product_type, directory=highres_dir)

        print(result)

        file_name = result[2]

        result_justfilename = Path(file_name).name
        result_png_name = Path(result_justfilename).stem + ".png"

        print(result_justfilename)
        print(result_png_name)

        # nasa logo position and size
        # 7253 7462
        # 668 559
        # usgs logo position
        # 0 7671
        # 1276 379

        # Pillow code to make the nodata transparent
        image = Image.open(file_name)
        image = image.convert("RGBA")

        width, height = image.size

        usgs_logo_pos_x = 0
        usgs_logo_pos_y = height - 400
        usgs_logo_width = 1300
        usgs_logo_height = 400

        nasa_logo_pos_x = width - 900
        nasa_logo_pos_y = height - 750

        nasa_logo_width = 900
        nasa_logo_height = 750

        if tile_dict["platform_name"] == "Landsat-8":

            blackBoxNasa = Image.new(
                image.mode, (nasa_logo_width, nasa_logo_height), "#000"
            )
            blackBoxUSGS = Image.new(
                image.mode, (usgs_logo_width, usgs_logo_height), "#000"
            )

            image.paste(blackBoxNasa, (nasa_logo_pos_x, nasa_logo_pos_y))
            image.paste(blackBoxUSGS, (usgs_logo_pos_x, usgs_logo_pos_y))

        datas = image.getdata()

        newData = []
        for item in datas:
            if item[0] <= 20 and item[1] <= 20 and item[2] <= 20:
                newData.append((0, 0, 0, 0))
            else:
                newData.append(item)

        image.putdata(newData)

        image_half = image.resize((math.floor(width / 2), math.floor(height / 2)))
        image_quarter = image.resize((math.floor(width / 4), math.floor(height / 4)))

        image_half.save(Path(highres_dir, Path(result_justfilename).stem + "_half.png"))
        image_quarter.save(
            Path(highres_dir, Path(result_justfilename).stem + "_quar.png")
        )

        # image.save(Path(highres_dir, result_png_name))
        # once the PNG with transparancy is generated, remove original JPEG
        os.remove(file_name)

    elif api_source == "esa_scihub":
        s2_dl = s2_downloader.S2Downloader("")

        result = s2_dl.download_tci(tile_dict["entity_id"], highres_dir)

        file_name = result[2]

        result_justfilename = Path(file_name).name
        result_png_name = Path(result_justfilename).stem + ".png"

        print(result_justfilename)
        print(result_png_name)

        # nasa logo position and size
        # 7253 7462
        # 668 559
        # usgs logo position
        # 0 7671
        # 1276 379

        # Pillow code to make the nodata transparent
        image = Image.open(file_name)
        image = image.convert("RGBA")

        width, height = image.size

        datas = image.getdata()

        newData = []
        for item in datas:
            if item[0] <= 20 and item[1] <= 20 and item[2] <= 20:
                newData.append((0, 0, 0, 0))
            else:
                newData.append(item)

        image.putdata(newData)

        image_half = image.resize((math.floor(width / 2), math.floor(height / 2)))
        image_quarter = image.resize((math.floor(width / 4), math.floor(height / 4)))

        image_half.save(Path(highres_dir, Path(result_justfilename).stem + "_half.png"))
        image_quarter.save(
            Path(highres_dir, Path(result_justfilename).stem + "_quar.png")
        )

        # image.save(Path(highres_dir, result_png_name))
        # once the PNG with transparancy is generated, remove original JPEG
        os.remove(file_name)