Exemplo n.º 1
0
    def __init__(self, path_to_config):

        self.config_path = path_to_config

        if os.path.exists(self.config_path):
            with open(self.config_path) as f:
                self.config = json.load(f)
        else:
            self.config = None

            raise Exception

        # TODO Should change this to use Env vars
        self.esa_username = self.config['SENTINEL_USER']
        self.esa_password = self.config['SENTINEL_PASS']

        self.asf_username = self.config['ASF_USER']
        self.asf_password = self.config['ASF_PASS']

        self.primary_dl_src = self.config['S1']['DOWNLOAD']

        self.esa_downloader = esa_downloader.S2Downloader(self.config_path)

        if self.primary_dl_src == 'USGS_ASF':
            self.secondary_dl_src = 'ESA_SCIHUB'
        elif self.primary_dl_src == 'ESA_SCIHUB':
            self.secondary_dl_src = 'USGS_ASF'
Exemplo n.º 2
0
    def test_download_fullproduct_celery(self):
        s2_dl = s2_downloader.S2Downloader(path_to_config=Path(BASE_DIR, "config.yaml"))

        result = s2_dl.download_fullproduct_callback(
            "6574b5fa-3898-4c9e-9c36-028193764211",
            "S2A_MSIL1C_20190620T181921_N0207_R127_T12UXA_20190620T231306",
            Path(os.path.abspath(os.path.dirname(__file__)), "test_data"),
            lambda x, y, z: print(f"{x} - {y} - {z}"),
        )
        print(result)
        self.assertTrue(True)
Exemplo n.º 3
0
def check_for_l1c_tile_esa(tile_name):
    """Use sentinel_downloader to query for L2A version of this tile"""

    s2_dl = s2_downloader.S2Downloader(CONFIG_FILE_PATH)

    query_dict = {"producttype": "S2MSI1C"}

    search_result = s2_dl.search_for_products_by_name(
        "Sentinel-2", [tile_name], query_dict
    )

    module_logger.debug(search_result)
    if len(search_result) > 0:
        return search_result.popitem(last=False)[1]
    else:
        return None
Exemplo n.º 4
0
def check_for_l2a_tile_esa(tile_name):
    """Use sentinel_downloader to query for L2A version of this tile"""

    s2_dl = s2_downloader.S2Downloader(CONFIG_FILE_PATH)

    query_dict = {"producttype": "S2MSI2A"}

    tile_name = tile_name.replace("L1C", "L2A")
    tile_name_parts = tile_name.split("_")
    tile_name_parts[3] = "*"
    tile_name_parts[6] = "*"

    search_name = "_".join(tile_name_parts)
    search_result = s2_dl.search_for_products_by_name(
        "Sentinel-2", [search_name], query_dict
    )

    module_logger.debug(search_result)
    if len(search_result) > 0:
        return search_result.popitem(last=False)[1]
    else:
        return None
Exemplo n.º 5
0
def download_using_sentinel_downloader(tile_id, tile_name, celery_task=None):

    # def download_fullproduct(self, tile_id, tile_name, directory):
    s2_dl = s2_downloader.S2Downloader(CONFIG_FILE_PATH)

    if celery_task:

        def callback(progress_so_far, total_filesize, percentage_complete):
            module_logger.info("Celery task info in download callback:")
            module_logger.info(celery_task)
            module_logger.info(type(celery_task))
            module_logger.info(percentage_complete)

            result = AsyncResult(celery_task)
            info = result.info
            state = result.state
            module_logger.info("Celery task info and state:")
            module_logger.info(info)
            module_logger.info(state)
            celery_task.update_state(
                state=states.STARTED, meta={"download": percentage_complete}
            )

        download_result = s2_dl.download_fullproduct_callback(
            tile_id, tile_name, WORKING_FOLDER_PATH, callback
        )

    else:

        download_result = s2_dl.download_fullproduct(
            tile_id, tile_name, WORKING_FOLDER_PATH
        )

    module_logger.debug(download_result)

    return download_result
Exemplo n.º 6
0
def query_by_name(platform_name,
                  name_list,
                  arg_list,
                  date_string,
                  config_path=None):

    try:
        s2_dl = s2_downloader.S2Downloader(config_path)

        products = s2_dl.search_for_products_by_name(platform_name, name_list,
                                                     arg_list)

    except Exception as e:
        logger.debug('Error occured while trying to query API: {}'.format(e))
        print('Sorry something went wrong while trying to query API')
        raise
    else:
        products_dict = {}
        if products:

            print(products)
            if platform_name == 'Sentinel-1':
                for key, value in products.items():
                    print(key)
                    print(value)
                    product_dict = {}
                    product_dict['entity_id'] = key

                    # S1 specific metadata
                    product_dict['sensor_mode'] = value[
                        'sensoroperationalmode']
                    product_dict['polarization_mode'] = value[
                        'polarisationmode']
                    product_dict['product_type'] = value['producttype']

                    product_dict['detailed_metadata'] = value
                    product_dict['api_source'] = 'esa_copernicus'
                    product_dict['download_source'] = None
                    product_dict['footprint'] = value['footprint']

                    product_dict['acquisition_start'] = value['beginposition']

                    product_dict['acquisition_end'] = value['endposition']

                    geom = ogr.CreateGeometryFromWkt(product_dict['footprint'])
                    # Get Envelope returns a tuple (minX, maxX, minY, maxY)
                    env = geom.GetEnvelope()

                    def envelope_to_wkt(env_tuple):
                        coord1 = str(env_tuple[0]) + \
                            ' ' + str(env_tuple[3])
                        coord2 = str(env_tuple[1]) + \
                            ' ' + str(env_tuple[3])
                        coord3 = str(env_tuple[1]) + \
                            ' ' + str(env_tuple[2])
                        coord4 = str(env_tuple[0]) + \
                            ' ' + str(env_tuple[2])

                        wkt_string = "POLYGON(({}, {}, {}, {}, {}))".format(
                            coord1, coord2, coord3, coord4, coord1)
                        return wkt_string

                    product_dict['mbr'] = envelope_to_wkt(env)

                    product_dict['dataset_name'] = 'S2MSI1C'
                    product_dict['name'] = value['title']
                    product_dict['sat_name'] = 'Sentinel-1A' if product_dict[
                        'name'][2] == 'A' else 'Sentinel-1B'
                    product_dict['vendor_name'] = value['identifier']
                    product_dict['uuid'] = key

                    product_dict['preview_url'] = value['link_icon']
                    product_dict['manual_product_url'] = value['link']
                    product_dict['manual_download_url'] = value[
                        'link_alternative']
                    product_dict['manual_bulkorder_url'] = None
                    # TODO: create a link to teh metaddata files using http get request
                    product_dict['metadata_url'] = None
                    product_dict['last_modified'] = value['ingestiondate']
                    product_dict['bulk_inprogress'] = None
                    product_dict['summary'] = value['summary']

                    # TODO: write a conversion module for converting between pathrow and MGRS centroids (nearest neighbor or most coverage)
                    product_dict['pathrow'] = None

                    # TODO: calculate this value once the atmos and scene classes are done
                    product_dict['land_cloud_percent'] = None

                    product_dict['cloud_percent'] = None

                    product_dict['platform_name'] = value['platformname']
                    product_dict['instrument'] = value['instrumentshortname']

                    # TODO: Create a converter that converts PATH/ROW to MGRS and vice Versa
                    # TODO: S1 does not come with a tile id, look up through shapefiles
                    product_dict['mgrs'] = None
                    product_dict['orbit'] = value['relativeorbitnumber']
                    product_dict['abs_orbit'] = value['orbitnumber']

                    products_dict[key] = product_dict

            elif platform_name == 'Sentinel-2':
                for key, value in products.items():
                    product_dict = {}
                    product_dict['entity_id'] = key

                    product_dict['detailed_metadata'] = value
                    product_dict['api_source'] = 'esa_copernicus'
                    product_dict['download_source'] = None
                    product_dict['footprint'] = value['footprint']

                    product_dict['acquisition_start'] = value['beginposition']

                    product_dict['acquisition_end'] = value['endposition']

                    geom = ogr.CreateGeometryFromWkt(product_dict['footprint'])
                    # Get Envelope returns a tuple (minX, maxX, minY, maxY)
                    env = geom.GetEnvelope()

                    def envelope_to_wkt(env_tuple):
                        coord1 = str(env_tuple[0]) + \
                            ' ' + str(env_tuple[3])
                        coord2 = str(env_tuple[1]) + \
                            ' ' + str(env_tuple[3])
                        coord3 = str(env_tuple[1]) + \
                            ' ' + str(env_tuple[2])
                        coord4 = str(env_tuple[0]) + \
                            ' ' + str(env_tuple[2])

                        wkt_string = "POLYGON(({}, {}, {}, {}, {}))".format(
                            coord1, coord2, coord3, coord4, coord1)
                        return wkt_string

                    product_dict['mbr'] = envelope_to_wkt(env)

                    product_dict['dataset_name'] = 'S2MSI1C'
                    product_dict['name'] = value['title']
                    product_dict['uuid'] = key

                    product_dict['size'] = value['size'][0:-3]

                    product_dict['preview_url'] = value['link_icon']
                    product_dict['manual_product_url'] = value['link']
                    product_dict['manual_download_url'] = value[
                        'link_alternative']
                    product_dict['manual_bulkorder_url'] = None
                    # TODO: create a link to teh metaddata files using http get request
                    product_dict['metadata_url'] = None
                    product_dict['last_modified'] = value['ingestiondate']
                    product_dict['bulk_inprogress'] = None
                    product_dict['summary'] = value['summary']
                    product_dict['sat_name'] = value[
                        'platformserialidentifier']
                    product_dict['vendor_name'] = value['identifier']

                    # TODO: write a conversion module for converting between pathrow and MGRS centroids (nearest neighbor or most coverage)
                    product_dict['pathrow'] = None

                    # TODO: calculate this value once the atmos and scene classes are done
                    product_dict['land_cloud_percent'] = None

                    product_dict['cloud_percent'] = value[
                        'cloudcoverpercentage']

                    product_dict['platform_name'] = value['platformname']
                    product_dict['instrument'] = value['instrumentshortname']

                    # TODO: Create a converter that converts PATH/ROW to MGRS and vice Versa
                    if 'tileid' in value.keys():
                        product_dict['mgrs'] = value['tileid']
                    else:
                        product_dict['mgrs'] = 'n/a'
                    product_dict['orbit'] = value['relativeorbitnumber']
                    product_dict['abs_orbit'] = value['orbitnumber']

                    products_dict[key] = product_dict

            return products_dict
        else:
            print('No product found.')
            return {}
Exemplo n.º 7
0
def query_by_polygon(platform_name,
                     polygon_list,
                     arg_list,
                     date_string,
                     config_path=None):
    """ platform_name can be ['Sentinel-1', 'Sentinel-2', 'Landsat-8']
        polygon_list is a list of wkt polygons
        arg_list has
            all
            ['date_start', 'date_end', 'raw_coverage']
            Sentinel-1
            ['product_type', 'sensor_mode', 'resolution']
            Sentinel-2
            ['cloud_percent', 'coverage_minus_cloud']
            Landsat-8
            ['cloud_percent', 'coverage_minus_cloud']
    """

    products_dict = {}

    # args that apply to all products
    arg_dict = {
        'date':
        (arg_list['date_start'], arg_list['date_end'] + timedelta(days=1)),
        'platformname': platform_name,
    }

    if platform_name == 'Sentinel-1':

        if 'product_type' in arg_list:
            arg_dict['producttype'] = arg_list['product_type']

        if 'sensor_mode' in arg_list:
            arg_dict['sensoroperationalmode'] = arg_list['sensor_mode']

        if 'resolution' in arg_list:
            arg_dict['filename'] = 'S1?_??_???{}_*'.format(
                arg_list['resolution'])

        logger.info('Querying Copernicus API for S1 with args: %s' % arg_dict)
        print(arg_dict)
        for index, fp in enumerate(polygon_list):
            products = None

            try:
                s2_dl = s2_downloader.S2Downloader(config_path)

                products = s2_dl.search_for_products(platform_name, fp,
                                                     arg_dict)

            except Exception as e:
                logger.debug(
                    'Error occured while trying to query API: {}'.format(e))
                print('Sorry something went wrong while trying to query API')
                raise
            else:
                if products:
                    for key, value in products.items():

                        product_dict = {}
                        product_dict['entity_id'] = key

                        # S1 specific metadata
                        product_dict['sensor_mode'] = value[
                            'sensoroperationalmode']
                        product_dict['polarization_mode'] = value[
                            'polarisationmode']
                        product_dict['product_type'] = value['producttype']

                        product_dict['detailed_metadata'] = value
                        product_dict['api_source'] = 'esa_copernicus'
                        product_dict['download_source'] = None
                        product_dict['footprint'] = value['footprint']

                        product_dict['acquisition_start'] = value[
                            'beginposition']

                        product_dict['acquisition_end'] = value['endposition']

                        geom = ogr.CreateGeometryFromWkt(
                            product_dict['footprint'])
                        # Get Envelope returns a tuple (minX, maxX, minY, maxY)
                        env = geom.GetEnvelope()

                        def envelope_to_wkt(env_tuple):
                            coord1 = str(env_tuple[0]) + \
                                ' ' + str(env_tuple[3])
                            coord2 = str(env_tuple[1]) + \
                                ' ' + str(env_tuple[3])
                            coord3 = str(env_tuple[1]) + \
                                ' ' + str(env_tuple[2])
                            coord4 = str(env_tuple[0]) + \
                                ' ' + str(env_tuple[2])

                            wkt_string = "POLYGON(({}, {}, {}, {}, {}))".format(
                                coord1, coord2, coord3, coord4, coord1)
                            return wkt_string

                        product_dict['mbr'] = envelope_to_wkt(env)

                        product_dict['dataset_name'] = 'S2MSI1C'
                        product_dict['name'] = value['title']
                        product_dict[
                            'sat_name'] = 'Sentinel-1A' if product_dict[
                                'name'][2] == 'A' else 'Sentinel-1B'
                        product_dict['vendor_name'] = value['identifier']
                        product_dict['uuid'] = key

                        product_dict['preview_url'] = value['link_icon']
                        product_dict['manual_product_url'] = value['link']
                        product_dict['manual_download_url'] = value[
                            'link_alternative']
                        product_dict['manual_bulkorder_url'] = None
                        # TODO: create a link to teh metaddata files using http get request
                        product_dict['metadata_url'] = None
                        product_dict['last_modified'] = value['ingestiondate']
                        product_dict['bulk_inprogress'] = None
                        product_dict['summary'] = value['summary']

                        # TODO: write a conversion module for converting between pathrow and MGRS centroids (nearest neighbor or most coverage)
                        product_dict['pathrow'] = None

                        # TODO: calculate this value once the atmos and scene classes are done
                        product_dict['land_cloud_percent'] = None

                        product_dict['cloud_percent'] = None

                        product_dict['platform_name'] = value['platformname']
                        product_dict['instrument'] = value[
                            'instrumentshortname']

                        # TODO: Create a converter that converts PATH/ROW to MGRS and vice Versa
                        # TODO: S1 does not come with a tile id, look up through shapefiles
                        product_dict['mgrs'] = None
                        product_dict['orbit'] = value['relativeorbitnumber']
                        product_dict['abs_orbit'] = value['orbitnumber']

                        products_dict[key] = product_dict

    elif platform_name == 'Sentinel-2':

        if 'cloud_percent' in arg_list:
            arg_dict['cloudcoverpercentage'] = (0, arg_list['cloud_percent'])

        print('Querying Copernicus API for S2 with args: %s' % arg_dict)

        for index, fp in enumerate(polygon_list):
            products = None
            print(fp)
            try:
                s2_dl = s2_downloader.S2Downloader(config_path)

                products = s2_dl.search_for_products(platform_name, fp,
                                                     arg_dict)
            except Exception as e:
                logger.debug(
                    'Error occured while trying to query API: {}'.format(e))
                print('Sorry something went wrong while trying to query API')
                raise
            else:
                print('inside api wrapper')
                print(products)
                for p in products.items():
                    print(p)
                if products:
                    for key, value in products.items():
                        product_dict = {}
                        product_dict['entity_id'] = key

                        product_dict['detailed_metadata'] = value
                        product_dict['api_source'] = 'esa_copernicus'
                        product_dict['download_source'] = None
                        product_dict['footprint'] = value['footprint']

                        product_dict['acquisition_start'] = value[
                            'beginposition']

                        product_dict['acquisition_end'] = value['endposition']

                        geom = ogr.CreateGeometryFromWkt(
                            product_dict['footprint'])
                        # Get Envelope returns a tuple (minX, maxX, minY, maxY)
                        env = geom.GetEnvelope()

                        def envelope_to_wkt(env_tuple):
                            coord1 = str(env_tuple[0]) + \
                                ' ' + str(env_tuple[3])
                            coord2 = str(env_tuple[1]) + \
                                ' ' + str(env_tuple[3])
                            coord3 = str(env_tuple[1]) + \
                                ' ' + str(env_tuple[2])
                            coord4 = str(env_tuple[0]) + \
                                ' ' + str(env_tuple[2])

                            wkt_string = "POLYGON(({}, {}, {}, {}, {}))".format(
                                coord1, coord2, coord3, coord4, coord1)
                            return wkt_string

                        product_dict['mbr'] = envelope_to_wkt(env)

                        product_dict['dataset_name'] = 'S2MSI1C'
                        product_dict['name'] = value['title']
                        product_dict['uuid'] = key

                        product_dict['size'] = value['size'][0:-3]

                        product_dict['preview_url'] = value['link_icon']
                        product_dict['manual_product_url'] = value['link']
                        product_dict['manual_download_url'] = value[
                            'link_alternative']
                        product_dict['manual_bulkorder_url'] = None
                        # TODO: create a link to teh metaddata files using http get request
                        product_dict['metadata_url'] = None
                        product_dict['last_modified'] = value['ingestiondate']
                        product_dict['bulk_inprogress'] = None
                        product_dict['summary'] = value['summary']
                        product_dict['sat_name'] = value[
                            'platformserialidentifier']
                        product_dict['vendor_name'] = value['identifier']

                        # TODO: write a conversion module for converting between pathrow and MGRS centroids (nearest neighbor or most coverage)
                        product_dict['pathrow'] = None

                        # TODO: calculate this value once the atmos and scene classes are done
                        product_dict['land_cloud_percent'] = None

                        product_dict['cloud_percent'] = value[
                            'cloudcoverpercentage']

                        product_dict['platform_name'] = value['platformname']
                        product_dict['instrument'] = value[
                            'instrumentshortname']

                        # TODO: Create a converter that converts PATH/ROW to MGRS and vice Versa
                        if 'tileid' in value.keys():
                            product_dict['mgrs'] = value['tileid']
                        else:
                            product_dict['mgrs'] = 'n/a'
                        product_dict['orbit'] = value['relativeorbitnumber']
                        product_dict['abs_orbit'] = value['orbitnumber']

                        products_dict[key] = product_dict

    else:
        logger.error('Invalid platform name!!!')

    return products_dict
Exemplo n.º 8
0
    def post(self, request, format=None):
        """
        Standard
        """

        HOSTNAME = request.get_host()
        module_logger.debug("hello")
        module_logger.info(request.FILES)

        shapefiles = request.FILES.getlist("shapefiles")
        module_logger.debug(shapefiles)

        visualization_shapefiles = request.FILES.getlist(
            "visualizationShapefiles")
        module_logger.info(visualization_shapefiles)

        if request.FILES.getlist("shapefiles"):

            files_urls = []
            shapefile_uploaded = None
            random_rename_string = get_random_string(length=8)

            # Make sure that all the required shapefiles are there
            file_ext_name_list = [
                Path(f.name).suffix
                for f in request.FILES.getlist("shapefiles")
            ]
            shapefile_ext_list = [".shp", ".shx", ".dbf", ".prj"]
            missing_ext_list = []
            for ext in shapefile_ext_list:
                if ext not in file_ext_name_list:
                    module_logger.debug(f"missing {ext} file")
                    module_logger.debug(
                        f"files with these extensions found: {file_ext_name_list}"
                    )
                    missing_ext_list.append(ext)

            if missing_ext_list:
                return Response({
                    "error":
                    f'Missing required files for shapefile ({", ".join(missing_ext_list)})'
                })

            module_logger.debug(request.FILES.getlist("shapefiles"))

            for afile in request.FILES.getlist("shapefiles"):

                module_logger.debug(afile.name)

                fs = FileSystemStorage()

                # check if a file with the same name already exists
                full_path = Path(settings.MEDIA_ROOT, afile.name)

                if full_path.exists():
                    filename = (Path(afile.name).stem + random_rename_string +
                                Path(afile.name).suffix)
                else:
                    filename = afile.name

                filename = fs.save(filename, afile)

                uploaded_file_url = fs.url(filename)

                if Path(filename).suffix == ".shp":
                    module_logger.debug(uploaded_file_url)
                    module_logger.debug(filename)
                    shapefile_uploaded = Path(settings.MEDIA_ROOT, filename)

                    module_logger.debug(shapefile_uploaded)

                files_urls.append(uploaded_file_url)

            if shapefile_uploaded:
                visualization_wkt_list = []

                # Handle visualization shapefile conversion
                if request.FILES.getlist("visualizationShapefiles"):
                    module_logger.info("visualization shapefiles uploaded")
                    shapefile_paths = parseVisualizationShapefiles(
                        request.FILES.getlist("visualizationShapefiles"))
                    module_logger.info("Shapefile paths:")
                    module_logger.info(shapefile_paths)

                    if shapefile_paths:
                        for shapefile in shapefile_paths:
                            wkt = grid_intersect.get_wkt_from_shapefile(
                                str(shapefile[1]))
                            visualization_wkt_list.append({
                                "name": shapefile[0],
                                "wkt": wkt
                            })

                    module_logger.info(visualization_wkt_list)

                wkt_footprint = grid_intersect.get_wkt_from_shapefile(
                    str(shapefile_uploaded))

                module_logger.info("Finding MGRS intersection list...")
                mgrs_list = grid_intersect.find_mgrs_intersection(
                    wkt_footprint)
                module_logger.info("Finding WRS intersection list...")
                wrs_list = grid_intersect.find_wrs_intersection(wkt_footprint)

                wrs_wkt_geometry = []
                module_logger.info(len(wrs_list))
                for wrs in wrs_list:
                    wkt = grid_intersect.get_wkt_for_wrs_tile(wrs)
                    module_logger.info(wkt)
                    wrs_wkt_geometry.append((wrs, wkt))

                module_logger.debug("WRS AND WKT")
                module_logger.debug(wrs_wkt_geometry)

                wrs_geojson = create_geojson_wrs_overlay(wrs_wkt_geometry)
                module_logger.debug(wrs_geojson)

                mgrs_wkt_geometry = []

                for mgrs in mgrs_list:
                    wkt = grid_intersect.get_wkt_for_mgrs_tile(mgrs)
                    mgrs_wkt_geometry.append((mgrs, wkt))

                module_logger.debug("MGRS AND WKT")
                module_logger.debug(mgrs_wkt_geometry)

                mgrs_geojson = create_geojson_mgrs_overlay(mgrs_wkt_geometry)
                module_logger.debug(mgrs_geojson)

                # config_path
                # landsat_downloader query here
                # search_for_products_by_tile
                # search_for_products_by_tile(self, dataset_name, tile_list, query_dict, just_entity_ids=False, write_to_csv=False, detailed=False):

                aoi_fields = request.data

                module_logger.debug(aoi_fields)
                date_start = datetime.strptime(aoi_fields["startDate"],
                                               "%Y%m%d").replace(
                                                   hour=0,
                                                   minute=0,
                                                   second=0,
                                                   microsecond=000000)
                date_end = datetime.strptime(aoi_fields["endDate"],
                                             "%Y%m%d").replace(
                                                 hour=23,
                                                 minute=59,
                                                 second=59,
                                                 microsecond=999999)

                arg_list = {"date_start": date_start, "date_end": date_end}

                arg_list["cloud_percent"] = 100
                arg_list["collection_category"] = ["T1", "T2"]

                # {'fieldId': 20510, 'name': 'Collection Category', 'fieldLink': 'https://lta.cr.usgs.gov/DD/landsat_dictionary.html#collection_category', 'valueList': [{'value': None, 'name': 'All'}, {'value': 'T1', 'name': 'Tier 1'}, {'value': 'T2', 'name': 'Tier 2'}, {'value': 'RT', 'name': 'Real-Time'}]},

                module_logger.debug(arg_list)
                config_path = Path(settings.BASE_DIR, "config.yaml")
                module_logger.debug(config_path)

                search_results = {}
                platforms = aoi_fields["platforms"].split(",")
                module_logger.debug(platforms)
                module_logger.debug(wkt_footprint)

                for platform in platforms:
                    if platform == "sentinel2":

                        s2_dl = s2_downloader.S2Downloader(config_path)
                        s2_end_date = date_end + dt.timedelta(days=1)
                        module_logger.debug(mgrs_list)
                        s2_results = s2_dl.search_for_products_by_footprint(
                            wkt_footprint, (f'{date_start.isoformat()}Z',
                                            f'{s2_end_date.isoformat()}Z'),
                            product_type="L1C")
                        module_logger.debug(s2_results)
                        module_logger.debug(wkt_footprint)

                        module_logger.debug("scihub sentinel results ")

                        search_results[platform] = []
                        for key in s2_results.keys():
                            module_logger.debug(key)
                            product_dict = s2_results[key]
                            module_logger.debug(product_dict)
                            if "tileid" not in product_dict.keys():
                                product_dict["tileid"] = product_dict[
                                    "title"].split("_")[5][1:]

                            wkt_string = str(product_dict["footprint"])
                            module_logger.debug(wkt_string)

                            data_footprint = wkt_loads(wkt_string)

                            module_logger.debug(data_footprint.geom_type)

                            if data_footprint.geom_type == "MultiPolygon":
                                # do multipolygon things.
                                actual_polygon = list(data_footprint)[0]
                            elif data_footprint.geom_type == "Polygon":
                                # do polygon things.
                                actual_polygon = data_footprint
                            else:
                                # raise IOError('Shape is not a polygon.')
                                raise IOError(
                                    "Invalid footprint geometry (Not a polygon or multipolygon)."
                                )

                            module_logger.debug(actual_polygon)
                            # check if the valid data footprint actually intersects our area of interest
                            data_intersect = spatial_utils.polygons_intersect(
                                wkt_footprint, str(actual_polygon))

                            module_logger.debug(data_intersect)

                            if data_intersect:
                                product_dict[
                                    "footprint"] = grid_intersect.get_wkt_for_mgrs_tile(
                                        product_dict["tileid"])
                                module_logger.debug(product_dict)
                                product_dict["name"] = product_dict["title"]
                                product_dict[
                                    "acquisition_start"] = product_dict[
                                        "beginposition"]
                                product_dict["acquisition_end"] = product_dict[
                                    "endposition"]
                                # title_parts = product_dict['title'].split('_')
                                # product_dict['usgs_name'] = f'{title_parts[1][3:]}_{title_parts[5]}_A{str(product_dict["orbitnumber"]).zfill(6)}_{title_parts[2]}'
                                product_dict["espg_code"] = 4326
                                product_dict["cloud_percent"] = str(
                                    product_dict["cloudcoverpercentage"])
                                product_dict[
                                    "geojson"] = create_geojson_feature_esa(
                                        product_dict)

                                # Steps
                                # Download preview image to media folder
                                # update low res preview url for each tile.
                                module_logger.debug(
                                    "trying to download lowres preview url")
                                local_filename = download_file_esa(
                                    product_dict["link_icon"],
                                    product_dict["title"])
                                module_logger.debug(HOSTNAME)

                                if local_filename:
                                    module_logger.debug(
                                        f"http://{HOSTNAME}/media/lowres_previews/{local_filename}"
                                    )
                                    product_dict[
                                        "preview_url"] = f"http://{HOSTNAME}/media/lowres_previews/{local_filename}"

                                search_results[platform].append(product_dict)

                    if platform == "landsat8":

                        downloader = l8_downloader.L8Downloader(config_path,
                                                                verbose=False)

                        results = downloader.search_for_products(
                            "LANDSAT_8_C1",
                            wkt_footprint,
                            arg_list,
                            detailed=True,
                            realtime=False)

                        module_logger.info(len(results))

                        for tile in results:
                            # Steps
                            # Download preview image to media folder
                            # update low res preview url for each tile.
                            module_logger.debug(
                                "trying to download lowres preview url----")
                            local_filename = download_file(tile["preview_url"])
                            module_logger.debug(HOSTNAME)

                            module_logger.debug(tile)

                            tile["geojson"] = create_geojson_feature(tile)

                            if local_filename:
                                module_logger.debug(
                                    f"http://{HOSTNAME}/media/lowres_previews/{local_filename}"
                                )
                                tile[
                                    "preview_url"] = f"http://{HOSTNAME}/media/lowres_previews/{local_filename}"

                        search_results[platform] = results

                # Code below is a task for downloading and creating higher resolution previews for each tile (less than 50% cloud)
                # TODO: implement with celery task queue instead of django-workers (unreliable connection to postgres database)
                # for platform in search_results.keys():
                #     for result in search_results[platform]:
                #         print('DJANGO WORKERS TASK')
                #         print(result)
                #         result_serializable = {
                #             'platform_name': result['geojson']['properties']['platform_name'],
                #             'name': result['geojson']['properties']['name'],
                #             'dataset_name': result['geojson']['properties']['dataset_name'],
                #             'entity_id': result['geojson']['properties']['entity_id'],
                #             'api_source': result['geojson']['properties']['api_source'],
                #         }
                #         download_fullrespreview(result_serializable, result_serializable['api_source'])

                if search_results:
                    return Response({
                        "data": {
                            "id": str(uuid.uuid4()),
                            "uploaded_file_url_list": files_urls,
                            "wkt_footprint": wkt_footprint,
                            "wkt_vis_list": visualization_wkt_list,
                            "mgrs_list": mgrs_list,
                            "wrs_list": wrs_list,
                            "sensor_list": platforms,
                            "wrs_geojson": wrs_geojson,
                            "mgrs_geojson": mgrs_geojson,
                            "tile_results": search_results,
                        }
                    })
                else:
                    return Response({
                        "data": {
                            "id": str(uuid.uuid4()),
                            "uploaded_file_url_list": files_urls,
                            "wkt_footprint": wkt_footprint,
                            "wkt_vis_list": visualization_wkt_list,
                            "mgrs_list": mgrs_list,
                            "wrs_list": wrs_list,
                            "tile_results": [],
                        }
                    })
        else:
            return Response({"error": "Missing required shapefiles data"})
Exemplo n.º 9
0
def download_fullrespreview(tile_dict, api_source):
    print("downloading the full res preview")

    highres_dir = Path(settings.MEDIA_ROOT, "highres_previews")
    print(tile_dict)

    if api_source == "usgs_ee":

        if tile_dict["platform_name"] == "Landsat-8":
            product_type = "FR_REFL"
        else:
            product_type = "FRB"

        l8_dl = l8_downloader.L8Downloader("", verbose=False)

        result = l8_dl.download_product(tile_dict, product_type, directory=highres_dir)

        print(result)

        file_name = result[2]

        result_justfilename = Path(file_name).name
        result_png_name = Path(result_justfilename).stem + ".png"

        print(result_justfilename)
        print(result_png_name)

        # nasa logo position and size
        # 7253 7462
        # 668 559
        # usgs logo position
        # 0 7671
        # 1276 379

        # Pillow code to make the nodata transparent
        image = Image.open(file_name)
        image = image.convert("RGBA")

        width, height = image.size

        usgs_logo_pos_x = 0
        usgs_logo_pos_y = height - 400
        usgs_logo_width = 1300
        usgs_logo_height = 400

        nasa_logo_pos_x = width - 900
        nasa_logo_pos_y = height - 750

        nasa_logo_width = 900
        nasa_logo_height = 750

        if tile_dict["platform_name"] == "Landsat-8":

            blackBoxNasa = Image.new(
                image.mode, (nasa_logo_width, nasa_logo_height), "#000"
            )
            blackBoxUSGS = Image.new(
                image.mode, (usgs_logo_width, usgs_logo_height), "#000"
            )

            image.paste(blackBoxNasa, (nasa_logo_pos_x, nasa_logo_pos_y))
            image.paste(blackBoxUSGS, (usgs_logo_pos_x, usgs_logo_pos_y))

        datas = image.getdata()

        newData = []
        for item in datas:
            if item[0] <= 20 and item[1] <= 20 and item[2] <= 20:
                newData.append((0, 0, 0, 0))
            else:
                newData.append(item)

        image.putdata(newData)

        image_half = image.resize((math.floor(width / 2), math.floor(height / 2)))
        image_quarter = image.resize((math.floor(width / 4), math.floor(height / 4)))

        image_half.save(Path(highres_dir, Path(result_justfilename).stem + "_half.png"))
        image_quarter.save(
            Path(highres_dir, Path(result_justfilename).stem + "_quar.png")
        )

        # image.save(Path(highres_dir, result_png_name))
        # once the PNG with transparancy is generated, remove original JPEG
        os.remove(file_name)

    elif api_source == "esa_scihub":
        s2_dl = s2_downloader.S2Downloader("")

        result = s2_dl.download_tci(tile_dict["entity_id"], highres_dir)

        file_name = result[2]

        result_justfilename = Path(file_name).name
        result_png_name = Path(result_justfilename).stem + ".png"

        print(result_justfilename)
        print(result_png_name)

        # nasa logo position and size
        # 7253 7462
        # 668 559
        # usgs logo position
        # 0 7671
        # 1276 379

        # Pillow code to make the nodata transparent
        image = Image.open(file_name)
        image = image.convert("RGBA")

        width, height = image.size

        datas = image.getdata()

        newData = []
        for item in datas:
            if item[0] <= 20 and item[1] <= 20 and item[2] <= 20:
                newData.append((0, 0, 0, 0))
            else:
                newData.append(item)

        image.putdata(newData)

        image_half = image.resize((math.floor(width / 2), math.floor(height / 2)))
        image_quarter = image.resize((math.floor(width / 4), math.floor(height / 4)))

        image_half.save(Path(highres_dir, Path(result_justfilename).stem + "_half.png"))
        image_quarter.save(
            Path(highres_dir, Path(result_justfilename).stem + "_quar.png")
        )

        # image.save(Path(highres_dir, result_png_name))
        # once the PNG with transparancy is generated, remove original JPEG
        os.remove(file_name)