Exemple #1
0
def test_get_product_info():
    api = SentinelAPI(**_api_auth)

    expected_s1 = {
        'id':
        '8df46c9e-a20c-43db-a19a-4240c2ed3b8b',
        'size':
        143549851,
        'md5':
        'D5E4DF5C38C6E97BF7E7BD540AB21C05',
        'url':
        "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/$value",
        'date':
        '2015-11-21T10:03:56Z',
        'footprint':
        '-63.852531 -5.880887,-67.495872 -5.075419,-67.066071 -3.084356,-63.430576 -3.880541,'
        '-63.852531 -5.880887',
        'title':
        'S1A_EW_GRDM_1SDV_20151121T100356_20151121T100429_008701_00C622_A0EC'
    }

    expected_s2 = {
        'date':
        '2015-12-27T14:22:29Z',
        'footprint':
        '-58.80274769505742 -4.565257232533263,-58.80535376268811 -5.513960396525286,'
        '-57.90315169909761 -5.515947033626909,-57.903151791669515 -5.516014389089381,-57.85874693129081 -5.516044812342758,'
        '-57.814323596961835 -5.516142631941845,-57.81432351345917 -5.516075248310466,-57.00018056571297 -5.516633044843839,'
        '-57.000180565731384 -5.516700066819259,-56.95603179187787 -5.51666329264377,-56.91188395837315 -5.516693539799448,'
        '-56.91188396736038 -5.51662651925904,-56.097209386295305 -5.515947927683427,-56.09720929423562 -5.516014937246069,'
        '-56.053056977999596 -5.5159111504805916,-56.00892491028779 -5.515874390220655,-56.00892501130261 -5.515807411549814,'
        '-55.10621586418906 -5.513685455771881,-55.108821882251775 -4.6092845892233,-54.20840287327946 -4.606372862374043,'
        '-54.21169990975238 -3.658594390979672,-54.214267703869346 -2.710949551849636,-55.15704255065496 -2.7127451087194463,'
        '-56.0563616875051 -2.71378646425769,-56.9561852630143 -2.7141556791285275,-57.8999998009875 -2.713837142510183,'
        '-57.90079161941062 -3.6180222056692726,-58.800616247288836 -3.616721351843382,-58.80274769505742 -4.565257232533263',
        'id':
        '44517f66-9845-4792-a988-b5ae6e81fd3e',
        'md5':
        '48C5648C2644CE07207B3C943DEDEB44',
        'size':
        5854429622,
        'title':
        'S2A_OPER_PRD_MSIL1C_PDMC_20151228T112523_R110_V20151227T142229_20151227T142229',
        'url':
        "https://scihub.copernicus.eu/apihub/odata/v1/Products('44517f66-9845-4792-a988-b5ae6e81fd3e')/$value"
    }

    assert api.get_product_odata(
        '8df46c9e-a20c-43db-a19a-4240c2ed3b8b') == expected_s1
    assert api.get_product_odata(
        '44517f66-9845-4792-a988-b5ae6e81fd3e') == expected_s2
Exemple #2
0
def download_s1(user, password, dir_raw, dir_nc, start_date, end_date,
                footprint):

    api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus/')

    #footprint = "POLYGON((73 11, 74 11, 74 14, 73 14, 73 11))"
    products = api.query(footprint,
                         date=(start_date, end_date),
                         producttype='GRD')

    #print(products)

    for product in products:
        productInfo = api.get_product_odata(product)
        title = productInfo['title']

        print(title)
        file_nc = os.path.join(dir_nc, "%s_VV.nc" % title)
        file_wkt = os.path.join(os.path.dirname(dir_nc), 'wkt',
                                "%s.wkt" % title)

        if not os.path.exists(file_wkt):
            pFootPrint = productInfo['footprint']
            file = open(file_wkt, "a")
            file.write(pFootPrint)
            file.close()
        if not os.path.exists(file_nc):
            api.download(product, dir_raw, checksum=True)
Exemple #3
0
def download_s2(user, password, dir_raw, dir_nc, start_date, end_date, footprint, pr_status):


    api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus/')
    
    #footprint = "POLYGON((73 11, 74 11, 74 14, 73 14, 73 11))"
    #products = api.query(footprint, date=(start_date, end_date), producttype='S2MSI1C')
    products = api.query(footprint, date=(start_date, end_date), 
                         producttype='S2MSI1C',cloudcoverpercentage = (0,20))

   
    #print(products)
    
    l = ['S2A_MSIL1C_20180601T051651_N0206_R062_T43PFN_20180601T082308', 'S2A_MSIL1C_20180621T051651_N0206_R062_T43PFN_20180621T081647', 'S2B_MSIL1C_20180613T050649_N0206_R019_T43PFN_20180613T084228',
         'S2A_MSIL1C_20180601T051651_N0206_R062_T43PFP_20180601T082308', 'S2A_MSIL1C_20180621T051651_N0206_R062_T43PFP_20180621T081647', 'S2B_MSIL1C_20180613T050649_N0206_R019_T43PFP_20180613T084228',
         'S2A_MSIL1C_20180608T050651_N0206_R019_T43PFN_20180608T084904', 'S2A_MSIL1C_20180628T050651_N0206_R019_T43PFN_20180628T081023', 'S2B_MSIL1C_20180616T051649_N0206_R062_T43PFN_20180616T090733',
         'S2A_MSIL1C_20180608T050651_N0206_R019_T43PFP_20180608T084904', 'S2A_MSIL1C_20180628T050651_N0206_R019_T43PFP_20180628T081023', 'S2B_MSIL1C_20180616T051649_N0206_R062_T43PFP_20180616T090733',
         'S2A_MSIL1C_20180611T051651_N0206_R062_T43PFN_20180611T081245', 'S2B_MSIL1C_20180603T050649_N0206_R019_T43PFN_20180603T084545', 'S2B_MSIL1C_20180623T050649_N0206_R019_T43PFN_20180623T084444',
         'S2A_MSIL1C_20180611T051651_N0206_R062_T43PFP_20180611T081245', 'S2B_MSIL1C_20180603T050649_N0206_R019_T43PFP_20180603T084545', 'S2B_MSIL1C_20180623T050649_N0206_R019_T43PFP_20180623T084444',
         'S2A_MSIL1C_20180618T050651_N02206_R019_T43PFN_20180618T085607', 'S2B_MSIL1C_20180606T051649_N0206_R062_T43PFN_20180606T104751', 'S2B_MSIL1C_20180626T051649_N0206_R062_T43PFN_20180626T090058',
         'S2A_MSIL1C_20180618T050651_N0206_R019_T43PFP_20180618T085607', 'S2B_MSIL1C_20180606T051649_N0206_R062_T43PFP_20180606T104751', 'S2B_MSIL1C_20180626T051649_N0206_R062_T43PFP_20180626T090058']
    
    for product in products:
        productInfo = api.get_product_odata(product)
        title = productInfo['title']
        
        
        if title in l:
            continue
        
        tileNo_time = '%s_%s' % (title.split('_')[5], title.split('_')[2])
    
        try:
            downloadFlag = not pr_status[tileNo_time]
        except KeyError:
            pr_status[tileNo_time] = False
            downloadFlag =True
            print "no error"
        #file_nc = os.path.join(dir_nc, "%s_VV.nc"%os.path.basename(title).split("_")[4])
        #file_nc = os.path.join(dir_nc, "%s_VV.nc" % title[17:48])
        file_wkt = os.path.join(os.path.dirname(dir_nc), "wkt/%s.wkt" % tileNo_time)
                
        if not os.path.exists(file_wkt):
            pFootPrint = productInfo['footprint']
            file = open(file_wkt, "a")
            file.write(pFootPrint)
            file.close()
        
        if downloadFlag and not title in l:
            
            api.download(product, dir_raw, checksum=True)
            l.append(title)
        
        return pr_status
Exemple #4
0
def download_s2(user, password, dir_raw, dir_nc, start_date, end_date,
                footprint, pr_status):

    api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus/')

    #footprint = "POLYGON((73 11, 74 11, 74 14, 73 14, 73 11))"
    products = api.query(footprint,
                         date=(start_date, end_date),
                         producttype='S2MSI1C')

    #print(products)

    for product in products:
        productInfo = api.get_product_odata(product)
        title = productInfo['title']
        tileNo_time = '%s_%s' % (title.split('_')[5], title.split('_')[2])
        try:
            downloadFlag = not pr_status[tileNo_time]
        except KeyError:
            pr_status[tileNo_time] = False
            downloadFlag = True
        #file_nc = os.path.join(dir_nc, "%s_VV.nc"%os.path.basename(title).split("_")[4])
        #file_nc = os.path.join(dir_nc, "%s_VV.nc" % title[17:48])
        file_wkt = os.path.join(os.path.dirname(dir_nc),
                                "wkt/%s.wkt" % tileNo_time)

        if not os.path.exists(file_wkt):
            pFootPrint = productInfo['footprint']
            file = open(file_wkt, "a")
            file.write(pFootPrint)
            file.close()

        if downloadFlag:
            api.download(product, dir_raw, checksum=True)

    return pr_status
Exemple #5
0
def cli(user, password, geometry, start, end, uuid, name, download, md5, sentinel, producttype,
        instrument, cloud, footprints, path, query, url, order_by, limit):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
    containing the geometry of the area you want to search for or the UUIDs of the products. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    _set_logger_handler()

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs["platformname"] = "Sentinel-" + sentinel

    if instrument and not producttype:
        search_kwargs["instrumentshortname"] = instrument

    if producttype:
        search_kwargs["producttype"] = producttype

    if cloud:
        if sentinel not in ['2', '3']:
            logger.error('Cloud cover is only supported for Sentinel 2 and 3.')
            raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.')
        search_kwargs["cloudcoverpercentage"] = (0, cloud)

    if query is not None:
        search_kwargs.update((x.split('=') for x in query.split(',')))

    if geometry is not None:
        search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry))

    if uuid is not None:
        uuid_list = [x.strip() for x in uuid.split(',')]
        products = {}
        for productid in uuid_list:
            try:
                products[productid] = api.get_product_odata(productid)
            except SentinelAPIError as e:
                if 'Invalid key' in e.msg:
                    logger.error('No product with ID \'%s\' exists on server', productid)
    elif name is not None:
        search_kwargs["identifier"] = name
        products = api.query(order_by=order_by, limit=limit, **search_kwargs)
    else:
        start = start or "19000101"
        end = end or "NOW"
        products = api.query(date=(start, end),
                             order_by=order_by, limit=limit, **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, failed_downloads = api.download_all(products, path, checksum=md5)
        if md5 is True:
            if len(failed_downloads) > 0:
                with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile:
                    for failed_id in failed_downloads:
                        outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title']))
    else:
        for product_id, props in products.items():
            if uuid is None:
                logger.info('Product %s - %s', product_id, props['summary'])
            else:  # querying uuids has no summary key
                logger.info('Product %s - %s - %s MB', product_id, props['title'],
                            round(int(props['size']) / (1024. * 1024.), 2))
        if uuid is None:
            logger.info('---')
            logger.info('%s scenes found with a total size of %.2f GB',
                        len(products), api.get_products_size(products))
Exemple #6
0
def cli(
    user,
    password,
    geometry,
    start,
    end,
    uuid,
    name,
    download,
    sentinel,
    producttype,
    instrument,
    cloud,
    footprints,
    path,
    query,
    url,
    order_by,
    limit,
):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
    containing the geometry of the area you want to search for or the UUIDs of the products. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    _set_logger_handler()

    if user is None or password is None:
        try:
            user, password = requests.utils.get_netrc_auth(url)
        except TypeError:
            pass

    if user is None or password is None:
        raise click.UsageError(
            "Missing --user and --password. Please see docs "
            "for environment variables and .netrc support.")

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs["platformname"] = "Sentinel-" + sentinel

    if instrument and not producttype:
        search_kwargs["instrumentshortname"] = instrument

    if producttype:
        search_kwargs["producttype"] = producttype

    if cloud:
        if sentinel not in ["2", "3"]:
            logger.error("Cloud cover is only supported for Sentinel 2 and 3.")
            exit(1)
        search_kwargs["cloudcoverpercentage"] = (0, cloud)

    if query is not None:
        search_kwargs.update((x.split("=") for x in query))

    if geometry is not None:
        search_kwargs["area"] = geojson_to_wkt(read_geojson(geometry))

    if uuid is not None:
        uuid_list = [x.strip() for x in uuid]
        products = {}
        for productid in uuid_list:
            try:
                products[productid] = api.get_product_odata(productid)
            except SentinelAPIError as e:
                if "Invalid key" in e.msg:
                    logger.error("No product with ID '%s' exists on server",
                                 productid)
                    exit(1)
                else:
                    raise
    elif name is not None:
        search_kwargs["identifier"] = name[0] if len(
            name) == 1 else "(" + " OR ".join(name) + ")"
        products = api.query(order_by=order_by, limit=limit, **search_kwargs)
    else:
        start = start or "19000101"
        end = end or "NOW"
        products = api.query(date=(start, end),
                             order_by=order_by,
                             limit=limit,
                             **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        if os.path.isdir(footprints):
            foot_path = os.path.join(footprints, "search_footprints.geojson")
        else:
            foot_path = "search_footprints.geojson"
        if path == ".":
            dump_path = os.path.join(os.getcwd(), foot_path)
        else:
            dump_path = os.path.join(path, foot_path)
        with open(dump_path, "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, triggered, failed_downloads = api.download_all(
            products, path)
        if len(failed_downloads) > 0:
            with open(os.path.join(path, "corrupt_scenes.txt"),
                      "w") as outfile:
                for failed_id in failed_downloads:
                    outfile.write("%s : %s\n" %
                                  (failed_id, products[failed_id]["title"]))
    else:
        for product_id, props in products.items():
            if uuid is None:
                logger.info("Product %s - %s", product_id, props["summary"])
            else:  # querying uuids has no summary key
                logger.info(
                    "Product %s - %s - %s MB",
                    product_id,
                    props["title"],
                    round(int(props["size"]) / (1024.0 * 1024.0), 2),
                )
        if uuid is None:
            logger.info("---")
            logger.info(
                "%s scenes found with a total size of %.2f GB",
                len(products),
                api.get_products_size(products),
            )
Exemple #7
0
class S3SynergyDowload(object):
    """Sentinel 3 Synergy downloader using SentinelSat
    """
    def __init__(self,
                 username,
                 password,
                 dest_dir,
                 sel_bands=[3, 6, 8, 18],
                 dload_options=None):
        # Can probably do away with sentinelsat dependency

        self.api = SentinelAPI(username, password)
        self.auth = (username, password)
        if dload_options is None:
            self.dload_options = DLOAD_OPTS(5., 95., 75., -70.)
        dest_dir = Path(dest_dir)
        if dest_dir.exists():
            self.dest_dir = dest_dir
        else:
            raise IOError
        self.sel_bands = sel_bands
        LOG.info(f"Selected bands: {sel_bands}")
        LOG.info(f"Destination folder: {self.dest_dir}")

    def _query(self, doy, year, polygon=None):
        """Uses sentinelsat to query the Query the science hub, or wherever the
        data are kept. Filters products by landcover, clouds, etc"""
        date = dt.datetime.strptime(f"{year}{doy}", "%Y%j")
        date0 = date.strftime("%Y%m%d")
        date1 = (date + dt.timedelta(days=1)).strftime("%Y%m%d")
        import pdb
        pdb.set_trace()
        if polygon != None:
            footprint = geojson_to_wkt(read_geojson(polygon))
            products = self.api.query(area=footprint,
                                      date=(date0, date1),
                                      producttype='SY_2_SYN___')
        else:
            products = self.api.query(area=None,
                                      date=(date0, date1),
                                      producttype='SY_2_SYN___')
        selected_products = {
            k: product
            for k, product in products.items()
            if check_bounds(product['footprint'], self.dload_options.max_lat,
                            self.dload_options.min_lat)
        }
        keep = {}
        for p in selected_products.keys():
            meta = self.api.get_product_odata(p, full=True)
            try:
                # get exta info eg landcover percentage
                # CONDITIONS
                # 1. Landcover greater than 15 -- too high?
                lc = meta["Land Cover Percentage (%)"]
                cond1 = lc > self.dload_options.landcover_keep
                # 2. Cloud less than 90%?
                cloud = meta['Cloud Cover Percentage (%)']
                cond2 = cloud < self.dload_options.cloud_throw
                passCond = cond1 and cond2
                if passCond:
                    keep[p] = meta
            except:
                pass
        LOG.info(f"Number of suitable granules {len(keep)}")
        return keep

    def download_data(self, doy, year, polygon):
        granules = self._query(doy, year, polygon)
        for k, granule in granules.items():
            LOG.info(f"Downloading granule {granule['Filename']}...")
            fname = granule['Filename']
            pid = granule["id"]
            fdir = create_outputs(self.dest_dir, doy, year, fname)
            download(fname, granule, self.auth, pid, fdir, self.sel_bands)
Exemple #8
0
def cli(
    user,
    password,
    geometry,
    start,
    end,
    uuid,
    name,
    download,
    quicklook,
    sentinel,
    producttype,
    instrument,
    cloud,
    footprints,
    path,
    query,
    url,
    order_by,
    location,
    limit,
    info,
):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
    containing the geometry of the area you want to search for or the UUIDs of the products. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    _set_logger_handler()

    if user is None or password is None:
        try:
            user, password = requests.utils.get_netrc_auth(url)
        except TypeError:
            pass

    if user is None or password is None:
        raise click.UsageError(
            "Missing --user and --password. Please see docs "
            "for environment variables and .netrc support.")

    api = SentinelAPI(user, password, url)

    if info:
        ctx = click.get_current_context()
        click.echo("DHuS version: " + api.dhus_version)
        ctx.exit()

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs["platformname"] = "Sentinel-" + sentinel

    if instrument and not producttype:
        search_kwargs["instrumentshortname"] = instrument

    if producttype:
        search_kwargs["producttype"] = producttype

    if cloud:
        if sentinel not in ["2", "3"]:
            logger.error("Cloud cover is only supported for Sentinel 2 and 3.")
            exit(1)
        search_kwargs["cloudcoverpercentage"] = (0, cloud)

    if query is not None:
        search_kwargs.update((x.split("=") for x in query))

    if location is not None:
        wkt, info = placename_to_wkt(location)
        minX, minY, maxX, maxY = info["bbox"]
        r = 6371  # average radius, km
        extent_east = r * math.radians(maxX - minX) * math.cos(
            math.radians((minY + maxY) / 2))
        extent_north = r * math.radians(maxY - minY)
        logger.info(
            "Querying location: '%s' with %.1f x %.1f km, %f, %f to %f, %f bounding box",
            info["display_name"],
            extent_north,
            extent_east,
            minY,
            minX,
            maxY,
            maxX,
        )
        search_kwargs["area"] = wkt

    if geometry is not None:
        # check if the value is an existing path
        if os.path.exists(geometry):
            search_kwargs["area"] = geojson_to_wkt(read_geojson(geometry))
        # check if the value is a GeoJSON
        else:
            if geometry.startswith("{"):
                try:
                    geometry = json.loads(geometry)
                    search_kwargs["area"] = geojson_to_wkt(geometry)
                except json_parse_exception:
                    raise click.UsageError(
                        "geometry string starts with '{' but is not a valid GeoJSON."
                    )
            # check if the value is a WKT
            elif is_wkt(geometry):
                search_kwargs["area"] = geometry
            else:
                raise click.UsageError(
                    "The geometry input is neither a GeoJSON file with a valid path, "
                    "a GeoJSON String nor a WKT string.")

    if uuid is not None:
        uuid_list = [x.strip() for x in uuid]
        products = {}
        for productid in uuid_list:
            try:
                products[productid] = api.get_product_odata(productid)
            except InvalidKeyError:
                logger.error("No product with ID '%s' exists on server",
                             productid)
                exit(1)
    elif name is not None:
        search_kwargs["identifier"] = name[0] if len(
            name) == 1 else "(" + " OR ".join(name) + ")"
        products = api.query(order_by=order_by, limit=limit, **search_kwargs)
    else:
        start = start or "19000101"
        end = end or "NOW"
        products = api.query(date=(start, end),
                             order_by=order_by,
                             limit=limit,
                             **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"),
                  "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if quicklook:
        downloaded_quicklooks, failed_quicklooks = api.download_all_quicklooks(
            products, path)
        if failed_quicklooks:
            api.logger.warning("Some quicklooks failed: %s out of %s",
                               len(failed_quicklooks), len(products))

    if download is True:
        product_infos, triggered, failed_downloads = api.download_all(
            products, path)
        if len(failed_downloads) > 0:
            with open(os.path.join(path, "corrupt_scenes.txt"),
                      "w") as outfile:
                for failed_id in failed_downloads:
                    outfile.write("%s : %s\n" %
                                  (failed_id, products[failed_id]["title"]))
    else:
        for product_id, props in products.items():
            if uuid is None:
                logger.info("Product %s - %s", product_id, props["summary"])
            else:  # querying uuids has no summary key
                logger.info(
                    "Product %s - %s - %s MB",
                    product_id,
                    props["title"],
                    round(int(props["size"]) / (1024.0 * 1024.0), 2),
                )
        if uuid is None:
            logger.info("---")
            logger.info(
                "%s scenes found with a total size of %.2f GB",
                len(products),
                api.get_products_size(products),
            )
Exemple #9
0
def cli(user, password, geometry, start, end, uuid, name, download, sentinel, producttype,
        instrument, cloud, footprints, path, query, url, order_by, limit):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
    containing the geometry of the area you want to search for or the UUIDs of the products. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    _set_logger_handler()

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs["platformname"] = "Sentinel-" + sentinel

    if instrument and not producttype:
        search_kwargs["instrumentshortname"] = instrument

    if producttype:
        search_kwargs["producttype"] = producttype

    if cloud:
        if sentinel not in ['2', '3']:
            logger.error('Cloud cover is only supported for Sentinel 2 and 3.')
            raise ValueError('Cloud cover is only supported for Sentinel 2 and 3.')
        search_kwargs["cloudcoverpercentage"] = (0, cloud)

    if query is not None:
        search_kwargs.update((x.split('=') for x in query.split(',')))

    if geometry is not None:
        search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry))

    if uuid is not None:
        uuid_list = [x.strip() for x in uuid.split(',')]
        products = {}
        for productid in uuid_list:
            try:
                products[productid] = api.get_product_odata(productid)
            except SentinelAPIError as e:
                if 'Invalid key' in e.msg:
                    logger.error('No product with ID \'%s\' exists on server', productid)
    elif name is not None:
        search_kwargs["identifier"] = name
        products = api.query(order_by=order_by, limit=limit, **search_kwargs)
    else:
        start = start or "19000101"
        end = end or "NOW"
        products = api.query(date=(start, end),
                             order_by=order_by, limit=limit, **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"), "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, failed_downloads = api.download_all(products, path)
        if len(failed_downloads) > 0:
            with open(os.path.join(path, "corrupt_scenes.txt"), "w") as outfile:
                for failed_id in failed_downloads:
                    outfile.write("%s : %s\n" % (failed_id, products[failed_id]['title']))
    else:
        for product_id, props in products.items():
            if uuid is None:
                logger.info('Product %s - %s', product_id, props['summary'])
            else:  # querying uuids has no summary key
                logger.info('Product %s - %s - %s MB', product_id, props['title'],
                            round(int(props['size']) / (1024. * 1024.), 2))
        if uuid is None:
            logger.info('---')
            logger.info('%s scenes found with a total size of %.2f GB',
                        len(products), api.get_products_size(products))
Exemple #10
0
def test_get_product_info_scihub_down():
    api = SentinelAPI("mock_user", "mock_password")

    with requests_mock.mock() as rqst:
        rqst.get(
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/?$format=json",
            text="Mock SciHub is Down",
            status_code=503)
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')

        rqst.get(
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/?$format=json",
            text='{"error":{"code":null,"message":{"lang":"en","value":'
            '"No Products found with key \'8df46c9e-a20c-43db-a19a-4240c2ed3b8b\' "}}}',
            status_code=500)
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert excinfo.value.msg == "No Products found with key \'8df46c9e-a20c-43db-a19a-4240c2ed3b8b\' "

        rqst.get(
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/?$format=json",
            text="Mock SciHub is Down",
            status_code=200)
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        assert excinfo.value.msg == "Mock SciHub is Down"

        # Test with a real server response
        rqst.get(
            "https://scihub.copernicus.eu/apihub/odata/v1/Products('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')/?$format=json",
            text=textwrap.dedent("""\
            <!doctype html>
            <title>The Sentinels Scientific Data Hub</title>
            <link href='https://fonts.googleapis.com/css?family=Open+Sans' rel='stylesheet' type='text/css'>
            <style>
            body { text-align: center; padding: 125px; background: #fff;}
            h1 { font-size: 50px; }
            body { font: 20px 'Open Sans',Helvetica, sans-serif; color: #333; }
            article { display: block; text-align: left; width: 820px; margin: 0 auto; }
            a { color: #0062a4; text-decoration: none; font-size: 26px }
            a:hover { color: #1b99da; text-decoration: none; }
            </style>

            <article>
            <img alt="" src="/datahub.png" style="float: left;margin: 20px;">
            <h1>The Sentinels Scientific Data Hub will be back soon!</h1>
            <div style="margin-left: 145px;">
            <p>
            Sorry for the inconvenience,<br/>
            we're performing some maintenance at the moment.<br/>
            </p>
            <!--<p><a href="https://scihub.copernicus.eu/news/News00098">https://scihub.copernicus.eu/news/News00098</a></p>-->
            <p>
            We'll be back online shortly!
            </p>
            </div>
            </article>
            """),
            status_code=502)
        with pytest.raises(SentinelAPIError) as excinfo:
            api.get_product_odata('8df46c9e-a20c-43db-a19a-4240c2ed3b8b')
        print(excinfo.value)
        assert "The Sentinels Scientific Data Hub will be back soon!" in excinfo.value.msg
Exemple #11
0
class Sentinelsat_products:
    def __init__(self,
                 date_start,
                 date_finish,
                 footprint=settings.footprint,
                 platformname="Sentinel-3"):
        self.date_start = date_start
        self.date_finish = date_finish
        self.api = SentinelAPI(settings.sentinel_api_user,
                               settings.sentinel_api_key,
                               'https://scihub.copernicus.eu/dhus')
        self.wkt_footprint = footprint
        self.products = self.query_products(self.date_start, self.date_finish)

    def query_products(self,
                       date_start,
                       date_finish,
                       platformname="Sentinel-3"):
        # connect to the API
        api = SentinelAPI(settings.sentinel_api_user,
                          settings.sentinel_api_key,
                          'https://scihub.copernicus.eu/dhus')

        # search by polygon, time, and Hub query keywords
        products = api.query(self.wkt_footprint,
                             area_relation='Contains',
                             date=(self.date_start, self.date_finish),
                             platformname='Sentinel-3')
        return products

    def filter_products(self, instrument, level, p_type, timeliness):
        removed_products = []
        for product_key in self.products:
            odata = self.api.get_product_odata(product_key, full=True)
            product_instrument = odata["Instrument"]
            product_level = odata["Product level"]
            product_type = odata["Product type"]
            #mission_type = odata["Mission type"]
            product_timeliness = odata["Timeliness Category"]
            #filter only from Level 1 OLCI instrument with NTC full resolution
            conditions = ((product_instrument == instrument)
                          and (p_type in product_type)
                          and product_timeliness == timeliness
                          and product_level == level)
            if conditions:
                pass
                #print(instrument, product_level, product_type)
            else:
                removed_products.append(product_key)
        for key in removed_products:
            del self.products[key]

    def download_products(self, make_subset=True):
        print("----------")
        for key in self.products:
            file_name = self.products[key]["filename"]
            file_date = self.products[key]["summary"][:16].split("Date: ")[1]
            download_path = os.path.join(settings.data_path, file_date)
            if not os.path.exists(download_path):
                os.makedirs(download_path)
            # if it was downloaded before it won't download again
            download_info = self.api.download(key,
                                              directory_path=download_path)
            #print(download_info)
            zip_path = download_info["path"]
            with zipfile.ZipFile(zip_path, 'r') as zip_ref:
                zip_ref.extractall(download_path)
            if make_subset:
                extracted_directory = os.path.join(download_path, file_name)
                product = snappy_utils.read_product(extracted_directory)
                subset = snappy_utils.make_subset(product, settings.footprint)
                snappy_utils.write_product(
                    subset, os.path.join(download_path, "laguna.tif"),
                    "GeoTiff")
                snappy_utils.write_product(
                    subset, os.path.join(download_path, "laguna.dim"),
                    "BEAM-DIMAP")
Exemple #12
0
def download_images(save_imgs, save_rgb, save_tiles, unet_weights, unet_clouds,
                    class_path, class_clouds, poly_path, percentiles_forest,
                    percentiles_clouds, boundsdata):

    # connect to the API
    user = '******'
    password = '******'

    api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus')

    # search by polygon
    footprint = geojson_to_wkt(read_geojson(boundsdata))

    # search for the images
    products = api.query(
        footprint,
        date=(["NOW-30DAYS", "NOW"]),
        area_relation='IsWithin',
        platformname='Sentinel-2',
        processinglevel='Level-2A',
        #cloudcoverpercentage = (0, 20)
    )

    print(len(products))

    table_names = api.to_geodataframe(products)

    uuid_names = table_names['uuid']  # names to download
    name_zip = table_names['title']  #title of image
    extracted_name = table_names['filename']  # name of folder .SAFE

    k = 0

    # download images
    for fname in uuid_names:

        file_dir = save_imgs + '/' + extracted_name[k]

        if os.path.isdir(file_dir) is False:

            retval = os.getcwd()
            os.chdir(save_imgs)
            print("Downloading data...")
            api.get_product_odata(fname)
            api.download(fname)
            os.chdir(retval)  # return to previous directory

            path_zip_name = save_imgs + '/' + name_zip[k] + '.zip'
            while not os.path.exists(path_zip_name):
                time.sleep(1)

            if os.path.isfile(path_zip_name):
                # extract files
                zip_ref = zipfile.ZipFile(path_zip_name, 'r')
                zip_ref.extractall(save_imgs)
                zip_ref.close()
                os.remove(path_zip_name)  # remove .zip file
                print("%s has been removed successfully" % name_zip[k])

                path_to_folder = save_imgs + '/' + extracted_name[
                    k] + '/GRANULE/'

                # calls the rgb_tiles function
                dir_save_tiles = save_tiles + '/' + name_zip[k]
                if os.path.isdir(dir_save_tiles) is False:
                    print('Creating RGB tiles')
                    os.mkdir(dir_save_tiles)
                    rgb_tiles(path_to_folder, save_rgb, dir_save_tiles,
                              name_zip[k])

                # calls the application() Unet function
                save_class_path = class_path + '/' + name_zip[k]
                if os.path.isdir(save_class_path) is False:
                    print('Applying UNet')
                    os.mkdir(save_class_path)
                    application(dir_save_tiles,
                                unet_weights,
                                save_class_path,
                                percentiles_forest,
                                clouds=0)

                    # merge predicted tiles into one GeoTiff
                    join_tiles(save_class_path, class_path, path_to_folder)
                    print("Tiles merged!")

                save_class_clouds = class_clouds + '/' + name_zip[k]
                if os.path.isdir(save_class_clouds) is False:
                    print('Applying UNet clouds')
                    os.mkdir(save_class_clouds)
                    application(dir_save_tiles,
                                unet_clouds,
                                save_class_clouds,
                                percentiles_clouds,
                                clouds=1)

                    # merge the clouds predicted tiles into one GeoTiff
                    join_tiles(save_class_clouds, class_clouds, path_to_folder)
                    print("Clouds tiles merged!")

                # polygons evalutation
                print("Polygons evaluation")
                polygons(name_zip[k],
                         class_path,
                         class_clouds,
                         path_to_folder,
                         save_class_path,
                         save_imgs,
                         poly_path,
                         time_spaced=None)

                k = k + 1

            else:
                raise ValueError("%s isn't a file!" % path_zip_name)

        else:
            path_to_folder = save_imgs + '/' + extracted_name[k] + '/GRANULE/'

            # calls the rgb_tiles function
            dir_save_tiles = save_tiles + '/' + name_zip[k]
            if os.path.isdir(dir_save_tiles) is False:
                print('Creating RGB tiles')
                os.mkdir(dir_save_tiles)
                rgb_tiles(path_to_folder, save_rgb, dir_save_tiles,
                          name_zip[k])

            # calls the application() Unet function
            save_class_path = class_path + '/' + name_zip[k]
            if os.path.isdir(save_class_path) is False:
                print('Applying UNet')
                os.mkdir(save_class_path)
                application(dir_save_tiles,
                            unet_weights,
                            save_class_path,
                            percentiles_forest,
                            clouds=0)

                # merge predicted tiles into one GeoTiff
                join_tiles(save_class_path, class_path, path_to_folder)
                print("Tiles merged!")

            save_class_clouds = class_clouds + '/' + name_zip[k]
            if os.path.isdir(save_class_clouds) is False:
                print('Applying UNet clouds')
                os.mkdir(save_class_clouds)
                application(dir_save_tiles,
                            unet_clouds,
                            save_class_clouds,
                            percentiles_clouds,
                            clouds=1)

                # merge the clouds predicted tiles into one GeoTiff
                join_tiles(save_class_clouds, class_clouds, path_to_folder)
                print("Clouds tiles merged!")

            # polygons evalutation
            print("Polygons evaluation")
            polygons(name_zip[k],
                     class_path,
                     class_clouds,
                     path_to_folder,
                     save_class_path,
                     save_imgs,
                     poly_path,
                     time_spaced=None)

            k = k + 1

    return
Exemple #13
0
                     sensoroperationalmode='IW'
                     )

if len(products) == 0:
    print("No files found at date: " + date)
    quit()
for i in range(len(products)):
    print(products[products.keys()[i]])

products_df = api.to_dataframe(products)

# FINDING SMALLEST FILE
smallestFile = None
tempSize = 9999999999
for i in range(0, len(products)):
    if (api.get_product_odata(products_df.index[i])["size"] < tempSize):
        smallestFile = products_df.index[i]
        tempSize = api.get_product_odata(products_df.index[i])["size"]
# ----------------------------------------------------------------

# SETTING MAX SIZE AND GETTING PRODUCT INFO
maxSize = 500000000  # Set the max size for files to download (in bytes)
if (tempSize < maxSize):
    api.download(smallestFile)
    smallestName = api.get_product_odata(smallestFile)["title"]
    smallestDate = api.get_product_odata(smallestFile)["date"].strftime("%d-%m-%Y_%H-%M") # ":" cause error in windowsOS and with KML links
    smallestLink = api.get_product_odata(smallestFile)["url"]
    print("Downloading " + smallestName + ", Size: " + str(tempSize) + " bytes.")
else:
    print("No file small enough to download")
    quit()