Beispiel #1
0
def downloadProduct(data):

    index, directory_path, username, password = data
    api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus')
    download_data = Downloads.objects.filter(product_id=index).update(
        start_time=datetime.datetime.now(), status='started')
    download_data = Downloads.objects.get(product_id=index)
    # download_data.start_time=datetime.datimetime.now()
    # download_data.status='started'
    # download_data.save()
    logger.info(f'Downloading product with product_id {index} Started')
    try:
        api.download(index, directory_path=directory_path, checksum=True)
        download_data.end_time = datetime.datetime.now()
        download_data.status = 'finished'
        download_data.save()
    except Exception as e:
        logger.exception(
            f'Exception occured {e} while downloading product with   product_id {index}'
        )
        download_data.end_time = datetime.datetime.now()
        download_data.status = 'error'
        download_data.save()
    except AttributeError as e:
        logger.exception(
            f'Exception occured {e} while downloading product with   product_id {index}'
        )
        download_data.end_time = datetime.datetime.now()
        download_data.status = 'error'
        download_data.save()
    logger.info(f'Downloading product with product_id {index} Finished')
Beispiel #2
0
def download_s1(user, password, dir_raw, dir_nc, start_date, end_date,
                footprint):

    api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus/')

    #footprint = "POLYGON((73 11, 74 11, 74 14, 73 14, 73 11))"
    products = api.query(footprint,
                         date=(start_date, end_date),
                         producttype='GRD')

    #print(products)

    for product in products:
        productInfo = api.get_product_odata(product)
        title = productInfo['title']

        print(title)
        file_nc = os.path.join(dir_nc, "%s_VV.nc" % title)
        file_wkt = os.path.join(os.path.dirname(dir_nc), 'wkt',
                                "%s.wkt" % title)

        if not os.path.exists(file_wkt):
            pFootPrint = productInfo['footprint']
            file = open(file_wkt, "a")
            file.write(pFootPrint)
            file.close()
        if not os.path.exists(file_nc):
            api.download(product, dir_raw, checksum=True)
Beispiel #3
0
def download_scene(scene):
    """Download a scene and change its status """
    from sentinelsat.sentinel import SentinelAPI
    from django.conf import settings

    path = join(settings.MEDIA_ROOT, scene.sat, scene.identifier)
    check_create_folder(path)

    try:
        api = SentinelAPI(
            settings.SENTINEL_USER,
            settings.SENTINEL_PASSWORD,
            settings.SENTINEL_API_URL
        )
    except AttributeError:
        api = SentinelAPI(settings.SENTINEL_USER, settings.SENTINEL_PASSWORD)
    try:
        print('Changing status of scene %s to downloading' % scene.identifier)
        scene.change_status('downloading')
        print('Starting download of product %s on path %s' %
              (scene.product, path))
        api.download(scene.product, path)
        print('Changing status of scene %s to downloaded' % scene.identifier)
        scene.change_status('downloaded')
    except Exception as exp:
        print('Unexpected error: %s' % exp)
        print('Changing status of scene %s to dl_failed' % scene.identifier)
        scene.change_status('dl_failed')
Beispiel #4
0
def download_s2(user, password, dir_raw, dir_nc, start_date, end_date, footprint, pr_status):


    api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus/')
    
    #footprint = "POLYGON((73 11, 74 11, 74 14, 73 14, 73 11))"
    #products = api.query(footprint, date=(start_date, end_date), producttype='S2MSI1C')
    products = api.query(footprint, date=(start_date, end_date), 
                         producttype='S2MSI1C',cloudcoverpercentage = (0,20))

   
    #print(products)
    
    l = ['S2A_MSIL1C_20180601T051651_N0206_R062_T43PFN_20180601T082308', 'S2A_MSIL1C_20180621T051651_N0206_R062_T43PFN_20180621T081647', 'S2B_MSIL1C_20180613T050649_N0206_R019_T43PFN_20180613T084228',
         'S2A_MSIL1C_20180601T051651_N0206_R062_T43PFP_20180601T082308', 'S2A_MSIL1C_20180621T051651_N0206_R062_T43PFP_20180621T081647', 'S2B_MSIL1C_20180613T050649_N0206_R019_T43PFP_20180613T084228',
         'S2A_MSIL1C_20180608T050651_N0206_R019_T43PFN_20180608T084904', 'S2A_MSIL1C_20180628T050651_N0206_R019_T43PFN_20180628T081023', 'S2B_MSIL1C_20180616T051649_N0206_R062_T43PFN_20180616T090733',
         'S2A_MSIL1C_20180608T050651_N0206_R019_T43PFP_20180608T084904', 'S2A_MSIL1C_20180628T050651_N0206_R019_T43PFP_20180628T081023', 'S2B_MSIL1C_20180616T051649_N0206_R062_T43PFP_20180616T090733',
         'S2A_MSIL1C_20180611T051651_N0206_R062_T43PFN_20180611T081245', 'S2B_MSIL1C_20180603T050649_N0206_R019_T43PFN_20180603T084545', 'S2B_MSIL1C_20180623T050649_N0206_R019_T43PFN_20180623T084444',
         'S2A_MSIL1C_20180611T051651_N0206_R062_T43PFP_20180611T081245', 'S2B_MSIL1C_20180603T050649_N0206_R019_T43PFP_20180603T084545', 'S2B_MSIL1C_20180623T050649_N0206_R019_T43PFP_20180623T084444',
         'S2A_MSIL1C_20180618T050651_N02206_R019_T43PFN_20180618T085607', 'S2B_MSIL1C_20180606T051649_N0206_R062_T43PFN_20180606T104751', 'S2B_MSIL1C_20180626T051649_N0206_R062_T43PFN_20180626T090058',
         'S2A_MSIL1C_20180618T050651_N0206_R019_T43PFP_20180618T085607', 'S2B_MSIL1C_20180606T051649_N0206_R062_T43PFP_20180606T104751', 'S2B_MSIL1C_20180626T051649_N0206_R062_T43PFP_20180626T090058']
    
    for product in products:
        productInfo = api.get_product_odata(product)
        title = productInfo['title']
        
        
        if title in l:
            continue
        
        tileNo_time = '%s_%s' % (title.split('_')[5], title.split('_')[2])
    
        try:
            downloadFlag = not pr_status[tileNo_time]
        except KeyError:
            pr_status[tileNo_time] = False
            downloadFlag =True
            print "no error"
        #file_nc = os.path.join(dir_nc, "%s_VV.nc"%os.path.basename(title).split("_")[4])
        #file_nc = os.path.join(dir_nc, "%s_VV.nc" % title[17:48])
        file_wkt = os.path.join(os.path.dirname(dir_nc), "wkt/%s.wkt" % tileNo_time)
                
        if not os.path.exists(file_wkt):
            pFootPrint = productInfo['footprint']
            file = open(file_wkt, "a")
            file.write(pFootPrint)
            file.close()
        
        if downloadFlag and not title in l:
            
            api.download(product, dir_raw, checksum=True)
            l.append(title)
        
        return pr_status
Beispiel #5
0
def download(user, password, productid, path, md5, url):
    """Download a Sentinel Product with your Copernicus Open Access Hub user and password
    and the id of the product you want to download.
    """
    api = SentinelAPI(user, password, url)
    try:
        api.download(productid, path, md5)
    except SentinelAPIError as e:
        if 'Invalid key' in e.msg:
            logger.error('No product with ID \'%s\' exists on server', productid)
        else:
            raise
Beispiel #6
0
def download(user, password, productid, path, md5, url):
    """Download a Sentinel Product. It just needs your SciHub user and password
    and the id of the product you want to download.
    """
    api = SentinelAPI(user, password, url)
    try:
        api.download(productid, path, md5)
    except SentinelAPIError as e:
        if 'Invalid key' in e.msg:
            logger.error('No product with ID \'%s\' exists on server', productid)
        else:
            raise
Beispiel #7
0
 def downloadImages_s(self):
     S1_download_url = 'https://scihub.copernicus.eu/dhus'
     api = SentinelAPI('chenjinlv', 'cjl19890710', S1_download_url)
     self.downloadBar_s.setMinimum(0)
     self.downloadBar_s.setValue(0)
     self.downloadLabel_s.setText(u"等待下载")
     idfile = self.savecsv_s.text()
     outputdir = self.savedownload_s.text()
     if self.sate_s.currentText() == "s2a":
         csvf = open(idfile, 'r')
         csv_count = len(csvf.readlines()) - 1
         csvf.close()
         print(csv_count)
         self.downloadBar_s.setMaximum(csv_count)
         with open(idfile, 'r') as f:
             idcsv = csv.reader(f)
             header = next(idcsv)
             for ck, row in enumerate(idcsv):
                 self.downloadBar_s.setValue(ck)
                 url = row[1]
                 PRT_ID = row[0]
                 # filename = url.split('/')[-1]
                 if os.path.exists(outputdir + '/{}'.format(PRT_ID)):
                     shutil.rmtree(outputdir + '/{}'.format(PRT_ID))
                 if not os.path.exists(outputdir +
                                       '/{}.zip'.format(PRT_ID)):
                     dt = time.strftime('%Y-%m-%d %H:%M:%S',
                                        time.localtime(time.time()))
                     self.downloadLabel_s.setText(
                         str(ck) + "/" + str(csv_count))
                     print(dt + "Downloading {} ...".format(PRT_ID))
                     self.get_image_s(PRT_ID, url, outputdir)
     elif self.sate_s.currentText() == "s1":
         csvf = open(idfile, 'r')
         csv_count = len(csvf.readlines()) - 1
         csvf.close()
         print(csv_count)
         self.downloadBar_s.setMaximum(csv_count)
         with open(idfile, 'r') as f:
             idcsv = csv.reader(f)
             header = next(idcsv)
             for ck, row in enumerate(idcsv):
                 self.downloadBar_s.setValue(ck)
                 uuidprd = row[1]
                 filename = row[0]
                 self.downloadLabel_s.setText(
                     str(ck) + "/" + str(csv_count))
                 api.download(uuidprd, directory_path=outputdir)
Beispiel #8
0
def download(product_id, output_dir, esa_sso_username, esa_sso_password,
             dhus_username, dhus_password):
    """Download product given its identifier.

    Parameters
    ----------
    product_id : str
        Landsat, ERS, Envisat or Sentinel-1 product ID.
    output_dir : str
        Output directory.

    Returns
    -------
    product_dir : str
        Product directory.
    """
    platform = guess_platform(product_id)
    # Avoid if product is already downloaded
    product_path = find_product(product_id, output_dir)
    if product_path:
        return product_path

    if platform == 'Landsat':
        product = Product(product_id)
        product.download(output_dir, progressbar=False)

    elif platform in ('ERS', 'Envisat'):
        session = log_in(esa_sso_username, esa_sso_password)
        try:
            request_download(session, product_id,
                             output_dir, progressbar=False)
        except FileExistsError:
            pass
        log_out(session)

    else:
        api = SentinelAPI(dhus_username, dhus_password,
                          show_progressbars=False)
        meta = api.query(filename=product_id + '*')
        uuid = list(meta)[0]
        if _is_online(uuid, dhus_username, dhus_password):
            api.download(uuid, output_dir)
            unzip(os.path.join(output_dir, product_id + '.zip'))
        else:
            raise requests.exceptions.HTTPError(
                '503: Product offline. Re-upload requested.')
    
    return find_product(product_id, output_dir)
def run_download(product_id, out_dir, username, password):
    # start the downloading with the data id, output directory, and sentinelsat api
    #file_object = open(out_dir+'data_product_id',"w")
    #file_object.write(product_id)
    #file_object.close()
    url = 'https://scihub.copernicus.eu/dhus'
    d_api = SentinelAPI(username, password, url)
    try:
        download_info = d_api.download(product_id['uuid'], directory_path=out_dir)
    except:
        print('Server connection error')
        return 0

    if os.path.exists(out_dir+download_info['title']+'.zip'):
        os.mknod(out_dir+download_info['title']+'_data_downloaded.ok')
        print('data_downloaded')
        retrievel_code = 1
        #os.remove(out_dir+'data_product_id')
    elif download_info['Online']:
        # os.mknod(out_dir+"online_not_downloaded.ok")
        print('online_but_not_downloaded')
    elif not download_info['Online']:
        retrievel_code = d_api._trigger_offline_retrieval(download_info['url'])
        # check https://scihub.copernicus.eu/userguide/LongTermArchive#HTTP_Status_codes
        if retrievel_code == 202:
            # os.mknod(out_dir+"retrieval_accepted.ok")
            print("offline product retrieval accepted")
        elif retrievel_code == 403:
            # os.mknod(out_dir+"requests_exceed_quota.ok")
            print("offline product requests exceed quota")
        elif retrievel_code == 503:
            # os.mknod(out_dir+"retrieval_not_accepted.ok")
            print("offline product retrieval not accepted")
    return retrievel_code
def run_download_from_pool(product_id, out_dir, username, password):
    url = 'https://scihub.copernicus.eu/dhus'
    d_api = SentinelAPI(username, password, url)
    try:
        download_info = d_api.download(product_id, directory_path=out_dir)
    except:
        print('Server connection error')
        return 0
    if os.path.exists(out_dir+'/'+download_info['title']+'.zip'):
        os.mknod(out_dir+'/'+download_info['title']+'.ok')
        print('data_downloaded')
        #os.remove(out_dir+'data_product_id')
    elif download_info['Online']:
        # os.mknod(out_dir+"online_not_downloaded.ok")
        print('online_but_not_downloaded')
    elif not download_info['Online']:
        retrievel_code = d_api._trigger_offline_retrieval(download_info['url'])
        # check https://scihub.copernicus.eu/userguide/LongTermArchive#HTTP_Status_codes
        if retrievel_code == 202:
            # os.mknod(out_dir+"retrieval_accepted.ok")
            print("offline product retrieval accepted")
        elif retrievel_code == 403:
            # os.mknod(out_dir+"requests_exceed_quota.ok")
            print("offline product requests exceed quota")
        elif retrievel_code == 503:
            # os.mknod(out_dir+"retrieval_not_accepted.ok")
            print("offline product retrieval not accepted")
    return download_info
Beispiel #11
0
def downloadSentinel(user, pw, aoi, start, stop):
    # For image before November 16th, 2015
    curl = pycurl.Curl()
    curl.setopt(pycurl.CAINFO, certifi.where())
    curl.setopt(pycurl.URL, 'https://scihub.copernicus.eu/dhus')
    curl.perform()
    # For image before November 16th, 2015
    api = SentinelAPI(user, pw, 'https://scihub.copernicus.eu/dhus')
    AOI = KMLtoGeoJason.kml2geojson(aoi)
    api.query(get_coordinates(AOI), start, stop, producttype='GRD')
# footprint generation of all found images:
    a=api.get_footprints()
    name = AOI[:-8]+"_S1footprint.geojson"
    foot = open(name, "w")
    foot.write(dumps(a, indent=2) + "\n")
    foot.close()
##
##    with open(name) as f:
##      contents = f.read()
##      display(contents)
# selected image download and unzip:
    imageId = raw_input("Insert Sentinel-1 image id: ")
    output_img = 'C:\Users\ithaca\Documents\Magda\Tool_MIE\SENTINEL-1_TOOL\Immagini_grandi'
    s1 = api.download(imageId, output_img)
    path = os.path.dirname(s1)
    with zipfile.ZipFile(s1, "r") as z:
        z.extractall(path)
Beispiel #12
0
def test_download(tmpdir):
    api = SentinelAPI(**_api_auth)
    uuid = "1f62a176-c980-41dc-b3a1-c735d660c910"
    filename = "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E"
    expected_path = tmpdir.join(filename + ".zip")

    # Download normally
    path, product_info = api.download(uuid, str(tmpdir), checksum=True)
    assert expected_path.samefile(path)
    assert product_info["id"] == uuid
    assert product_info["title"] == filename
    assert product_info["size"] == expected_path.size()

    hash = expected_path.computehash()
    modification_time = expected_path.mtime()
    expected_product_info = product_info

    # File exists, test with checksum
    # Expect no modification
    path, product_info = api.download(uuid, str(tmpdir), check_existing=True)
    assert expected_path.mtime() == modification_time
    assert product_info == expected_product_info

    # File exists, test without checksum
    # Expect no modification
    path, product_info = api.download(uuid, str(tmpdir), check_existing=False)
    assert expected_path.mtime() == modification_time
    assert product_info == expected_product_info

    # Create invalid file, expect re-download
    with expected_path.open("wb") as f:
        f.seek(expected_product_info["size"] - 1)
        f.write(b'\0')
    assert expected_path.computehash("md5") != hash
    path, product_info = api.download(uuid, str(tmpdir), check_existing=True)
    assert expected_path.computehash("md5") == hash
    assert product_info == expected_product_info

    # Test continue
    with expected_path.open("rb") as f:
        content = f.read()
    with expected_path.open("wb") as f:
        f.write(content[:100])
    assert expected_path.computehash("md5") != hash
    path, product_info = api.download(uuid, str(tmpdir), check_existing=True)
    assert expected_path.computehash("md5") == hash
    assert product_info == expected_product_info

    # Test MD5 check
    with expected_path.open("wb") as f:
        f.write(b'abcd' * 100)
    assert expected_path.computehash("md5") != hash
    with pytest.raises(InvalidChecksumError):
        api.download(uuid, str(tmpdir), check_existing=True, checksum=True)
Beispiel #13
0
def test_download(tmpdir):
    api = SentinelAPI(environ['SENTINEL_USER'], environ['SENTINEL_PASSWORD'])
    uuid = "1f62a176-c980-41dc-b3a1-c735d660c910"
    filename = "S1A_WV_OCN__2SSH_20150603T092625_20150603T093332_006207_008194_521E"
    expected_path = tmpdir.join(filename + ".zip")

    # Download normally
    path, product_info = api.download(uuid, str(tmpdir), checksum=True)
    assert expected_path.samefile(path)
    assert product_info["id"] == uuid
    assert product_info["title"] == filename
    assert product_info["size"] == expected_path.size()

    hash = expected_path.computehash()
    modification_time = expected_path.mtime()
    expected_product_info = product_info

    # File exists, test with checksum
    # Expect no modification
    path, product_info = api.download(uuid, str(tmpdir), check_existing=True)
    assert expected_path.mtime() == modification_time
    assert product_info == expected_product_info

    # File exists, test without checksum
    # Expect no modification
    path, product_info = api.download(uuid, str(tmpdir), check_existing=False)
    assert expected_path.mtime() == modification_time
    assert product_info == expected_product_info

    # Create invalid file, expect re-download
    with expected_path.open("wb") as f:
        f.seek(expected_product_info["size"] - 1)
        f.write(b'\0')
    assert expected_path.computehash("md5") != hash
    path, product_info = api.download(uuid, str(tmpdir), check_existing=True)
    assert expected_path.computehash("md5") == hash
    assert product_info == expected_product_info

    # Test continue
    with expected_path.open("rb") as f:
        content = f.read()
    with expected_path.open("wb") as f:
        f.write(content[:100])
    assert expected_path.computehash("md5") != hash
    path, product_info = api.download(uuid, str(tmpdir), check_existing=True)
    assert expected_path.computehash("md5") == hash
    assert product_info == expected_product_info

    # Test MD5 check
    with expected_path.open("wb") as f:
        f.write(b'abcd' * 100)
    assert expected_path.computehash("md5") != hash
    with pytest.raises(InvalidChecksumError):
        api.download(uuid, str(tmpdir), check_existing=True, checksum=True)
Beispiel #14
0
class Downloader():
    def __init__(self, downloader_path, options):
        self.downloader_path = downloader_path
        self.options = options
        self.hyp3_archive_db = Hyp3_Archive_Sql(self.options.pg_db,
                                                self.options.find_granule_sql)
        self.get_sentinel_api()
        self.get_download_path()
        ensure_paths([self.download_path, self.options.final_dir])

    def get_download_path(self):
        self.download_path = os.path.abspath(
            os.path.join(self.downloader_path, self.options.download_dir))

    def get_sentinel_api(self):
        self.api = SentinelAPI(self.options.user, self.options.password,
                               self.options.esa_host)

    def download_granule(self, product, granule):
        log.info(
            f"Downloading product: {product} corresponding to granule: {granule}"
        )

        if not self.is_product_handled(product=product, granule=granule):
            self.api.download(product, directory_path=self.download_path)
            move(os.path.join(self.download_path, f"{granule}.zip"),
                 self.options.final_dir)

    def is_product_handled(self, product, granule):
        if not self.hyp3_archive_db.is_granule_in_hyp3(product):
            if not os.path.exists(
                    os.path.join(self.options.final_dir, f"{granule}*")):
                return False

        log.info(
            f"Product {product} correspond to granule {granule} has already been handled"
        )
        return True
Beispiel #15
0
def download(request):
    if request.method == 'POST':
        data = json.loads(request.body)
        footprint = geojson_to_wkt(geojson.loads(data['geoJson']))
        username = config.username  # ask ITC for the username and password
        password = config.password
        api = SentinelAPI(username, password,
                          "https://apihub.copernicus.eu/apihub/"
                          )  # fill with SMARTSeeds user and password
        tanggal = '[{0} TO {1}]'.format(data['dateFrom'].replace('.000Z', 'Z'),
                                        data['dateTo'].replace('.000Z', 'Z'))
        print(tanggal)
        products = api.query(footprint,
                             producttype=config.producttype,
                             orbitdirection=config.orbitdirection,
                             platformname='Sentinel-1',
                             date=tanggal)
        #menyimpan di folder sentineldata
        dirpath = os.getcwd() + '/sentineldata'
        for product in products:
            try:
                api.download(product, directory_path=dirpath, checksum=True)
            except:
                continue
        for item in os.listdir(dirpath):
            if item.endswith(".incomplete"):
                os.remove(os.path.join(dirpath, item))
        #fungsi notifikasi
        email = send_mail(  #libary django untuk mengirim email
            'Your Download was successful!',  #subject email
            'Terima kasih sudah menggunakan aplikasi webgis data yang anda unduh sudah masuk kedalam sistem website!',  #isi email
            settings.EMAIL_HOST_USER,  #email host pengirim notifikasi
            [request.user.email],  #email penerima notifikasi
            fail_silently=False,
        )
        return HttpResponse(request.body)
Beispiel #16
0
def download_s2(user, password, dir_raw, dir_nc, start_date, end_date,
                footprint, pr_status):

    api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus/')

    #footprint = "POLYGON((73 11, 74 11, 74 14, 73 14, 73 11))"
    products = api.query(footprint,
                         date=(start_date, end_date),
                         producttype='S2MSI1C')

    #print(products)

    for product in products:
        productInfo = api.get_product_odata(product)
        title = productInfo['title']
        tileNo_time = '%s_%s' % (title.split('_')[5], title.split('_')[2])
        try:
            downloadFlag = not pr_status[tileNo_time]
        except KeyError:
            pr_status[tileNo_time] = False
            downloadFlag = True
        #file_nc = os.path.join(dir_nc, "%s_VV.nc"%os.path.basename(title).split("_")[4])
        #file_nc = os.path.join(dir_nc, "%s_VV.nc" % title[17:48])
        file_wkt = os.path.join(os.path.dirname(dir_nc),
                                "wkt/%s.wkt" % tileNo_time)

        if not os.path.exists(file_wkt):
            pFootPrint = productInfo['footprint']
            file = open(file_wkt, "a")
            file.write(pFootPrint)
            file.close()

        if downloadFlag:
            api.download(product, dir_raw, checksum=True)

    return pr_status
class CopernicusAPI(BaseAPI):
    def __init__(self, user, password):
        super().__init__()
        self.__api = SentinelAPI(user=user, password=password)

    def search(self, start='NOW-7DAYS', end='NOW'):
        self._logger.info('Searching for new data sets')

        # loading search extend
        current_dir = os.path.dirname(__file__)
        extend_path = os.path.join(current_dir, "nrw.geojson")
        footprint = geojson_to_wkt(read_geojson(extend_path))
        try:
            return self.__api.query(area=footprint,
                                    initial_date=start,
                                    end_date=end,
                                    platformname='Sentinel-1',
                                    producttype='GRD')
        except SentinelAPIError:
            self._logger.error('There was an error searching for data sets',
                               exc_info=True)
            return {}

    def download(self, product_id):
        self._logger.info("Start downloading product: {}".format(product_id))
        try:
            product_info = self.__api.download(product_id, checksum=True)
            self._logger.info(
                "Product was successfully downloaded to {}".format(
                    product_info['path']))
            return product_info['path']
        except InvalidChecksumError:
            self._logger.error(
                'The checksum of the download was invalid. Skipping product {}'
            ).format(product_id)
        except SentinelAPIError:
            self._logger.error(
                'There was an error trying to download the product',
                exc_info=True)

    def remove(self, filename):
        self._logger.info('Removing product at {}'.format(filename))
        try:
            os.remove(filename)
        except OSError:
            pass
Beispiel #18
0
def _download_from_hub(product, download_path, user, password,
                       hub_address="https://scihub.copernicus.eu/apihub", overwrite=False):

    path = os.path.join(download_path, product["identifier"] + ".*")
    if glob.glob(path) and not overwrite:
        return glob.glob(path)[0]
    else:
        # Connect to the hub and download
        try:
            hub = SentinelAPI(user, password, hub_address)
            p = hub.download(product["uuid"], download_path)
        except SentinelAPIError as e:
            print(e)
            return ""
        with zipfile.ZipFile(p["path"], "r") as z:
            z.extractall(download_path)
        os.remove(p["path"])
        return glob.glob(path)[0]
Beispiel #19
0
products_df = api.to_dataframe(products)

# FINDING SMALLEST FILE
smallestFile = None
tempSize = 9999999999
for i in range(0, len(products)):
    if (api.get_product_odata(products_df.index[i])["size"] < tempSize):
        smallestFile = products_df.index[i]
        tempSize = api.get_product_odata(products_df.index[i])["size"]
# ----------------------------------------------------------------

# SETTING MAX SIZE AND GETTING PRODUCT INFO
maxSize = 500000000  # Set the max size for files to download (in bytes)
if (tempSize < maxSize):
    api.download(smallestFile)
    smallestName = api.get_product_odata(smallestFile)["title"]
    smallestDate = api.get_product_odata(smallestFile)["date"].strftime("%d-%m-%Y_%H-%M") # ":" cause error in windowsOS and with KML links
    smallestLink = api.get_product_odata(smallestFile)["url"]
    print("Downloading " + smallestName + ", Size: " + str(tempSize) + " bytes.")
else:
    print("No file small enough to download")
    quit()
    
fileName = 'sentinel-image(a)_' + smallestDate
cleanFileName = 'sentinel-image(C)_' + smallestDate
saveName = 'sentinel_images/' + fileName + '.png'  # file name
cleanSaveName = 'sentinel_images_clean/' + cleanFileName + '.png' 
# ----------------------------------------------------------------

Beispiel #20
0
# GeoJSON FeatureCollection containing footprints and metadata of the scenes
fp = api.get_footprints(products)

with open('footprints.json', 'w') as outfile:
    #json.dump({'numbers':n, 'strings':s, 'x':x, 'y':y}, outfile, indent=4)
    json.dump(fp, outfile, indent=4)

with open('product.json', 'w') as outfile:
    #json.dump({'numbers':n, 'strings':s, 'x':x, 'y':y}, outfile, indent=4)
    json.dump(products, outfile, indent=4)

#print(fp["features"][0]["properties"]["identifier"])

i = 0
#2017-02-07T10:42:00.746Z
datetime_old = format_date("20170101")
print datetime_old

for entry in fp["features"]:
    datetime = entry["properties"]["date_beginposition"]
    if (datetime >= datetime_old):
        datetime_old = datetime
        product_id = entry["properties"]["product_id"]
        print "###########" + datetime
        print entry["properties"]["identifier"]
        print entry["properties"]["product_id"]
        print entry["properties"]["date_beginposition"]

api.download(product_id)
Beispiel #21
0
    def execute(self, context):
        log.info("###################")
        log.info("## DHUS Download ##")
        log.info('API URL: %s', self.dhus_url)
        log.info('API User: %s', self.dhus_user)
        #log.info('API Password: %s', self.dhus_pass)
        log.info('Max Downloads: %s', self.download_max)
        log.info('Download Directory: %s', self.download_dir)
        log.info('Input from: %s', self.get_inputs_from)
        log.info('Product IDs: %s', self.product_ids)

        log.info("Execute DHUS Download.. ")

        # create the download directory if it does not exists
        if not os.path.exists(self.download_dir):
            log.info("Creating directory for download: {}".format(
                self.download_dir))
            os.makedirs(self.download_dir)

        # generate a dict of products from
        # 1) the list of product_ids
        # 2) in case 1) is None then check the XCOM key in 'get_inputs_from'
        self.products = dict()
        if self.product_ids != None and len(self.product_ids) != 0:
            for product_id in self.product_ids:
                self.products[product_id] = ""
            print("Download request for {} products via IDs:\n{}".format(
                len(self.product_ids), self.products))
        elif self.get_inputs_from != None:
            self.products = context['task_instance'].xcom_pull(
                task_ids=self.get_inputs_from, key=XCOM_RETURN_KEY)
            print("Downloading request for {} products via XCOM:\n{}".format(
                len(self.products), self.products))
        else:
            # exit gracefully if no products are found
            log.info('no products to process')
            return None

        # log warning in case the amount of products exceed the limit
        if len(self.products) > self.download_max:
            log.warn("Found products ({}) exceeds download limit ({})".format(
                len(self.products), self.download_max))

        # download all files via it's ID
        log.info('Starting downloading..')
        product_downloaded = {}
        api = SentinelAPI(self.dhus_user, self.dhus_pass, self.dhus_url)
        for product_id in self.products.keys():

            # If download limit reached, stopp and break out
            # Else if the file already exists, then try next from search
            product_filename = os.path.join(
                self.download_dir,
                self.products[product_id].get("title") + ".zip")
            if len(product_downloaded) >= self.download_max:
                log.info("Limit exceeded, stopping download..")
                break
            elif os.path.exists(product_filename):
                log.info("Product already downloaded. Continuing..")
                continue

            log.info('Downloading Product..\nuuid:  {}\ntitle: {}\nsize:  {}'.
                     format(product_id, self.products[product_id].get("title"),
                            self.products[product_id].get("size")))
            downloaded = api.download(product_id,
                                      directory_path=self.download_dir)
            path = downloaded['path']
            # TODO check if file in 'path' is binary.
            # It might is an XML file containing an error such as
            # "Maximum number of 2 concurrent flows achieved by the user "xyz""
            # Check MD5
            # If file already downloaded move on to next one?
            product_downloaded[path] = downloaded

        # print summary and push products to XCOM
        log.debug("Downloaded {} products:\n{}".format(
            len(product_downloaded), pp.pprint(product_downloaded)))
        context['task_instance'].xcom_push(key='downloaded_products',
                                           value=product_downloaded)
        context['task_instance'].xcom_push(key='downloaded_products_paths',
                                           value=' '.join(
                                               product_downloaded.keys()))
        return product_downloaded
Beispiel #22
0
def download(user, password, productid, path, md5, url):
    """Download a Sentinel Product. It just needs your SciHub user and password
    and the id of the product you want to download.
    """
    api = SentinelAPI(user, password, url)
    api.download(productid, path, md5)
Beispiel #23
0
def download_images(save_imgs, save_rgb, save_tiles, unet_weights, unet_clouds,
                    class_path, class_clouds, poly_path, percentiles_forest,
                    percentiles_clouds, boundsdata):

    # connect to the API
    user = '******'
    password = '******'

    api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus')

    # search by polygon
    footprint = geojson_to_wkt(read_geojson(boundsdata))

    # search for the images
    products = api.query(
        footprint,
        date=(["NOW-30DAYS", "NOW"]),
        area_relation='IsWithin',
        platformname='Sentinel-2',
        processinglevel='Level-2A',
        #cloudcoverpercentage = (0, 20)
    )

    print(len(products))

    table_names = api.to_geodataframe(products)

    uuid_names = table_names['uuid']  # names to download
    name_zip = table_names['title']  #title of image
    extracted_name = table_names['filename']  # name of folder .SAFE

    k = 0

    # download images
    for fname in uuid_names:

        file_dir = save_imgs + '/' + extracted_name[k]

        if os.path.isdir(file_dir) is False:

            retval = os.getcwd()
            os.chdir(save_imgs)
            print("Downloading data...")
            api.get_product_odata(fname)
            api.download(fname)
            os.chdir(retval)  # return to previous directory

            path_zip_name = save_imgs + '/' + name_zip[k] + '.zip'
            while not os.path.exists(path_zip_name):
                time.sleep(1)

            if os.path.isfile(path_zip_name):
                # extract files
                zip_ref = zipfile.ZipFile(path_zip_name, 'r')
                zip_ref.extractall(save_imgs)
                zip_ref.close()
                os.remove(path_zip_name)  # remove .zip file
                print("%s has been removed successfully" % name_zip[k])

                path_to_folder = save_imgs + '/' + extracted_name[
                    k] + '/GRANULE/'

                # calls the rgb_tiles function
                dir_save_tiles = save_tiles + '/' + name_zip[k]
                if os.path.isdir(dir_save_tiles) is False:
                    print('Creating RGB tiles')
                    os.mkdir(dir_save_tiles)
                    rgb_tiles(path_to_folder, save_rgb, dir_save_tiles,
                              name_zip[k])

                # calls the application() Unet function
                save_class_path = class_path + '/' + name_zip[k]
                if os.path.isdir(save_class_path) is False:
                    print('Applying UNet')
                    os.mkdir(save_class_path)
                    application(dir_save_tiles,
                                unet_weights,
                                save_class_path,
                                percentiles_forest,
                                clouds=0)

                    # merge predicted tiles into one GeoTiff
                    join_tiles(save_class_path, class_path, path_to_folder)
                    print("Tiles merged!")

                save_class_clouds = class_clouds + '/' + name_zip[k]
                if os.path.isdir(save_class_clouds) is False:
                    print('Applying UNet clouds')
                    os.mkdir(save_class_clouds)
                    application(dir_save_tiles,
                                unet_clouds,
                                save_class_clouds,
                                percentiles_clouds,
                                clouds=1)

                    # merge the clouds predicted tiles into one GeoTiff
                    join_tiles(save_class_clouds, class_clouds, path_to_folder)
                    print("Clouds tiles merged!")

                # polygons evalutation
                print("Polygons evaluation")
                polygons(name_zip[k],
                         class_path,
                         class_clouds,
                         path_to_folder,
                         save_class_path,
                         save_imgs,
                         poly_path,
                         time_spaced=None)

                k = k + 1

            else:
                raise ValueError("%s isn't a file!" % path_zip_name)

        else:
            path_to_folder = save_imgs + '/' + extracted_name[k] + '/GRANULE/'

            # calls the rgb_tiles function
            dir_save_tiles = save_tiles + '/' + name_zip[k]
            if os.path.isdir(dir_save_tiles) is False:
                print('Creating RGB tiles')
                os.mkdir(dir_save_tiles)
                rgb_tiles(path_to_folder, save_rgb, dir_save_tiles,
                          name_zip[k])

            # calls the application() Unet function
            save_class_path = class_path + '/' + name_zip[k]
            if os.path.isdir(save_class_path) is False:
                print('Applying UNet')
                os.mkdir(save_class_path)
                application(dir_save_tiles,
                            unet_weights,
                            save_class_path,
                            percentiles_forest,
                            clouds=0)

                # merge predicted tiles into one GeoTiff
                join_tiles(save_class_path, class_path, path_to_folder)
                print("Tiles merged!")

            save_class_clouds = class_clouds + '/' + name_zip[k]
            if os.path.isdir(save_class_clouds) is False:
                print('Applying UNet clouds')
                os.mkdir(save_class_clouds)
                application(dir_save_tiles,
                            unet_clouds,
                            save_class_clouds,
                            percentiles_clouds,
                            clouds=1)

                # merge the clouds predicted tiles into one GeoTiff
                join_tiles(save_class_clouds, class_clouds, path_to_folder)
                print("Clouds tiles merged!")

            # polygons evalutation
            print("Polygons evaluation")
            polygons(name_zip[k],
                     class_path,
                     class_clouds,
                     path_to_folder,
                     save_class_path,
                     save_imgs,
                     poly_path,
                     time_spaced=None)

            k = k + 1

    return
Beispiel #24
0
class Sentinelsat_products:
    def __init__(self,
                 date_start,
                 date_finish,
                 footprint=settings.footprint,
                 platformname="Sentinel-3"):
        self.date_start = date_start
        self.date_finish = date_finish
        self.api = SentinelAPI(settings.sentinel_api_user,
                               settings.sentinel_api_key,
                               'https://scihub.copernicus.eu/dhus')
        self.wkt_footprint = footprint
        self.products = self.query_products(self.date_start, self.date_finish)

    def query_products(self,
                       date_start,
                       date_finish,
                       platformname="Sentinel-3"):
        # connect to the API
        api = SentinelAPI(settings.sentinel_api_user,
                          settings.sentinel_api_key,
                          'https://scihub.copernicus.eu/dhus')

        # search by polygon, time, and Hub query keywords
        products = api.query(self.wkt_footprint,
                             area_relation='Contains',
                             date=(self.date_start, self.date_finish),
                             platformname='Sentinel-3')
        return products

    def filter_products(self, instrument, level, p_type, timeliness):
        removed_products = []
        for product_key in self.products:
            odata = self.api.get_product_odata(product_key, full=True)
            product_instrument = odata["Instrument"]
            product_level = odata["Product level"]
            product_type = odata["Product type"]
            #mission_type = odata["Mission type"]
            product_timeliness = odata["Timeliness Category"]
            #filter only from Level 1 OLCI instrument with NTC full resolution
            conditions = ((product_instrument == instrument)
                          and (p_type in product_type)
                          and product_timeliness == timeliness
                          and product_level == level)
            if conditions:
                pass
                #print(instrument, product_level, product_type)
            else:
                removed_products.append(product_key)
        for key in removed_products:
            del self.products[key]

    def download_products(self, make_subset=True):
        print("----------")
        for key in self.products:
            file_name = self.products[key]["filename"]
            file_date = self.products[key]["summary"][:16].split("Date: ")[1]
            download_path = os.path.join(settings.data_path, file_date)
            if not os.path.exists(download_path):
                os.makedirs(download_path)
            # if it was downloaded before it won't download again
            download_info = self.api.download(key,
                                              directory_path=download_path)
            #print(download_info)
            zip_path = download_info["path"]
            with zipfile.ZipFile(zip_path, 'r') as zip_ref:
                zip_ref.extractall(download_path)
            if make_subset:
                extracted_directory = os.path.join(download_path, file_name)
                product = snappy_utils.read_product(extracted_directory)
                subset = snappy_utils.make_subset(product, settings.footprint)
                snappy_utils.write_product(
                    subset, os.path.join(download_path, "laguna.tif"),
                    "GeoTiff")
                snappy_utils.write_product(
                    subset, os.path.join(download_path, "laguna.dim"),
                    "BEAM-DIMAP")
api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus')

#Setting the search footprint using a Geojson file
footprint = geojson_to_wkt(read_geojson(AOI))

#Setting the search query parameters
products = api.query(footprint,
                     date = ('20200101', '20200331'),
                     platformname = 'Sentinel-2',
                     processinglevel = 'Level-2A',
                     cloudcoverpercentage = (0, 20))

#Printing the number of products found
print("The number of products found is: {} " .format (len(products)))

#Creating a table with all the product search results
products_table = api.to_geodataframe(products)

#This part downloads the product(s) in the same folder where your code is located

 ## OPTION 1: Download single product
api.download('df132697-676e-43ce-b7bd-45211696119f')

 ## OPTION 2:Download all products
download_list = []
for index, row in products_table.iterrows():    
   download_list.append (row['title'])
   print ("The following products will be downloaded: {}" . format (download_list))
   api.download (row['uuid'])
 
Beispiel #26
0
# Start Download
for entry in range(0, len(df)):
    # The uuid element allows to create the path to the file
    uuid_element = df['uuid'][entry]
    id_sar = df['identifier'][entry]
    sentinel_link = df['link'][entry]

    # Destinationpath with filename where download to be stored
    destinationpath = dirpath + id_sar + '.zip'

    if os.path.exists(destinationpath):
        logger.info(id_sar + ' already downloaded')
        print(id_sar + ' already downloaded')
    else:
        # Download file and read
        try:
            api.download(df['uuid'][entry],
                         directory_path=dirpath,
                         checksum=True)
            logger.info("Successfully downloaded" + id_sar + 'in to' +
                        destinationpath)
        except:
            logger.warning("error connection!.... Download Interrupted!")
            print("error connection!.... Download Interrupted!")
            time.sleep(1)

# delete all incomplete file
for item in os.listdir(dirpath):
    if item.endswith(".incomplete"):
        os.remove(os.path.join(dirpath, item))
Beispiel #27
0
def getSentinelFiles(DATE,
                     COLHUB_UNAME,
                     COLHUB_PW,
                     TMPDIR,
                     bbox,
                     max_files=1,
                     polarization='hh',
                     platform='s1',
                     time_window=1):
    print('Arguments -> Box: %s, Max downloads: %s, Polarization: %s, Platform: %s' \
        %(bbox, max_files, polarization, platform))
    # api = SentinelAPI(COLHUB_UNAME, COLHUB_PW, 'https://colhub.met.no/#/home')
    api = SentinelAPI(COLHUB_UNAME, COLHUB_PW,
                      'https://scihub.copernicus.eu/dhus/#/home')
    date = DATE.strftime('%Y%m%d')
    yestdate = (DATE - timedelta(time_window)).strftime('%Y%m%d')

    footprint = geojson_to_wkt(read_geojson(bbox))
    try:
        if platform == 's1':
            products = api.query(footprint, (yestdate, date),
                                 platformname='Sentinel-1',
                                 producttype='GRD',
                                 sensoroperationalmode='EW')
        elif platform == 's2':
            products = api.query(
                footprint,
                (yestdate, date),
                platformname='Sentinel-2',
                cloudcoverpercentage=(0, 80)  # TODO: find reasonable threshold
            )
        else:
            print('Not a valid platform!')
            return [False]

    except SentinelAPIError as e:
        print(e)
        return [False]
    except:
        print(
            "Unknown error occurred while accessing Copernicus Open Access Hub"
        )
        return [False]

    if len(products) == 0:
        print("No files found at date: " + date)
        return [False]

    print("Found", len(products), "Sentinel images.")

    if platform == 's2':
        products_df = api.to_dataframe(products).sort_values(
            ['cloudcoverpercentage', 'beginposition'], ascending=[True, True])
        products_df = products_df.head(1)
    else:
        products_df = api.to_dataframe(products).sort_values('beginposition',
                                                             ascending=True)

    downloadNames = []

    for i in range(len(products_df)):
        print("Image %s / %s" % (i, len(products_df)))
        if i == max_files:  # Prevents too large mosaic file
            break
        product_size = float(products_df['size'].values[i].split(' ')[0])
        product_name = products_df['filename'].values[i][:-5]
        product_date = products_df['beginposition'].values[i]

        product_clouds = ''
        if platform == 's2':
            product_clouds = ', Cloudcover: ' + str(
                products_df['cloudcoverpercentage'].values[i])

        print("Name: %s, size: %s MB%s" %
              (product_name, product_size, product_clouds))

        # if max_files == 1: # No point with ingestion date in mosaics with several images
        #     self.returns[platform + 'c'] = products_df['ingestiondate'].values[i]
        api.download(products_df['uuid'][i], TMPDIR)

        if platform == 's1':
            # UNZIPPING DOWNLOADED FILE
            print("Unzipping product...")
            zip_ref = zipfile.ZipFile(TMPDIR + product_name + '.zip')
            zip_ref.extractall(TMPDIR)
            zip_ref.close()

            geofiles = glob.glob(TMPDIR + product_name + '.SAFE/measurement/*')
            # FINDING RIGHT POLARIZATION FILE (now using HH-polarization)
            if '-' + polarization + '-' in geofiles[0]:
                downloadNames.append(geofiles[0])
            else:
                downloadNames.append(geofiles[1])

        elif platform == 's2':
            downloadNames.append(product_name)
    return downloadNames
Beispiel #28
0
def download(user, password, productid, path):
    """Download a Sentinel-1 Product. It just needs your SciHub user and password
    and the id of the product you want to download.
    """
    api = SentinelAPI(user, password)
    api.download(productid, path)
Beispiel #29
0
            ShapefileName1 = JsonFileName.split('.')[0] + '.shp'
            gdal.Warp('aRaster_ABC_cliped.tif',
                      'aRaster_ABC.tif',
                      cutlineDSName=ShapefileName1,
                      dstAlpha=True)

            plt.imshow(gdal.Open('aRaster_ABC_cliped.tif').ReadAsArray()[0])
            plt.show()
            plt.imshow(gdal.Open('aRaster_ABC.tif').ReadAsArray()[0])
            plt.show()
            Identifier = ProductID['Identifier'][int(ID)]
            Decision = raw_input('Do you think this is correct [Y or N]: ')
        os.remove('abc.tif')
        os.remove('aRaster_ABC.tif')
        os.remove('aRaster_ABC_cliped.tif')
        api.download(ProductID['Pr_Id'][int(ID)], Path_to_Download)

    elif Kind == 'A':
        print 'You all Data is being downloading\n'
        for item in ProductID['Pr_Id']:
            api.download(item, Path_to_Download)
    elif Kind == 'M':
        print 'You are downloading Multipel Scene ID'
        ID = raw_input(
            'Type Multipel Scene ID number separated by , eg: 1,2,5\nTypeNow: '
        )
        ID = ID.split(',')
        for i in ID:
            i = int(i)
            api.download(ProductID['Pr_Id'][i], Path_to_Download)