def products(): """A fixture for tests that need some non-specific set of products as input.""" api = SentinelAPI(**_api_auth) products = api.query( geojson_to_wkt(read_geojson('tests/map.geojson')), "20151219", "20151228" ) return products
def raw_products(): """A fixture for tests that need some non-specific set of products in the form of a raw response as input.""" api = SentinelAPI(**_api_auth) raw_products = api._load_query(api.format_query( geojson_to_wkt(read_geojson('tests/map.geojson')), "20151219", "20151228") ) return raw_products
def test_get_products_invalid_json(): api = SentinelAPI("mock_user", "mock_password") with requests_mock.mock() as rqst: rqst.post('https://scihub.copernicus.eu/apihub/search?format=json', text="{Invalid JSON response", status_code=200) with pytest.raises(SentinelAPIError) as excinfo: api.query(area=geojson_to_wkt( read_geojson(FIXTURES_DIR + "/map.geojson")), date=("20151219", "20151228"), platformname="Sentinel-2") assert excinfo.value.msg == "Invalid API response."
def test_order_by(): api = SentinelAPI(**_api_auth) products = api.query( geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map.geojson')), ("20151219", "20151228"), platformname="Sentinel-2", cloudcoverpercentage=(0, 10), order_by="cloudcoverpercentage, -beginposition" ) assert len(products) == 3 vals = [x["cloudcoverpercentage"] for x in products.values()] assert sorted(vals) == vals
def main(): if len(sys.argv) < 2: print(" Usage: python3 get_past_scenes.py [year] [month]") return 1 api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus') logging.info(api.api_url) t0 = datetime(int(sys.argv[1]), int(sys.argv[2]), 1, 0, 0, 0) tf = t0 + timedelta(days=12) # search by polygon, time, and SciHub query keywords footprint = geojson_to_wkt( read_geojson(home['parameters'] + '/extent_' + location['region'] + '.geojson')) products_s1a = api.query(footprint, date=(date(t0.year, t0.month, t0.day), date(tf.year, tf.month, tf.day)), producttype="GRD", platformname='Sentinel-1') unavailable = [] for uuid in products_s1a: product_info = api.get_product_odata(uuid) if any(product_info['title'] in s for s in os.listdir(sarIn)): logging.info('Skipping ' + uuid + '. Already exists in ' + sarIn) continue logging.info('Is ' + uuid + ' online?') logging.info(product_info['Online']) if not product_info['Online']: logging.info('Requesting unavailable uuids') api.download(uuid) unavailable = unavailable + [uuid] else: logging.info('Downloading available uuids') api.download(uuid, directory_path=sarIn) logging.info( 'Sleeping 30 minutes (the API does not allow intensive requests)') time.sleep(30 * 60) while len(unavailable) > 0: for uuid in unavailable: product_info = api.get_product_odata(uuid) if product_info['Online']: logging.info(uuid + ' is available! Downloading:') api.download(uuid, directory_path=sarIn) unavailable.remove(uuid) time.sleep(600) return 0
def test_s2_cloudcover(): api = SentinelAPI(**_api_auth) products = api.query(geojson_to_wkt(read_geojson('tests/map.geojson')), "20151219", "20151228", platformname="Sentinel-2", cloudcoverpercentage="[0 TO 10]") assert len(products) == 3 product_ids = list(products) assert product_ids[0] == "6ed0b7de-3435-43df-98bf-ad63c8d077ef" assert product_ids[1] == "37ecee60-23d8-4ec2-a65f-2de24f51d30e" assert product_ids[2] == "0848f6b8-5730-4759-850e-fc9945d42296"
def __init__(self, geojson_path='sample-polygone.geojson', max_threads=4): # private self.__lock__ = Lock() self.__dbconn__ = DBClient() self.__api__ = self.__get_sentinel_api__() self.__geojson_path__ = geojson_path self.__footprint__ = geojson_to_wkt(read_geojson(geojson_path)) self.__threadpool__ = ThreadPool(max_threads) self.__img_path__ = self.__get_env_var__('img_path') self.__tiff_path__ = self.__get_env_var__('tiff_path') # public self.max_threads = max_threads self.__fetch_measurements__()
def sentinelsat_getlists(base_url, template, datetime_start, datetime_end): try: if datetime_start is None: datetime_start = date datetime_start = '20151219' if datetime_end is None: datetime_end = date datetime_end = date(2015, 12, 29) str_temp = '' if template is not None: sentinelsat_obj = json.loads(template) platformname = sentinelsat_obj.get('platformname') #'Sentinel-1' producttype = sentinelsat_obj.get('producttype') polarisationmode = sentinelsat_obj.get('polarisationmode') sensoroperationalmode = sentinelsat_obj.get( 'sensoroperationalmode') #Search by polygon, time, and Hub query keywords footprint = geojson_to_wkt(read_geojson(geojson_roi)) if platformname != None and producttype != None and polarisationmode != None and sensoroperationalmode != None: products = api.query(footprint, date=(datetime_start.date(), datetime_end.date()), platformname=platformname, producttype=producttype, polarisationmode=polarisationmode, sensoroperationalmode=sensoroperationalmode) else: products = api.query( footprint, date=(datetime_start.date(), datetime_end.date()), producttype=producttype, #cloudcoverpercentage = None, platformname=platformname) list_links = [] for value in products.values(): link = value.get("uuid") list_links.append(link) list_links except: logger.error("Error in upsert_database") return list_links
def test_get_products_invalid_json(): api = SentinelAPI("mock_user", "mock_password") with requests_mock.mock() as rqst: rqst.post( 'https://scihub.copernicus.eu/apihub/search?format=json', text="{Invalid JSON response", status_code=200 ) with pytest.raises(SentinelAPIError) as excinfo: api.query( area=geojson_to_wkt(read_geojson(FIXTURES_DIR + "/map.geojson")), date=("20151219", "20151228"), platformname="Sentinel-2" ) assert excinfo.value.msg == "Invalid API response."
def get_tile(START, END, gjson, out): # search database for matching archives print('Querying database...') footprint = geojson_to_wkt(read_geojson(gjson)) products = api.query(footprint, ingestiondate=(START, END), platformname='Sentinel-1', producttype='GRD', sensoroperationalmode='IW', orbitdirection='ASCENDING', polarisationmode='VH') # download archive print('Downloading archive...') pmd = api.download_all(products, directory_path='./temp/') fname = './temp/' + list(pmd[0].values())[0]['title'] + '.zip' # unpack and ingest print('Unpacking archive...') scene = pyroSAR.identify(fname) scene.unpack('./temp/', overwrite=True) # geocode print('Geocoding data...') shp = vector.Vector(filename=gjson) geocode(infile=scene, outdir='./temp/', tr=int(sys.argv[3]), scaling='db', removeS1ThermalNoise=True, demResamplingMethod='BISINC_21_POINT_INTERPOLATION', terrainFlattening=True, allow_RES_OSV=True, speckleFilter='Refined Lee', shapefile=shp, cleanup=True) # save image print('Copying image...') smd = scene.scanMetadata() iname = './temp/' + [ file for file in os.listdir('./temp/') if '{}__{}___{}_{}_VH'.format(smd['sensor'], smd['acquisition_mode'], smd['orbit'], smd['start']) in file ][0] shutil.copy2(iname, out) # done print('Done.')
def test_s2_cloudcover(): api = SentinelAPI(**_api_auth) products = api.query( geojson_to_wkt(read_geojson('tests/map.geojson')), "20151219", "20151228", platformname="Sentinel-2", cloudcoverpercentage="[0 TO 10]" ) assert len(products) == 3 product_ids = list(products) assert product_ids[0] == "6ed0b7de-3435-43df-98bf-ad63c8d077ef" assert product_ids[1] == "37ecee60-23d8-4ec2-a65f-2de24f51d30e" assert product_ids[2] == "0848f6b8-5730-4759-850e-fc9945d42296"
def createimage(taskid, producttitle, dat): PRODUCT_DIR = os.path.join(config["rootdirectory"], "tasks", str(taskid), str(producttitle)) feature = read_geojson( os.path.join(config["rootdirectory"], "tasks", str(taskid), "roi.geojson")) geom = geopandas.GeoDataFrame.from_features(FeatureCollection([feature])) geom.crs = fiona.crs.from_epsg(4326) for root, dir_names, file_names in os.walk( os.path.join(config["rootdirectory"], "tasks", str(taskid), producttitle)): sorted_files = sorted(fnmatch.filter(file_names, "*.jp2")) filename = fnmatch.filter( file_names, "*" + feature["properties"]["band"] + "_10m.jp2") if len(filename) == 0: continue with rasterio.open(os.path.join(root, filename[0])) as band: projected_geom = geom.to_crs(band.crs) roi_bb = create_bb_data_frame(projected_geom.bounds.minx, projected_geom.bounds.miny, projected_geom.bounds.maxx, projected_geom.bounds.maxy) roi_bb_polygons = list( map( lambda item: json.loads( geopandas.GeoSeries(item).to_json())["features"][0][ "geometry"], roi_bb.geometry)) bb_mask, bb_transform = mask.mask(band, roi_bb_polygons, crop=True) plot.show(bb_mask) profile = band.meta.copy() profile.update({ "driver": "GTIFF", "dtype": bb_mask.dtype, "height": bb_mask.shape[1], "width": bb_mask.shape[2], "transform": bb_transform }) img_name = "image" + str( (database.getImageCounter(taskid) + 1)) + ".tif" img_file = os.path.join(config["rootdirectory"], "tasks", str(taskid), img_name) with rasterio.open(img_file, "w", **profile) as dst: dst.write(bb_mask) database.registerNewImage(taskid, img_name, dat)
def sent2_query(user, passwd, geojsonfile, start_date, end_date, cloud=50): """ Fetches a list of Sentienl-2 products Parameters ----------- user : string Username for ESA hub. Register at https://scihub.copernicus.eu/dhus/#/home passwd : string password for the ESA Open Access hub geojsonfile : string Path to a geojson file containing a polygon of the outline of the area you wish to download. See www.geojson.io for a tool to build these. start_date : string Date of beginning of search in the format YYYY-MM-DDThh:mm:ssZ (ISO standard) end_date : string Date of end of search in the format yyyy-mm-ddThh:mm:ssZ See https://www.w3.org/TR/NOTE-datetime, or use cehck_for_s2_data_by_date cloud : string (optional) The maximum cloud clover (as calculated by Copernicus) to download. Returns ------- A dictionary of Sentinel-2 granule products that are touched by your AOI polygon, keyed by product ID. Returns both level 1 and level 2 data. Notes ----- If you get a 'request too long' error, it is likely that your polygon is too complex. The following functions download by granule; there is no need to have a precise polygon at this stage. """ # Originally by Ciaran Robb api = SentinelAPI(user, passwd) footprint = geojson_to_wkt(read_geojson(geojsonfile)) log.info( "Sending Sentinel-2 query:\nfootprint: {}\nstart_date: {}\nend_date: {}\n cloud_cover: {} " .format(footprint, start_date, end_date, cloud)) products = api.query(footprint, date=(start_date, end_date), platformname="Sentinel-2", cloudcoverpercentage="[0 TO {}]".format(cloud)) return products
def test_order_by(): api = SentinelAPI(**_api_auth) kwargs = dict(area=geojson_to_wkt( read_geojson(FIXTURES_DIR + '/map.geojson')), date=("20151219", "20161019"), platformname="Sentinel-2", cloudcoverpercentage=(0, 10), order_by="cloudcoverpercentage, -beginposition") # Check that order_by works correctly also in cases where pagination is required expected_count = api.count(**kwargs) assert expected_count > 100 products = api.query(**kwargs) assert len(products) == expected_count vals = [x["cloudcoverpercentage"] for x in products.values()] assert sorted(vals) == vals
def test_footprints_s1(): api = SentinelAPI(**_api_auth) products = api.query( geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map.geojson')), (datetime(2014, 10, 10), datetime(2014, 12, 31)), producttype="GRD" ) footprints = api.to_geojson(products) for footprint in footprints['features']: assert not footprint['geometry'].errors() with open(FIXTURES_DIR + '/expected_search_footprints_s1.geojson') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(footprints) == set(expected_footprints)
def download_sentinel_products_for_ROI(geojson_file): print("Searching products for %s" % geojson_file) feature = read_geojson(geojson_file) footprint = geojson_to_wkt(feature.geometry) date = feature.properties["date"] incubation = feature.properties["incubation"] # TODO adjustable coverage interval # Config file? products = api.query(footprint, date=("2018-07-12T13:00:00Z-7DAYS", "2018-07-12T13:00:00Z+7DAYS"), platformname="Sentinel-2") if len(products) > 0: print("Found {} Products, downloading {} GB".format( len(products), api.get_products_size(products))) elif len(products): print("Found no products for specified search terms.") exit(0) if not os.path.exists(SENTINELPRODUCTS_DIR): os.makedirs(SENTINELPRODUCTS_DIR) try: api.download_all(list(products.keys()), SENTINELPRODUCTS_DIR) except ConnectionError as err: print(err) # for every found product, create a separate GeoJson Feature for later simplicity import copy regions = {} if not os.path.exists(REGION_DATA_FILE): regions = FeatureCollection([]) else: with open(REGION_DATA_FILE, "r") as f: regions = json.load(f) for product in products.values(): new_feature = copy.deepcopy(feature) new_feature.properties["sentinelproduct"] = product["filename"] regions["features"].append(new_feature) with open(REGION_DATA_FILE, "w") as f: json.dump(regions, f)
def test_footprints_s1(): api = SentinelAPI(**_api_auth) products = api.query(geojson_to_wkt(read_geojson('tests/map.geojson')), datetime(2014, 10, 10), datetime(2014, 12, 31), producttype="GRD") footprints = api.to_geojson(products) for footprint in footprints['features']: validation = geojson.is_valid(footprint['geometry']) assert validation['valid'] == 'yes', validation['message'] with open('tests/expected_search_footprints_s1.geojson') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(footprints) == set(expected_footprints)
def test_footprints_s1(): api = SentinelAPI(**_api_auth) products = api.query( geojson_to_wkt(read_geojson('tests/map.geojson')), datetime(2014, 10, 10), datetime(2014, 12, 31), producttype="GRD" ) footprints = api.to_geojson(products) for footprint in footprints['features']: validation = geojson.is_valid(footprint['geometry']) assert validation['valid'] == 'yes', validation['message'] with open('tests/expected_search_footprints_s1.geojson') as geojson_file: expected_footprints = geojson.loads(geojson_file.read()) # to compare unordered lists (JSON objects) they need to be sorted or changed to sets assert set(footprints) == set(expected_footprints)
def test_s2_cloudcover(): api = SentinelAPI(**_api_auth) products = api.query(geojson_to_wkt( read_geojson(FIXTURES_DIR + '/map.geojson')), ("20151219", "20151228"), platformname="Sentinel-2", cloudcoverpercentage=(0, 10)) assert len(products) == 3 product_ids = list(products) assert product_ids[0] == "6ed0b7de-3435-43df-98bf-ad63c8d077ef" assert product_ids[1] == "37ecee60-23d8-4ec2-a65f-2de24f51d30e" assert product_ids[2] == "0848f6b8-5730-4759-850e-fc9945d42296" # For order-by test vals = [x["cloudcoverpercentage"] for x in products.values()] assert sorted(vals) != vals
def main(options): api = SentinelAPI('bartulo', 'ventanuco') footprint = geojson_to_wkt(read_geojson(options.geojson)) products = api.query( footprint, date = ( date.today() - timedelta(options.days), date.today() + timedelta(1)), producttype = 'S2MSI2A', platformname = 'Sentinel-2') data = api.to_geojson(products)['features'] question_name = 'capa' questions = [ inquirer.List( question_name, message = 'Imagenes disponibles', choices = ["Id: %s - Fecha: %s - Cobertura de nubes:%.4s%%" % (i, data[i]['properties']['beginposition'], data[i]['properties']['cloudcoverpercentage']) for i in range(len(data))], ), ] answers = inquirer.prompt(questions) index = int(answers[question_name].split('-')[0].split(':')[1].replace(' ', '')) baseURL = data[index]['properties']['link_alternative'] filename = data[index]['properties']['filename'] api_session = requests.Session() api_session.auth = ('bartulo', 'ventanuco') granules = api_session.get("%s/Nodes('%s')/Nodes('GRANULE')/Nodes?$format=json" % (baseURL, filename)).json() granules_id = granules['d']['results'][0]['Id'] print("%s/Nodes('%s')/Nodes('GRANULE')/Nodes?$format=json" % (baseURL, filename)) bands_10m = api_session.get("%s/Nodes('%s')/Nodes('GRANULE')/Nodes('%s')/Nodes('IMG_DATA')/Nodes('R10m')/Nodes?$format=json" % (baseURL, filename, granules_id)).json() band8 = bands_10m['d']['results'][4]['__metadata']['media_src'] bandColor = bands_10m['d']['results'][5]['__metadata']['media_src'] bands_20m = api_session.get("%s/Nodes('%s')/Nodes('GRANULE')/Nodes('%s')/Nodes('IMG_DATA')/Nodes('R20m')/Nodes?$format=json" % (baseURL, filename, granules_id)).json() band12 = bands_20m['d']['results'][8]['__metadata']['media_src'] print("%s/Nodes('%s')/Nodes('GRANULE')/Nodes('%s')/Nodes('IMG_DATA')/Nodes('R20m')/Nodes?$format=json" % (baseURL, filename, granules_id)) # download(band12, 'banda12.jp2', api_session) # download(band8, 'banda8.jp2', api_session) # download(bandColor, 'color.jp2', api_session) banda8 = gdal.Open('banda8.jp2') banda12 = gdal.Open('banda12.jp2') b8 = banda8.ReadAsArray() b12 = banda12.ReadAsArray()
def search(user, psswd, sensor, file, start, end, maxcloud): ''' Searching for all the available scenes in the specified region and with the parameters provided by user ''' url = 'https://scihub.copernicus.eu/dhus' api = SentinelAPI(user, psswd, url) footprint = geojson_to_wkt(read_geojson(file)) if sensor == 's1': products = api.query(footprint, date=(start, end), platformname='Sentinel-1', orbitdirection='ASCENDING', polarisationmode='VV VH', producttype='GRD', sensoroperationalmode='IW') for x in products: logging.info("\t {} {} ".format(products[x]["filename"], products[x]["size"])) logging.info("\t Found {} scenes in the region specified".format( len(products))) with open("scenes_s1_found.txt", "w") as f: for i in products: f.write(products[i]["uuid"] + "\n") return (products, api) if sensor == 's2': products = api.query(footprint, date=(start, end), platformname='Sentinel-2', cloudcoverpercentage=(0, maxcloud)) for x in products: logging.info("\t {} {} ".format(products[x]["filename"], products[x]["size"])) logging.info("\t Found {} scenes in the region specified".format( len(products))) with open("scenes_s2_found.txt", "w") as f: for i in products: f.write(products[i]["identifier"] + "\n") return (products)
def get_sentinel_images(reef, start_date, end_date, num_images, user, password): """ Method to download Sentinel-2 images using Sentinel API Params - 1. reef (str) - Coral reef object 2. start_date (str) - starting date of sentinel images 3. end_date (str) - end date of sentinel images 4. num_images (int) - number of sentinel-2 images to download 5. user (str) - username on scihub.copernicus.eu 6. password (str) - password on scihub.copernicus.eu """ #login into api api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus') #load in geojson of reef reef_path = reef.get_path() reef_gjson_fp = os.path.join(reef_path, reef.get_reef_name() + '.geojson') reef_footprint = geojson_to_wkt(read_geojson(reef_gjson_fp)) #query sentinel sat api products = api.query(reef_footprint,date = (start_date, end_date),platformname = 'Sentinel-2'\ ,area_relation = 'Intersects',processinglevel = 'Level-2A',\ order_by = 'cloudcoverpercentage') #creating folder for saving sentinel images sentinel_path = os.path.join(reef_path, 'SAFE files') if not os.path.exists(sentinel_path): os.makedirs(sentinel_path) #downloading num_images for i, x in enumerate(products.items()): k, v = x[0], x[1] if i < num_images: api.download(k, directory_path=sentinel_path) #unzipping files for file in os.listdir(sentinel_path): if file.endswith('.zip'): file_path = os.path.join(sentinel_path, file) out_path = os.path.join(sentinel_path, file.split('.')[0]) if os.path.exists(file_path) and not os.path.exists(out_path): with zipfile.ZipFile(file_path, "r") as zip_ref: zip_ref.extractall(sentinel_path) os.remove(file_path)
def test_s2_cloudcover(): api = SentinelAPI(**_api_auth) products = api.query( geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map.geojson')), ("20151219", "20151228"), platformname="Sentinel-2", cloudcoverpercentage=(0, 10) ) assert len(products) == 3 product_ids = list(products) assert product_ids[0] == "6ed0b7de-3435-43df-98bf-ad63c8d077ef" assert product_ids[1] == "37ecee60-23d8-4ec2-a65f-2de24f51d30e" assert product_ids[2] == "0848f6b8-5730-4759-850e-fc9945d42296" # For order-by test vals = [x["cloudcoverpercentage"] for x in products.values()] assert sorted(vals) != vals
def create_training_sets(): print("Creating training and validating sets...") from sklearn.model_selection import train_test_split features = read_geojson(REGION_DATA_FILE).features print("Total of {} items".format(len(features))) train_set, test_set = train_test_split(features, test_size=0.2, random_state=42) train_set, val_set = train_test_split(train_set, test_size=0.2, random_state=42) populate_set(train_set, TRAIN_DIR) populate_set(val_set, VAL_DIR) populate_set(test_set, TEST_DIR)
def intersect_products(self): print('Found ' + str(len(self.products)) + ' products') S2_geojson_path = (self.directory / 'orders' / FLAGS.s2_order_id).with_suffix('.geojson') ground_geojsons = read_geojson(S2_geojson_path) products_geojsons = self.queried_products_as_geojson() ground_polygon = ground_geojsons.get('features')[0].get( 'geometry').get('coordinates') ground_polygon = geometry.Polygon(ground_polygon[0][0]) titles = [] ids = [] for item in products_geojsons.get('features'): id = item.get('properties').get('id') item = item.get('properties').get('title') item = (item[17:25] + item[48:55]) titles.append(item) ids.append([item, id]) unique = list(set(titles)) unique.sort() union_list = [] for i, element in enumerate(unique): local_polygon = Polygon() for j in range(len(titles)): if titles[j] == element: item = products_geojsons.get('features')[j] item = item.get('geometry').get('coordinates') item = geometry.Polygon(item[0][0]) item = affinity.scale(item, xfact=1.01, yfact=1.01) polygons = [item, local_polygon] local_polygons = unary_union(polygons) local_polygon = item union_list.append([local_polygons, element]) found_id = None for index, element in enumerate(union_list): wkt = element[0].wkt if ground_polygon.within(element[0]): found_id = element[1] break for i in ids: if found_id != i[0]: del self.products[i[1]] print('Reduced the products to ' + str(len(self.products)) + ' products')
def ndvihesaplama(request): api = SentinelAPI('flavves', 'BATUhan123.', 'https://scihub.copernicus.eu/dhus') footprint = geojson_to_wkt(read_geojson('media/map.geojson')) products = api.query(footprint, date=('20191219', date(2019, 12, 29)), platformname='Sentinel-2') # pandas dataframe yap products_df = api.to_dataframe(products) # filtreleme products_df_sorted = products_df.sort_values( ['cloudcoverpercentage', 'ingestiondate'], ascending=[True, True]) products_df_sorted = products_df_sorted.head(1) df = products_df_sorted NotDefteriKaydi = df.values.tolist() str_denemesi = str(NotDefteriKaydi) Kaydetmeye_basla = list(str_denemesi.split(",")) yerler = [0, 7, 8, 9, 12, 14, 18, 19, 20] isimler = [ "Dosya adı:", "Uydu adı", "Dosya boyutu", "Çekim tarihi", "Orbit numarası", "Bulut", "vejetasyon", "su", "not vejetasyon" ] i = 0 with open("media/books/txt/deneme.txt", "w") as dosya: for sira in yerler: print(isimler[i] + ":" + Kaydetmeye_basla[sira]) yaz = (isimler[i] + ":" + Kaydetmeye_basla[sira]) i = i + 1 dosya.write(yaz) dosya.write("\n") dosya.close() file_path = ('media\\books\\txt\\deneme.txt') #full path to text. data_file = open(file_path, 'r') data = data_file.read() data = list(data.split("\n")) context = {'deneme': data} return render(request, "todo_app/ndvihesaplama.html", context, backend().badana())
def getscenes(): api = SentinelAPI(username, password, 'https://scihub.copernicus.eu/dhus') # download single scene by known product id #api.download(<product_id>) t0 = datetime.now() - timedelta(days=7) tf = datetime.now() # search by polygon, time, and SciHub query keywords footprint = geojson_to_wkt( read_geojson(home['parameters'] + '/extent_ce.geojson')) products = api.query(footprint, date=(date(t0.year, t0.month, t0.day), date(tf.year, tf.month, tf.day)), platformname='Sentinel-2', cloudcoverpercentage=(0, 20)) # download all results from the search #s2aIn = '/home/delgado/Documents/tmp' # in case you are just testing api.download_all(products, directory_path=s2aIn)
def _query(self, doy, year, polygon=None): """Uses sentinelsat to query the Query the science hub, or wherever the data are kept. Filters products by landcover, clouds, etc""" date = dt.datetime.strptime(f"{year}{doy}", "%Y%j") date0 = date.strftime("%Y%m%d") date1 = (date + dt.timedelta(days=1)).strftime("%Y%m%d") import pdb pdb.set_trace() if polygon != None: footprint = geojson_to_wkt(read_geojson(polygon)) products = self.api.query(area=footprint, date=(date0, date1), producttype='SY_2_SYN___') else: products = self.api.query(area=None, date=(date0, date1), producttype='SY_2_SYN___') selected_products = { k: product for k, product in products.items() if check_bounds(product['footprint'], self.dload_options.max_lat, self.dload_options.min_lat) } keep = {} for p in selected_products.keys(): meta = self.api.get_product_odata(p, full=True) try: # get exta info eg landcover percentage # CONDITIONS # 1. Landcover greater than 15 -- too high? lc = meta["Land Cover Percentage (%)"] cond1 = lc > self.dload_options.landcover_keep # 2. Cloud less than 90%? cloud = meta['Cloud Cover Percentage (%)'] cond2 = cloud < self.dload_options.cloud_throw passCond = cond1 and cond2 if passCond: keep[p] = meta except: pass LOG.info(f"Number of suitable granules {len(keep)}") return keep
def checkForNewProducts(taskid): task = database.getTask(taskid) TASK_DIR = task["filepath"] #print(task) roi = os.path.join(TASK_DIR, "roi.geojson") feature = read_geojson(roi) footprint = geojson_to_wkt(feature.geometry) #print(footprint) begin = datetime.datetime.strptime(task["observation_begin"], "%Y-%m-%d") end = datetime.datetime.strptime(task["observation_end"], "%Y-%m-%d") if end > datetime.datetime.today(): #print("query for task: %s" % taskid) products = api.query(footprint, date=(begin, datetime.date.today()), platformname='Sentinel-2', cloudcoverpercentage=(0, 30)) #print(taskid) newProductCount = database.updateproducts(taskid, products) if len(database.getPendingProduts(taskid=taskid)) > 0: database.setStatus(taskid=taskid, status="waiting")
def __init__(self, username, password, geo_json, platform_name='Sentinel-2', processinglevel='Level-2A', date_s='NOW-3DAYS', date_e='NOW', cloud=(0, 5)): """ Parameters ---------- username : String Copernicus Scihub username. password : String Copernicus Scihub password. geo_json : String geo_json path. platform_name : String, optional DESCRIPTION. The default is 'Sentinel-2'. processinglevel : String, optional DESCRIPTION. The default is 'Level-2A'. date_s : String, optional DESCRIPTION. The default is 'NOW-3DAYS'. date_e : String, optional DESCRIPTION. The default is 'NOW'. cloud : Tuple, optional DESCRIPTION. The default is (0, 5). """ self.platform_name = platform_name self.processinglevel = processinglevel self.date_s = date_s self.date_e = date_e self.cloud = cloud self.json = geojson_to_wkt(read_geojson(geo_json)) self.api = SentinelAPI(username, password) self.run()
def download_metadata(db_user, db_pass, api_user, api_pass, table_name='s2_metadata', schema='<SCHEMA>', database='<DATABASE>', platformname='Sentinel-2', aoi='<GEOJSON OF AREA>'): api = get_api(api_user, api_pass) engine = init_db(db_user, db_pass, database) footprint = geojson_to_wkt(read_geojson(aoi)) products = api.query(footprint, date=('20150623', 'NOW'), platformname=platformname, cloudcoverpercentage=(0, 100)) products_df = api.to_dataframe(products) products_df.to_sql(table_name, engine, schema=schema, if_exists='replace') engine.execute('alter table {1}.{0} add primary key(index)'.format( table_name, schema)) engine.execute('alter table {1}.{0} add column thumb_loc text'.format( table_name, schema))
def from_geojson_to_list_coords(filename): """Part the geojson file and return the list of points""" geo_json_roi = read_geojson(filename) if geo_json_roi["type"] == 'FeatureCollection': geo_json_features = geo_json_roi["features"] n_features = len(geo_json_features) assert n_features == 1, "The number of features must be 1" geo_json_feature = geo_json_features[0]["geometry"] assert geo_json_feature["type"] == "Polygon" , "Feature types other than polygons is not yet possible" list_coordinates = geo_json_feature["coordinates"][0] else: raise RuntimeError("Unknown Geojson type") return list_coordinates
def satquery(geojson, date_from=None, date_to=None, platform='Sentinel-2', cloud_cover_percentage=95): """ Query products with given properties. :param geojson: str The geojson file path for footprint. :param date_from: datetime, optional :param date_to: datetime, optional The time interval filter based on the Sensing Date of the products :param platform: string 'Sentinel-1' or 'Sentinel-2' :param cloud_cover_percentage, Maximum cloud coverage percentage. Hundred percent cloud cover means no clear sky is visible. Default is 95% :return: Pandas DataFrame, Return the products from a query response as a Pandas DataFrame with the values in their appropriate Python types. """ api = SentinelAPI(USERNAME, PASSWORD, 'https://scihub.copernicus.eu/dhus') footprint = geojson_to_wkt(read_geojson(geojson), decimals=6) kwargs = dict() kwargs['platformname'] = platform if platform == 'Sentinel-1': # Level-1 Ground Range Detected (GRD) products kwargs['producttype'] = 'GRD' elif platform == 'Sentinel-2': kwargs['cloudcoverpercentage'] = (0, cloud_cover_percentage) products = api.query(footprint, date=(date_from, date_to), area_relation='Contains', **kwargs) df = api.to_dataframe(products) return df.sort_values(by='beginposition')
def open_hub(start, end, path): footprint = geojson_to_wkt(read_geojson('./study_area.geojson')) data = api.query(area=footprint, date=(start, end), producttype='GRD', area_relation='Contains') data_df = api.to_dataframe(data) if "A" in path: data_df_o = data_df[data_df['orbitdirection'] == 'ASCENDING'] else: data_df_o = data_df[data_df['orbitdirection'] == 'DESCENDING'] data_df_D_sort = data_df_o.sort_values(by='summary').reset_index(drop=True) print(data_df_D_sort[['summary', 'orbitdirection']]) inp = input('Do you want to download? [y/n] ') if inp in ['y', 'ye', 'yes']: uuid = data_df_D_sort[['uuid']].as_matrix() else: sys.exit() for uuid in uuid: api.download(uuid[0], directory_path=path + '1-row_data/')
def getMetaData(self, year, month, day): try: _date = datetime(int(year), int(month), int(day)) api = SentinelAPI('USERNAME', 'PASSWORD', 'https://scihub.copernicus.eu/dhus') footprint = geojson_to_wkt(read_geojson('goldmine_sa.geojson')) products = api.query(footprint, date=(_date.strftime('%Y%m%d'), (_date + timedelta(days=1)).strftime('%Y%m%d')), platformname='Sentinel-1', producttype='GRD') fc = api.to_geojson(products) features = fc['features'] for items in features: if json.dumps(items['properties']['filename']).replace( '"', '') == fn: metaData = items return metaData except: print 'couldnt retreive metadata' exit()
def __init__(self, configFile = './config.ini', ignoreStart=False): print("Server initializing...") print("Checking for geoJSON files") self.geoJSONList = [] #Connect MYSQL db = mysql.connector.connect(host="localhost",user="******",passwd="ps4",database="ps4") cursor = db.cursor() cursor.execute("TRUNCATE TABLE Localisation") sql = "INSERT INTO Localisation(localisation) VALUES (%s)" for file in glob.glob("*.geojson"): self.geoJSONList.append(file) cursor.execute(sql,(file,)) db.commit() print("Inserted succcesfully") #connect avec Mongo #Mongo doesn't take into consideration multiple geoJSONs self.footprints = [geojson_to_wkt(read_geojson(footprintPath)) for footprintPath in self.geoJSONList] print("This is the list of detected geoJSONs, the database has been updated. Obsolete images with non-existing geoJSONs won't be show in the interface.") print(self.geoJSONList) print("Resetting workdir") os.system("rm -rf "+os.getcwd()+"/workdir") os.makedirs(os.getcwd()+'/workdir') self.configParsing(configFile) self.api = SentinelAPI(self.login, self.password, self.link) self.ignoreStart = ignoreStart
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt # ------------------------------------------- # necessary information: user = '******' password = '******' # YYYYMMDD start_date = '20150101' end_date = '20180207' # map.geojson with boundary coordinates # just generate and save as "map.geojson" using: --- http://geojson.io --- geojson_path = 'directory\\to\\the\\file\\map.geojson' # where to save the data save_path = 'directory\\to\\the\\save_folder' # ------------------------------------------- # connect to the API / SentinelHub api = SentinelAPI(user, password, 'https://scihub.copernicus.eu/dhus', show_progressbars=True) footprint = geojson_to_wkt(read_geojson(geojson_path)) products = api.query(footprint,date=(start_date, end_date), platformname='Sentinel-2', producttype='S2MSI2Ap') print 'Number of images: {}'.format(len(products)) api.download_all(products, save_path)
def test_get_coordinates(): wkt = ('POLYGON ((-66.2695312 -8.0592296, -66.2695312 0.7031074, ' + '-57.3046875 0.7031074, -57.3046875 -8.0592296, -66.2695312 -8.0592296))') assert geojson_to_wkt(read_geojson('tests/map.geojson')) == wkt assert geojson_to_wkt(read_geojson('tests/map_z.geojson')) == wkt
def test_boundaries_longitude_less(fixture_path): with pytest.raises(ValueError): geojson_to_wkt(read_geojson(fixture_path('map_boundaries_lon.geojson')))
def test_get_coordinates(fixture_path): wkt = ('POLYGON((-66.2695 -8.0592,-66.2695 0.7031,' '-57.3047 0.7031,-57.3047 -8.0592,-66.2695 -8.0592))') assert geojson_to_wkt(read_geojson(fixture_path('map.geojson'))) == wkt assert geojson_to_wkt(read_geojson(fixture_path('map_z.geojson'))) == wkt assert geojson_to_wkt(read_geojson(fixture_path('map_nested.geojson'))) == wkt
def test_wkt(geojson_path): return geojson_to_wkt(read_geojson(geojson_path))
def test_get_coordinates(): wkt = ('POLYGON((-66.2695 -8.0592,-66.2695 0.7031,' '-57.3047 0.7031,-57.3047 -8.0592,-66.2695 -8.0592))') assert geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map.geojson')) == wkt assert geojson_to_wkt(read_geojson(FIXTURES_DIR + '/map_z.geojson')) == wkt
"""Download Sentinel2 images corresponding to the map.geojson file. Images are downloaded in the folder OUTPUT_FOLDER """ from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt from datetime import date import os import zipfile OUTPUT_FOLDER = 'D:\\NewData' api = SentinelAPI('alpha_03', 'R9846om157', 'https://scihub.copernicus.eu/dhus') footprint = geojson_to_wkt(read_geojson('map.geojson')) products = api.query(footprint, platformname='Sentinel-2', date=("20190501", '20191030'), producttype='S2MSI1C', cloudcoverpercentage=(0, 20)) print(len(products)) if not os.path.exists(OUTPUT_FOLDER): os.makedirs(OUTPUT_FOLDER) api.download_all(products, OUTPUT_FOLDER) ''' for path in os.listdir(OUTPUT_FOLDER): if os.path.splitext(path)[1] == '.zip': with zipfile.ZipFile(path, 'r') as zip_ref:
def test_boundaries_latitude_more(fixture_path): with pytest.raises(ValueError): geojson_to_wkt(read_geojson(fixture_path('map_boundaries_lat.geojson')))