def download_all(dic_download: dict, sent: int, output_path: str, opt: str) -> list: """ Args: dic_download: sent: int, could be 1 or 2, corresponds for the sensor parameter output_path: string, path to the directory where the zip image is going to be download opt: Returns: a list of strings which is """ """:param dic_download : a dict with key the name of the image and the value the date of the image""" proxy = None urlOpener = makeUrlOpener(proxy) l_url = [] for image_name in dic_download: date = dic_download[image_name] print(image_name) dict_param = {"startDate": next_string_date(date, -1), "completionDate": next_string_date(date, 1)} zip_url = get_download_zip_url(image_name, dict_param, sent) l_url += [zip_url] print("DOWNLOAD_PATH") download_url(zip_url, output_path, opt) return l_url
def mainRoutine(): """ Main routine """ cmdargs = getCmdargs() urlOpener = saraclient.makeUrlOpener(proxy=cmdargs.proxy) numTests = 0 countPassed = 0 # test Sentinel-1 ok = testSearch(urlOpener, 1, "2017-01-08") numTests += 1 if ok: countPassed += 1 # test Sentinel-2 ok = testSearch(urlOpener, 2, "2017-01-05") numTests += 1 if ok: countPassed += 1 # test Sentinel-3 ok = testSearch(urlOpener, 3, "2017-01-08") numTests += 1 if ok: countPassed += 1 print("\n\nPassed {} tests of {}".format(countPassed, numTests))
def main(): proxy = None urlOpener = makeUrlOpener(proxy) image_id_test = "S1B_IW_GRDH_1SSH_20200322T220406_20200322T220435_020810_027767_839F" # dict_param={"startDate":"2020-03-22","completionDate":"2020-03-23","productType": "GRD", "sensorMode": "IW", "instrument": "C-SAR"} dict_param2 = {"startDate": "2020-03-22", "completionDate": "2020-03-23"} zip_url = get_download_zip_url(image_id_test, dict_param2, 2) output_path = "/root/code/sent2-cloud-remover/test_data/" + image_id_test + ".zip" download_url(zip_url)
def collection_info(): """ List the available collections and their products. """ collections_url = "https://copernicus.nci.org.au/sara.server/1.0/collections.json" url_opener = saraclient.makeUrlOpener() info, err = saraclient.readJsonUrl(url_opener, collections_url) if err is None: return info else: raise Exception(err)
def mainRoutine(): """ Main routine """ cmdargs = getCmdargs() urlOpener = saraclient.makeUrlOpener(cmdargs.proxy) excludeSet = loadExcludeList(cmdargs.excludelist) if cmdargs.polygonfile is not None: geomList = readPolygonFile(cmdargs.polygonfile) else: geomList = [None] queryParamList = cmdargs.queryparam results = [] # Loop over each polygon in the input polygonfile for geom in geomList: tmpParamList = copy.copy(queryParamList) if geom is not None: tmpParamList.append("geometry={}".format(geom.ExportToWkt())) tmpResults = saraclient.searchSara(urlOpener, cmdargs.sentinel, tmpParamList) results.extend(tmpResults) # Remove any duplicates from images which intersect multiple geometries in geomlist tmpResults = [] idSet = set() for r in results: esaid = saraclient.getFeatAttr(r, saraclient.FEATUREATTR_ESAID) if esaid not in idSet: idSet.add(esaid) tmpResults.append(r) results = tmpResults # Restrict further by additional search options results = [ f for f in results if saraclient.getFeatAttr( f, saraclient.FEATUREATTR_ESAID) not in excludeSet ] if cmdargs.urllist is not None: writeUrllist(cmdargs.urllist, results) if cmdargs.curlscript is not None: writeCurlScript(cmdargs, results) if cmdargs.jsonfeaturesfile is not None: writeJsonFeatures(cmdargs.jsonfeaturesfile, results) if cmdargs.simplejsonfile is not None: writeSimpleJsonFile(cmdargs.simplejsonfile, results)
def query(collection, query_params, polygon_fname=None): """ Submit the query to SARA and return a GeoJSON document. :param collection: A string containing the Collection as defined in SARA. :param query_params: A list containing additional query parameters to be used in querying SARA. :param polygon_fname: A string containing the full file pathname of an OGR compliant vector file. Ideally the vector file will contain a single polygon defining the Region Of Interest (ROI) to spatially constrain the search to. """ # search by polygon, startDate, completionDate, collection, productType url_opener = saraclient.makeUrlOpener() params = copy.copy(query_params) if polygon_fname is not None: # only deal with the first feature at this point in time with fiona.open(polygon_fname, 'r') as src: feature = src[0] geom = shape(feature['geometry']) params.append("geometry={}".format(geom.wkt)) # the searchSara api requires 1, 2 or 3, not S1, S2, or S3 sentinel_number = collection[1] results = saraclient.searchSara(url_opener, sentinel_number, params) json_doc = { "type": "FeatureCollection", "properties": {}, "features": results } return json_doc
def get_download_zip_url(path_image_name, dict_param, sent=1): image_name = path_image_name.split("/")[-1] # dict_param.update(S1_OPTPARAM) # update the parameters with d if sent == 1: dict_param.update(S1_OPTPARAM) if sent == 2: dict_param.update(S2_OPTPARAM) proxy = None urlOpener = makeUrlOpener(proxy) lparam = [] for key in dict_param: lparam += [transformfilter2query(key, dict_param[key])] allfeatures = searchSara(urlOpener, sent, lparam) df = pd.DataFrame(allfeatures) df = reformat_dataframe(df) print(df.head()) final_df = get_image_download_path(df, image_name) print(final_df) if final_df.shape[0] == 0: print("NO image found with {}".format(lparam)) return None else: print(get_image_download_path(df, image_name).iloc[0]) return get_image_download_path(df, image_name).iloc[0]