def search_dataset(self, aoi, cloud_ratio, date_start, date_stop, platform, full_objects=False): """ Search datasets with different filters :param aoi: list of lat/lon coordinates describing and area of interest in EPSG4326 :param cloud_ratio: float between 0 and 1 :param date_start: datetime object :param date_stop: datetime object :param platform: string :param full_objects: if True returns CatalogObject instances, otherwise simple dictionary structure :return: resultset as list of CatalogObject instances or dicts """ geometry = wkt_dumps(Polygon(aoi)) params = dict() params['clouds'] = int(100 * cloud_ratio) dates = dict() dates['start_date'] = date_start.strftime('%m/%d/%Y') dates['end_date'] = date_stop.strftime('%m/%d/%Y') params['daterange'] = [dates] params['sensors'] = [{'name': platform}] params['areas'] = [{'aoi': geometry}] results = list() try: response = self.__post_resource__("catalog/search/result.json", params) logger.info('Searching datasets', extra=params) except ApiException, e: logger.exception('An error occurred during dataset search [%s]' % str(params))
def find(self, platform, aoi, date_start, date_stop, cloud_ratio=1.0): session = requests.Session() session.auth = (None, None) session.stream = True headers = {'content-type': 'application/json'} poly = Polygon(aoi) geometry = wkt_dumps(poly) params = dict() params['clouds'] = int(100 * cloud_ratio) dates = dict() dates['start_date'] = date_start.strftime('%m/%d/%Y') dates['end_date'] = date_stop.strftime('%m/%d/%Y') params['daterange'] = [dates] params['sensors'] = [{'name': platform}] params['areas'] = [{'aoi': geometry}] response = requests.post(self.url, json=ujson.loads(ujson.dumps(params)), auth=session.auth, headers=headers) datasets = set() if response.status_code == requests.codes.ok: result = response.json()['found_dataset'] for r in result: ds = Catalog_Dataset() ds.entity_id = r['entity_id'] ds.acq_time = r['acq_time'] ds.sensor = r['sensor'] ds.tile_identifier = r['tile_identifier'] ds.clouds = r['clouds'] ds.level = r['level'] ds.daynight = r['daynight'] datasets.add(ds) else: print response.text return datasets
def find(self, platforms, aoi, date_start, date_stop, cloud_ratio=0.2): url = self.url poly = Polygon(aoi) geometry = wkt_dumps(poly) params = { 'api_key': self.api_key, 'api_secret': self.api_secret, 'cloud_coverage_lte': cloud_ratio, 'acquired_gte': date_start.isoformat(), 'acquired_lte': date_stop.isoformat(), 'geometry_intersects': geometry, # 'sensor_platform': 'deimos-1,deimos-2,theia' 'sensor_platform': ",".join(platforms) } result = requests.get(url, params=params) datasets = set() for r in result.json()['payload']: ds = Catalog_Dataset() ds.entity_id = r['owner_scene_id'] ds.acq_time = r['acquired'] ds.sensor = r['sensor_platform'] # ds.tile_identifier = r['tile_identifier'] ds.clouds = r['cloud_coverage'] # ds.level = r['level'] if int(ds.clouds) > 0: ds.daynight = 'day' else: ds.daynight = 'night' datasets.add(ds) return datasets
layers.append(import_shapefile(dir_out, "%s.shp" % k)) features = [] for layer, key in zip(layers, layer_map.keys()): print " Features: %i" % layer.featureCount() provider = layer.dataProvider() allAttrs = provider.attributeIndexes() # start data retreival: fetch geometry and all attributes for each feature provider.select(allAttrs) feat = QgsFeature() while provider.nextFeature(feat): # fetch geometry geom = feat.geometry() print " Feature ID: %s" % feat.id() features.append( dict(wkt=wkt_dumps(wkb_loads(geom.asWkb())), key=key, desc=layer_map[key])) print "Total features: %i" % len(features) flush_and_transmit(features) dstor_qgis() remove_temp(dir_out)
def find(self, provider, aoi, date_start, date_stop, clouds=None): session = requests.Session() session.auth = (self.user, self.pwd) session.stream = True acquisition_date = '(beginPosition:[%s TO %s])' % (date_start.strftime( '%Y-%m-%dT%H:%M:%SZ'), date_stop.strftime('%Y-%m-%dT%H:%M:%SZ')) poly = Polygon(aoi) geometry = wkt_dumps(poly) query_area = ' AND (footprint:"Intersects(%s)")' % geometry query = ''.join([acquisition_date, query_area]) response = requests.post(self.url, dict(q=query), auth=session.auth) assert response.status_code == requests.codes.ok, 'Connection to copernicus server went wrong [%d]. Please check %s. \\n%s' % \ (response.status_code, self.url, response.text) products = response.json()['feed']['entry'] datasets = set() print products for p in products: ds = Catalog_Dataset() ds.entity_id = p['title'] ds.acq_time = next(x for x in p["date"] if x["name"] == "beginposition")["content"] ds.sensor = next(x for x in p["str"] if x["name"] == "platformname")["content"] resource_url = next(x for x in p["link"] if len(x.keys()) == 1)["href"] if ds.sensor == 'Sentinel-2': # ds.tile_identifier = r['tile_identifier'] ds.clouds = p['double']['content'] ds.level = next(x for x in p["str"] if x["name"] == "processinglevel")["content"] daynight = 'day' if next(x for x in p["str"] if x["name"] == "orbitdirection")["content"] != 'DESCENDING': daynight = 'night' ds.daynight = daynight cop = CopernicusSciHubContainer() cop.http = resource_url container = cop.to_dict() s3 = S3PublicContainer() if remote_file_exists(SENTINEL_S3_HTTP_ZIP_BASEURL + ds.entity_id + '.zip'): s3.http = SENTINEL_S3_HTTP_ZIP_BASEURL + ds.entity_id + '.zip' if public_key_exists('sentinel-s2-l1c', 'zips/%s.zip' % ds.entity_id): s3.bucket = SENTINEL_S3_BUCKET s3.prefix = 'zips/%s.zip' % ds.entity_id if s3.http != None or s3.bucket != None: container.update(s3.to_dict()) # print s3.to_dict() ds.container = container datasets.add(ds) return datasets
def find(self, provider, aoi, date_start, date_stop, cloud_ratio=0.2, black_fill=0.0): ''' :param provider: string ("planetscope", "rapideye", "landsat", "sentinel") :param aoi: geojson polygon :param date_start: timestamp :param date_stop: timestamp :param cloud_ratio: number (0 - 1) :return: ''' session = requests.Session() session.auth = (self.authkey, '') poly = Polygon(aoi) geometry = wkt_dumps(poly) initial_filters = { 'catalog::provider': provider, 'geometry': geometry, 'catalog::acquired': '[{start}:{end}]'.format(start=date_start.isoformat(), end=date_stop.isoformat()), 'catalog::cloud_cover': '[:{}]'.format(cloud_ratio), 'catalog::black_fill': '[:{}]'.format(black_fill), } next_url = self.utm_25_url + '?' + urllib.urlencode(initial_filters) datasets = set() # Go through each page of results so long as there is a `next` URL returned while next_url: data = session.get(next_url) data.raise_for_status() scenes_data = data.json() # there will be one entry in 'features' per result for s in scenes_data['features']: ds = Dataset() ds.identifier = s['id'] ds.uuid = uuid4() ds.time_created = s['properties']['catalog::acquired'] # ds.extent = s['geometry'] g1 = geojson.loads(ujson.dumps(s['geometry'])) g2 = shape(g1) ds.extent = WKTElement(g2.wkt, srid=4326) ds.properties = s['properties'] datasets.add(ds) # TODO add asset url for ordering, activating and downloading # Get the URL for the next page of results next_url = scenes_data['_links'].get('_next') return datasets
for layer, key in zip(layers, layer_map.keys()): print " Features: %i" % layer.featureCount() provider = layer.dataProvider() allAttrs = provider.attributeIndexes() # start data retreival: fetch geometry and all attributes for each feature provider.select(allAttrs) feat = QgsFeature() while provider.nextFeature(feat): # fetch geometry geom = feat.geometry() print " Feature ID: %s" % feat.id() features.append( dict( wkt=wkt_dumps(wkb_loads(geom.asWkb())), key=key, desc=layer_map[key] ) ) print "Total features: %i" % len(features) flush_and_transmit(features) dstor_qgis() remove_temp(dir_out)