def build_request(aoi_geom, start_date, stop_date, cloud_cover=100): """build a data api search request for PS imagery. Args: aoi_geom (geojson): start_date (datetime.datetime) stop_date (datetime.datetime) Returns: Request """ query = filters.and_filter( filters.geom_filter(aoi_geom), filters.range_filter('cloud_cover', lte=cloud_cover), filters.date_range('acquired', gt=start_date), filters.date_range('acquired', lt=stop_date)) # Skipping REScene because is not orthorrectified and # cannot be clipped. return filters.build_search_request(query, [ 'PSScene3Band', 'PSScene4Band', 'PSOrthoTile', 'REOrthoTile', ])
def build_request(aoi_shape): old = datetime.datetime(year=y1, month=m1, day=d1) new = datetime.datetime(year=y2, month=m2, day=d2) query = filters.and_filter(filters.geom_filter(sgeom.mapping(aoi_shape)), filters.range_filter('cloud_cover', lt=cc), filters.date_range('acquired', gt=old), filters.date_range('acquired', lt=new)) item_types = ["PSScene4Band"] return filters.build_search_request(query, item_types)
def create_regional_filter(geo_json_path, start_date, percent_cloud): ''' Creates the regional filter needed for analysis :param geo_json_path: Geographic Coordinates :paramtype geo_json_path: json file path :param date_1: Starting Date for analysis :paramtype date_1: datetime :param percent_cloud: will allow you to filter images that are more than x% clouds :paramtype percent_cloud: float [0,1] :returns: The aggregated filter for the region of interest :rtype: dict ..note: Assumes AND Logic Filter ''' geometry_filter = Filters.create_geometry_filter(geo_json_path) date_filter = filters.date_range('acquired', gte=start_date) cloud_filter = filters.range_filter('cloud_cover', lte=percent_cloud) regional_filter = filters.and_filter(geometry_filter, date_filter, cloud_filter) return regional_filter
def search(self, spatial, temporal=None, properties=None, limit=10, **kwargs): stac_query = STACQuery(spatial, temporal) # Start with spatial as its always required planet_query = filters.geom_filter(stac_query.spatial) if temporal: temporal_query = filters.and_filter( filters.date_range('acquired', gt=stac_query.temporal[0]), filters.date_range('acquired', lt=stac_query.temporal[1]), ) planet_query = filters.and_filter(planet_query, temporal_query) if properties: property_queries = [] for (field_name, v) in properties.items(): if field_name == 'eo:instrument': continue # Handle for searching on legacy extension if field_name.startswith('legacy:'): field_name = field_name.repace('legacy:', '') equality = list(v)[0] args = {equality: v[equality]} planet_field = field_mappings[field_name] field_type = field_types[field_name] if field_type == str: property_queries.append( filters.string_filter(planet_field, *args)) elif field_type == float: property_queries.append( filters.range_filter(planet_field, **args)) planet_query = filters.and_filter(planet_query, *property_queries) planet_request = api.filters.build_search_request( planet_query, kwargs['subdatasets']) planet_request.update({'limit': limit}) self.manifest.searches.append([self, planet_request])
def getPlanetPicture(fireDataSet): distance = 45 for i in range(0, 100): client = api.ClientV1(api_key="e262ca6835e64fa7b6975c558237e509") geom = HF.getbox(i, distance, fireDataSet) geom_AOI = {"type": "Polygon", "coordinates": [geom]} pre_date = HF.getDatePre(fireDataSet, i) post_date = HF.getDatePost(fireDataSet, i) datePre_filter = filters.date_range('acquired', gte=pre_date[0], lte=pre_date[1]) datePost_filter = filters.date_range('acquired', gte=post_date[0], lte=post_date[1]) geom_filter = filters.geom_filter(geom_AOI) cloud_filter = filters.range_filter('cloud_cover', lte=0.03) andPre_filter = filters.and_filter(datePre_filter, cloud_filter, geom_filter) andPost_filter = filters.and_filter(datePost_filter, cloud_filter, geom_filter) item_types = ["PSOrthoTile"] reqPre = filters.build_search_request(andPre_filter, item_types) reqPost = filters.build_search_request(andPost_filter, item_types) resPre = client.quick_search(reqPre) resPost = client.quick_search(reqPost) print("it should print something :") for item in resPre.items_iter(1): print(item['id'], item['properties']['item_type']) for item in resPost.items_iter(1): print(item['id'], item['properties']['item_type']) imagePre = None imagePost = None return imagePre, imagePost
def get_a_filter_cli_api(polygon_json,start_date, end_date, could_cover_thr): geo_filter = filters.geom_filter(polygon_json) date_filter = filters.date_range('acquired', gte=start_date, lte = end_date) cloud_filter = filters.range_filter('cloud_cover', lte=could_cover_thr) combined_filters = filters.and_filter(geo_filter, date_filter, cloud_filter) return combined_filters
def get_planet_items(api_key, aoi, start, end, cloud_cover): """ Request imagery items from the planet API for the requested dates. Args: api_key (str): the valid api key aoi(geojson): the geometry of the alert start(datetime.date): the start of the request end (datetime.date): the end of the request cloud_cover (int): the cloud coverage tolerated Return: (list): items from the Planet API """ query = filters.and_filter( filters.geom_filter(aoi), filters.range_filter("cloud_cover", lte=cloud_cover), filters.date_range("acquired", gt=start), filters.date_range("acquired", lt=end), ) # Skipping REScene because is not orthorrectified and # cannot be clipped. asset_types = ["PSScene"] # build the request request = filters.build_search_request(query, asset_types) result = client(api_key).quick_search(request) # get all the results items_pages = [] for page in result.iter(None): items_pages.append(page.get()) items = [item for page in items_pages for item in page["features"]] return items
def get_a_filter_cli_api(polygon_json, start_date, end_date, could_cover_thr): ''' create a filter based on a geometry, date range, cloud cover :param polygon_json: a polygon in json format :param start_date: start date :param end_date: end date :param could_cover_thr: images with cloud cover less than this value :return: a combined filter (and filter) ''' # gt: Greater Than # gte: Greater Than or Equal To # lt: Less Than # lte: Less Than or Equal To geo_filter = filters.geom_filter(polygon_json) date_filter = filters.date_range('acquired', gte=start_date, lte=end_date) cloud_filter = filters.range_filter('cloud_cover', lte=could_cover_thr) combined_filters = filters.and_filter(geo_filter, date_filter, cloud_filter) return combined_filters
def idl(**kwargs): for key, value in kwargs.items(): if key == 'infile' and value is not None: infile = value try: if infile.endswith('.geojson'): with open(infile) as aoi: aoi_resp = json.load(aoi) for things in aoi_resp['features']: ovall.append(things['geometry']['coordinates']) #print(list_depth(ovall)) if len(ovall) > 1: aoi_geom = ovall else: if list_depth(ovall) == 0: aoi_geom = ovall elif list_depth(ovall) == 1: aoi_geom = ovall[0] elif list_depth(ovall) == 2: aoi_geom = ovall[0][0] else: print( 'Please check GeoJSON: Could not parse coordinates' ) aoi_geom == aoi_geom #print(aoi_geom) elif infile.endswith('.json'): with open(infile) as aoi: aoi_resp = json.load(aoi) aoi_geom = aoi_resp['config'][0]['config'][ 'coordinates'] elif infile.endswith('.kml'): getcoord = kml2coord(infile) aoi_geom = getcoord except Exception as e: print('Could not parse geometry') print(e) if key == 'item' and value is not None: try: item = value except Exception as e: sys.exit(e) if key == 'start' and value is not None: try: start = value st = filters.date_range('acquired', gte=start) except Excpetion as e: sys.exit(e) if key == 'end' and value is not None: end = value ed = filters.date_range('acquired', lte=end) if key == 'asset' and value is not None: try: asset = value except Exception as e: sys.exit(e) if key == 'cmin': if value == None: try: cmin = 0 except Exception as e: print(e) if value is not None: try: cmin = float(value) except Exception as e: print(e) if key == 'cmax': if value == None: try: cmax = 1 except Exception as e: print(e) elif value is not None: try: cmax = float(value) except Exception as e: print(e) if key == 'num': if value is not None: num = value elif value == None: num = 1000000 if key == 'outfile' and value is not None: outfile = value try: open(outfile, 'w') except Exception as e: sys.exit(e) if key == 'ovp': if value is not None: ovp = int(value) elif value == None: ovp = 1 if key == 'filters' and value is not None: for items in value: ftype = items.split(':')[0] if ftype == 'string': try: fname = items.split(':')[1] fval = items.split(':')[2] #stbase={'config': [], 'field_name': [], 'type': 'StringInFilter'} stbase['config'] = fval.split(',') #fval stbase['field_name'] = fname except Exception as e: print(e) elif ftype == 'range': fname = items.split(':')[1] fgt = items.split(':')[2] flt = items.split(':')[3] #rbase={'config': {'gte': [], 'lte': []},'field_name': [], 'type': 'RangeFilter'} rbase['config']['gte'] = int(fgt) rbase['config']['lte'] = int(flt) rbase['field_name'] = fname print('Running search for a maximum of: ' + str(num) + ' assets') l = 0 [head, tail] = os.path.split(outfile) if len(ovall) > 1: temp = {"coordinates": [], "type": "MultiPolygon"} temp['coordinates'] = aoi_geom else: temp = tempsingle temp['coordinates'] = aoi_geom sgeom = filters.geom_filter(temp) aoi_shape = shape(temp) date_filter = filters.date_range('acquired', gte=start, lte=end) cloud_filter = filters.range_filter('cloud_cover', gte=cmin, lte=cmax) asset_filter = filters.permission_filter('assets.' + str(asset) + ':download') # print(rbase) # print(stbase) if len(rbase['field_name']) != 0 and len(stbase['field_name']) != 0: and_filter = filters.and_filter(date_filter, cloud_filter, asset_filter, sgeom, stbase, rbase) elif len(rbase['field_name']) == 0 and len(stbase['field_name']) != 0: and_filter = filters.and_filter(date_filter, cloud_filter, asset_filter, sgeom, stbase) elif len(rbase['field_name']) != 0 and len(stbase['field_name']) == 0: and_filter = filters.and_filter(date_filter, cloud_filter, asset_filter, sgeom, rbase) elif len(rbase['field_name']) == 0 and len(stbase['field_name']) == 0: and_filter = filters.and_filter(date_filter, cloud_filter, asset_filter, sgeom) item_types = [item] req = filters.build_search_request(and_filter, item_types) res = client.quick_search(req) for things in res.items_iter( 1000000): # A large number as max number to check against itemid = things['id'] footprint = things["geometry"] s = shape(footprint) if item.startswith('SkySat'): epsgcode = '3857' else: epsgcode = things['properties']['epsg_code'] if aoi_shape.area > s.area: intersect = (s).intersection(aoi_shape) elif s.area >= aoi_shape.area: intersect = (aoi_shape).intersection(s) proj = partial(pyproj.transform, pyproj.Proj(init='epsg:4326'), pyproj.Proj(init='epsg:' + str(epsgcode))) if transform(proj, aoi_shape).area > transform(proj, s).area: if (transform(proj, intersect).area / transform(proj, s).area * 100) >= ovp: ar.append(transform(proj, intersect).area / 1000000) far.append(transform(proj, s).area / 1000000) with open(outfile, 'a') as csvfile: writer = csv.writer(csvfile, delimiter=',', lineterminator='\n') writer.writerow([itemid]) elif transform(proj, s).area > transform(proj, aoi_shape).area: if (transform(proj, intersect).area / transform(proj, aoi_shape).area * 100) >= ovp: ar.append(transform(proj, intersect).area / 1000000) far.append(transform(proj, s).area / 1000000) with open(outfile, 'a') as csvfile: writer = csv.writer(csvfile, delimiter=',', lineterminator='\n') writer.writerow([itemid]) if int(len(ar)) == int(num): break num_lines = sum( 1 for line in open(os.path.join(head, tail.split('.')[0] + '.csv'))) print('Total number of assets written to ' + str( os.path.join(head, tail.split('.')[0] + '.csv') + ' ===> ' + str(num_lines))) print('Total estimated cost to quota: ' + str("{:,}".format(round(sum(far)))) + ' sqkm') print('Total estimated cost to quota if clipped: ' + str("{:,}".format(round(sum(ar)))) + ' sqkm')
def filters(self): populated_filters = [] start_qdate = None end_qdate = None start_date = None end_date = None if not self.startDateEdit.dateTime().isNull(): start_qdate = self.startDateEdit.date() start_date = start_qdate.toString(Qt.ISODate) if not self.endDateEdit.dateTime().isNull(): end_qdate = self.endDateEdit.date().addDays(1) end_date = end_qdate.toString(Qt.ISODate) if start_qdate and end_qdate: if start_qdate < end_qdate: date_filter = date_range("acquired", gte=start_date, lte=end_date) populated_filters.append(date_filter) else: self._show_message("Start date later than end date.", level=Qgis.Warning, duration=10) elif start_date: start_date_filter = date_range("acquired", gte=start_date) populated_filters.append(start_date_filter) elif end_date: end_date_filter = date_range("acquired", lte=end_date) populated_filters.append(end_date_filter) # TODO: double check actual domain/range of sliders sliders = self.frameRangeSliders.findChildren( PlanetExplorerRangeSlider) for slider in sliders: slide_filter = None range_low, range_high = slider.range() if slider.filter_key == "cloud_cover": range_low /= 100.0 range_high /= 100.0 slider_max = 1.0 else: slider_max = slider.max if range_low != slider.min and range_high != slider_max: slide_filter = range_filter(slider.filter_key, gte=range_low, lte=range_high) elif range_low != slider.min: slide_filter = range_filter(slider.filter_key, gte=range_low) elif range_high != slider_max: slide_filter = range_filter(slider.filter_key, lte=range_high) if slide_filter: populated_filters.append(slide_filter) s_ids = self.leStringIDs.text() if s_ids: ids_actual = [] s_ids.replace(" ", "") for s_id in s_ids.split(","): for text_chunk in s_id.split(":"): for pattern in self.id_regex: if pattern.match(text_chunk): ids_actual.append(text_chunk) if ids_actual: s_ids_list = ["id"] s_ids_list.extend(ids_actual) string_ids_filter = string_filter(*s_ids_list) populated_filters.append(string_ids_filter) else: self._show_message("No valid ID present", level=Qgis.Warning, duration=10) instruments = [] for chk in [self.chkPs2, self.chkPs2Sd, self.chkPsbSd]: if chk.isChecked(): instruments.append(chk.text()) if instruments: instrument_filter = string_filter("instrument", *instruments) populated_filters.append(instrument_filter) server_filters = [] if self.chkBxCanDownload.isChecked(): dl_permission_filter = permission_filter("assets:download") server_filters.append(dl_permission_filter) # Ground_control can be 'true', 'false, or a numeric value # Safest to check for not 'false' if self.chkBxGroundControl.isChecked(): gc_filter = not_filter(string_filter("ground_control", "false")) server_filters.append(gc_filter) server_filters.extend([ f for f in populated_filters if f["field_name"] not in LOCAL_FILTERS ]) local_filters = [ f for f in populated_filters if f["field_name"] in LOCAL_FILTERS ] return server_filters, local_filters
[-122.45593070983887, 37.76060492968732]]] } # Get API key api_key = api.ClientV1().login('*****@*****.**', 'gertan20') # Create client client = api.ClientV1(api_key=api_key['api_key']) # Build a query using the AOI and a cloud_cover filter # that get images with lower than 10% cloud cover # and acquired on Nov 1st, 2017 query = filters.and_filter( filters.geom_filter(aoi), filters.range_filter('cloud_cover', lt=0.1), filters.date_range('acquired', gte='2017-11-01T00:00:00.000Z', lte='2017-11-01T23:59:00.000Z')) # Build a request for only PlanetScope imagery # Item types: # https://www.planet.com/docs/reference/data-api/items-assets request = filters.build_search_request(query, item_types=['PSScene4Band']) # Get results result = client.quick_search(request) # Setup auth session = requests.Session() session.auth = (api_key['api_key'], '') # Set asset type to download
def test_date_range(dt, expected): fieldname = "acquired" arg = "gte" assert filters.date_range(fieldname, **{arg: dt}) == expected
} }), (filters.string_filter('ground_control', 'false'), { 'type': 'NotFilter', 'config': { 'type': 'StringInFilter', 'field_name': 'ground_control', 'config': ('false', ) } })]) def test_not_filter(predicate, expected): assert expected == filters.not_filter(predicate) @pytest.mark.parametrize('filt, expected', [(filters.and_filter( filters.date_range('published', lt='2019-08-29T13:20:37.776031Z'), filters.not_filter( filters.string_filter( 'id', '20190625_070754_20_105c', '20190324_150924_104b'))), { 'type': 'AndFilter', 'config': ({ 'field_name': 'published', 'type': 'DateRangeFilter', 'config': { 'lt': '2019-08-29T13:20:37.776031Z' } }, { 'type': 'NotFilter', 'config': { 'field_name':
import geopandas as gpd from datetime import datetime import concurrent.futures import pdb import clientSatAPIs.utils as ut import clientSatAPIs.planet_api as pa from planet import api from planet.api import filters # ***************** USING Planet PYTHON CLIENT ********************* client = api.ClientV1() start_date = datetime(year=2020, month=8, day=1) date_filter_cl = filters.date_range('acquired', gte=start_date) cloud_filter_cl = filters.range_filter('cloud_cover', lte=0.1) def quick_search_client(shapesList): items = [] for shape in shapesList: geo_filter = { "type": "GeometryFilter", "field_name": "geometry", "config": shape } # Setup an "AND" logical filter and_filter = filters.and_filter(date_filter_cl, cloud_filter_cl, geo_filter)
def filters(self): populated_filters = [] start_date = None end_date = None start_datetime = None end_datetime = None if not self.startDateEdit.dateTime().isNull(): start_datetime = self.startDateEdit.dateTime() start_date = start_datetime.toString(Qt.ISODate) if not self.endDateEdit.dateTime().isNull(): end_datetime = self.endDateEdit.dateTime() end_date = end_datetime.toString(Qt.ISODate) if start_datetime and end_datetime: if start_datetime < end_datetime: date_filter = date_range('acquired', gte=start_date, lte=end_date) populated_filters.append(date_filter) else: self._show_message('Start date later than end date.', level=Qgis.Warning, duration=10) elif start_date: start_date_filter = date_range('acquired', gte=start_date) populated_filters.append(start_date_filter) elif end_date: end_date_filter = date_range('acquired', lte=end_date) populated_filters.append(end_date_filter) # TODO: double check actual domain/range of sliders sliders = self.frameRangeSliders.findChildren( PlanetExplorerRangeSlider) for slider in sliders: slide_filter = None range_low, range_high = slider.range() if slider.filter_key == 'cloud_cover': range_low /= 100.0 range_high /= 100.0 if range_low != slider.min and range_high != slider.max: slide_filter = range_filter(slider.filter_key, gte=range_low, lte=range_high) elif range_low != slider.min: slide_filter = range_filter(slider.filter_key, gte=range_low) elif range_high != slider.max: slide_filter = range_filter(slider.filter_key, lte=range_high) if slide_filter: populated_filters.append(slide_filter) s_ids = self.leStringIDs.text() if s_ids: ids_actual = [] s_ids.replace(" ", "") for s_id in s_ids.split(','): for text_chunk in s_id.split(':'): for pattern in self.id_regex: if pattern.match(text_chunk): ids_actual.append(text_chunk) if ids_actual: s_ids_list = ['id'] s_ids_list.extend(ids_actual) string_ids_filter = string_filter(*s_ids_list) populated_filters.append(string_ids_filter) else: self._show_message('No valid ID present', level=Qgis.Warning, duration=10) if self.chkBxCanDownload.isChecked(): dl_permission_filter = permission_filter('assets:download') populated_filters.append(dl_permission_filter) # Ground_control can be 'true', 'false, or a numeric value # Safest to check for not 'false' if self.chkBxGroundControl.isChecked(): gc_filter = not_filter(string_filter('ground_control', 'false')) populated_filters.append(gc_filter) return populated_filters
def idl(**kwargs): for key, value in kwargs.items(): if key == "infile" and value is not None: infile = value try: if infile.endswith(".geojson"): aoi_resp = multipoly(infile) try: for things in aoi_resp["features"]: ovall.append(things["geometry"]["coordinates"]) except Exception: for things in json.loads(aoi_resp)["features"]: ovall.append(things["geometry"]["coordinates"]) # print(list_depth(ovall)) aoi_geom = ovall if (list_depth(ovall) == 2 and json.loads(aoi_resp) ["features"][0]["geometry"]["type"] == "MultiPolygon"): temp["coordinates"] = aoi_geom[0] elif (list_depth(ovall) == 2 and json.loads(aoi_resp)["features"][0]["geometry"] ["type"] != "MultiPolygon"): aoi_geom = ovall[0][0] temp["type"] = "Polygon" temp["coordinates"] = aoi_geom elif list_depth(ovall) == 1: aoi_geom = ovall[0] temp["type"] = "Polygon" temp["coordinates"] = aoi_geom elif list_depth(ovall) == 0: aoi_geom = ovall temp["type"] = "Polygon" temp["coordinates"] = aoi_geom else: print( "Please check GeoJSON: Could not parse coordinates" ) elif infile.endswith(".json"): with open(infile) as aoi: aoi_resp = json.load(aoi) aoi_geom = aoi_resp["config"][0]["config"][ "coordinates"] elif infile.endswith(".kml"): getcoord = kml2coord(infile) aoi_geom = getcoord["features"][0]["geometry"][ "coordinates"] except Exception as e: print("Could not parse geometry") print(e) if key == "item" and value is not None: try: item = value except Exception as e: sys.exit(e) if key == "start" and value is not None: try: start = time2utc(value) st = filters.date_range("acquired", gte=start) except Exception as e: sys.exit(e) if key == "end" and value is not None: try: end = time2utc(value) ed = filters.date_range("acquired", lte=end) except Exception as e: sys.exit(e) if key == "asset" and value is not None: try: asset = value except Exception as e: sys.exit(e) if key == "cmin": if value == None: try: cmin = 0 except Exception as e: print(e) if value is not None: try: cmin = float(value) except Exception as e: print(e) if key == "cmax": if value == None: try: cmax = 1 except Exception as e: print(e) elif value is not None: try: cmax = float(value) except Exception as e: print(e) if key == "num": if value is not None: num = value elif value == None: num = 1000000 if key == "outfile" and value is not None: outfile = value try: open(outfile, "w") except Exception as e: sys.exit(e) if key == "ovp": if value is not None: ovp = int(value) elif value == None: ovp = 0.01 if key == "filters" and value is not None: for items in value: ftype = items.split(":")[0] if ftype == "string": try: fname = items.split(":")[1] fval = items.split(":")[2] # stbase={'config': [], 'field_name': [], 'type': 'StringInFilter'} stbase["config"] = fval.split(",") # fval stbase["field_name"] = fname except Exception as e: print(e) elif ftype == "range": fname = items.split(":")[1] fgt = items.split(":")[2] flt = items.split(":")[3] # rbase={'config': {'gte': [], 'lte': []},'field_name': [], 'type': 'RangeFilter'} rbase["config"]["gte"] = int(fgt) rbase["config"]["lte"] = int(flt) rbase["field_name"] = fname print("Running search for a maximum of: " + str(num) + " assets") l = 0 [head, tail] = os.path.split(outfile) sgeom = filters.geom_filter(temp) aoi_shape = shape(temp) if not aoi_shape.is_valid: aoi_shape = aoi_shape.buffer(0) # print('Your Input Geometry is invalid & may have issues:A valid Polygon may not possess anyoverlapping exterior or interior rings.'+'\n') date_filter = filters.date_range("acquired", gte=start, lte=end) cloud_filter = filters.range_filter("cloud_cover", gte=cmin, lte=cmax) asset_filter = filters.permission_filter("assets." + str(asset.split(",")[0]) + ":download") # print(rbase) # print(stbase) if len(rbase["field_name"]) != 0 and len(stbase["field_name"]) != 0: and_filter = filters.and_filter(date_filter, cloud_filter, asset_filter, sgeom, stbase, rbase) elif len(rbase["field_name"]) == 0 and len(stbase["field_name"]) != 0: and_filter = filters.and_filter(date_filter, cloud_filter, asset_filter, sgeom, stbase) elif len(rbase["field_name"]) != 0 and len(stbase["field_name"]) == 0: and_filter = filters.and_filter(date_filter, cloud_filter, asset_filter, sgeom, rbase) elif len(rbase["field_name"]) == 0 and len(stbase["field_name"]) == 0: and_filter = filters.and_filter(date_filter, cloud_filter, asset_filter, sgeom) item_types = [item] req = filters.build_search_request(and_filter, item_types) res = client.quick_search(req) for things in res.items_iter( 1000000): # A large number as max number to check against try: all_assets = [ assets.split(":")[0].replace("assets.", "") for assets in things["_permissions"] ] if things["properties"]["quality_category"] == "standard" and all( elem in all_assets for elem in asset.split(",")): itemid = things["id"] footprint = things["geometry"] s = shape(footprint) if item.startswith("SkySat"): epsgcode = "3857" else: epsgcode = things["properties"]["epsg_code"] if aoi_shape.area > s.area: intersect = (s).intersection(aoi_shape) elif s.area >= aoi_shape.area: intersect = (aoi_shape).intersection(s) proj_transform = pyproj.Transformer.from_proj( pyproj.Proj(4326), pyproj.Proj(epsgcode), always_xy=True ).transform # always_xy determines correct coord order print( "Processing " + str(len(ar) + 1) + " items with total area " + str("{:,}".format(round(sum(far)))) + " sqkm", end="\r", ) if (transform(proj_transform, (aoi_shape)).area > transform( proj_transform, s).area): if (transform(proj_transform, intersect).area / transform(proj_transform, s).area * 100) >= ovp: ar.append( transform(proj_transform, intersect).area / 1000000) far.append(transform(proj_transform, s).area / 1000000) with open(outfile, "a") as csvfile: writer = csv.writer(csvfile, delimiter=",", lineterminator="\n") writer.writerow([itemid]) elif (transform(proj_transform, s).area >= transform( proj_transform, aoi_shape).area): if (transform(proj_transform, intersect).area / transform( proj_transform, aoi_shape).area * 100) >= ovp: ar.append( transform(proj_transform, intersect).area / 1000000) far.append(transform(proj_transform, s).area / 1000000) with open(outfile, "a") as csvfile: writer = csv.writer(csvfile, delimiter=",", lineterminator="\n") writer.writerow([itemid]) if int(len(ar)) == int(num): break except Exception as e: pass except (KeyboardInterrupt, SystemExit) as e: print("\n" + "Program escaped by User") sys.exit() num_lines = sum( 1 for line in open(os.path.join(head, tail.split(".")[0] + ".csv"))) print("Total number of item ids written to " + str( os.path.join(head, tail.split(".")[0] + ".csv") + " ===> " + str(num_lines))) print("Total estimated cost to quota: " + str("{:,}".format(round(sum(far)))) + " sqkm") print("Total estimated cost to quota if clipped: " + str("{:,}".format(round(sum(ar)))) + " sqkm")
if (geojson_exists): myAOI = AOIs[1] else: myAOI = AOIs[1]["geometry"] # build a query using the AOI and # a cloud_cover filter that excludes 'cloud free' scenes quakeDay = datetime.datetime(year, month, day) old = datetime.datetime(year, month, day) - timedelta(weeks=26) now = datetime.datetime(year, month, day) + timedelta(weeks=26) query = filters.and_filter( filters.geom_filter(myAOI), filters.range_filter('cloud_cover', lt=50), #filters.date_range('acquired', gt=old) filters.date_range('acquired', gt=old, lt=now)) # build a request for only PlanetScope imagery request = filters.build_search_request(query, item_types=[sat_use]) # if you don't have an API key configured, this will raise an exception result = client.quick_search(request) scenes = [] planet_map = {} for item in result.items_iter(limit=3000): planet_map[item['id']] = item props = item['properties'] props["id"] = item['id'] props["geometry"] = item["geometry"] props["thumbnail"] = item["_links"]["thumbnail"] scenes.append(props)
from tqdm import tqdm import zipfile # some operation are slightly different in mac os and windows #Step1: # client information client = api.ClientV1(api_key="eb14409cd5544310ae7cdde4e8fe9e0f") # Set API key (this should to be an environment variable) api_key = 'eb14409cd5544310ae7cdde4e8fe9e0f' PLANET_API_KEY = os.getenv('PL_API_KEY') #Step2: #set filters #1:data filter (lte-> larger than or equel to) #set start date and end date start_date = datetime(year=2016, month=6, day=1) end_date = datetime(year=2016, month=7, day=1) date_filter = filters.date_range('acquired', gte=start_date, lte=end_date) #2 cloud filter cloud_filter = filters.range_filter('cloud_cover', lte=1.0) #3 geometry filter geometry_filter = { "type": "GeometryFilter", "field_name": "geometry", "config": { "type": "Polygon", "coordinates": [[[-118.13630282878874, 34.13666853390047], [-118.13481152057646, 34.13666853390047], [-118.13481152057646, 34.13785404192857], [-118.13630282878874, 34.13785404192857],