def test_missing_api_key(): '''verify exception raised on missing API key''' client = api.Client(api_key=None) try: client._get('whatevs').get_body() except api.exceptions.InvalidAPIKey as ex: assert str(ex) == 'No API key provided' else: assert False
def fetch_lat_lon(image_dir, lat_lon_list, count = 1, scene_type = 'rapideye', product = 'analytic'): """ Download a single rapideye image of each coordinate in a list of coordinates. :param image_directory: The directory where the images should be downloaded to. :param lat_lon_list: A list of coordinate tuples in the form of (lat, lon) :param scene_type: The type of satellite that the images should be from ('ortho' or 'rapideye') :param product: The image type. 'analytic' or 'visual'. :return: """ points = [geojson.Point([lon, lat]) for lat, lon in lat_lon_list] intersects = [geojson.dumps(point) for point in points] sceneIDs = [] url = "https://api.planet.com/v0/scenes/rapideye/" for intersect in intersects: params = { "cloud_cover.estimated.lte": 0, "intersects":intersect, } data = requests.get(url, params=params, auth=(const.API_KEY, '')) scenes_data = data.json()["features"] if not scenes_data: print "No scenes available for these coordinates: ", intersect continue counter = 0 for scene in scenes_data: if counter == count: break sceneIDs.append(scene['id']) print scene['id'] counter += 1 print "Downloading scene list!" planet_client = api.Client(api_key = const.API_KEY) try: callback = api.write_to_file(image_dir) bodies = planet_client.fetch_scene_geotiffs(scene_ids = sceneIDs, product = product, scene_type=scene_type, callback=callback) for b in bodies: b.await() except Exception, e: print "Download failed: %s"%e
def setUp(self): self.client = api.Client(api_key='xyz')
def client(): return api.Client(**client_params)
def client(): return api.Client('foobar')
# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from _common import read_fixture import json import os from planet import api from requests_mock import Mocker client = api.Client(api_key='xyz') def test_list_mosaics(): with Mocker() as m: text = read_fixture('list-mosaics.json') uri = os.path.join(client.base_url, 'mosaics/') m.get(uri, text=text, status_code=200) r = client.list_mosaics() assert r.response.status_code == 200 assert r.get() == json.loads(text)
def fetch_images(image_directory, scene_type = 'rapideye', product = 'analytic', filters = {}): """ :param image_directory: The directory where the original images should be saved to. :param scene_type: The satellite type that images should be fetched from. :param product: The type of imagery that will be downloaded. "analytic" is not pre-processed, "visual" is true-color. :param filters: Any filters for the search. :return: """ if not os.path.isdir(image_directory): raise OSError("The specified path " + image_directory + " does not point to a directory!") if not filters: start = datetime.datetime(year=2011, month=1, day=1, tzinfo=pytz.utc).isoformat() end = datetime.datetime(year=2015, month=12, day=1, tzinfo=pytz.utc).isoformat() filters = { # Your filters here, for example: # Get images with estimated 0% cloud cover "cloud_cover.estimated.lte": 0, "acquired.gte": start, "acquired.lte": end } next_url = "https://api.planet.com/v0/scenes/" + scene_type + "/?" + urllib.urlencode(filters) scene_data_pages = [] scene_IDs = [] scene_count = 0 print "Searching for %s images that comply with the given filters." % scene_type print "Scene List:" while next_url: # Note: you don't have to pass the filters in again here, # here, they will always be included in data.links.next r = requests.get(next_url, auth=(const.API_KEY, '')) r.raise_for_status() data = r.json() scenes_data = data["features"] scene_data_pages.append(scenes_data) for scene in scenes_data: scene_IDs.append(str(scene['id'])) print str(scene['id']) scene_count += len(scenes_data) next_url = data["links"].get("next", None) print '\n%s total results' % scene_count print "Downloading scene list!" planet_client = api.Client(api_key = const.API_KEY) callback = api.write_to_file(image_directory) bodies = planet_client.fetch_scene_geotiffs(scene_IDs, scene_type = scene_type, product = product, callback=callback) # await the completion of the asynchronous downloads, this is where # any exception handling should be performed for b in bodies: b.await()
aoi = """{ "type": "Polygon", "coordinates": [ [ [-122.54, 37.81], [-122.38, 37.84], [-122.35, 37.71], [-122.53, 37.70], [-122.54, 37.81] ] ] }""" # will pick up api_key via environment variable PL_API_KEY # but can be specified using `api_key` named argument client = api.Client() # collect all scenes here scenes = [] print 'loading scenes' # get `count` number of scenes, for this example, use 1 to verify paging scene = client.get_scenes_list(count=1) # we'll use 3 `pages` of results for s in scene.iter(pages=3): scenes.extend(s.get()['features']) assert len(scenes) == 3 ids = [f['id'] for f in scenes]