def delete_and_append(dataset_id, api_version, src_url, delete_where_clause=None): if api_version == 'staging': api_url = r'http://staging-api.globalforestwatch.org' token = util.get_token('gfw-rw-api-staging') elif api_version == 'prod': api_url = r'http://production-api.globalforestwatch.org' token = util.get_token('gfw-rw-api-prod') else: raise ValueError('unknown api_version: {}'.format(api_version)) headers = {'Content-Type': 'application/json', 'Authorization': 'Bearer {0}'.format(token)} delete_url = r'{0}/query/{1}'.format(api_url, dataset_id) sql = "DELETE FROM index_{0}".format(dataset_id.replace('-', '')) if delete_where_clause: sql += ' ' + delete_where_clause qry_parms = {"sql": sql} logging.debug('starting delete request') logging.debug(qry_parms) r = requests.get(delete_url, headers=headers, params=qry_parms) logging.debug(r.status_code) logging.debug(r.json()) # Temporarily remove this-- request will timeout and return a 500, but delete will execute # Raul is making this an async request, because it takes awhile to delete all the rows # if r.status_code != 200: # raise ValueError('request failed with status code {}'.format(r.status_code)) dataset_url = r'{0}/dataset/{1}/concat'.format(api_url, dataset_id) payload = {"url": src_url, "provider": "csv", "legend": { "long": "long", "lat": "lat"} } logging.debug('starting concat') logging.debug(payload) r = requests.post(dataset_url, headers=headers, json=payload) status = r.status_code if status == 204: logging.debug('Request succeeded!') else: print r.text logging.debug(r.text) raise ValueError('Request failed with code: {}'.format(status))
def get_headers(api_version): if api_version == 'staging': api_url = r'http://staging-api.globalforestwatch.org' token = util.get_token('gfw-rw-api-staging') elif api_version == 'prod': api_url = r'http://production-api.globalforestwatch.org' token = util.get_token('gfw-rw-api-prod') else: raise ValueError('unknown api_version: {}'.format(api_version)) headers = {'Content-Type': 'application/json', 'Authorization': 'Bearer {0}'.format(token)} return headers, api_url
def run_job(job_uid, job_type): """ Used to kick off and monitor job progress :param job_uid: HOT OSM job uid :param job_type: reruns kicks off the job again, runs just monitors job progress :return: return the json output """ auth_key = util.get_token('thomas.maschler@hot_export') headers = {"Content-Type": "application/json", "Authorization": "Token " + auth_key} url = "http://export.hotosm.org/api/{0}?job_uid={1}".format(job_type, job_uid) request = urllib2.Request(url) for key, value in headers.items(): request.add_header(key, value) return json.load(urllib2.urlopen(request))
def set_processing_server_state(self, desired_state): token_info = util.get_token('boto.config') aws_access_key = token_info[0][1] aws_secret_key = token_info[1][1] ec2_conn = boto.ec2.connect_to_region('us-east-1', aws_access_key_id=aws_access_key, aws_secret_access_key=aws_secret_key) reservations = ec2_conn.get_all_reservations() for reservation in reservations: for instance in reservation.instances: if 'Name' in instance.tags: if instance.tags['Name'] == self.server_name: server_instance = instance break if server_instance.state != desired_state: print 'Current server state is {0}. Setting it to {1} now.'.format(server_instance.state, desired_state) if desired_state == 'running': server_instance.start() else: server_instance.stop() while server_instance.state != desired_state: print server_instance.state time.sleep(5) # Need to keep checking get updated instance status server_instance.update() self.server_ip = server_instance.ip_address print 'Server {0} is now {1}, IP: {2}'.format(self.server_name, server_instance.state, self.server_ip)
for data in tqdm(response.iter_content()): handle.write(data) # Unzip file ziped_item = zipfile.ZipFile('output/' + scene_id + '.zip') ziped_item.extractall('output/' + scene_id) # Delete zip file os.remove('output/' + scene_id + '.zip') print('Downloaded clips located in: output/') if __name__ == '__main__': #Specify API Key api_key = util.get_token('planet_api.json')['planet-api-key'] endpoint = "https://api.planet.com/compute/ops/clips/v1/" #set image ID scene_id = '20180716_175654_1006' # Set Item Type item_type = 'PSScene4Band' # Set Asset Type asset_type = 'analytic' #path to geojson defining aoi aoi = r'data\clip_aoi.geojson'
res = session.get(data_dict[image_id]['assets']) asset = res.json() activation_url = asset['visual']['_links']['activate'] assets[image_id] = {} assets[image_id]['activation_url'] = activation_url res_activate = session.get(assets[next(iter(assets))]['activation_url']) # Print the response from the activation request print(res.status_code) status_code(res.status_code) if __name__ == '__main__': api_key = util.get_token('planet_api.json')["planet_api"] planet_data_url = "https://api.planet.com/data/v1" stats_url = f"{planet_data_url}/stats" quick_url = f"{planet_data_url}/quick-search" aoi = json.load(open('aoi.json')) filter = create_filter(aoi, '2013')[2] session = authenticate(api_key, planet_data_url) data_dict = request_imagery_for_aoi(session, quick_url, filter, ["PSScene3Band", "REOrthoTile"])
def post_process(layerdef): """ Create density maps for GFW Climate Visualization :param layerdef: the layerdef """ logging.debug('starting postprocess glad maps') logging.debug(layerdef.source) #start country page analysis stuff (not map related) logging.debug("starting country page analytics") cmd = ['python', 'update_country_stats.py', '-d', 'umd_landsat_alerts', '-a', 'gadm2_boundary'] cwd = r'D:\scripts\gfw-country-pages-analysis-2' if layerdef.gfw_env == 'DEV': cmd.append('--test') subprocess.check_call(cmd, cwd=cwd) # POST to kick off GLAD Alerts subscriptions now that we've updated the country-pages data api_token = util.get_token('gfw-rw-api-prod') headers = {'Content-Type': 'application/json', 'Authorization': 'Bearer {0}'.format(api_token)} url = r'https://production-api.globalforestwatch.org/subscriptions/notify-updates/glad-alerts' r = requests.post(url, headers=headers) logging.debug(r.text) olddata_hash = {} past_points = [ r"D:\GIS Data\GFW\glad\past_points\borneo_day2016.shp", r"D:\GIS Data\GFW\glad\past_points\peru_day2016.shp", r"D:\GIS Data\GFW\glad\past_points\roc_day2016.shp", r"D:\GIS Data\GFW\glad\past_points\brazil_day2016.shp" ] latest_rasters = [] new_points = [] for point in past_points: point_name = os.path.basename(point) olddata_hash[point_name] = arcpy.SearchCursor(point, "", "", "","GRID_CODE D").next().getValue("GRID_CODE") for ras in layerdef.source: if "FE" in ras: pass elif "day" in ras: ras_name = os.path.basename(ras) shp_name = ras_name.replace(".tif", ".shp") where_clause = "Value > " + str(olddata_hash[shp_name]) raster_extract = ExtractByAttributes(ras, where_clause) latest_raster = raster_extract.save(os.path.join (r"D:\GIS Data\GFW\glad\latest_points", ras_name)) latest_rasters.append(latest_raster) # latest_rasters.append(latest_raster) logging.debug("new values for %s extracted" %(ras_name)) else: pass if not latest_rasters: pass else: for ras in latest_rasters: ras_name = os.path.basename(ras).replace(".tif", ".shp") output = os.path.join(os.path.dirname(ras), ras_name) new_point = arcpy.RasterToPoint_conversion(ras, output, "Value") new_points.append(output) logging.debug("converted %s to points" %(ras)) if not new_points: pass else: for newp in new_points: for pastp in past_points: if os.path.basename(newp) == os.path.basename(pastp): arcpy.Copy_management(newp, pastp) logging.debug("copied %s to %s" %(newp, pastp)) if not new_points: pass else: for idnp in new_points: if "borneo" in idnp: logging.debug('clipping indonesia data') clip = r"D:\GIS Data\GFW\glad\maps\clip\idn_clip.shp" name = "borneo_clip.shp" output = os.path.join(os.path.dirname(idnp), name) idnp_clipped = arcpy.Clip_analysis(idnp, clip, output) new_points.remove(idnp) new_points.insert(0, output) logging.debug(new_points) else: pass if not new_points: pass else: for newp in new_points: outKDens = KernelDensity(newp, "NONE", "", "", "HECTARES") path = r"D:\GIS Data\GFW\glad\maps\density_rasters" name = os.path.basename(newp).replace(".shp", "") output = os.path.join(path, name + "_density.tif") outKDens.save(output) logging.debug("density layer created") if not new_points: pass else: for layer in new_points: if "peru" in layer: logging.debug("creating map for peru") make_maps(peru_mxd) if "roc" in layer: logging.debug("creating map for roc") make_maps(roc_mxd) if "brazil" in layer: logging.debug("creating map for brazil") make_maps(brazil_mxd) if "borneo" in layer: logging.debug("creating map for borneo") make_maps(borneo_mxd) else: pass