def search(dataset, node, aoi, start_date, end_date, longitude, latitude, distance, lower_left, upper_right, where, api_key, geojson): node = get_node(dataset, node) if aoi == "-": src = click.open_file('-').readlines() aoi = json.loads(''.join([ line.strip() for line in src ])) bbox = map(get_bbox, aoi.get('features'))[0] lower_left = bbox[0:2] upper_right = bbox[2:4] if where: # Query the dataset fields endpoint for queryable fields fields = api.dataset_fields(dataset, node) def format_fieldname(s): return ''.join(c for c in s if c.isalnum()).lower() field_lut = { format_fieldname(field['name']): field['fieldId'] for field in fields } where = { field_lut[format_fieldname(k)]: v for k, v in where if format_fieldname(k) in field_lut } if lower_left: lower_left = dict(zip(['longitude', 'latitude'], lower_left)) upper_right = dict(zip(['longitude', 'latitude'], upper_right)) data = api.search(dataset, node, lat=latitude, lng=longitude, distance=distance, ll=lower_left, ur=upper_right, start_date=start_date, end_date=end_date, where=where, api_key=api_key) if geojson: features = map(to_geojson_feature, data) data = { 'type': 'FeatureCollection', 'features': features } print(json.dumps(data))
def create_snapshots(): """ Run requests against USGS API for use in tests. """ api_key = api.login(os.environ['USGS_USERNAME'], os.environ['USGS_PASSWORD']) # Dataset Fields response = api.dataset_fields("LANDSAT_8_C1", "EE", api_key=api_key) write_response(response, 'dataset-fields.json') # Datasets response = api.datasets(None, "EE") write_response(response, 'datasets.json') # Download response = api.download("LANDSAT_8_C1", "EE", ["LC80810712017104LGN00"], product='STANDARD') write_response(response, 'download.json') # Download Options response = api.download_options("LANDSAT_8_C1", "EE", ["LC80810712017104LGN00"]) write_response(response, 'download-options.json') # Metadata response = api.metadata("LANDSAT_8_C1", "EE", ["LC80810712017104LGN00"]) write_response(response, 'metadata.json') # Search response = api.search("LANDSAT_8_C1", "EE", start_date='20170401', end_date='20170402', max_results=10) write_response(response, 'search.json') api.logout(api_key)
def get_dataset_fields(dataset_list): """Code to look through available fields for datasets""" for (dataset, _) in dataset_list: #pylint: disable=W0612 # Get the available filters for this data set print('-----> For DS = ' + dataset) result = api.dataset_fields(dataset, CATALOG) #print(result) if not result or ('data' not in result): print('Failed to get dataset fields for ' + dataset) continue # Make sure the fields we want to filter on are there DESIRED_FIELDS = ['agency - platform - vendor'] found_count = 0 for field in result['data']: print(field['name']) name = field['name'].lower() for df in DESIRED_FIELDS: if df in name: found_count += 1 break if found_count < len(DESIRED_FIELDS): print('Did not find all desired filter fields!') #raise Exception('debug') continue
def test_dataset_fields(): expected_keys = ["fieldId", "name", "valueList", "fieldLink"] results = api.dataset_fields("LANDSAT_8", "EE") for item in results: for key in expected_keys: assert item.get(key) is not None
def search(dataset, node, aoi, start_date, end_date, longitude, latitude, distance, lower_left, upper_right, where, geojson, extended, api_key): node = get_node(dataset, node) if aoi == "-": src = click.open_file('-') if not src.isatty(): lines = src.readlines() if len(lines) > 0: aoi = json.loads(''.join([line.strip() for line in lines])) bbox = map(get_bbox, aoi.get('features') or [aoi])[0] lower_left = bbox[0:2] upper_right = bbox[2:4] if where: # Query the dataset fields endpoint for queryable fields resp = api.dataset_fields(dataset, node) def format_fieldname(s): return ''.join(c for c in s if c.isalnum()).lower() field_lut = { format_fieldname(field['name']): field['fieldId'] for field in resp['data'] } where = { field_lut[format_fieldname(k)]: v for k, v in where if format_fieldname(k) in field_lut } if lower_left: lower_left = dict(zip(['longitude', 'latitude'], lower_left)) upper_right = dict(zip(['longitude', 'latitude'], upper_right)) result = api.search(dataset, node, lat=latitude, lng=longitude, distance=distance, ll=lower_left, ur=upper_right, start_date=start_date, end_date=end_date, where=where, extended=extended, api_key=api_key) if geojson: result = to_geojson(result) print(json.dumps(result))
def test_dataset_fields(): expected_keys = ["fieldId", "name", "valueList", "fieldLink"] response = api.dataset_fields("LANDSAT_8_C1", "EE") assert check_root_keys(response) for item in response['data']: for key in expected_keys: assert item.get(key) is not None
def scene_search(dataset, aoi, max_results, metadata_type, start_date, end_date, lower_left, upper_right, longitude, latitude, distance, where, api_key): if aoi: src = click.open_file('-') if aoi == "-" else click.open_file(aoi) if not src.isatty(): lines = src.readlines() if len(lines) > 0: aoi = json.loads(''.join([line.strip() for line in lines])) bbox = [get_bbox(feature) for feature in aoi.get('features')][0] lower_left = bbox[0:2] upper_right = bbox[2:4] if where: # Query the dataset fields endpoint for queryable fields resp = api.dataset_fields(dataset, node) def format_fieldname(s): return ''.join(c for c in s if c.isalnum()).lower() field_lut = { format_fieldname(field['name']): field['fieldId'] for field in resp['data'] } where = { field_lut[format_fieldname(k)]: v for k, v in where if format_fieldname(k) in field_lut } if len(lower_left) > 0: lower_left = dict(zip(['longitude', 'latitude'], lower_left)) upper_right = dict(zip(['longitude', 'latitude'], upper_right)) else: lower_left = None upper_right = None result = api.scene_search(dataset, max_results=max_results, metadata_type=metadata_type, start_date=start_date, end_date=end_date, ll=lower_left, ur=upper_right, lng=longitude, lat=latitude, distance=distance) print(json.dumps(result))
def search(dataset, node, aoi, start_date, end_date, longitude, latitude, distance, lower_left, upper_right, where, max_results, geojson, extended, api_key): node = get_node(dataset, node) if aoi == "-": src = click.open_file('-') if not src.isatty(): lines = src.readlines() if len(lines) > 0: aoi = json.loads(''.join([ line.strip() for line in lines ])) bbox = map(get_bbox, aoi.get('features') or [aoi])[0] lower_left = bbox[0:2] upper_right = bbox[2:4] if where: # Query the dataset fields endpoint for queryable fields resp = api.dataset_fields(dataset, node) def format_fieldname(s): return ''.join(c for c in s if c.isalnum()).lower() field_lut = { format_fieldname(field['name']): field['fieldId'] for field in resp['data'] } where = { field_lut[format_fieldname(k)]: v for k, v in where if format_fieldname(k) in field_lut } if lower_left: lower_left = dict(zip(['longitude', 'latitude'], lower_left)) upper_right = dict(zip(['longitude', 'latitude'], upper_right)) result = api.search( dataset, node, lat=latitude, lng=longitude, distance=distance, ll=lower_left, ur=upper_right, start_date=start_date, end_date=end_date, where=where, max_results=max_results, extended=extended, api_key=api_key) if geojson: result = to_geojson(result) print(json.dumps(result))
def dataset_fields(dataset, node): node = get_node(dataset, node) data = api.dataset_fields(dataset, node) print(json.dumps(data))
def dataset_fields(dataset, node): node = get_node(dataset, node) data = api.dataset_fields(dataset, node) click.echo(json.dumps(data))