def __init__(self, cat_id, band_type="MS", node="toa_reflectance", **kwargs): self.interface = Auth() self.vectors = Vectors() self._gid = cat_id self._band_type = band_type self._pansharpen = kwargs.get('pansharpen', False) self._acomp = kwargs.get('acomp', False) if self._pansharpen: self._node_id = 'pansharpened' else: self._node_id = node self._level = kwargs.get('level', 0) if 'proj' in kwargs: self._proj = kwargs['proj'] if '_ipe_graphs' in kwargs: self._ipe_graphs = kwargs['_ipe_graphs'] else: self._ipe_graphs = self._init_graphs() super(CatalogImage, self).__init__(self._ipe_graphs, cat_id, node=self._node_id, **kwargs)
def test_vectors_aggregate_query_complex(self): wkt = 'POLYGON((-76.65 40.10, -76.65 40.14, -76.55 40.14, -76.55 40.10, -76.65 40.10))' child_agg = AggregationDef(agg_type='date_hist', value='M') aggs = AggregationDef(agg_type='geohash', value='4', children=child_agg) v = Vectors() query = 'item_type:tweet' start_date = 'now-6M' end_date = 'now' result = v.aggregate_query(wkt, aggs, index=None, query=query, start_date=start_date, end_date=end_date) assert len(result) == 1 assert 'name' in result[0] assert result[0]['name'] == 'geohash:4' assert 'terms' in result[0] terms = result[0]['terms'] assert len(terms) == 1 assert terms[0]['term'] == 'dr1s' assert len(terms[0]['aggregations']) == 1 assert len(terms[0]['aggregations'][0]['terms']) == 4
def vector_services_query(query, aoi=None, **kwargs): vectors = Vectors() if not aoi: aoi = wkt.dumps(box(-180, -90, 180, 90)) _parts = sorted(vectors.query(aoi, query=query, **kwargs), key=lambda x: x['properties']['id']) return _parts
def _build_standard_products(idaho_id, proj, bucket=None, gsd=None, acomp=False): if bucket is None: vectors = Vectors() aoi = wkt.dumps(box(-180, -90, 180, 90)) query = "item_type:IDAHOImage AND id:{}".format(idaho_id) result = vectors.query(aoi, query=query) if len(result): bucket = result[0]["properties"]["attributes"][ "tileBucketName"] dn_op = ipe.IdahoRead(bucketName=bucket, imageId=idaho_id, objectStore="S3") params = ortho_params(proj, gsd=gsd) graph = { "1b": dn_op, "ortho": ipe.Orthorectify(dn_op, **params), "acomp": ipe.Format(ipe.Orthorectify(ipe.Acomp(dn_op), **params), dataType="4"), "toa_reflectance": ipe.Format(ipe.Orthorectify(ipe.TOAReflectance(dn_op), **params), dataType="4") } return graph
def _find_parts(cat_id, band_type): vectors = Vectors() aoi = wkt.dumps(box(-180, -90, 180, 90)) query = "item_type:IDAHOImage AND attributes.catalogID:{} " \ "AND attributes.colorInterpretation:{}".format(cat_id, band_types[band_type]) return sorted(vectors.query(aoi, query=query), key=lambda x: x['properties']['id'])
def test_vectors_search_index(self): v = Vectors() aoi = "POLYGON((17.75390625 25.418470119273117,24.08203125 25.418470119273117,24.08203125 19.409611549990895,17.75390625 19.409611549990895,17.75390625 25.418470119273117))" results = v.query(aoi, query="item_type:WV03", index="vector-dgcatalog-2016") assert len(results) == 140
def test_vectors_aggregate_query_agg_def(self): wkt = 'POLYGON((-76.65 40.10, -76.65 40.14, -76.55 40.14, -76.55 40.10, -76.65 40.10))' aggs = AggregationDef(agg_type='terms', value='ingest_source') v = Vectors() result = v.aggregate_query(wkt, aggs, index=None) assert len(result) == 1 assert 'name' in result[0] assert result[0]['name'] == 'terms:ingest_source' assert 'terms' in result[0] assert len(result[0]['terms']) == 6
def test_vectors_aggregate_query_with_defined_index(self): wkt = 'POLYGON((-76.65 40.10, -76.65 40.14, -76.55 40.14, -76.55 40.10, -76.65 40.10))' aggs = 'terms:item_type' v = Vectors() result = v.aggregate_query(wkt, aggs, index='read-vector-osm-*') assert len(result) == 1 assert 'name' in result[0] assert result[0]['name'] == 'terms:item_type' assert 'terms' in result[0] assert len(result[0]['terms']) == 10
def _image_by_type(cls, cat_id, **kwargs): vectors = Vectors() aoi = wkt.dumps(box(-180, -90, 180, 90)) query = "item_type:GBDXCatalogRecord AND attributes.catalogID:{}".format(cat_id) query += " AND NOT item_type:IDAHOImage AND NOT item_type:DigitalGlobeAcquisition" result = vectors.query(aoi, query=query, count=1) if len(result) == 0: raise Exception('Could not find a catalog entry for the given id: {}'.format(cat_id)) else: return cls._image_class(cat_id, result[0], **kwargs)
def test_vectors_search_iteratively(self): v = Vectors() aoi = "POLYGON((17.75390625 25.418470119273117,24.08203125 25.418470119273117,24.08203125 19.409611549990895,17.75390625 19.409611549990895,17.75390625 25.418470119273117))" g = v.query_iteratively(aoi, query="item_type:WV03", index=None, count=1000) count = 0 for vector in g: count += 1 assert isinstance(g, types.GeneratorType) assert count == 310
def test_vectors_create_from_wkt(self): v = Vectors(self.gbdx) aoi = "POLYGON((0 3,3 3,3 0,0 0,0 3))" result = v.create_from_wkt(aoi, item_type='test_type_123', ingest_source='api', attribute1='nothing', attribute2='something', number=6, date='2015-06-06') assert result == '/insight-vector/api/vector/vector-web-s/b1af66c3-2e41-4696-9924-6ab264336692'
def _find_parts(cat_id, band_type): def vendor_id(rec): _id = rec['properties']['attributes']['vendorDatasetIdentifier'] return _id.split(':')[1].split('_')[0] vectors = Vectors() aoi = wkt.dumps(box(-180, -90, 180, 90)) query = "item_type:IDAHOImage AND attributes.catalogID:{} " \ "AND attributes.colorInterpretation:{}".format(cat_id, band_types[band_type]) _parts = sorted(vectors.query(aoi, query=query), key=lambda x: x['properties']['id']) if not len(_parts): raise MissingIdahoImages('Unable to find IDAHO imagery in the catalog: {}'.format(query)) _id = vendor_id(_parts[0]) return [p for p in _parts if vendor_id(p) == _id]
def test_vectors_create_from_wkt(self): v = Vectors(self.gbdx) aoi = "POLYGON((0 3,3 3,3 0,0 0,0 3))" result = v.create_from_wkt( aoi, item_type='test_type_123', ingest_source='api', attribute1='nothing', attribute2='something', number=6, date='2015-06-06' ) assert result == '/insight-vector/api/vector/vector-web-s/b1af66c3-2e41-4696-9924-6ab264336692'
def _image_by_type(cls, cat_id, **kwargs): if cat_id is None: try: return IpeImage(GraphMeta(**kwargs)) except KeyError: raise ValueError("Catalog Images must be initiated by a Catalog Id or an RDA Graph Id") vectors = Vectors() aoi = wkt.dumps(box(-180, -90, 180, 90)) query = "item_type:GBDXCatalogRecord AND attributes.catalogID:{}".format(cat_id) query += " AND NOT item_type:IDAHOImage AND NOT item_type:DigitalGlobeAcquisition" result = vectors.query(aoi, query=query, count=1) if len(result) == 0: raise Exception('Could not find a catalog entry for the given id: {}'.format(cat_id)) else: return cls._image_class(cat_id, result[0], **kwargs)
def __init__(self, **kwargs): interface = Auth(**kwargs) self.gbdx_connection = interface.gbdx_connection self.root_url = interface.root_url self.logger = interface.logger # create and store an instance of the GBDX s3 client self.s3 = S3() # create and store an instance of the GBDX Ordering Client self.ordering = Ordering() # create and store an instance of the GBDX Catalog Client self.catalog = Catalog() # create and store an instance of the GBDX Workflow Client self.workflow = Workflow() # create and store an instance of the Idaho Client self.idaho = Idaho() self.vectors = Vectors() self.catalog_image = CatalogImage self.idaho_image = IdahoImage self.task_registry = TaskRegistry()
def __init__(self, **kwargs): interface = Auth(**kwargs) self.gbdx_connection = interface.gbdx_connection self.root_url = interface.root_url self.logger = interface.logger # create and store an instance of the GBDX s3 client self.s3 = S3() # create and store an instance of the GBDX Ordering Client self.ordering = Ordering() # create and store an instance of the GBDX Catalog Client self.catalog = Catalog() # create and store an instance of the GBDX Workflow Client self.workflow = Workflow() # create and store an instance of the Idaho Client self.idaho = Idaho() self.vectors = Vectors() self.catalog_image = CatalogImage self.idaho_image = IdahoImage self.landsat_image = LandsatImage self.sentinel2 = Sentinel2 self.tms_image = TmsImage self.dem_image = DemImage self.wv03_vnir = WV03_VNIR self.wv02 = WV02 self.ge01 = GE01 self.s3_image = S3Image self.task_registry = TaskRegistry()
def test_vectors_create_multiple(self): v = Vectors(self.gbdx) results = v.create([{ "type": "Feature", "geometry": { "type": "Point", "coordinates": [1.0,1.0] }, "properties": { "text" : "item text", "name" : "item name", "item_type" : "type", "ingest_source" : "source", "attributes" : { "latitude" : 1, "institute_founded" : "2015-07-17", "mascot" : "moth" } } }, { "type": "Feature", "geometry": { "type": "Point", "coordinates": [1.0,1.0] }, "properties": { "text" : "item text", "name" : "item name", "item_type" : "type", "ingest_source" : "source", "attributes" : { "latitude" : 1, "institute_founded" : "2015-07-17", "mascot" : "asdfadsfadf" } } }]) assert len(results) == 2
def test_vectors_create_single(self): v = Vectors(self.gbdx) results = v.create({ "type": "Feature", "geometry": { "type": "Point", "coordinates": [1.0,1.0] }, "properties": { "text" : "item text", "name" : "item name", "item_type" : "type", "ingest_source" : "source", "attributes" : { "latitude" : 1, "institute_founded" : "2015-07-17", "mascot" : "moth" } } }) for result in results: assert result == '/insight-vector/api/vector/vector-web-s/ce0699f3-bef8-402f-a18e-d149dc2f5f90'
def __init__(self, **kwargs): host = kwargs.get('host') if kwargs.get('host') else 'geobigdata.io' self.root_url = 'https://%s' % host if (kwargs.get('username') and kwargs.get('password') and kwargs.get('client_id') and kwargs.get('client_secret')): self.gbdx_connection = gbdx_auth.session_from_kwargs(**kwargs) elif kwargs.get('gbdx_connection'): # Pass in a custom gbdx connection object, for testing purposes self.gbdx_connection = kwargs.get('gbdx_connection') else: # This will throw an exception if your .ini file is not set properly self.gbdx_connection = gbdx_auth.get_session( kwargs.get('config_file')) # create a logger # for now, just log to the console. We'll replace all the 'print' statements # with at least logger.info or logger.debug statements # later, we can log to a service, file, or some other aggregator self.logger = logging.getLogger('gbdxtools') self.logger.setLevel(logging.ERROR) console_handler = logging.StreamHandler() console_handler.setLevel(logging.ERROR) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') console_handler.setFormatter(formatter) self.logger.addHandler(console_handler) self.logger.info('Logger initialized') # create and store an instance of the GBDX s3 client self.s3 = S3(self) # create and store an instance of the GBDX Ordering Client self.ordering = Ordering(self) # create and store an instance of the GBDX Catalog Client self.catalog = Catalog(self) # create and store an instance of the GBDX Workflow Client self.workflow = Workflow(self) # create and store an instance of the Idaho Client self.idaho = Idaho(self) self.vectors = Vectors(self) self.task_registry = TaskRegistry(self)
class CatalogImage(IpeImage): """ Catalog Image Class Collects metadata on all image parts and groups pan and ms bands from idaho Inherits from IpeImage and represents a mosiac data set of the full catalog strip """ _properties = None def __init__(self, cat_id, band_type="MS", node="toa_reflectance", **kwargs): self.interface = Auth() self.vectors = Vectors() self._gid = cat_id self._band_type = band_type self._pansharpen = kwargs.get('pansharpen', False) self._acomp = kwargs.get('acomp', False) if self._pansharpen: self._node_id = 'pansharpened' else: self._node_id = node self._level = kwargs.get('level', 0) if 'proj' in kwargs: self._proj = kwargs['proj'] if '_ipe_graphs' in kwargs: self._ipe_graphs = kwargs['_ipe_graphs'] else: self._ipe_graphs = self._init_graphs() super(CatalogImage, self).__init__(self._ipe_graphs, cat_id, node=self._node_id, **kwargs) def _query_vectors(self, query, aoi=None): if aoi is None: aoi = "POLYGON((-180.0 90.0,180.0 90.0,180.0 -90.0,-180.0 -90.0,-180.0 90.0))" try: return self.vectors.query(aoi, query=query) except Exception as err: raise Exception( 'Unable to query for image properties, the service may be currently down.', err) @property def properties(self): if self._properties is None: query = 'item_type:DigitalGlobeAcquisition AND attributes.catalogID:{}'.format( self._gid) self._properties = self._query_vectors(query) return self._properties @property def metadata(self): meta = {} query = 'item_type:IDAHOImage AND attributes.catalogID:{}'.format( self._gid) results = self._query_vectors(query) grouped = defaultdict(list) for idaho in results: vid = idaho['properties']['attributes']['vendorDatasetIdentifier'] grouped[vid].append(idaho) meta['parts'] = [] for key, parts in grouped.items(): part = {} for p in parts: attrs = p['properties']['attributes'] part[attrs['colorInterpretation']] = { 'properties': attrs, 'geometry': shape(p['geometry']) } meta['parts'].append(part) return meta def aoi(self, **kwargs): pansharp = False if self._pansharpen and 'pansharpen' not in kwargs: pansharp = True bounds = self._parse_geoms(**kwargs) if bounds is None: print( 'AOI bounds not found. Must specify a bbox, wkt, or geojson geometry.' ) return cfg = self._aoi_config(bounds, **kwargs) return DaskImage(**cfg) def _init_graphs(self): graph = {} ids = [] if self._node_id == 'pansharpened' and self._pansharpen: return self._pansharpen_graph() else: for part in self.metadata['parts']: for k, p in part.items(): if k == band_types[self._band_type]: _id = p['properties']['idahoImageId'] graph[_id] = ipe.Orthorectify( ipe.IdahoRead(bucketName="idaho-images", imageId=_id, objectStore="S3"), **ortho_params(self._proj)) return self._mosaic(graph) def _pansharpen_graph(self): pan_graph = {} ms_graph = {} for part in self.metadata['parts']: for k, p in part.items(): _id = p['properties']['idahoImageId'] if k == 'PAN': pan_graph[_id] = ipe.Orthorectify( ipe.IdahoRead(bucketName="idaho-images", imageId=_id, objectStore="S3"), **ortho_params(self._proj)) else: ms_graph[_id] = ipe.Orthorectify( ipe.IdahoRead(bucketName="idaho-images", imageId=_id, objectStore="S3"), **ortho_params(self._proj)) pan_mosaic = self._mosaic(pan_graph, suffix='-pan') pan = ipe.Format(ipe.MultiplyConst(pan_mosaic['toa_reflectance-pan'], constants=json.dumps([1000])), dataType="1") ms_mosaic = self._mosaic(ms_graph, suffix='-ms') ms = ipe.Format(ipe.MultiplyConst(ms_mosaic['toa_reflectance-ms'], constants=json.dumps([1000] * 8)), dataType="1") return { 'ms_mosaic': ms_mosaic, 'pan_mosiac': pan_mosaic, 'pansharpened': ipe.LocallyProjectivePanSharpen(ms, pan) } def _mosaic(self, graph, suffix=''): mosaic = ipe.GeospatialMosaic(*graph.values()) idaho_id = list(graph.keys())[0] meta = requests.get( 'http://idaho.timbr.io/{}.json'.format(idaho_id)).json() gains_offsets = calc_toa_gain_offset(meta['properties']) radiance_scales, reflectance_scales, radiance_offsets = zip( *gains_offsets) radiance = ipe.AddConst(ipe.MultiplyConst(ipe.Format(mosaic, dataType="4"), constants=radiance_scales), constants=radiance_offsets) toa = ipe.MultiplyConst(radiance, constants=reflectance_scales) graph.update({ "mosaic{}".format(suffix): mosaic, "radiance{}".format(suffix): radiance, "toa_reflectance{}".format(suffix): toa }) return graph
def test_init(self): c = Vectors() self.assertIsInstance(c, Vectors)
def test_vectors_search(self): v = Vectors(self.gbdx) aoi = "POLYGON((17.75390625 25.418470119273117,24.08203125 25.418470119273117,24.08203125 19.409611549990895,17.75390625 19.409611549990895,17.75390625 25.418470119273117))" results = v.query(aoi, query="item_type:WV03") assert len(results) == 100
def test_init(self): c = Vectors(self.gbdx) self.assertTrue(isinstance(c, Vectors))
def test_vectors_search_paging(self): v = Vectors() aoi = "POLYGON ((180 -90, 180 90, -180 90, -180 -90, 180 -90))" results = v.query(aoi, query="item_type:WV03_VNIR", index=None, count=1010) assert len(results) == 1010
def test_vectors_search_count_equal_to_num_records(self): v = Vectors() aoi = "POLYGON((17.75390625 25.418470119273117,24.08203125 25.418470119273117,24.08203125 19.409611549990895,17.75390625 19.409611549990895,17.75390625 25.418470119273117))" results = v.query(aoi, query="item_type:WV03", index=None, count=310) assert len(results) == 310
def test_vectors_search_index(self): v = Vectors() aoi = "POLYGON((17.75390625 25.418470119273117,24.08203125 25.418470119273117,24.08203125 19.409611549990895,17.75390625 19.409611549990895,17.75390625 25.418470119273117))" results = v.query(aoi, query="item_type:WV03", index="vector-dgcatalog-2016", count=1000) assert len(results) == 140