def test_get(self): c = Context() c.service = "http://example.org/service.atomsvc" c.http = MockHttp(HTTP_SRC_DIR) s = Service(c) headers, body = s.get() self.assertEqual(headers["status"], "200") self.assertEqual(s.uri(), "http://example.org/service.atomsvc")
class Spatial_Reference_type(Context().getBase()): __tablename__ = 'spatialreferencetype' __table_args__ = ({'sqlite_autoincrement': True, 'schema': 'catalogue'}) id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String, nullable=False) description = Column(String, nullable=False) shortcut = Column(String, nullable=True)
class Catalog_Dataset(Context().getBase()): __tablename__ = "global_catalog" __table_args__ = (UniqueConstraint('entity_id', 'tile_identifier'), { 'sqlite_autoincrement': True, 'schema': 'catalogue' }) id = Column(Integer, primary_key=True, autoincrement=True) entity_id = Column(String, index=True, nullable=False) acq_time = Column(DateTime(timezone=False)) tile_identifier = Column(String, index=True, nullable=False) clouds = Column(Float, nullable=False) resources = Column(JSONB) level = Column(String, index=True, nullable=False) daynight = Column(String, index=True, nullable=False) sensor = Column(String, index=True, nullable=False) time_registered = Column(DateTime(timezone=False)) def __repr__(self): return '<%s: id:%s (%s) [%s]>' % (self.__class__.__name__, self.entity_id, str(self.acq_time), self.tile_identifier) def __eq__(self, other): """Override the default Equals behavior""" if isinstance(other, self.__class__): bools = list() for k in ['entity_id', 'acq_time', 'tile_identifier', 'clouds']: bools.append( str(self.__dict__[k]).replace('+00:00', '') == str( other.__dict__[k]).replace('+00:00', '')) return all(bools) return False
def test_restore(self): class A(object): def __init__(self, context): self.context = context class B(object): def __init__(self, context): self.context = context class C(object): def __init__(self, context): self.context = context ctxt = Context() ctxt.service = "http://example.org/service.atomsvc" ctxt.collection = "http://example.org/collection/1/" ctxt.entry = "http://example.org/collection/1/1" a, b, c = ctxt.restore(A, B, C) self.assertEqual(type(a), A) self.assertEqual(type(b), B) self.assertEqual(type(c), C) ctxt.service = None a, b, c = ctxt.restore(A, B, C) self.assertEqual(a, None) self.assertEqual(b, None) self.assertEqual(c, None)
class Spatial_Reference(Context().getBase()): __tablename__ = 'spatialreference' __table_args__ = ({'sqlite_autoincrement': True, 'schema': 'catalogue'}) id = Column(Integer, primary_key=True, autoincrement=True) ref_id = Column(String, nullable=False) ref_name = Column(String, nullable=False) geom = Column(Geometry('POLYGON', srid=4326), nullable=False) referencetype_id = Column(Integer, ForeignKey(Spatial_Reference_type.id)) referencetype = relationship("Spatial_Reference_type", uselist=False) def __repr__(self): return '<%s> %s, %d>' % (self.__class__.__name__, self.ref_name, self.referencetype_id)
def training(): context_x = Context(inputs_dict.n_words, hidden_size).to(device) classification_x = Classification().to(device) context_x, classification_x, plot_losses = trainIters(context_x, classification_x, device, inputs_dict, target_dict, pairs, n_iters, print_every=50) return context_x, classification_x, plot_losses
def testCreateData(self): for x in range(O): obj = Test_Table( key=''.join( random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(M)), val=''.join( random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(N))) try: self.session.add(obj) self.session.commit() except: self.session.rollback() #raise finally: self.session.close() Context().closeSession() self.assertEqual(self.session.query(Test_Table).count(), O) Context().closeSession()
class EossProject(Context().getBase()): __tablename__ = 'project' __table_args__ = (UniqueConstraint('id', name='uq_project_identfier'), UniqueConstraint('uuid', name='uq_project_uuid'), { 'sqlite_autoincrement': True, 'schema': 'staging' }) id = Column(Integer, primary_key=True, autoincrement=True) uuid = Column(GUID, index=True, nullable=False) name = Column(String, nullable=False) project_start = Column(DateTime(timezone=True)) project_end = Column(DateTime(timezone=True)) geom = Column(Geometry('POLYGON', srid=4326), nullable=False) def __repr__(self): return "<Project(name=%s, start=%s)>" % (self.uuid, self.identifier)
def test_push_pop(self): c = Context() c.service = "http://example.org/service.atomsvc" c.collection = "http://example.org/collection/1/" c.entry = "http://example.org/collection/1/1" c.collpush("http://fred.org/") # Test pickle and un-pickle while were at it. file = StringIO() pickle.dump(c, file) file.seek(0) c = pickle.load(file) self.assertEqual(c.collection, "http://fred.org/") self.assertEqual(c.entry, None) c.collpop() self.assertEqual(c.collection, "http://example.org/collection/1/") self.assertEqual(c.entry, "http://example.org/collection/1/1")
class SensorAggregation(Context().getBase()): __tablename__ = "sensor_aggregation" __table_args__ = (UniqueConstraint('sensor', 'level', 'aggregation_type'), { 'sqlite_autoincrement': True, 'schema': 'catalogue' }) id = Column(Integer, primary_key=True, autoincrement=True) sensor = Column(String, ForeignKey(Catalog_Dataset.sensor), index=True, nullable=False) level = Column(String, ForeignKey(Catalog_Dataset.level), index=True, nullable=False) aggregation_type = Column(String, index=True, nullable=False) aggregation_name = Column(String, index=True, nullable=False)
def test_attribs(self): c = Context() c.service = "http://example.org/service.atomsvc" c.collection = "http://example.org/collection/1/" c.entry = "http://example.org/collection/1/1" self.assertEqual(c.service, "http://example.org/service.atomsvc") self.assertEqual(c.collection, "http://example.org/collection/1/") self.assertEqual(c.entry, "http://example.org/collection/1/1") c.collection = "http://example.org/collection/2/" self.assertEqual(c.service, "http://example.org/service.atomsvc") self.assertEqual(c.collection, "http://example.org/collection/2/") self.assertEqual(c.entry, None) c.service = "http://example.org/some_other_service.atomsvc" self.assertEqual(c.service, "http://example.org/some_other_service.atomsvc") self.assertEqual(c.collection, None) self.assertEqual(c.entry, None)
class Persistance: def __init__(self): self.logger = logging.getLogger('eoss.' + __name__) self.session = Context().getSession() self.engine = Context().get_engine() #@region.cache_on_arguments() def get_sensors(self, group): sensor_agg = func.array_agg(SensorAggregation.sensor, type_=String).label('sensor_agg') level_agg = func.array_agg(SensorAggregation.level, type_=String).label('level_agg') return self.session.query( SensorAggregation.aggregation_name, sensor_agg, level_agg).filter(SensorAggregation.aggregation_type == group).distinct().group_by( SensorAggregation.aggregation_name).order_by( SensorAggregation.aggregation_name) def delete_dataset(self, entity_id): self.session.query(Catalog_Dataset).filter( Catalog_Dataset.entity_id == entity_id).delete( synchronize_session=False) self.session.commit() #@region.cache_on_arguments() def get_dataset(self, entity_id): ds = self.session.query(Catalog_Dataset).filter( Catalog_Dataset.entity_id == entity_id).all() return ds def get_dataset_by_sensor_and_date(self, sensor, acq_date): ds = self.session.query(Catalog_Dataset).filter( Catalog_Dataset.sensor == sensor).filter( func.DATE(Catalog_Dataset.acq_time) == func.DATE( acq_date)).all() print ds return ds def get_observation_coverage(self, reference_type_id, last_days=2): struct = dict() struct['geojson'] = list() struct['attr'] = list() sql = text( 'select r.ref_name, st_asgeojson(st_centroid(r.geom)), max(ds.acq_time::date), count(ds.*) from catalogue.spatialreference r, CATALOGue.global_catalog ds where r.referencetype_id = %d and ds.tile_identifier = r.ref_name and ds.acq_time::date > now()::date-%d group by r.ref_name, st_asgeojson(st_centroid(r.geom)) order by r.ref_name' % (reference_type_id, last_days)) sql = text( 'select r.ref_name, st_asgeojson(r.geom), max(ds.acq_time::date), count(ds.*) from catalogue.spatialreference r, CATALOGue.global_catalog ds where r.referencetype_id = %d and ds.tile_identifier = r.ref_name and ds.acq_time::date > now()::date-%d group by r.ref_name, st_asgeojson(r.geom) order by r.ref_name' % (reference_type_id, last_days)) result = self.engine.execute(sql) for r in result: struct['geojson'].append(ujson.loads(r[1])) struct['attr'].append({ 'tile_id': r[0], 'count': r[3], 'last_observation': r[2].isoformat(), }) return struct def add_dataset(self, obj): session = Context().getSession() ds_exists = session.query(Catalog_Dataset).filter( Catalog_Dataset.entity_id == obj.entity_id).filter( Catalog_Dataset.tile_identifier == obj.tile_identifier).filter( Catalog_Dataset.acq_time == obj.acq_time) if not ds_exists.count(): try: c = Catalog_Dataset(**dict(obj)) session.add(c) session.commit() except: session.rollback() raise finally: session.close() Context().closeSession() return True #@region.cache_on_arguments() def get_all_sensor_aggregations(self): return self.session.query(SensorAggregation).all() @region.cache_on_arguments() def get_all_tilegrid(self): return self.session.query( Spatial_Reference.ref_name, geoalchemy2.functions.ST_AsGeoJSON(Spatial_Reference.geom)).all() def get_reference_by_sensorgrid(self, ref_id, ref_type_id, sensor_grid): sat_grid = aliased(Spatial_Reference) ref_obj = aliased(Spatial_Reference) return self.session.query(sat_grid.ref_name).filter( sat_grid.geom.ST_Intersects(ref_obj.geom)).filter( ref_obj.referencetype_id == ref_type_id).filter( ref_obj.ref_id == ref_id).filter( sat_grid.referencetype_id == sensor_grid) def get_reference(self, ref_id, ref_type_id): return self.session.query(Spatial_Reference).filter( Spatial_Reference.referencetype_id == ref_type_id).filter( Spatial_Reference.ref_id == ref_id) def get_referencebyaoi(self, wkt, sensor_grid): sat_grid = aliased(Spatial_Reference) wkt = WKTElement(wkt, srid=4326) return self.session.query(sat_grid.ref_name).filter( sat_grid.referencetype_id == sensor_grid).filter( sat_grid.geom.ST_Intersects(wkt)) def find_dataset(self, dates_filter, sensors_filter, grid_list, joint_gridset, clouds): if len(sensors_filter) > 0 and len(dates_filter) > 0 and len( grid_list[11]) > 0: query = self.session.query(Catalog_Dataset).filter( Catalog_Dataset.daynight == 'day').filter(Catalog_Dataset.clouds <= clouds).filter( or_(*sensors_filter)).filter(or_(*dates_filter)).filter( Catalog_Dataset.tile_identifier.in_(joint_gridset)) elif len(sensors_filter) == 0 and len(dates_filter) > 0 and len( grid_list[11]) > 0: query = self.session.query(Catalog_Dataset).filter( Catalog_Dataset.daynight == 'day').filter( Catalog_Dataset.clouds <= clouds).filter( or_(*dates_filter)).filter( Catalog_Dataset.tile_identifier.in_(joint_gridset)) elif len(sensors_filter) > 0 and len(dates_filter) > 0 and len( grid_list[11]) == 0: query = self.session.query(Catalog_Dataset).filter( Catalog_Dataset.daynight == 'day').filter( Catalog_Dataset.clouds <= clouds).filter( or_(*sensors_filter)).filter(or_(*dates_filter)) elif len(sensors_filter) == 0 and len(dates_filter) > 0 and len( grid_list[11]) == 0: query = self.session.query(Catalog_Dataset).filter( Catalog_Dataset.daynight == 'day').filter( Catalog_Dataset.clouds <= clouds).filter( or_(*dates_filter)) return query @region.cache_on_arguments() def get_tile_geom(self, tiles_list): return self.session.query( Spatial_Reference.ref_name, geoalchemy2.functions.ST_AsGeoJSON(Spatial_Reference.geom)).filter( Spatial_Reference.ref_name.in_(tiles_list)) @region.cache_on_arguments() def get_all_reference_types(self): return self.session.query(Spatial_Reference_type).all() @region.cache_on_arguments() def get_all_references(self): return self.session.query(Spatial_Reference.ref_name, Spatial_Reference.referencetype_id, Spatial_Reference.ref_id).distinct().all() def get_reference_by_groupid_polygon(self, group_id, polygon_wkt): return self.session.query( Spatial_Reference, geoalchemy2.functions.ST_AsGeoJSON(Spatial_Reference.geom), geoalchemy2.functions.ST_AsGeoJSON( geoalchemy2.functions.ST_Envelope( Spatial_Reference.geom))).filter( Spatial_Reference.referencetype_id == group_id).filter( Spatial_Reference.geom.ST_Intersects('SRID=4326;' + polygon_wkt)) def get_reference_by_groupid_reference_id(self, group_id, reference_id): return self.session.query( Spatial_Reference, geoalchemy2.functions.ST_AsGeoJSON(Spatial_Reference.geom), geoalchemy2.functions.ST_AsGeoJSON( geoalchemy2.functions.ST_Envelope( Spatial_Reference.geom))).filter( Spatial_Reference.referencetype_id == group_id).filter( Spatial_Reference.ref_id == reference_id) def get_reference_by_groupid_reference_name(self, group_id, reference_name): return self.session.query( Spatial_Reference, geoalchemy2.functions.ST_AsGeoJSON(Spatial_Reference.geom), geoalchemy2.functions.ST_AsGeoJSON( geoalchemy2.functions.ST_Envelope( Spatial_Reference.geom))).filter( Spatial_Reference.referencetype_id == group_id).filter( Spatial_Reference.ref_name == reference_name) @region.cache_on_arguments() def get_selected_references(self, REGISTERED_REF_TYPES): return self.session.query( Spatial_Reference.ref_name, Spatial_Reference.referencetype_id, Spatial_Reference.ref_id).filter( Spatial_Reference.referencetype_id.in_( REGISTERED_REF_TYPES)).distinct().all() def get_dataset_tiles_date_clouds(self, sensors_filter, dates_filter, tiles_list): if len(sensors_filter) > 0 and len(dates_filter) > 0: query = self.session.query( Catalog_Dataset.tile_identifier, Catalog_Dataset.acq_time, Catalog_Dataset.clouds).filter(or_(*sensors_filter)).filter( or_(*dates_filter)).filter( Catalog_Dataset.tile_identifier.in_( tiles_list)).order_by( Catalog_Dataset.tile_identifier, Catalog_Dataset.acq_time) elif len(sensors_filter) == 0 and len(dates_filter) > 0: query = self.session.query( Catalog_Dataset.tile_identifier, Catalog_Dataset.acq_time, Catalog_Dataset.clouds).filter(or_(*dates_filter)).filter( Catalog_Dataset.tile_identifier.in_(tiles_list)).order_by( Catalog_Dataset.tile_identifier, Catalog_Dataset.acq_time) return query
def tearDown(self): all_data = self.session.query(Test_Table).delete() self.session.commit() Context().closeSession()
def setUp(self): self.session = Context().getSession()
class DatabaseTest(unittest.TestCase): """Tests ConfigManager and local/remote config access.""" def setUp(self): self.session = Context().getSession() def tearDown(self): all_data = self.session.query(Test_Table).delete() self.session.commit() Context().closeSession() def testCreateData(self): for x in range(O): obj = Test_Table( key=''.join( random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(M)), val=''.join( random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(N))) try: self.session.add(obj) self.session.commit() except: self.session.rollback() #raise finally: self.session.close() Context().closeSession() self.assertEqual(self.session.query(Test_Table).count(), O) Context().closeSession()
class Test_Table(Context().getBase()): __tablename__ = "tester1" __table_args__ = ({'sqlite_autoincrement': True, 'schema': 'tmp'}) id = Column(Integer, primary_key=True, autoincrement=True) key = Column(String, nullable=False) val = Column(String, nullable=False)
def __init__(self): self.logger = logging.getLogger('eoss.' + __name__) self.session = Context().getSession() self.engine = Context().get_engine()
def add_dataset(self, obj): session = Context().getSession() ds_exists = session.query(Catalog_Dataset).filter( Catalog_Dataset.entity_id == obj.entity_id).filter( Catalog_Dataset.tile_identifier == obj.tile_identifier).filter( Catalog_Dataset.acq_time == obj.acq_time) if not ds_exists.count(): try: c = Catalog_Dataset(**dict(obj)) session.add(c) session.commit() except: session.rollback() raise finally: session.close() Context().closeSession() return True