class Catalog_Dataset(Context().getBase()): __tablename__ = "global_catalog" __table_args__ = (UniqueConstraint('entity_id', 'tile_identifier'), { 'sqlite_autoincrement': True, 'schema': 'catalogue' }) id = Column(Integer, primary_key=True, autoincrement=True) entity_id = Column(String, index=True, nullable=False) acq_time = Column(DateTime(timezone=False)) tile_identifier = Column(String, index=True, nullable=False) clouds = Column(Float, nullable=False) resources = Column(JSONB) level = Column(String, index=True, nullable=False) daynight = Column(String, index=True, nullable=False) sensor = Column(String, index=True, nullable=False) time_registered = Column(DateTime(timezone=False)) def __repr__(self): return '<%s: id:%s (%s) [%s]>' % (self.__class__.__name__, self.entity_id, str(self.acq_time), self.tile_identifier) def __eq__(self, other): """Override the default Equals behavior""" if isinstance(other, self.__class__): bools = list() for k in ['entity_id', 'acq_time', 'tile_identifier', 'clouds']: bools.append( str(self.__dict__[k]).replace('+00:00', '') == str( other.__dict__[k]).replace('+00:00', '')) return all(bools) return False
class Spatial_Reference_type(Context().getBase()): __tablename__ = 'spatialreferencetype' __table_args__ = ({'sqlite_autoincrement': True, 'schema': 'catalogue'}) id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String, nullable=False) description = Column(String, nullable=False) shortcut = Column(String, nullable=True)
def add_dataset(self, obj): session = Context().getSession() ds_exists = session.query(Catalog_Dataset).filter( Catalog_Dataset.entity_id == obj.entity_id).filter( Catalog_Dataset.tile_identifier == obj.tile_identifier).filter( Catalog_Dataset.acq_time == obj.acq_time) if not ds_exists.count(): try: c = Catalog_Dataset(**dict(obj)) session.add(c) session.commit() except: session.rollback() raise finally: session.close() Context().closeSession() return True
class Spatial_Reference(Context().getBase()): __tablename__ = 'spatialreference' __table_args__ = ({'sqlite_autoincrement': True, 'schema': 'catalogue'}) id = Column(Integer, primary_key=True, autoincrement=True) ref_id = Column(String, nullable=False) ref_name = Column(String, nullable=False) geom = Column(Geometry('POLYGON', srid=4326), nullable=False) referencetype_id = Column(Integer, ForeignKey(Spatial_Reference_type.id)) referencetype = relationship("Spatial_Reference_type", uselist=False) def __repr__(self): return '<%s> %s, %d>' % (self.__class__.__name__, self.ref_name, self.referencetype_id)
def testCreateData(self): for x in range(O): obj = Test_Table( key=''.join( random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(M)), val=''.join( random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(N))) try: self.session.add(obj) self.session.commit() except: self.session.rollback() #raise finally: self.session.close() Context().closeSession() self.assertEqual(self.session.query(Test_Table).count(), O) Context().closeSession()
def training(): context_x = Context(inputs_dict.n_words, hidden_size).to(device) classification_x = Classification().to(device) context_x, classification_x, plot_losses = trainIters(context_x, classification_x, device, inputs_dict, target_dict, pairs, n_iters, print_every=50) return context_x, classification_x, plot_losses
class EossProject(Context().getBase()): __tablename__ = 'project' __table_args__ = (UniqueConstraint('id', name='uq_project_identfier'), UniqueConstraint('uuid', name='uq_project_uuid'), { 'sqlite_autoincrement': True, 'schema': 'staging' }) id = Column(Integer, primary_key=True, autoincrement=True) uuid = Column(GUID, index=True, nullable=False) name = Column(String, nullable=False) project_start = Column(DateTime(timezone=True)) project_end = Column(DateTime(timezone=True)) geom = Column(Geometry('POLYGON', srid=4326), nullable=False) def __repr__(self): return "<Project(name=%s, start=%s)>" % (self.uuid, self.identifier)
class SensorAggregation(Context().getBase()): __tablename__ = "sensor_aggregation" __table_args__ = (UniqueConstraint('sensor', 'level', 'aggregation_type'), { 'sqlite_autoincrement': True, 'schema': 'catalogue' }) id = Column(Integer, primary_key=True, autoincrement=True) sensor = Column(String, ForeignKey(Catalog_Dataset.sensor), index=True, nullable=False) level = Column(String, ForeignKey(Catalog_Dataset.level), index=True, nullable=False) aggregation_type = Column(String, index=True, nullable=False) aggregation_name = Column(String, index=True, nullable=False)
def tearDown(self): all_data = self.session.query(Test_Table).delete() self.session.commit() Context().closeSession()
def setUp(self): self.session = Context().getSession()
class Test_Table(Context().getBase()): __tablename__ = "tester1" __table_args__ = ({'sqlite_autoincrement': True, 'schema': 'tmp'}) id = Column(Integer, primary_key=True, autoincrement=True) key = Column(String, nullable=False) val = Column(String, nullable=False)
def __init__(self): self.logger = logging.getLogger('eoss.' + __name__) self.session = Context().getSession() self.engine = Context().get_engine()