class SavedWorkflow(Device_Base): __tablename__ = 'saved_workflow' workflow_execution_id = Column(UUIDType(binary=False), primary_key=True) workflow_id = Column(UUIDType(binary=False), nullable=False) action_id = Column(UUIDType(binary=False), nullable=False) accumulator = Column(PickleType(), nullable=False) app_instances = Column(PickleType(), nullable=False) def __init__(self, workflow_execution_id, workflow_id, action_id, accumulator, app_instances): """Initializes a SavedWorkflow object. This is used when a workflow pauses execution, and must be reloaded at a later point. Args: workflow_execution_id (str): The workflow execution UID that this saved state refers to. workflow_id (str): The ID of the workflow that this saved state refers to. action_id (str): The currently executing action ID. accumulator (dict): The accumulator up to this point in the workflow. app_instances (str): The pickled app instances for the saved workflow """ self.workflow_execution_id = workflow_execution_id self.workflow_id = workflow_id self.action_id = action_id self.accumulator = accumulator self.app_instances = app_instances
class TuningRun(Base): uuid = Column(String(32), index=True, unique=True) program_version_id = Column(ForeignKey(ProgramVersion.id)) program_version = relationship(ProgramVersion, backref='tuning_runs') machine_class_id = Column(ForeignKey(MachineClass.id)) machine_class = relationship(MachineClass, backref='tuning_runs') input_class_id = Column(ForeignKey(InputClass.id)) input_class = relationship(InputClass, backref='tuning_runs') name = Column(String(128), default='unnamed') args = Column(PickleType(pickler=CompressedPickler)) objective = Column(PickleType(pickler=CompressedPickler)) state = Column(Enum('QUEUED', 'RUNNING', 'COMPLETE', 'ABORTED', name='t_tr_state'), default='QUEUED') start_date = Column(DateTime, default=func.now()) end_date = Column(DateTime) final_config_id = Column(ForeignKey(Configuration.id)) final_config = relationship(Configuration) #__mapper_args__ = {'primary_key': uuid} @property def program(self): return self.program_version.program
def make_optimization_iteration_table(database, if_exists="extend"): """Generate a table for information that is generated with each function evaluation. Args: database (sqlalchemy.MetaData): Bound metadata object. Returns: database (sqlalchemy.MetaData):Bound metadata object with added table. """ table_name = "optimization_iterations" _handle_existing_table(database, "optimization_iterations", if_exists) columns = [ Column("rowid", Integer, primary_key=True), Column("params", PickleType(pickler=RobustPickler)), Column("internal_derivative", PickleType(pickler=RobustPickler)), Column("timestamp", Float), Column("exceptions", String), Column("valid", Boolean), Column("hash", String), Column("value", Float), Column("step", Integer), Column("criterion_eval", PickleType(pickler=RobustPickler)), ] Table( table_name, database, *columns, sqlite_autoincrement=True, extend_existing=True ) database.create_all(database.bind)
class MetadataModel(DeclarativeBase): __tablename__ = 'metadata' __table_args__ = ({ 'mysql_charset': 'utf8', 'mysql_engine': 'InnoDB', 'mysql_row_format': 'DYNAMIC', }, ) fingerprint = Column(String(40), primary_key=True, nullable=False) url = Column(String(1024), nullable=False) depth = Column(Integer, nullable=False) created_at = Column(DateTime, nullable=False) fetched_at = Column(DateTime, nullable=True) status_code = Column(String(20)) score = Column(Float) error = Column(String(128)) meta = Column(PickleType()) headers = Column(PickleType()) cookies = Column(PickleType()) method = Column(String(6)) body = Column(String(2048), nullable=True) @classmethod def query(cls, session): return session.query(cls) def __repr__(self): return '<Metadata:%s (%s)>' % (self.url, self.fingerprint)
class SimFile(Base): """File that stores simulation results along with reference to network and sim type used""" __table_args__ = {'mysql_engine': 'InnoDB', 'sqlite_autoincrement': True} __tablename__ = prefix + '_sim_file' id = Column(Integer, primary_key=True) file_name = Column(String(255), unique=True, nullable=False) network_id = Column(ForeignKey(prefix + '_network.id')) network = relationship(Network, foreign_keys=network_id, backref=backref("files", order_by=id)) sim_type_id = Column(ForeignKey(prefix + '_sim_type.id')) sim_type = relationship(SimType, foreign_keys=sim_type_id, backref=backref("files", order_by=id)) parameters = Column(PickleType(pickler=dill), nullable=True) data = Column(PickleType(pickler=cPickle), nullable=True) choice = Column(Integer, default=1) def get_wrapper(self, **kwargs): import simulation.sim_types return simulation.sim_types.get_wrapper_from_file(self, **kwargs) @property def is_imported(self): return len(self.runs) > 0
class Page(Base): __tablename__ = 'pages' __table_args__ = ( UniqueConstraint('url'), { 'mysql_charset': 'utf8', 'mysql_engine': 'InnoDB', 'mysql_row_format': 'DYNAMIC', }, ) class State: NOT_CRAWLED = 'NOT CRAWLED' QUEUED = 'QUEUED' CRAWLED = 'CRAWLED' ERROR = 'ERROR' url = Column(String(1024), nullable=False) fingerprint = Column(String(40), primary_key=True, nullable=False, index=True, unique=True) depth = Column(Integer, nullable=False) created_at = Column(DatetimeTimestamp(20), nullable=False) status_code = Column(String(20)) state = Column(String(12)) error = Column(String(20)) meta = Column(PickleType()) headers = Column(PickleType()) cookies = Column(PickleType()) method = Column(String(6)) @classmethod def query(cls, session): return session.query(cls) def __repr__(self): return '<Page:%s>' % self.url
class Users(Base): __tablename__ = 'users' id = Column(Integer, primary_key=True, autoincrement=True) openid = Column(Text) goods_on_sell = Column(PickleType(), default=[]) buy_history = Column(PickleType(), default=[]) buy_car = Column(PickleType(), default=[]) browse_history = Column(PickleType()) like_matrix = Column(PickleType(), default=[]) __table_args__ = {"mysql_charset": "utf8"}
def __init__(self, url=None, engine=None, tablename='gosa.common.components.scheduler_jobs', metadata=None, pickle_protocol=pickle.HIGHEST_PROTOCOL): self.jobs = [] self.pickle_protocol = pickle_protocol if engine: self.engine = engine elif url: self.engine = create_engine(url) else: raise ValueError('Need either "engine" or "url" defined') self.jobs_t = Table( tablename, metadata or MetaData(), Column('id', Integer, Sequence(tablename + '_id_seq', optional=True), primary_key=True), Column('trigger', PickleType(pickle_protocol), nullable=False), Column('func_ref', String(1024), nullable=False), Column('args', PickleType(pickle_protocol), nullable=False), Column('kwargs', PickleType(pickle_protocol), nullable=False), Column('name', Unicode(1024)), Column('misfire_grace_time', Integer, nullable=False), Column('coalesce', Boolean, nullable=False), Column('owner', String(1024), nullable=True), Column('tag', String(1024), nullable=True), Column('description', String(1024), nullable=True), Column('callback_ref', String(1024), nullable=True), Column('progress', Integer, nullable=False), Column('status', Integer, nullable=False), Column('max_runs', Integer), Column('max_instances', Integer), Column('next_run_time', DateTime, nullable=False), Column('runs', BigInteger), Column('uuid', String(1024), nullable=False), Column('job_type', String(1024), nullable=False), Column('callback', String(1024), nullable=True)) created = False retries = 0 while created is False and retries < 5: try: self.jobs_t.create(self.engine, True) created = True except sqlalchemy.exc.OperationalError: retries += 1 time.sleep(5)
class SimRun(Base): """Storage of simulation data in the DB""" __tablename__ = prefix + '_sim_run' __table_args__ = {'mysql_engine': 'InnoDB'} id = Column(Integer, primary_key=True) parameters = Column(PickleType(pickler=dill), nullable=True) data = Column(PickleType(pickler=cPickle), nullable=True) sim_file_id = Column(ForeignKey(prefix + '_sim_file.id')) sim_file = relationship(SimFile, backref=backref("runs", order_by=id)) def get_wrapper(self, **kwargs): return self.sim_file.get_wrapper(**kwargs)
class ModelMetaData(Base): __tablename__ = 'models_meta_data' id = Column(Integer, primary_key=True) model_id = Column('model_id', Integer, ForeignKey('models.id')) app_id = Column('app_id', Integer, ForeignKey('apps.id')) client_id = Column('client_id', Integer, ForeignKey('clients.id')) product_id = Column('product_id', Integer, ForeignKey('products.id')) version = Column('version', String(6)) io_template = Column('io_template', PickleType()) active = Column('active', Boolean()) model_name = Column('model_name', Text()) model = relationship("Model") app = relationship("App") client = relationship("Client") product = relationship("Product") def __init__(self, model_id, app_id, client_id, version, product_id, io_template, active, model_name): self.model_id = model_id self.app_id = app_id self.client_id = client_id self.version = version self.product_id = product_id self.io_template = io_template self.active = active self.model_name = model_name
class TaskInstance(Base): """ Task instances store the state of a task instance. This table is the authority and single source of truth around what tasks have run and the state they are in. The SqlAlchemy model doesn't have a SqlAlchemy foreign key to the task or dag model deliberately to have more control over transactions. Database transactions on this table should insure double triggers and any confusion around what task instances are or aren't ready to run even while multiple schedulers may be firing task instances. """ __tablename__ = "task_instance" task_id = Column(String(ID_LEN), primary_key=True) dag_id = Column(String(ID_LEN), primary_key=True) execution_date = Column(UtcDateTime, primary_key=True) start_date = Column(UtcDateTime) end_date = Column(UtcDateTime) duration = Column(Float) state = Column(String(20)) _try_number = Column('try_number', Integer, default=0) max_tries = Column(Integer) hostname = Column(String(1000)) unixname = Column(String(1000)) job_id = Column(Integer) pool = Column(String(50), nullable=False) queue = Column(String(256)) priority_weight = Column(Integer) operator = Column(String(1000)) queued_dttm = Column(UtcDateTime) pid = Column(Integer) executor_config = Column(PickleType(pickler=dill))
class FlashCache(db_local.Entity): session = db_local.session __tablename__ = "flashcache" cache = Column(String(100), primary_key=True) hdd_disk = Column(String(100)) com_disk = Column(String(200)) # combination disk status = Column(String(20)) settings = Column(MutableDict.as_mutable(PickleType(pickler=json)), default={}) def __repr__(self): return "<cache: {}, hdd_disk: {}, com_disk: {}, status: {}, settings: {}>".format( self.cache, self.hdd_disk, self.com_disk, self.status, self.settings) @classmethod def delete_by_cache(cls, cache): num = cls.session.query(cls).filter_by(cache=cache).delete() cls.session.commit() return num @classmethod def get_active_flashcache(cls): return cls.session.query(cls).filter_by(status="ACTIVE").all() @classmethod def get_all(cls): return cls.session.query(cls).all()
class TrackerRecord(Base): __tablename__ = 'trackerscrape' __table_args__ = {'mysql_engine': 'InnoDB', 'useexisting': True} id = Column(Integer, primary_key=True, autoincrement=True) infohash = Column(Unicode(2048), nullable=False) complete = Column(Integer, nullable=False) incomplete = Column(Integer, nullable=False) downloads = Column(Integer, nullable=False) scrapetime = Column(DateTime, nullable=False) tracker = Column(Unicode(2048), nullable=False) scrape = Column(PickleType(), nullable=False) # json dump of scraped links def __repr__(self): if self.id != None: return "<TrackerRecord('%d','%s','%d')>" % (self.id, self.infohash, self.downloads) else: return "<TrackerRecord(None,'%s', '%d')>" % (self.infohash, self.downloads) def __init__(self, infohash, complete, incomplete, downloads, tracker, scrape): self.infohash = infohash self.complete = complete self.incomplete = incomplete self.downloads = downloads self.tracker = tracker self.scrapetime = datetime.now() self.scrape = scrape
class User(db.Model): __tablename__ = "user" uid = Column(String(255), primary_key=True) token = Column("token", PickleType(protocol=4)) connections = relationship("Connection", back_populates="user") def get_class(self, classId): for c in self.connections: if c.classId == classId: return c return None def is_authenticated(self): try: return self.token and self.token.valid except: return False def is_active(self): return True ###TODO Look at this def is_anonymous(self): if uid: return False return True def get_id(self): return self.uid def __repr__(self): return "<User %r>" % self.uid
class TaskInstance(Base): # type: ignore """Task instance class.""" __tablename__ = "task_instance" task_id = Column(String(ID_LEN, **COLLATION_ARGS), primary_key=True) dag_id = Column(String(ID_LEN, **COLLATION_ARGS), primary_key=True) execution_date = Column(UtcDateTime, primary_key=True) start_date = Column(UtcDateTime) end_date = Column(UtcDateTime) duration = Column(Float) state = Column(String(20)) _try_number = Column('try_number', Integer, default=0) max_tries = Column(Integer) hostname = Column(String(1000)) unixname = Column(String(1000)) job_id = Column(Integer) pool = Column(String(50), nullable=False) pool_slots = Column(Integer, default=1) queue = Column(String(256)) priority_weight = Column(Integer) operator = Column(String(1000)) queued_dttm = Column(UtcDateTime) queued_by_job_id = Column(Integer) pid = Column(Integer) executor_config = Column(PickleType(pickler=dill)) external_executor_id = Column(String(ID_LEN, **COLLATION_ARGS))
class Stash(Base): __tablename__ = 'stash' pk = Column(Integer, primary_key=True) id = Column(Integer, index=True, unique=True) date_added = Column(DateTime, default=datetime.now) is_complete = Column(Boolean, default=False) message = Column(String) protocol = Column(PickleType(pickler=pickler)) categories = relationship( 'Category', secondary=stash_categories, backref='protocols', ) upstream_deps = relationship( 'Stash', secondary=stash_dependencies, primaryjoin=(pk == stash_dependencies.c.downstream_pk), secondaryjoin=(pk == stash_dependencies.c.upstream_pk), backref='downstream_deps', ) def __repr__(self): return f"Stash(id={self.id!r})"
class Status(UniqueEpochMixin, ORMBase): """ General purpose calculation status ORM representation for bookkeeping purposes. The info `dict` contains zero or more of the following fields by convention: info = { 'last_response': Last http response for remote workers } """ # TODO(damb): Check if UUID is better located at ModelRun uuid = Column(GUID, unique=True, index=True, nullable=False) state = Column(Enum(EStatus), default=EStatus.PENDING) info = Column(PickleType(pickler=json)) # relation: ModelRun run_id = Column(Integer, ForeignKey('modelrun.id')) run = relationship('ModelRun', back_populates='status') def __init__(self, uuid, state=EStatus.PENDING, info=None): self.uuid = uuid self.state = state self.info = info self.starttime = datetime.datetime.utcnow() @hybrid_property def finished(self): return self.state in (EStatus.ERROR, EStatus.COMPLETE)
class DagPickle(Base): """ Dags can originate from different places (user repos, master repo, ...) and also get executed in different places (different executors). This object represents a version of a DAG and becomes a source of truth for a BackfillJob execution. A pickle is a native python serialized object, and in this case gets stored in the database for the duration of the job. The executors pick up the DagPickle id and read the dag definition from the database. """ id = Column(Integer, primary_key=True) pickle = Column(PickleType(pickler=dill)) created_dttm = Column(UtcDateTime, default=timezone.utcnow) pickle_hash = Column(Text) __tablename__ = "dag_pickle" def __init__(self, dag): self.dag_id = dag.dag_id if hasattr(dag, 'template_env'): dag.template_env = None self.pickle_hash = hash(dag) self.pickle = dag
class Paramset(Base, NameRepr, DictMixin): """Parameter set that stores a JSON python dictionary of network parameters""" __tablename__ = prefix + '_paramset' __table_args__ = {'mysql_engine': 'InnoDB', 'sqlite_autoincrement': True} id = Column(Integer, primary_key=True) name = Column(String(512), nullable=True) parameters = Column(PickleType(pickler=json), nullable=False) def __init__(self, **kwargs): self.parameters = kwargs def __eq__(self, other): if not other or (other.__class__ != self.__class__): return False for attr in self.parameters: if attr in [npr.W_0, npr.W_1, npr.W_SIGMA]: continue if attr in [npr.G_GABA, npr.G_AMPA, npr.G_NMDA, npr.NU_EXT]: if np.abs(self.parameters[attr] - other.parameters[attr]) > 1e-4: return False if not self.parameters[attr] == other.parameters[attr]: return False return True
class TaskState(Base, LoggingMixin): __tablename__ = "task_state" task_id = Column(String(ID_LEN, **COLLATION_ARGS), primary_key=True) dag_id = Column(String(ID_LEN, **COLLATION_ARGS), primary_key=True) execution_date = Column(UtcDateTime, primary_key=True) task_state = Column(PickleType(pickler=dill)) def __init__(self, task_id, dag_id, execution_date, task_state=None): super().__init__() self.dag_id = dag_id self.task_id = task_id self.execution_date = execution_date self._log = logging.getLogger("airflow.task") if task_state: self.task_state = task_state @staticmethod @provide_session def get_task_state(dag_id: str, task_id: str, executor_date: datetime, session: Session = None) -> TaskState: return session.query(TaskState).filter( TaskState.dag_id == dag_id, TaskState.task_id == task_id, TaskState.execution_date == executor_date).first() @provide_session def update_task_state(self, session: Session = None): session.merge(self) session.commit()
class GlobalResult(Base): epoch = Column(Integer) node = Column(Integer) hashv = Column(String(64)) data = Column(PickleType(pickler=CompressedPickler)) time = Column(DateTime) technique = Column(Enum('seed', 'DifferentialEvolutionAlt', 'UniformGreedyMutation', 'NormalGreedyMutation', 'RandomNelderMead', name='technique'), default='seed') result = Column(Float) was_the_best = Column(Boolean) @classmethod def get(cls, session, hashv, datav): try: session.flush() return (session.query(GlobalResult).filter_by(hashv=hashv).first()) except sqlalchemy.orm.exc.NoResultFound: return None @classmethod def extract(cls, session, node, epoch): try: session.flush() return (session.query(GlobalResult).filter( GlobalResult.node != node).filter( GlobalResult.epoch == epoch).all()) except sqlalchemy.orm.exc.NoResultFound: return None
class BookItem(Base): __tablename__ = 'book_item' id = Column(Integer, primary_key=True) title = Column(String(100), nullable=False) author = Column(PickleType(), nullable=False) description = Column(String(1000)) imgURL = Column(String(1000)) genre_id = Column(Integer, ForeignKey('genre.id'), nullable=False) genre = relationship(Genre) user_id = Column(Integer, ForeignKey('user.id'), nullable=False) user = relationship(User) # We added this serialize function to be able to send JSON objects in a # serializable format @property def serialize(self): return { 'title': self.title, 'author': self.author, 'description': self.description, 'image_URL': self.imgURL, 'id': self.id, 'genre': self.genre.name }
class ORMJob(Base): """ The DB representation of a common.classes.Job object, storing the relevant details needed by the job storage backend. """ __tablename__ = "jobs" # The hex UUID given to the job upon first creation id = Column(String, primary_key=True, autoincrement=False) # The job's state. Inflated here for easier querying to the job's state. state = Column(String, index=True) # The job's order in the entire global queue of jobs. queue_order = Column(Integer, autoincrement=True) # The queue name passed to the client when the job is scheduled. queue = Column(String, index=True) # The original Job object, pickled here for so we can easily access it. obj = Column(PickleType(protocol=OPTIONS["Python"]["PICKLE_PROTOCOL"])) time_created = Column(DateTime(timezone=True), server_default=func.now()) time_updated = Column(DateTime(timezone=True), server_onupdate=func.now())
class DbConfigTable(Base): __tablename__ = "config_data" unique_id = Column(Integer, primary_key=True, autoincrement=True) timestamp = Column(DateTime) config = Column(PickleType(pickler=pickle)) master_table_id = Column(Integer, ForeignKey("master.unique_id")) parent = relationship("DBMasterTable", back_populates="children_config") @classmethod def from_sqlite(cls, row): this = DbConfigTable() this.unique_id = row["unique_id"] this.timestamp = row["timestamp"] this.strat = row["config"] this.master_table_id = row["master_table_id"] return this def __repr__(self): return (f"<DbStratTable(unique_id={self.unique_id})" f", timestamp={self.timestamp} " f", master_table_id={self.master_table_id})>") @staticmethod def update(engine): logger.info("DbConfigTable : update called") @staticmethod def requires_update(engine): return False
class ScheduledJob(Base): """ The DB representation of a scheduled job, storing the relevant details needed to schedule jobs. """ __tablename__ = "scheduledjobs" # The hex UUID given to the job upon first creation id = Column(String, primary_key=True, autoincrement=False) # Repeat interval in seconds. interval = Column(Integer, default=0) # Number of times to repeat - None means repeat forever. repeat = Column(Integer, nullable=True) # The app name passed to the client when the job is scheduled. queue = Column(String, index=True) # The original Job object, pickled here for so we can easily access it. obj = Column(PickleType(protocol=OPTIONS["Python"]["PICKLE_PROTOCOL"])) scheduled_time = Column(DateTime()) __table_args__ = (Index("queue__scheduled_time", "queue", "scheduled_time"), )
class CachedResult(Base): """ Class for storing one tuning result """ # Name of associated SQL table for storing tuning results. __tablename__ = "results" id = Column(Integer, primary_key=True) # Unique number of object cfg = Column(PickleType(pickler=CompressedPickler)) # Configuration cfg_hash = Column(String(64)) # Hash of configuration for quick lookup state = Column(String(7)) # Result code msg = Column(String(128)) # Human-readible result message presynth_time = Column( Float, default=float('inf')) # Time needed for presynthesis synth_time = Column(Float, default=float('inf')) # Time needed for synthesis impl_time = Column(Float, default=float('inf')) # Time needed for implementation run_time = Column( Float, default=float('inf')) # Time needed for running application accuracy = Column(Float) # Accuracy of application energy = Column(Float) # Energy of application size = Column(Float) # Size of application confidence = Column(Float) # Confidence of result
class Event(Base): __tablename__ = 'events' id = Column(Integer, primary_key=True) event_name = Column(String(100), nullable=False) event_date = Column(String(100), nullable=False) event_hour = Column(String(100), nullable=True) event_location = Column(String(100), nullable=True) event_description = Column(String(1000), nullable=True) event_chat = Column(PickleType(), nullable=False) def __init__(self, event_name=None, event_date=None, event_hour=None, event_location=None, event_description=None, event_chat=None): self.event_name = event_name self.event_date = event_date self.event_hour = event_hour self.event_location = event_location self.event_description = event_description self.event_chat = event_chat def inform(self): return self.event_name + \ '\n' + self.event_date + \ '\n' + self.event_hour + \ '\n' + self.event_location + \ '\n' + self.event_description def dump(self): return 'event_name:' + self.event_name + \ '\nevent_date:' + self.event_date + \ '\nevent_hour:' + self.event_hour + \ '\nevent_location:' + self.event_location + \ '\nevent_description:' + self.event_description + \ '\nevent_chat:' + self.event_chat.dump()
def define_tables(cls, metadata): Table( "data", metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', PickleType(comparator=operator.eq)))
def orm_load_basedata(man): man.basedata_table = Table('basedata', man.metadata, Column('basedata_id', Integer, primary_key=True), Column('type', String(30), nullable=False), Column('meta', PickleType(comparator=operator.eq)), Column('history', String(4096)) ) #man.metadata.create_all() mapper(BaseData, man.basedata_table, polymorphic_on=man.basedata_table.c.type, polymorphic_identity='basedata')
class ImageLocation(BASE, ModelBase): """Represents an image location in the datastore""" __tablename__ = 'image_locations' id = Column(Integer, primary_key=True, nullable=False) image_id = Column(String(36), ForeignKey('images.id'), nullable=False) image = relationship(Image, backref=backref('locations')) value = Column(Text(), nullable=False) meta_data = Column(PickleType(), default={})
def __init__(self, mutable=True, comparator=None): return PickleType.__init__(self, pickler=self.JsonPickler, mutable=mutable, comparator=comparator)