class BadpennyJob(db.declarative_base('relengapi')): __tablename__ = 'relengapi_badpenny_jobs' id = sa.Column(sa.Integer, primary_key=True) task_id = sa.Column(sa.Integer, sa.ForeignKey('relengapi_badpenny_tasks.id'), nullable=False) task = sa.orm.relationship('BadpennyTask') created_at = sa.Column(db.UTCDateTime(timezone=True), nullable=False) started_at = sa.Column(db.UTCDateTime(timezone=True), nullable=True) completed_at = sa.Column(db.UTCDateTime(timezone=True), nullable=True) successful = sa.Column(sa.Boolean()) # note that there's never more than one log due to the unique id, but # SQLAlchemy still models it as a list logs = sa.orm.relationship('BadpennyJobLog') def to_jsonjob(self): return rest.BadpennyJob(id=self.id, task_name=self.task.name, created_at=self.created_at, started_at=self.started_at, completed_at=self.completed_at, successful=self.successful)
class Project(db.declarative_base('mapper')): """Object-relational mapping between python class Project and database table "projects" """ __tablename__ = 'projects' id = sa.Column(sa.Integer, primary_key=True) name = sa.Column(sa.String(255), nullable=False, unique=True)
class ManualActions(db.declarative_base('relengapi')): __tablename__ = _tbl_prefix + 'manualactions' id = sa.Column(sa.Integer, primary_key=True) loan_id = sa.Column(sa.Integer, sa.ForeignKey(_tbl_prefix + 'loans.id'), nullable=False) timestamp_start = sa.Column(db.UTCDateTime(timezone=True), default=tz.utcnow, nullable=False) timestamp_complete = sa.Column(db.UTCDateTime(timezone=True), default=None, nullable=True) complete_by = sa.Column(sa.String(255), nullable=True) msg = sa.Column(sa.Text, nullable=False) # Backrefs # # for_loan (Loan this applies to) __table_args__ = (Index("loan_id_idx", "loan_id"), ) def to_json(self): return dict(id=self.id, loan_id=self.loan_id, timestamp_start=self.timestamp_start, timestamp_complete=self.timestamp_complete, complete_by=self.complete_by, msg=self.msg) def to_wsme(self): return rest.ManualAction(**self.to_json())
class DbLog(db.declarative_base('relengapi')): __tablename__ = 'treestatus_log' id = Column(Integer, primary_key=True) tree = Column(String(32), nullable=False, index=True) when = Column(db.UTCDateTime, nullable=False, index=True) who = Column(Text, nullable=False) status = Column(String(64), nullable=False) reason = Column(Text, nullable=False) _tags = Column("tags", Text, nullable=False) def __init__(self, tags=None, **kwargs): if tags is not None: kwargs['_tags'] = json.dumps(tags) super(DbLog, self).__init__(**kwargs) @hybrid_property def tags(self): return json.loads(self._tags) def to_json(self): return types.JsonTreeLog( tree=self.tree, when=self.when, who=self.who, status=self.status, reason=self.reason, tags=self.tags, )
class Loans(db.declarative_base('relengapi')): __tablename__ = _tbl_prefix + 'loans' id = sa.Column(sa.Integer, primary_key=True) status = sa.Column(sa.String(50), nullable=False) bug_id = sa.Column(sa.Integer, nullable=True) human_id = sa.Column(sa.Integer, sa.ForeignKey(_tbl_prefix + 'humans.id'), nullable=False) machine_id = sa.Column(sa.Integer, sa.ForeignKey(_tbl_prefix + 'machines.id'), nullable=True) history = relationship("History", backref="for_loan") manual_actions = relationship("ManualActions", backref="for_loan") # Backrefs # # human (Humans) # # machine (Machines) def to_json(self, sub_meth="to_json"): if self.machine_id: return dict(id=self.id, status=self.status, bug_id=self.bug_id, human=getattr(self.human, sub_meth)(), machine=getattr(self.machine, sub_meth)()) else: return dict(id=self.id, status=self.status, bug_id=self.bug_id, human=getattr(self.human, sub_meth)(), machine=None) def to_wsme(self): return rest.Loan(**self.to_json(sub_meth="to_wsme"))
class File(db.declarative_base('tooltool')): """An file, identified by size and digest. The server may have zero or many copies of a file.""" __tablename__ = 'tooltool_files' id = sa.Column(sa.Integer, primary_key=True) size = sa.Column(sa.Integer, nullable=False) sha512 = sa.Column(sa.String(128), unique=True, nullable=False) visibility = sa.Column(sa.Enum('public', 'internal'), nullable=False) instances = sa.orm.relationship('FileInstance', backref='file') # note that changes to this dictionary will not be reflected to the DB; # add or delete BatchFile instances directly instead. @property def batches(self): return {bf.filename: bf.batch for bf in self._batches} def to_json(self, include_instances=False): rv = types.File(size=self.size, digest=self.sha512, algorithm='sha512', visibility=self.visibility) if include_instances: rv.instances = [i.region for i in self.instances] return rv
class Token(db.declarative_base('relengapi')): __tablename__ = 'auth_tokens' def __init__(self, permissions=None, **kwargs): if permissions is not None: kwargs['_permissions'] = ','.join((str(a) for a in permissions)) super(Token, self).__init__(**kwargs) id = sa.Column(sa.Integer, primary_key=True) typ = sa.Column(sa.String(4), nullable=False) description = sa.Column(sa.Text, nullable=False) user = sa.Column(sa.Text, nullable=True) disabled = sa.Column(sa.Boolean, nullable=False) _permissions = sa.Column(sa.Text, nullable=False) def to_jsontoken(self): tok = types.JsonToken(id=self.id, typ=self.typ, description=self.description, permissions=[str(a) for a in self.permissions], disabled=self.disabled) if self.user: tok.user = self.user return tok @property def permissions(self): token_permissions = [p.get(permissionstr) for permissionstr in self._permissions.split(',')] # silently ignore any nonexistent permissions; this allows us to remove unused # permissions without causing tokens permitting those permissions to fail # completely return [a for a in token_permissions if a]
class DbStatusChangeTree(db.declarative_base('relengapi')): __tablename__ = 'treestatus_change_trees' id = Column(Integer, primary_key=True) stack_id = Column(Integer, ForeignKey(DbStatusChange.id), index=True) tree = Column(String(32), nullable=False, index=True) last_state = Column(Text, nullable=False) stack = relation(DbStatusChange, backref='trees')
class ArchiverTask(db.declarative_base('relengapi')): __tablename__ = 'archiver_tasks' id = sa.Column(sa.Integer, primary_key=True) task_id = sa.Column(sa.String(100), nullable=False, unique=True) created_at = sa.Column(db.UTCDateTime(timezone=True), nullable=False) pending_expires_at = sa.Column(db.UTCDateTime(timezone=True), nullable=False) state = sa.Column(sa.String(50)) src_url = sa.Column(sa.String(200), nullable=False) s3_key = sa.Column(sa.String(200), nullable=False)
class BadpennyJobLog(db.declarative_base('relengapi')): __tablename__ = 'relengapi_badpenny_job_logs' id = sa.Column(sa.Integer, sa.ForeignKey('relengapi_badpenny_jobs.id'), primary_key=True) # 'logs' is free-form, hopefully brief, log text content = sa.Column(sa.Text())
class FileInstance(db.declarative_base('tooltool')): """A verified instance of a file in a single region.""" __tablename__ = 'tooltool_file_instances' file_id = sa.Column(sa.Integer, sa.ForeignKey('tooltool_files.id'), primary_key=True) region = sa.Column(sa.Enum(*allowed_regions), primary_key=True)
class BatchFile(db.declarative_base('relengapi')): """An association of upload batches to files, with filenames""" __tablename__ = 'tooltool_batch_files' file_id = sa.Column(sa.Integer, sa.ForeignKey('tooltool_files.id'), primary_key=True) file = sa.orm.relationship("File", backref="_batches") batch_id = sa.Column(sa.Integer, sa.ForeignKey('tooltool_batches.id'), primary_key=True) batch = sa.orm.relationship("Batch", backref="_files") filename = sa.Column(sa.Text, nullable=False)
class Uniqueness_Table(db.declarative_base('test_db'), db.UniqueMixin): __tablename__ = 'uniqueness_test' id = sa.Column(sa.Integer, primary_key=True) name = sa.Column(sa.String(18), nullable=False, unique=True) other = sa.Column(sa.String(18), nullable=True) @classmethod def unique_hash(cls, name, *args, **kwargs): return name @classmethod def unique_filter(cls, query, name, *args, **kwargs): return query.filter(Uniqueness_Table.name == name)
class DbTree(db.declarative_base('relengapi')): __tablename__ = 'treestatus_trees' tree = Column(String(32), primary_key=True) status = Column(String(64), default="open", nullable=False) reason = Column(Text, default="", nullable=False) message_of_the_day = Column(Text, default="", nullable=False) def to_json(self): return types.JsonTree( tree=self.tree, status=self.status, reason=self.reason, message_of_the_day=self.message_of_the_day, )
class PendingUpload(db.declarative_base('tooltool')): """Files for which upload URLs have been generated, but which haven't yet been uploaded. This table is used to poll for completed uploads, and to prevent trusting files for which there is an outstanding signed upload URL.""" __tablename__ = 'tooltool_pending_upload' file_id = sa.Column(sa.Integer, sa.ForeignKey('tooltool_files.id'), nullable=False, primary_key=True) expires = sa.Column(db.UTCDateTime, index=True, nullable=False) region = sa.Column(sa.Enum(*allowed_regions), nullable=False) file = sa.orm.relationship('File', backref='pending_uploads')
class Hash(db.declarative_base('mapper')): """Object-relational mapping between python class Hash and database table "hashes" """ __tablename__ = 'hashes' hg_changeset = sa.Column(sa.String(40), nullable=False) git_commit = sa.Column(sa.String(40), nullable=False) project_id = sa.Column(sa.Integer, sa.ForeignKey('projects.id'), nullable=False) project = orm.relationship(Project, primaryjoin=(project_id == Project.id)) # project = orm.relationship(Project, backref=orm.backref('hashes', order_by=id)) date_added = sa.Column(sa.Integer, nullable=False) project_name = property(lambda self: self.project.name) def as_json(self): return jsonify( **{ n: getattr(self, n) for n in ('git_commit', 'hg_changeset', 'date_added', 'project_name') }) __table_args__ = ( # TODO: (needs verification) all queries specifying a hash are for # (project, hash), so these aren't used sa.Index('hg_changeset', 'hg_changeset'), sa.Index('git_commit', 'git_commit'), # TODO: this index is a prefix of others and will never be used sa.Index('project_id', 'project_id'), sa.Index('project_id__date_added', 'project_id', 'date_added'), sa.Index('project_id__hg_changeset', 'project_id', 'hg_changeset', unique=True), sa.Index('project_id__git_commit', 'project_id', 'git_commit', unique=True), ) __mapper_args__ = { # tell the SQLAlchemy ORM about one of the unique indexes; it doesn't # matter which 'primary_key': [project_id, hg_changeset], }
class DbStatusChange(db.declarative_base('relengapi')): __tablename__ = 'treestatus_changes' id = Column(Integer, primary_key=True) who = Column(Text, nullable=False) reason = Column(Text, nullable=False) when = Column(db.UTCDateTime, nullable=False, index=True) status = Column(String(64), nullable=False) def to_json(self): return types.JsonStateChange( trees=[t.tree for t in self.trees], status=self.status, when=self.when, who=self.who, reason=self.reason, id=self.id, )
class BadpennyTask(db.declarative_base('relengapi'), db.UniqueMixin): __tablename__ = 'relengapi_badpenny_tasks' id = sa.Column(sa.Integer, primary_key=True) name = sa.Column(sa.Text, nullable=False) # all other data about a task is determined from runtime information (that # is, from the decorated function itself) jobs = sa.orm.relationship('BadpennyJob') @property def last_success(self): # TODO: get this value as part of the DB query for the task job = BadpennyJob.query. \ filter(BadpennyJob.task_id == self.id). \ order_by(sa.desc(BadpennyJob.created_at)). \ limit(1). \ first() if not job: return -1 elif job.successful: return 1 else: return 0 @classmethod def unique_filter(cls, query, name): return query.filter(BadpennyTask.name == name) @classmethod def unique_hash(cls, name): return name def to_jsontask(self, with_jobs=False): runtime_task = badpenny.Task.get(self.name) task = rest.BadpennyTask(name=self.name, last_success=self.last_success, active=bool(runtime_task)) if runtime_task: task.schedule = runtime_task.schedule if with_jobs: task.jobs = [j.to_jsonjob() for j in self.jobs] return task
class Machines(db.declarative_base('relengapi'), db.UniqueMixin): __tablename__ = _tbl_prefix + 'machines' id = sa.Column(sa.Integer, primary_key=True) fqdn = sa.Column(sa.String(255), nullable=False, unique=True) ipaddress = sa.Column(sa.String(18), unique=True) loan = relationship("Loans", backref="machine") @classmethod def unique_hash(cls, fqdn, *args, **kwargs): return fqdn @classmethod def unique_filter(cls, query, fqdn, *args, **kwargs): return query.filter(Machines.fqdn == fqdn) def to_json(self): return dict(id=self.id, fqdn=self.fqdn, ipaddress=self.ipaddress) def to_wsme(self): return rest.Machine(**self.to_json())
class Humans(db.declarative_base('relengapi'), db.UniqueMixin): __tablename__ = _tbl_prefix + 'humans' id = sa.Column(sa.Integer, primary_key=True) ldap = sa.Column(sa.String(255), nullable=False, unique=True) bugzilla = sa.Column(sa.String(255), nullable=False) loans = relationship("Loans", backref="human") @classmethod def unique_hash(cls, ldap, *args, **kwargs): return ldap @classmethod def unique_filter(cls, query, ldap, *args, **kwargs): return query.filter(Humans.ldap == ldap) def to_json(self): return dict(id=self.id, ldap_email=self.ldap, bugzilla_email=self.bugzilla) def to_wsme(self): return rest.Human(**self.to_json())
class History(db.declarative_base('relengapi')): __tablename__ = _tbl_prefix + 'history' id = sa.Column(sa.Integer, primary_key=True) loan_id = sa.Column(sa.Integer, sa.ForeignKey(_tbl_prefix + 'loans.id'), nullable=False) timestamp = sa.Column(db.UTCDateTime(timezone=True), default=tz.utcnow, nullable=False) msg = sa.Column(sa.Text, nullable=False) # Backrefs # # for_loan (Loans) def to_json(self): return dict(id=self.id, loan_id=self.loan_id, timestamp=self.timestamp, msg=self.msg) def to_wsme(self): return rest.HistoryEntry(**self.to_json())
class Batch(db.declarative_base('tooltool')): """Upload batches, with batch metadata, linked to the uploaded files""" __tablename__ = 'tooltool_batches' id = sa.Column(sa.Integer, primary_key=True) uploaded = sa.Column(db.UTCDateTime, index=True, nullable=False) author = sa.Column(sa.Text, nullable=False) message = sa.Column(sa.Text, nullable=False) # note that changes to this dictionary will not be reflected to the DB; # add or delete BatchFile instances directly instead. @property def files(self): return {bf.filename: bf.file for bf in self._files} def to_json(self): return types.UploadBatch( id=self.id, uploaded=self.uploaded, author=self.author, message=self.message, files={n: f.to_json() for n, f in self.files.iteritems()})
class ClobbererBase(db.declarative_base(DB_DECLARATIVE_BASE)): __abstract__ = True id = sa.Column(sa.Integer, primary_key=True) branch = sa.Column(sa.String(50), index=True) builddir = sa.Column(sa.String(100), index=True)
class DevTable(db.declarative_base('test_db')): __tablename__ = 'users' id = sa.Column(sa.Integer, primary_key=True) date = sa.Column(db.UTCDateTime(timezone=True))