def setup_thd(conn): metadata = sa.MetaData() metadata.bind = conn sautils.Table( 'builder_masters', metadata, sa.Column('id', sa.Integer, primary_key=True), # .. ).create() sautils.Table( 'masters', metadata, sa.Column('id', sa.Integer, primary_key=True), # .. ).create() buildslaves = sautils.Table( "buildslaves", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column("name", sa.String(256), nullable=False), sa.Column("info", JsonObject, nullable=False), ) buildslaves.create() conn.execute(buildslaves.insert(), { 'id': 29, 'name': u'windows', 'info': {} })
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine objects = sautils.Table( "objects", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column('name', sa.String(128), nullable=False), sa.Column('class_name', sa.String(128), nullable=False), sa.UniqueConstraint('name', 'class_name', name='object_identity'), ) objects.create() object_state = sautils.Table( "object_state", metadata, sa.Column("objectid", sa.Integer, sa.ForeignKey('objects.id'), nullable=False), sa.Column("name", sa.String(length=256), nullable=False), sa.Column("value_json", sa.Text, nullable=False), sa.UniqueConstraint('objectid', 'name', name='name_per_object'), ) object_state.create()
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), ) sourcestamps.create() changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(255), nullable=False), sa.Column('committer', sa.String(255), nullable=self.PREVIOUS_NULLABLE), sa.Column('comments', sa.Text, nullable=False), sa.Column('branch', sa.String(255)), sa.Column('revision', sa.String(255)), sa.Column('revlink', sa.String(256)), sa.Column('when_timestamp', sa.Integer, nullable=False), sa.Column('category', sa.String(255)), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id', ondelete='CASCADE'), nullable=False), sa.Column('parent_changeids', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='SET NULL'), nullable=True), ) changes.create()
def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn buildslaves = sautils.Table('buildslaves', metadata, autoload=True) configured_buildslaves = sautils.Table('configured_buildslaves', metadata, autoload=True) connected_buildslaves = sautils.Table('connected_buildslaves', metadata, autoload=True) q = sa.select([buildslaves]) self.assertEqual(map(dict, conn.execute(q).fetchall()), [ # (the info does not get de-JSON'd due to use of autoload) {'id': 29, 'name': u'windows', 'info': '{}'}]) # check that the name column was resized self.assertEqual(buildslaves.c.name.type.length, 50) q = sa.select([configured_buildslaves.c.buildermasterid, configured_buildslaves.c.buildslaveid]) self.assertEqual(conn.execute(q).fetchall(), []) q = sa.select([connected_buildslaves.c.masterid, connected_buildslaves.c.buildslaveid]) self.assertEqual(conn.execute(q).fetchall(), [])
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine builders = sautils.Table('builders', metadata, autoload=True) masters = sautils.Table('masters', metadata, autoload=True) workers = sautils.Table('workers', metadata, autoload=True) builder_masters = sautils.Table('builder_masters', metadata, autoload=True) configured_workers = sautils.Table('configured_workers', metadata, autoload=True) for fk in ( ForeignKeyConstraint([builder_masters.c.builderid], [builders.c.id], ondelete='CASCADE'), ForeignKeyConstraint([builder_masters.c.masterid], [masters.c.id], ondelete='CASCADE'), ForeignKeyConstraint([configured_workers.c.buildermasterid], [builder_masters.c.id], ondelete='CASCADE'), ForeignKeyConstraint([configured_workers.c.workerid], [workers.c.id], ondelete='CASCADE'), ): fk.drop() fk.create()
def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn masters = sautils.Table('masters', metadata, autoload=True) buildrequest_claims = sautils.Table('buildrequest_claims', metadata, autoload=True) # two masters (although we don't know which ids they will get) res = conn.execute(sa.select([masters.c.id, masters.c.name])) rows = res.fetchall() masterids = dict((row.name, row.id) for row in rows) self.assertEqual(sorted(masterids.keys()), ['master:/one', 'master:/two']) mOne = masterids['master:/one'] mTwo = masterids['master:/two'] res = conn.execute(buildrequest_claims.select()) self.assertEqual( sorted([(row.brid, row.masterid, row.claimed_at) for row in res.fetchall()]), [ (20, mOne, 1349011179), (21, mTwo, 1349022279), (22, mTwo, 1349033379), (23, None, 1349444479), ])
def _define_old_tables(self, conn): metadata = sa.MetaData() metadata.bind = conn self.changes = sautils.Table( 'changes', metadata, # ... sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(256), nullable=False), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), # CVS uses NULL sa.Column('category', sa.String(256))) self.object_state = sautils.Table( "object_state", metadata, # ... sa.Column( "objectid", sa.Integer, # commented not to add objects table # sa.ForeignKey('objects.id'), nullable=False), sa.Column("name", sa.String(length=256), nullable=False)) self.users = sautils.Table( "users", metadata, # ... sa.Column("uid", sa.Integer, primary_key=True), sa.Column("identifier", sa.String(256), nullable=False), )
def _create_connected_workers_table(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sautils.Table('masters', metadata, autoload=True) sautils.Table('workers', metadata, autoload=True) # Create 'connected_workers' table. connected_workers = sautils.Table( 'connected_workers', metadata, sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id'), nullable=False), sa.Column('workerid', sa.Integer, sa.ForeignKey('workers.id'), nullable=False), ) connected_workers.create() # Create indexes. idx = sa.Index('connected_workers_masterid', connected_workers.c.masterid) idx.create() idx = sa.Index('connected_workers_workers', connected_workers.c.workerid) idx.create() idx = sa.Index('connected_workers_identity', connected_workers.c.masterid, connected_workers.c.workerid, unique=True) idx.create()
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sautils.Table('masters', metadata, sa.Column('id', sa.Integer, primary_key=True), # .. ) changesources = sautils.Table( 'changesources', metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column('name', sa.Text, nullable=False), sa.Column('name_hash', sa.String(40), nullable=False), ) changesource_masters = sautils.Table( 'changesource_masters', metadata, sa.Column('changesourceid', sa.Integer, sa.ForeignKey('changesources.id'), nullable=False, primary_key=True), sa.Column('masterid', sa.Integer, sa.ForeignKey('masters.id'), nullable=False), ) # create the new tables changesources.create() changesource_masters.create() # and the indices idx = sa.Index('changesource_name_hash', changesources.c.name_hash, unique=True) idx.create()
def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn schedulers = sautils.Table('schedulers', metadata, autoload=True) scheduler_masters = sautils.Table('scheduler_masters', metadata, autoload=True) scheduler_changes = sautils.Table('scheduler_changes', metadata, autoload=True) q = sa.select( [schedulers.c.id, schedulers.c.name, schedulers.c.name_hash]) self.assertEqual(conn.execute(q).fetchall(), []) q = sa.select([ scheduler_masters.c.schedulerid, scheduler_masters.c.masterid ]) self.assertEqual(conn.execute(q).fetchall(), []) q = sa.select([ scheduler_changes.c.schedulerid, scheduler_changes.c.changeid, scheduler_changes.c.important ]) self.assertEqual(conn.execute(q).fetchall(), [])
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn self.sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), sa.Column('patchid', sa.Integer), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('sourcestampsetid', sa.Integer), ) self.sourcestamps.create(bind=conn) self.changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(256), nullable=False), sa.Column('comments', sa.String(1024), nullable=False), sa.Column('is_dir', sa.SmallInteger, nullable=False), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), sa.Column('revlink', sa.String(256)), sa.Column('when_timestamp', sa.Integer, nullable=False), sa.Column('category', sa.String(256)), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), ) self.changes.create(bind=conn)
def _createTables_thd(self, conn, metadata): objects = sautils.Table( "objects", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column('name', sa.String(128), nullable=False), sa.Column('class_name', sa.String(128), nullable=False), ) objects.create() buildrequest_claims = sautils.Table( 'buildrequest_claims', metadata, sa.Column('brid', sa.Integer, index=True, unique=True), sa.Column('objectid', sa.Integer, index=True, nullable=True), sa.Column('claimed_at', sa.Integer, nullable=False), ) buildrequest_claims.create() sa.Index('buildrequest_claims_brids', buildrequest_claims.c.brid, unique=True).create() buildrequests = sautils.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), # .. ) buildrequests.create()
def reload_tables_after_migration(self, conn): metadata = sa.MetaData() metadata.bind = conn self.sourcestamps = sautils.Table('sourcestamps', metadata, autoload=True) self.changes = sautils.Table('changes', metadata, autoload=True)
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), # the rest is unimportant ) changes.create() # Links (URLs) for changes change_links = sautils.Table( 'change_links', metadata, sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid'), nullable=False), sa.Column('link', sa.String(1024), nullable=False), ) change_links.create() sa.Index('change_links_changeid', change_links.c.changeid).create()
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine builders = sautils.Table('builders', metadata, autoload=True) masters = sautils.Table('masters', metadata, autoload=True) workers = sautils.Table('workers', metadata, autoload=True) builder_masters = sautils.Table('builder_masters', metadata, autoload=True) configured_workers = sautils.Table('configured_workers', metadata, autoload=True) fks_to_change = [] # we need to parse the reflected model in order to find the automatic fk name that was put # mysql and pgsql have different naming convention so this is not very easy to have generic code working. for table, keys in [(builder_masters, (builders.c.id, masters.c.id)), (configured_workers, (builder_masters.c.id, workers.c.id))]: for fk in table.constraints: if not isinstance(fk, sa.ForeignKeyConstraint): continue for c in fk.elements: if c.column in keys: # migrate.xx.ForeignKeyConstraint is changing the model so initializing here # would break the iteration (Set changed size during iteration) fks_to_change.append(( table, (fk.columns, [c.column]), dict(name=fk.name, ondelete='CASCADE'))) for table, args, kwargs in fks_to_change: fk = ForeignKeyConstraint(*args, **kwargs) table.append_constraint(fk) try: fk.drop() except NotSupportedError: pass # some versions of sqlite do not support drop, but will still update the fk fk.create()
def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn # Verify database contents. # 'workers' table contents. workers = sautils.Table('workers', metadata, autoload=True) c = workers.c q = sa.select([c.id, c.name, c.info]).order_by(c.id) self.assertEqual(q.execute().fetchall(), [ (30, u'worker-1', u'{}'), (31, u'worker-2', u'{"a": 1}'), ]) # 'builds' table contents. builds = sautils.Table('builds', metadata, autoload=True) c = builds.c q = sa.select([ c.id, c.number, c.builderid, c.buildrequestid, c.workerid, c.masterid, c.started_at, c.complete_at, c.state_string, c.results ]).order_by(c.id) # Check that build with invalid reference to buildslaves now # have no reference to it. self.assertEqual(q.execute().fetchall(), [ (10, 2, None, 3, None, 1, 0, None, u'state', None), (11, 1, None, 4, 31, 2, 1000, None, u'state2', None), ])
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sautils.Table('builds', metadata, sa.Column('id', sa.Integer, primary_key=True), # .. ) # This table contains input properties for builds build_properties = sautils.Table( 'build_properties', metadata, sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id'), nullable=False), sa.Column('name', sa.String(256), nullable=False), # JSON-encoded value sa.Column('value', sa.Text, nullable=False), sa.Column('source', sa.Text, nullable=False), ) # create the new table build_properties.create() # and an Index on it. idx = sa.Index('build_properties_buildid', build_properties.c.buildid) idx.create()
def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn buildrequests = sautils.Table( 'buildrequests', metadata, autoload=True) builders = sautils.Table('builders', metadata, autoload=True) self.assertFalse(hasattr(buildrequests.c, 'buildername')) self.assertTrue(hasattr(buildrequests.c, 'builderid')) self.assertEqual( sorted([i.name for i in buildrequests.indexes]), [ 'buildrequests_builderid', 'buildrequests_buildsetid', 'buildrequests_complete', ]) # get the new builderid bldr2_id = conn.execute( sa.select( [builders.c.id], whereclause=(builders.c.name == 'bldr2'))).first()[0] res = conn.execute( sa.select([buildrequests.c.id, buildrequests.c.builderid])) self.assertEqual(sorted(map(tuple, res)), [(30, 20), (31, 20), (32, bldr2_id)])
def setup_thd(conn): metadata = sa.MetaData() metadata.bind = conn buildrequests = sautils.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), ) buildrequests.create() builders = sautils.Table( 'builders', metadata, sa.Column('id', sa.Integer, primary_key=True), ) builders.create() masters = sautils.Table( "masters", metadata, sa.Column('id', sa.Integer, primary_key=True), ) masters.create() builds = sautils.Table( 'builds', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('brid', sa.Integer, sa.ForeignKey('buildrequests.id'), nullable=False), sa.Column('start_time', sa.Integer, nullable=False), sa.Column('finish_time', sa.Integer), ) builds.create()
def setup_thd(conn): metadata = sa.MetaData() metadata.bind = conn scheduler_changes = sautils.Table( 'scheduler_changes', metadata, sa.Column('objectid', sa.Integer), sa.Column('changeid', sa.Integer), # .. ) scheduler_changes.create() sautils.Table( 'masters', metadata, sa.Column('id', sa.Integer, primary_key=True), # .. ).create() sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), # .. ).create() idx = sa.Index('scheduler_changes_objectid', scheduler_changes.c.objectid) idx.create() idx = sa.Index('scheduler_changes_changeid', scheduler_changes.c.changeid) idx.create() idx = sa.Index('scheduler_changes_unique', scheduler_changes.c.objectid, scheduler_changes.c.changeid, unique=True) idx.create()
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn builds = sautils.Table( 'builds', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('builderid', sa.Integer), sa.Column('buildrequestid', sa.Integer, nullable=False), sa.Column('buildslaveid', sa.Integer), sa.Column('masterid', sa.Integer, nullable=False), sa.Column('started_at', sa.Integer, nullable=False), sa.Column('complete_at', sa.Integer), sa.Column('state_strings_json', sa.Text, nullable=False), sa.Column('results', sa.Integer), ) builds.create() steps = sautils.Table( 'steps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('name', sa.String(50), nullable=False), sa.Column('buildid', sa.Integer, sa.ForeignKey('builds.id')), sa.Column('started_at', sa.Integer), sa.Column('complete_at', sa.Integer), sa.Column('state_strings_json', sa.Text, nullable=False), sa.Column('results', sa.Integer), sa.Column('urls_json', sa.Text, nullable=False), ) steps.create()
def remove_buildername(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sautils.Table('builders', metadata, autoload=True) sautils.Table('buildsets', metadata, autoload=True) buildrequests = sautils.Table('buildrequests', metadata, autoload=True) # Specify what the new table should look like buildrequests.c.buildername.drop() changeset.alter_column(sa.Column('builderid', sa.Integer, sa.ForeignKey("builders.id"), nullable=False), table=buildrequests, metadata=metadata, engine=migrate_engine) idx = sa.Index('buildrequests_builderid', buildrequests.c.builderid) idx.create(migrate_engine) idx = sa.Index('buildrequests_buildsetid', buildrequests.c.buildsetid) idx.create(migrate_engine) idx = sa.Index('buildrequests_complete', buildrequests.c.complete) idx.create(migrate_engine)
def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn builders = sautils.Table('builders', metadata, autoload=True) q = sa.select([builders]) num_rows = 0 for row in conn.execute(q): self.assertEqual(row, (1, u'bname', u'description', u'dontcare')) num_rows += 1 self.assertEqual(num_rows, 1) tags = sautils.Table('tags', metadata, autoload=True) builders_tags = sautils.Table('builders_tags', metadata, autoload=True) q = sa.select([tags.c.id, tags.c.name, tags.c.name_hash]) self.assertEqual(conn.execute(q).fetchall(), []) q = sa.select([ builders_tags.c.id, builders_tags.c.builderid, builders_tags.c.tagid ]) self.assertEqual(conn.execute(q).fetchall(), [])
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn self.buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('external_idstring', sa.String(256)), sa.Column('reason', sa.String(256)), sa.Column('sourcestampid', sa.Integer, nullable=False), # NOTE: foreign key omitted sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), sa.Column('results', sa.SmallInteger), ) self.buildsets.create(bind=conn) self.buildrequests = sautils.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey("buildsets.id"), nullable=False), sa.Column('buildername', sa.String(length=256), nullable=False), sa.Column('priority', sa.Integer, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('claimed_at', sa.Integer, server_default=sa.DefaultClause("0")), sa.Column('claimed_by_name', sa.String(length=256)), sa.Column('claimed_by_incarnation', sa.String(length=256)), sa.Column('complete', sa.Integer, server_default=sa.DefaultClause("0")), sa.Column('results', sa.SmallInteger), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete_at', sa.Integer), ) self.buildrequests.create(bind=conn) idx = sa.Index('buildrequests_buildsetid', self.buildrequests.c.buildsetid) idx.create() idx = sa.Index('buildrequests_buildername', self.buildrequests.c.buildername) idx.create() idx = sa.Index('buildrequests_complete', self.buildrequests.c.complete) idx.create() self.objects = sautils.Table( "objects", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column('name', sa.String(128), nullable=False), sa.Column('class_name', sa.String(128), nullable=False), sa.UniqueConstraint('name', 'class_name', name='object_identity'), ) self.objects.create(bind=conn)
def setup_thd(conn): metadata = sa.MetaData() metadata.bind = conn steps = sautils.Table( 'steps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('name', sa.String(50), nullable=False), sa.Column('buildid', sa.Integer), sa.Column('started_at', sa.Integer), sa.Column('complete_at', sa.Integer), # a list of strings describing the step's state sa.Column('state_strings_json', sa.Text, nullable=False), sa.Column('results', sa.Integer), sa.Column('urls_json', sa.Text, nullable=False), ) steps.create() logs = sautils.Table( 'logs', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('name', sa.String(50), nullable=False), sa.Column('stepid', sa.Integer, sa.ForeignKey('steps.id')), sa.Column('complete', sa.SmallInteger, nullable=False), sa.Column('num_lines', sa.Integer, nullable=False), # 's' = stdio, 't' = text, 'h' = html sa.Column('type', sa.String(1), nullable=False), ) logs.create() logchunks = sautils.Table( 'logchunks', metadata, sa.Column('logid', sa.Integer, sa.ForeignKey('logs.id')), # 0-based line number range in this chunk (inclusive); note that for # HTML logs, this counts lines of HTML, not lines of rendered output sa.Column('first_line', sa.Integer, nullable=False), sa.Column('last_line', sa.Integer, nullable=False), # log contents, including a terminating newline, encoded in utf-8 or, # if 'compressed' is true, compressed with gzip sa.Column('content', sa.LargeBinary(65536)), sa.Column('compressed', sa.SmallInteger, nullable=False), ) logchunks.create() idx = sa.Index('logs_name', logs.c.stepid, logs.c.name, unique=True) idx.create() idx = sa.Index('logchunks_firstline', logchunks.c.logid, logchunks.c.first_line) idx.create() idx = sa.Index('logchunks_lastline', logchunks.c.logid, logchunks.c.last_line) idx.create()
def setup_thd(conn): metadata = sa.MetaData() metadata.bind = conn # This table contains basic information about each build. builds = sautils.Table( 'builds', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('builderid', sa.Integer), # note that there is 1:N relationship here. # In case of slave loss, build has results RETRY # and buildrequest is unclaimed sa.Column('buildrequestid', sa.Integer, nullable=False), # slave which performed this build # TODO: ForeignKey to buildslaves table, named buildslaveid # TODO: keep nullable to support worker-free # builds sa.Column('buildslaveid', sa.Integer), # master which controlled this build sa.Column('masterid', sa.Integer, nullable=False), # start/complete times sa.Column('started_at', sa.Integer, nullable=False), sa.Column('complete_at', sa.Integer), # a list of strings describing the build's state sa.Column('state_strings_json', sa.Text, nullable=False), sa.Column('results', sa.Integer), ) builds.create() buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('external_idstring', sa.String(256)), sa.Column('reason', sa.String(256)), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), sa.Column('results', sa.SmallInteger), ) buildsets.create() conn.execute(buildsets.insert(), [ dict(external_idstring='extid', reason='rsn1', sourcestamps=[91], submitted_at=datetime2epoch( datetime.datetime(1978, 6, 15, 12, 31, 15)), complete_at=datetime2epoch( datetime.datetime(1979, 6, 15, 12, 31, 15)), complete=0, results=-1, bsid=91) ])
def verify_thd(conn): metadata = sa.MetaData() metadata.bind = conn test_result_sets = sautils.Table('test_result_sets', metadata, autoload=True) q = sa.select([ test_result_sets.c.builderid, test_result_sets.c.buildid, test_result_sets.c.stepid, test_result_sets.c.description, test_result_sets.c.category, test_result_sets.c.value_unit, test_result_sets.c.tests_passed, test_result_sets.c.tests_failed, test_result_sets.c.complete, ]) self.assertEqual(conn.execute(q).fetchall(), []) test_results = sautils.Table('test_results', metadata, autoload=True) q = sa.select([ test_results.c.builderid, test_results.c.test_result_setid, test_results.c.test_nameid, test_results.c.test_code_pathid, test_results.c.line, test_results.c.duration_ns, test_results.c.value, ]) self.assertEqual(conn.execute(q).fetchall(), []) test_names = sautils.Table('test_names', metadata, autoload=True) q = sa.select([ test_names.c.builderid, test_names.c.name, ]) self.assertEqual(conn.execute(q).fetchall(), []) test_code_paths = sautils.Table('test_code_paths', metadata, autoload=True) q = sa.select([ test_code_paths.c.builderid, test_code_paths.c.path, ]) self.assertEqual(conn.execute(q).fetchall(), []) insp = sa.inspect(conn) indexes = insp.get_indexes('test_names') index_names = [item['name'] for item in indexes] self.assertTrue('test_names_name' in index_names) indexes = insp.get_indexes('test_code_paths') index_names = [item['name'] for item in indexes] self.assertTrue('test_code_paths_path' in index_names)
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(256), nullable=False), sa.Column('comments', sa.String(1024), nullable=False), # old, for CVS sa.Column('is_dir', sa.SmallInteger, nullable=False), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), # CVS uses NULL sa.Column('revlink', sa.String(256)), sa.Column('when_timestamp', sa.Integer, nullable=False), sa.Column('category', sa.String(256)), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), ) changes.create() buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('external_idstring', sa.String(256)), sa.Column('reason', sa.String(256)), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), sa.Column('results', sa.SmallInteger), sa.Column('sourcestampsetid', sa.Integer), # foreign key omitted ) buildsets.create() buildset_properties = sautils.Table( 'buildset_properties', metadata, sa.Column('buildsetid', sa.Integer, nullable=False), sa.Column('property_name', sa.String(256), nullable=False), sa.Column('property_value', sa.String(1024), nullable=False), ) buildset_properties.create()
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sourcestamps_table = sautils.Table('sourcestamps', metadata, autoload=True) buildsets_table = sautils.Table('buildsets', metadata, autoload=True) # Create the sourcestampset table # that defines a sourcestampset sourcestampsets_table = sautils.Table( "sourcestampsets", metadata, sa.Column("id", sa.Integer, primary_key=True), ) sourcestampsets_table.create() # All current sourcestampid's are migrated to sourcestampsetid # Insert all sourcestampid's as setid's into sourcestampsets table sourcestampsetids = sa.select([sourcestamps_table.c.id]) # this doesn't seem to work without str() -- verified in sqla 0.6.0 - 0.7.1 migrate_engine.execute( str(sautils.InsertFromSelect(sourcestampsets_table, sourcestampsetids))) # rename the buildsets table column buildsets_table.c.sourcestampid.alter(name='sourcestampsetid') metadata.remove(buildsets_table) buildsets_table = sautils.Table('buildsets', metadata, autoload=True) cons = constraint.ForeignKeyConstraint( [buildsets_table.c.sourcestampsetid], [sourcestampsets_table.c.id]) cons.create() # Add sourcestampsetid including index to sourcestamps table ss_sourcestampsetid = sa.Column('sourcestampsetid', sa.Integer) ss_sourcestampsetid.create(sourcestamps_table) # Update the setid to the same value as sourcestampid migrate_engine.execute( str(sourcestamps_table.update().values( sourcestampsetid=sourcestamps_table.c.id))) ss_sourcestampsetid.alter(nullable=False) # Data is up to date, now force integrity cons = constraint.ForeignKeyConstraint( [sourcestamps_table.c.sourcestampsetid], [sourcestampsets_table.c.id]) cons.create() # Add index for performance reasons to find all sourcestamps in a set # quickly idx = sa.Index('sourcestamps_sourcestampsetid', sourcestamps_table.c.sourcestampsetid, unique=False) idx.create()
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn self.buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('external_idstring', sa.String(256)), sa.Column('reason', sa.String(256)), # NOTE: foreign key omitted: sa.Column('sourcestampid', sa.Integer, nullable=False), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), sa.Column('results', sa.SmallInteger), ) self.buildsets.create(bind=conn) sa.Index('buildsets_complete', self.buildsets.c.complete).create() sa.Index('buildsets_submitted_at', self.buildsets.c.submitted_at).create() self.patches = sautils.Table( 'patches', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('patchlevel', sa.Integer, nullable=False), sa.Column('patch_base64', sa.Text, nullable=False), sa.Column('patch_author', sa.Text, nullable=False), sa.Column('patch_comment', sa.Text, nullable=False), sa.Column('subdir', sa.Text), ) self.patches.create(bind=conn) self.sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), sa.Column('patchid', sa.Integer, sa.ForeignKey('patches.id')), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id')), ) self.sourcestamps.create(bind=conn)