def process_column(self, column): """Processes default values for column""" # XXX: this is a snippet from SA processing of positional parameters if not SQLA_06 and column.args: toinit = list(column.args) else: toinit = list() if column.server_default is not None: if isinstance(column.server_default, sqlalchemy.FetchedValue): toinit.append(column.server_default) else: toinit.append(sqlalchemy.DefaultClause(column.server_default)) if column.server_onupdate is not None: if isinstance(column.server_onupdate, FetchedValue): toinit.append(column.server_default) else: toinit.append( sqlalchemy.DefaultClause(column.server_onupdate, for_update=True)) if toinit: column._init_items(*toinit) if not SQLA_06: column.args = []
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn self.buildrequests = sautils.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, # foreign key removed nullable=False), sa.Column('buildername', sa.String(length=256), nullable=False), sa.Column('priority', sa.Integer, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete', sa.Integer, server_default=sa.DefaultClause("0")), sa.Column('results', sa.SmallInteger), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete_at', sa.Integer), ) self.buildrequests.create(bind=conn) # these indices should already exist everywhere but on sqlite if conn.dialect.name != 'sqlite': idx = sa.Index('buildrequests_buildsetid', self.buildrequests.c.buildsetid) idx.create() idx = sa.Index('buildrequests_buildername', self.buildrequests.c.buildername) idx.create() idx = sa.Index('buildrequests_complete', self.buildrequests.c.complete) idx.create()
def setup_thd(conn): metadata = sa.MetaData() metadata.bind = conn buildrequests = sautils.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, nullable=False), sa.Column('buildername', sa.String(length=256), nullable=False), sa.Column('priority', sa.Integer, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete', sa.Integer, server_default=sa.DefaultClause("0")), sa.Column('results', sa.SmallInteger), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete_at', sa.Integer), ) buildrequests.create() conn.execute(buildrequests.insert(), [ { 'id': 101, 'buildsetid': 13, 'buildername': 'bld', 'priority': 10, 'complete': 0, 'submitted_at': 1234 }, ])
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn self.buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('external_idstring', sa.String(256)), sa.Column('reason', sa.String(256)), sa.Column('sourcestampid', sa.Integer, nullable=False), # NOTE: foreign key omitted sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), sa.Column('results', sa.SmallInteger), ) self.buildsets.create(bind=conn) self.buildrequests = sautils.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey("buildsets.id"), nullable=False), sa.Column('buildername', sa.String(length=256), nullable=False), sa.Column('priority', sa.Integer, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('claimed_at', sa.Integer, server_default=sa.DefaultClause("0")), sa.Column('claimed_by_name', sa.String(length=256)), sa.Column('claimed_by_incarnation', sa.String(length=256)), sa.Column('complete', sa.Integer, server_default=sa.DefaultClause("0")), sa.Column('results', sa.SmallInteger), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete_at', sa.Integer), ) self.buildrequests.create(bind=conn) idx = sa.Index('buildrequests_buildsetid', self.buildrequests.c.buildsetid) idx.create() idx = sa.Index('buildrequests_buildername', self.buildrequests.c.buildername) idx.create() idx = sa.Index('buildrequests_complete', self.buildrequests.c.complete) idx.create() self.objects = sautils.Table( "objects", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column('name', sa.String(128), nullable=False), sa.Column('class_name', sa.String(128), nullable=False), sa.UniqueConstraint('name', 'class_name', name='object_identity'), ) self.objects.create(bind=conn)
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(256), nullable=False), sa.Column('comments', sa.String(1024), nullable=False), # old, for CVS sa.Column('is_dir', sa.SmallInteger, nullable=False), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), # CVS uses NULL sa.Column('revlink', sa.String(256)), sa.Column('when_timestamp', sa.Integer, nullable=False), sa.Column('category', sa.String(256)), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), ) changes.create() buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('external_idstring', sa.String(256)), sa.Column('reason', sa.String(256)), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), sa.Column('results', sa.SmallInteger), sa.Column('sourcestampsetid', sa.Integer), # foreign key omitted ) buildsets.create() buildset_properties = sautils.Table( 'buildset_properties', metadata, sa.Column('buildsetid', sa.Integer, nullable=False), sa.Column('property_name', sa.String(256), nullable=False), sa.Column('property_value', sa.String(1024), nullable=False), ) buildset_properties.create()
def remove_buildername(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sa.Table('builders', metadata, autoload=True) sa.Table('buildsets', metadata, autoload=True) # Specify what the new table should look like buildrequests = sa.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey("buildsets.id"), nullable=False), sa.Column('builderid', sa.Integer, sa.ForeignKey('builders.id'), nullable=False), sa.Column('priority', sa.Integer, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete', sa.Integer, server_default=sa.DefaultClause("0")), sa.Column('results', sa.SmallInteger), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete_at', sa.Integer), sa.Column('waited_for', sa.SmallInteger, server_default=sa.DefaultClause("0")), ) changeset.drop_column(sa.Column('buildername', sa.String(length=256), nullable=False), table=buildrequests, metadata=metadata, engine=migrate_engine) changeset.alter_column(sa.Column('builderid', sa.Integer, sa.ForeignKey("builders.id"), nullable=False), table='buildrequests', metadata=metadata, engine=migrate_engine) idx = sa.Index('buildrequests_builderid', buildrequests.c.builderid) idx.create(migrate_engine) idx = sa.Index('buildrequests_buildsetid', buildrequests.c.buildsetid) idx.create(migrate_engine) idx = sa.Index('buildrequests_complete', buildrequests.c.complete) idx.create(migrate_engine)
def add_cols(table): repository = sa.Column('repository', sa.String(512), nullable=False, server_default=sa.DefaultClause('')) repository.create(table, populate_default=True) project = sa.Column('project', sa.String(512), nullable=False, server_default=sa.DefaultClause('')) project.create(table, populate_default=True)
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine # add an empty class_name to the schedulers table patches = sa.Table('patches', metadata, autoload=True) patch_author= sa.Column('patch_author', sa.Text, server_default=sa.DefaultClause(''), nullable=False) patch_author.create(patches, populate_default=True) patch_author= sa.Column('patch_comment', sa.Text, server_default=sa.DefaultClause(''), nullable=False) patch_author.create(patches, populate_default=True)
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn patches = sautils.Table( 'patches', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('patchlevel', sa.Integer, nullable=False), sa.Column('patch_base64', sa.Text, nullable=False), sa.Column('patch_author', sa.Text, nullable=False), sa.Column('patch_comment', sa.Text, nullable=False), sa.Column('subdir', sa.Text), ) sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('ss_hash', sa.String(40), nullable=False), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), sa.Column('patchid', sa.Integer, sa.ForeignKey('patches.id')), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('created_at', sa.Integer, nullable=False), ) changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(256), nullable=False), sa.Column('comments', sa.Text, nullable=False), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), sa.Column('revlink', sa.String(256)), sa.Column('when_timestamp', sa.Integer, nullable=False), sa.Column('category', sa.String(256)), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id')), ) patches.create() sourcestamps.create() changes.create()
def test_no_embed_in_sql(self): """Using a DefaultGenerator, Sequence, DefaultClause in the columns, where clause of a select, or in the values clause of insert, update, raises an informative error""" t = Table("some_table", MetaData(), Column("id", Integer)) for const in ( sa.Sequence("y"), sa.ColumnDefault("y"), sa.DefaultClause("y"), ): assert_raises_message( sa.exc.ArgumentError, r"SQL expression for WHERE/HAVING role expected, " r"got \[(?:Sequence|ColumnDefault|DefaultClause)\('y'.*\)\]", t.select, [const], ) assert_raises_message( sa.exc.ArgumentError, "SQL expression element expected, got %s" % const.__class__.__name__, t.insert().values, col4=const, ) assert_raises_message( sa.exc.ArgumentError, "SQL expression element expected, got %s" % const.__class__.__name__, t.update().values, col4=const, )
def rename_sourcestamps_to_old(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sourcestamps = sa.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), sa.Column('patchid', sa.Integer, sa.ForeignKey('patches.id')), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('sourcestampsetid', sa.Integer, sa.ForeignKey('sourcestampsets.id')), ) for index in sourcestamps.indexes: index.drop() migrate_engine.execute('alter table sourcestamps ' 'rename to sourcestamps_old')
def test_no_embed_in_sql(self): """Using a DefaultGenerator, Sequence, DefaultClause in the columns, where clause of a select, or in the values clause of insert, update, raises an informative error""" t = Table( "some_table", MetaData(), Column("id", Integer), Column("col4", String()), ) for const in ( sa.Sequence("y"), sa.ColumnDefault("y"), sa.DefaultClause("y"), ): assert_raises_message( sa.exc.ArgumentError, "SQL expression object expected, got object of type " "<.* 'list'> instead", t.select, [const], ) assert_raises_message( sa.exc.InvalidRequestError, "cannot be used directly as a column expression.", str, t.insert().values(col4=const), ) assert_raises_message( sa.exc.InvalidRequestError, "cannot be used directly as a column expression.", str, t.update().values(col4=const), )
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), ) sourcestamps.create() changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(255), nullable=False), sa.Column('committer', sa.String(255), nullable=self.PREVIOUS_NULLABLE), sa.Column('comments', sa.Text, nullable=False), sa.Column('branch', sa.String(255)), sa.Column('revision', sa.String(255)), sa.Column('revlink', sa.String(256)), sa.Column('when_timestamp', sa.Integer, nullable=False), sa.Column('category', sa.String(255)), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id', ondelete='CASCADE'), nullable=False), sa.Column('parent_changeids', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='SET NULL'), nullable=True), ) changes.create()
def test_no_embed_in_sql(self): """Using a DefaultGenerator, Sequence, DefaultClause in the columns, where clause of a select, or in the values clause of insert, update, raises an informative error""" for const in ( sa.Sequence('y'), sa.ColumnDefault('y'), sa.DefaultClause('y') ): assert_raises_message( sa.exc.ArgumentError, "SQL expression object or string expected, got object of type " "<.* 'list'> instead", t.select, [const] ) assert_raises_message( sa.exc.InvalidRequestError, "cannot be used directly as a column expression.", str, t.insert().values(col4=const) ) assert_raises_message( sa.exc.InvalidRequestError, "cannot be used directly as a column expression.", str, t.update().values(col4=const) )
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column( 'mailtemplates', sa.Column('slug', sa.String(), nullable=False, server_default=sa.DefaultClause("0")))
def setup_thd(conn): metadata = sa.MetaData() metadata.bind = conn # This table contains basic information about each build. builds = sautils.Table( 'builds', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('builderid', sa.Integer), # note that there is 1:N relationship here. # In case of slave loss, build has results RETRY # and buildrequest is unclaimed sa.Column('buildrequestid', sa.Integer, nullable=False), # slave which performed this build # TODO: ForeignKey to buildslaves table, named buildslaveid # TODO: keep nullable to support worker-free # builds sa.Column('buildslaveid', sa.Integer), # master which controlled this build sa.Column('masterid', sa.Integer, nullable=False), # start/complete times sa.Column('started_at', sa.Integer, nullable=False), sa.Column('complete_at', sa.Integer), # a list of strings describing the build's state sa.Column('state_strings_json', sa.Text, nullable=False), sa.Column('results', sa.Integer), ) builds.create() buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('external_idstring', sa.String(256)), sa.Column('reason', sa.String(256)), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), sa.Column('results', sa.SmallInteger), ) buildsets.create() conn.execute(buildsets.insert(), [ dict(external_idstring='extid', reason='rsn1', sourcestamps=[91], submitted_at=datetime2epoch( datetime.datetime(1978, 6, 15, 12, 31, 15)), complete_at=datetime2epoch( datetime.datetime(1979, 6, 15, 12, 31, 15)), complete=0, results=-1, bsid=91) ])
def drop_old_schema_parts(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sourcestamp_changes = sa.Table( 'sourcestamp_changes', metadata, sa.Column('sourcestampid', sa.Integer), # ... ) # this drops 'sourcestamp_changes_sourcestampid' too sourcestamp_changes.drop() buildsets = sa.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('external_idstring', sa.String(256)), sa.Column('reason', sa.String(256)), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), sa.Column('results', sa.SmallInteger), sa.Column('sourcestampsetid', sa.Integer, sa.ForeignKey('sourcestampsets.id')), ) # there's a leftover bogus foreign key constraint referencing # sourcestamps_old.sourcestampid, from the rename of sourcestampid to # sourcestampsetid in migration 018. Dropping this column will drop # that constraint, too. buildsets.c.sourcestampsetid.drop() sourcestamps_old = sa.Table( 'sourcestamps_old', metadata, sa.Column('id', sa.Integer, primary_key=True), # ... ) sourcestamps_old.drop() sourcestampsets = sa.Table( 'sourcestampsets', metadata, sa.Column('id', sa.Integer, primary_key=True), ) sourcestampsets.drop() # re-create all indexes on the table - sqlite dropped them if migrate_engine.dialect.name == 'sqlite': idx = sa.Index('buildsets_complete', buildsets.c.complete) idx.create() idx = sa.Index('buildsets_submitted_at', buildsets.c.submitted_at) idx.create()
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn self.buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('external_idstring', sa.String(256)), sa.Column('reason', sa.String(256)), # NOTE: foreign key omitted: sa.Column('sourcestampid', sa.Integer, nullable=False), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), sa.Column('results', sa.SmallInteger), ) self.buildsets.create(bind=conn) sa.Index('buildsets_complete', self.buildsets.c.complete).create() sa.Index('buildsets_submitted_at', self.buildsets.c.submitted_at).create() self.patches = sautils.Table( 'patches', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('patchlevel', sa.Integer, nullable=False), sa.Column('patch_base64', sa.Text, nullable=False), sa.Column('patch_author', sa.Text, nullable=False), sa.Column('patch_comment', sa.Text, nullable=False), sa.Column('subdir', sa.Text), ) self.patches.create(bind=conn) self.sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), sa.Column('patchid', sa.Integer, sa.ForeignKey('patches.id')), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id')), ) self.sourcestamps.create(bind=conn)
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column( 'vacancies', sa.Column('deleted', sa.Boolean(), nullable=False, server_default=sa.DefaultClause(false()))) op.add_column( 'vacancies', sa.Column('updated_at', sa.DateTime(), nullable=True, server_default=sa.DefaultClause(str( datetime.datetime.now())))) op.add_column('vacancies', sa.Column('user_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'vacancies', 'users', ['user_id'], ['id'])
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine sourcestamps_table = sa.Table('sourcestamps', metadata, autoload=True) changes_table = sa.Table('changes', metadata, autoload=True) # Add codebase to tables ss_codebase = sa.Column('codebase', sa.String(length=256), nullable=False, server_default=sa.DefaultClause("")) ss_codebase.create(sourcestamps_table) c_codebase = sa.Column('codebase', sa.String(length=256), nullable=False, server_default=sa.DefaultClause("")) c_codebase.create(changes_table)
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine buildrequests_table = sa.Table('buildrequests', metadata, autoload=True) # boolean indicating whether there is a step blocking, waiting for this # request to complete waited_for = sa.Column('waited_for', sa.SmallInteger, server_default=sa.DefaultClause("0")) waited_for.create(buildrequests_table)
def upgrade(): op.create_table( 'usuario', sa.Column('id', sa.BigInteger, primary_key=True, index=True), sa.Column('rol_id', sa.Integer, nullable=True), sa.Column('correo_electronico', sa.String(128), unique=True, nullable=True, index=True), sa.Column('credencial', sa.String(64), nullable=False), sa.Column('nombres', sa.String(64), nullable=True, index=True), sa.Column('apellidos', sa.String(64), nullable=True, index=True), sa.Column('sexo', sa.SmallInteger, nullable=True), sa.Column('tipo_documento', sa.SmallInteger, server_default=sa.DefaultClause('0'), nullable=False), sa.Column('nacionalidad', sa.CHAR(2), nullable=True), sa.Column('escuela', sa.SmallInteger, server_default=sa.DefaultClause('0'), nullable=True), sa.Column('nro_telefono', sa.BigInteger, nullable=True), sa.Column('distrito', sa.SmallInteger, nullable=True), sa.Column('direccion', sa.String(64), nullable=True), sa.Column('avatar', sa.Binary, nullable=True), sa.Column('fecha_creacion', sa.DateTime, nullable=False), sa.Column('fecha_ultima_actualizacion', sa.DateTime, nullable=False), sa.Column('autorizado', sa.Boolean, server_default=sa.true(), nullable=False), sa.Column('deshabilitado', sa.Boolean, server_default=sa.false(), nullable=False), sa.Column('avatar', sa.Binary, nullable=True))
def create_tables_thd(self, conn): metadata = sa.MetaData() metadata.bind = conn sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), ) sourcestamps.create() changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(255), nullable=False), sa.Column('comments', sa.Text, nullable=False), sa.Column('branch', sa.String(255)), sa.Column('revision', sa.String(255)), sa.Column('revlink', sa.String(256)), sa.Column('when_timestamp', sa.Integer, nullable=False), sa.Column('category', sa.String(255)), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id', ondelete='CASCADE'), nullable=False), sa.Column('parent_changeids', sa.Integer, sa.ForeignKey('changes.changeid', ondelete='SET NULL'), nullable=True), ) changes.create() conn.execute(sourcestamps.insert(), [ dict(id=100), ]) conn.execute(changes.insert(), [ dict( changeid=1, author='warner', comments='fix whitespace', when_timestamp=256738404, repository='git://warner', codebase='core', project='Buildbot', sourcestampid=100), ])
def setup_guid_server_defaults(metadata: sa.MetaData, engine: sa.engine.Engine) -> None: database_backend = DatabaseBackend.from_engine(engine) guid_server_defaults = { DatabaseBackend.postgresql: "gen_random_uuid()", DatabaseBackend.sqlite: "(lower(hex(randomblob(16))))", } for table in metadata.tables.values(): if len(table.primary_key.columns) != 1: continue for column in table.primary_key.columns: if type(column.type) is GUID: column.server_default = sa.DefaultClause( sa.text(guid_server_defaults[database_backend]))
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine # add an empty class_name to the schedulers table schedulers = sa.Table('schedulers', metadata, autoload=True) class_name = sa.Column('class_name', sa.String(length=128), nullable=False, server_default=sa.DefaultClause('')) class_name.create(schedulers, populate_default=True) # and an index since we'll be selecting with (name= AND class=) idx = sa.Index('name_and_class', schedulers.c.name, schedulers.c.class_name) idx.create(migrate_engine)
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine # add patch_author and patch_comment to the patches table # mysql doesn't like default values on these columns defaults = {} if migrate_engine.dialect.name != "mysql": defaults['server_default'] = sa.DefaultClause('') patches = sautils.Table('patches', metadata, autoload=True) patch_author = sa.Column('patch_author', sa.Text, nullable=False, **defaults) patch_author.create(patches, populate_default=True) patch_author = sa.Column('patch_comment', sa.Text, nullable=False, **defaults) patch_author.create(patches, populate_default=True)
def setup_thd(conn): metadata = sa.MetaData() metadata.bind = conn patches = sautils.Table( 'patches', metadata, sa.Column('id', sa.Integer, primary_key=True), # ... ) patches.create() sourcestampsets = sautils.Table( 'sourcestampsets', metadata, sa.Column('id', sa.Integer, primary_key=True), ) sourcestampsets.create() sourcestamps = sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), sa.Column('patchid', sa.Integer, sa.ForeignKey('patches.id')), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), sa.Column('sourcestampsetid', sa.Integer, sa.ForeignKey('sourcestampsets.id')), ) sourcestamps.create() buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('external_idstring', sa.String(256)), sa.Column('reason', sa.String(256)), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), sa.Column('results', sa.SmallInteger), sa.Column('sourcestampsetid', sa.Integer, sa.ForeignKey('sourcestampsets.id')), ) buildsets.create() changes = sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, primary_key=True), sa.Column('author', sa.String(256), nullable=False), sa.Column('comments', sa.String(1024), nullable=False), sa.Column('is_dir', sa.SmallInteger, nullable=False), # old, for CVS sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), # CVS uses NULL sa.Column('revlink', sa.String(256)), sa.Column('when_timestamp', sa.Integer, nullable=False), sa.Column('category', sa.String(256)), sa.Column('repository', sa.String(length=512), nullable=False, server_default=''), sa.Column('codebase', sa.String(256), nullable=False, server_default=sa.DefaultClause("")), sa.Column('project', sa.String(length=512), nullable=False, server_default=''), ) changes.create() sourcestamp_changes = sautils.Table( 'sourcestamp_changes', metadata, sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id'), nullable=False), sa.Column('changeid', sa.Integer, sa.ForeignKey('changes.changeid'), nullable=False), ) sourcestamp_changes.create() # now insert some data.. conn.execute(changes.insert(), [ {'changeid': 101, 'author': 'dustin', 'comments': 'AAAA', 'branch': 'br', 'revision': 'aaa', 'revlink': 'ht:/', 'when_timestamp': 1356371433, 'category': 'cat', 'repository': 'git:/', 'codebase': 'cb', 'project': 'pr', 'is_dir': 0}, {'changeid': 102, 'author': 'tom', 'comments': 'BBBB', 'branch': 'br', 'revision': 'bbb', 'revlink': 'ht:/', 'when_timestamp': 1356371433, 'category': 'cat', 'repository': 'git:/', 'codebase': 'cb', 'project': 'pr', 'is_dir': 0}, {'changeid': 103, 'author': 'pierre', 'comments': 'CCCC', 'branch': 'dev', 'revision': 'ccc', 'revlink': 'ht:/', 'when_timestamp': 1356371433, 'category': 'cat', 'repository': 'git:/', 'codebase': 'cb', 'project': 'pr', 'is_dir': 0}, ]) conn.execute(patches.insert(), [ {'id': 301}, # other columns don't matter ]) conn.execute(sourcestampsets.insert(), [ {'id': 2001}, {'id': 2002}, {'id': 2011}, {'id': 9999}, ]) conn.execute(sourcestamps.insert(), [ {'id': 201, 'branch': 'br', 'revision': 'aaa', 'patchid': None, 'repository': 'git:/', 'codebase': 'cb', 'project': 'pr', 'sourcestampsetid': 2001}, {'id': 202, 'branch': 'br', 'revision': 'bbb', 'patchid': None, 'repository': 'git:/', 'codebase': 'cb', 'project': 'pr', 'sourcestampsetid': 2002}, {'id': 211, 'branch': 'br', 'revision': 'aaa', 'patchid': 301, 'repository': 'git:/', 'codebase': 'cb', 'project': 'pr', 'sourcestampsetid': 2011}, {'id': 221, 'branch': None, 'revision': 'a22', 'patchid': None, 'repository': 'hg:/', 'codebase': 'lib1', 'project': 'pr', 'sourcestampsetid': 2001}, {'id': 231, 'branch': None, 'revision': 'a33', 'patchid': None, 'repository': 'svn:/', 'codebase': 'lib2', 'project': 'pr', 'sourcestampsetid': 2001}, {'id': 222, 'branch': None, 'revision': 'b22', 'patchid': None, 'repository': 'hg:/', 'codebase': 'lib1', 'project': 'pr', 'sourcestampsetid': 2002}, {'id': 232, 'branch': None, 'revision': 'b33', 'patchid': None, 'repository': 'svn:/', 'codebase': 'lib2', 'project': 'pr', 'sourcestampsetid': 2002}, # this sourcestamp gets forgotten, because it's not used {'id': 999, 'branch': None, 'revision': '999', 'patchid': None, 'repository': 'svn:/', 'codebase': '999', 'project': 'pr', 'sourcestampsetid': 9999}, ]) conn.execute(sourcestamp_changes.insert(), [ # change 101 has sourcestamp 201 {'changeid': 101, 'sourcestampid': 201}, # change 102 has sourcestamps 201 and 202 {'changeid': 102, 'sourcestampid': 201}, {'changeid': 102, 'sourcestampid': 202}, # change 103 has no sourcestamps ]) conn.execute(buildsets.insert(), [ {'id': 501, 'submitted_at': 1356372121, 'sourcestampsetid': 2001}, {'id': 502, 'submitted_at': 1356372131, 'sourcestampsetid': 2002}, {'id': 511, 'submitted_at': 1356372141, 'sourcestampsetid': 2011}, ])
def to_sql(df, name, con, keycols=[], references={}, chunksize=4096, method="multi", basecols=None, **kw): """Like pd.DataFrame().to_sql, but supports auto-increment unique key columns, foreign key columns, and automatic JSONification of extraneous columns. keycols = ["colname1", "colname2", ...] Columns to add a database level default value of an autoincrement sequence. The columns do not need to, but can, exist in the dataframe. references = {"colname": "desttable.destcolname", ...} Columns to add a foreign key constraint to. Columns must exist in the dataframe, and the destination table and column must already exist. basecols = ["colname1", "colname2", ...] Only use these columns to form the table. Any other columns will be stored in a JSON object per row in the column "extra". """ if basecols is not None: df = df.assign( **{missing: None for missing in set(basecols) - set(df.columns)}) extra = df[set(df.columns) - set(basecols)] df = df[basecols].copy() df["extra"] = extra.apply(lambda r: json.dumps( { key: value.item() if hasattr(value, "dtype") else value for key, value in r.items() }, default=_serialiser), axis=1) df["extra"] = df["extra"].astype("object") pandastable = pd.io.sql.SQLTable(name, pd.io.sql.SQLDatabase(con), frame=df, if_exists="append", **kw) table = pandastable.table.tometadata(sqlalchemy.MetaData(con)) for key, ref in references.items(): ref_table, ref_col = ref.split(".") logger.debug("For table %s, col %s -> ref table: %s, ref col: %s" % (name, key, ref_table, ref_col)) reftable = sqlalchemy.Table( ref_table, table.metadata, sqlalchemy.Column(ref_col, sqlalchemy.BigInteger(), primary_key=True)) references[key] = reftable.columns[ref_col] seqs = [] for keycol in keycols: seq = sqlalchemy.sql.schema.Sequence('%s_%s_seq' % (pandastable.name, keycol)) seqs.append(seq) if keycol not in table.columns: table.append_column( sqlalchemy.Column(keycol, sqlalchemy.BigInteger())) keycoldef = table.columns[keycol] table.append_constraint(sqlalchemy.sql.schema.UniqueConstraint(keycol)) keycoldef.server_default = sqlalchemy.DefaultClause(seq.next_value()) for key, ref in references.items(): table.append_constraint(sqlalchemy.ForeignKeyConstraint([key], [ref])) for seq in seqs: seq.create(con) # table.create() table.metadata.create_all(checkfirst=True) pandastable.insert(chunksize=chunksize, method=method)
def upgrade(migrate_engine): metadata = sa.MetaData() metadata.bind = migrate_engine # re-include some of the relevant tables, as they were in version 3, since # sqlalchemy's reflection doesn't work very well for defaults. These must # be complete table specifications as for some dialects sqlalchemy will # create a brand new, temporary table, and copy data over sautils.Table( "schedulers", metadata, sa.Column('schedulerid', sa.Integer, autoincrement=False, primary_key=True), sa.Column('name', sa.String(128), nullable=False), sa.Column('state', sa.String(1024), nullable=False), sa.Column('class_name', sa.String(128), nullable=False), ) sautils.Table( 'changes', metadata, sa.Column('changeid', sa.Integer, autoincrement=False, primary_key=True), sa.Column('author', sa.String(256), nullable=False), sa.Column('comments', sa.String(1024), nullable=False), sa.Column('is_dir', sa.SmallInteger, nullable=False), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), sa.Column('revlink', sa.String(256)), sa.Column('when_timestamp', sa.Integer, nullable=False), sa.Column('category', sa.String(256)), sa.Column('repository', sa.Text, nullable=False, server_default=''), sa.Column('project', sa.Text, nullable=False, server_default=''), ) sautils.Table( 'patches', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('patchlevel', sa.Integer, nullable=False), sa.Column('patch_base64', sa.Text, nullable=False), sa.Column('subdir', sa.Text), ) sautils.Table( 'sourcestamps', metadata, sa.Column('id', sa.Integer, autoincrement=True, primary_key=True), sa.Column('branch', sa.String(256)), sa.Column('revision', sa.String(256)), sa.Column('patchid', sa.Integer, sa.ForeignKey('patches.id')), sa.Column('repository', sa.Text(length=None), nullable=False, server_default=''), sa.Column('project', sa.Text(length=None), nullable=False, server_default=''), ) sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('external_idstring', sa.String(256)), sa.Column('reason', sa.String(256)), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id'), nullable=False), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), sa.Column('results', sa.SmallInteger), ) sautils.Table( 'buildrequests', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('buildsetid', sa.Integer, sa.ForeignKey("buildsets.id"), nullable=False), sa.Column('buildername', sa.String(length=None), nullable=False), sa.Column('priority', sa.Integer, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('claimed_at', sa.Integer, server_default=sa.DefaultClause("0")), sa.Column('claimed_by_name', sa.String(length=None)), sa.Column('claimed_by_incarnation', sa.String(length=None)), sa.Column('complete', sa.Integer, server_default=sa.DefaultClause("0")), sa.Column('results', sa.SmallInteger), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete_at', sa.Integer), ) sautils.Table( 'builds', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('number', sa.Integer, nullable=False), sa.Column('brid', sa.Integer, sa.ForeignKey('buildrequests.id'), nullable=False), sa.Column('start_time', sa.Integer, nullable=False), sa.Column('finish_time', sa.Integer), ) to_autoinc = [ s.split(".") for s in ( "schedulers.schedulerid", "builds.id", "changes.changeid", "buildrequests.id", "buildsets.id", "patches.id", "sourcestamps.id", ) ] # It seems that SQLAlchemy's ALTER TABLE doesn't work when migrating from # INTEGER to PostgreSQL's SERIAL data type (which is just pseudo data type # for INTEGER with SEQUENCE), so we have to work-around this with raw SQL. if migrate_engine.dialect.name in ('postgres', 'postgresql'): for table_name, col_name in to_autoinc: migrate_engine.execute("CREATE SEQUENCE %s_%s_seq" % (table_name, col_name)) migrate_engine.execute( "ALTER TABLE %s ALTER COLUMN %s SET DEFAULT nextval('%s_%s_seq'::regclass)" % (table_name, col_name, table_name, col_name)) migrate_engine.execute( "ALTER SEQUENCE %s_%s_seq OWNED BY %s.%s" % (table_name, col_name, table_name, col_name)) else: for table_name, col_name in to_autoinc: table = metadata.tables[table_name] col = table.c[col_name] col.alter(autoincrement=True) # also drop the changes_nextid table here (which really should have been a # sequence..) table = sautils.Table('changes_nextid', metadata, autoload=True) table.drop()
buildsets = sautils.Table( 'buildsets', metadata, sa.Column('id', sa.Integer, autoincrement=False, primary_key=True), sa.Column('external_idstring', sa.String(256)), sa.Column('reason', sa.String(256)), sa.Column('sourcestampid', sa.Integer, sa.ForeignKey('sourcestamps.id'), nullable=False), sa.Column('submitted_at', sa.Integer, nullable=False), sa.Column('complete', sa.SmallInteger, nullable=False, server_default=sa.DefaultClause("0")), sa.Column('complete_at', sa.Integer), sa.Column('results', sa.SmallInteger), ) buildset_properties = sautils.Table( 'buildset_properties', metadata, sa.Column('buildsetid', sa.Integer, sa.ForeignKey('buildsets.id'), nullable=False), sa.Column('property_name', sa.String(256), nullable=False), sa.Column('property_value', sa.String(1024), nullable=False), )