def downgrade(): op.create_table('nyaa_torrents_info', sa.Column('info_dict', mysql.MEDIUMBLOB(), nullable=True), sa.Column('torrent_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['torrent_id'], ['nyaa_torrents.id'], name='nyaa_torrents_info_ibfk_1', ondelete='CASCADE'), sa.PrimaryKeyConstraint('torrent_id'), mysql_collate='utf8_bin', mysql_default_charset='utf8', mysql_engine='InnoDB', mysql_row_format='COMPRESSED') op.create_table('sukebei_torrents_info', sa.Column('info_dict', mysql.MEDIUMBLOB(), nullable=True), sa.Column('torrent_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), sa.ForeignKeyConstraint( ['torrent_id'], ['sukebei_torrents.id'], name='sukebei_torrents_info_ibfk_1', ondelete='CASCADE'), sa.PrimaryKeyConstraint('torrent_id'), mysql_collate='utf8_bin', mysql_default_charset='utf8', mysql_engine='InnoDB', mysql_row_format='COMPRESSED')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'security_question') op.drop_column('users', 'hashed_security_answer') op.alter_column('entries', 'created_by_id', existing_type=mysql.INTEGER(display_width=11), nullable=True) op.alter_column('entries', 'imgs_id', existing_type=mysql.INTEGER(display_width=11), nullable=True) op.alter_column('entries', 'title', existing_type=mysql.VARCHAR(length=100), nullable=True) op.create_index(op.f('ix_entries_created_at'), 'entries', ['created_at'], unique=False) op.drop_index('ix_entry_created_at', table_name='entries') op.drop_index('ix_entry_created_by', table_name='entries') op.alter_column('filetypes', 'ft_1', existing_type=mysql.VARCHAR(length=5), nullable=True) op.alter_column('imgs', 'filetypes_id', existing_type=mysql.INTEGER(display_width=11), nullable=True) op.alter_column('imgs', 'img_1', existing_type=mysql.MEDIUMBLOB(), nullable=True) op.drop_index('ix_users_created_at', table_name='users') op.drop_column('users', 'is_active')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('users', sa.Column('security_question', sa.Integer(), nullable=False)) op.add_column('users', sa.Column('otp_secret', sa.String(length=10), nullable=False)) op.add_column('users', sa.Column('hashed_security_answer', sa.String(length=255), nullable=False)) op.add_column('users', sa.Column('is_active', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=False)) op.add_column('users', sa.Column('has_2fa', mysql.TINYINT(display_width=1), server_default=sa.text('0'), autoincrement=False, nullable=False)) op.alter_column('imgs', 'img_1', existing_type=mysql.MEDIUMBLOB(), nullable=False) op.alter_column('imgs', 'filetypes_id', existing_type=mysql.INTEGER(display_width=11), nullable=False) op.alter_column('filetypes', 'ft_1', existing_type=mysql.VARCHAR(length=5), nullable=False) op.alter_column('entries', 'title', existing_type=mysql.VARCHAR(length=100), nullable=False) op.alter_column('entries', 'imgs_id', existing_type=mysql.INTEGER(display_width=11), nullable=False) op.alter_column('entries', 'created_by_id', existing_type=mysql.INTEGER(display_width=11), nullable=False)
def upgrade(): op.create_table( 'dataprocessingcache', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('namespace_id', sa.Integer(), nullable=False), sa.Column('contact_rankings', mysql.MEDIUMBLOB(), nullable=True), sa.Column('contact_rankings_last_updated', sa.DateTime(), nullable=True), sa.Column('contact_groups', mysql.MEDIUMBLOB(), nullable=True), sa.Column('contact_groups_last_updated', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['namespace_id'], [u'namespace.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('namespace_id'))
def upgrade(): op.create_table( "dataprocessingcache", sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), sa.Column("created_at", sa.DateTime(), nullable=False), sa.Column("updated_at", sa.DateTime(), nullable=False), sa.Column("deleted_at", sa.DateTime(), nullable=True), sa.Column("namespace_id", sa.Integer(), nullable=False), sa.Column("contact_rankings", mysql.MEDIUMBLOB(), nullable=True), sa.Column("contact_rankings_last_updated", sa.DateTime(), nullable=True), sa.Column("contact_groups", mysql.MEDIUMBLOB(), nullable=True), sa.Column("contact_groups_last_updated", sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ["namespace_id"], [u"namespace.id"], ondelete="CASCADE" ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("namespace_id"), )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "images", sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("name", sa.String(length=20), nullable=True), sa.Column("image", mysql.MEDIUMBLOB(), nullable=True), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("name"), )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('imgs', sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False), sa.Column('img_1', mysql.MEDIUMBLOB(), nullable=False), sa.Column('img_2', mysql.MEDIUMBLOB(), nullable=True), sa.Column('img_3', mysql.MEDIUMBLOB(), nullable=True), sa.Column('img_4', mysql.MEDIUMBLOB(), nullable=True), sa.Column('filetypes_id', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id'), mysql_default_charset='utf8mb4', mysql_engine='InnoDB' ) op.create_table('filetypes', sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=True, nullable=False), sa.Column('ft_1', sa.String(length=5), nullable=False), sa.Column('ft_2', sa.String(length=5), nullable=True), sa.Column('ft_3', sa.String(length=5), nullable=True), sa.Column('ft_4', sa.String(length=5), nullable=True), sa.PrimaryKeyConstraint('id') ) op.add_column('entries', sa.Column('description', sa.LargeBinary(length=4294967295), nullable=True)) op.add_column('entries', sa.Column('imgs_id', sa.Integer(), nullable=False)) op.create_foreign_key( 'fk_imgs_id', 'entries', 'imgs', ['imgs_id'], ['id'], ) op.create_foreign_key( 'fk_filetypes_id', 'imgs', 'filetypes', ['filetypes_id'], ['id'], )
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('s3_bucket_metadata_summary', sa.Column('bucket', mysql.VARCHAR(length=63), nullable=False), sa.Column('metadata_summary', mysql.MEDIUMBLOB(), nullable=True), sa.Column('updated_at', mysql.DATETIME(), nullable=False), sa.PrimaryKeyConstraint('bucket'), mysql_default_charset=u'utf8', mysql_engine=u'InnoDB') op.create_table('s3_bucket_metadata_cache', sa.Column('bucket', mysql.VARCHAR(length=63), nullable=False), sa.Column('metadata_cache', mysql.MEDIUMBLOB(), nullable=True), sa.PrimaryKeyConstraint('bucket'), mysql_default_charset=u'utf8', mysql_engine=u'InnoDB')
import pytz from datetime import datetime from sqlalchemy import schema, types from sqlalchemy.orm import relationship, validates from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.dialects import mysql __all__ = [ 'Base', 'ContentBlock', 'DataCollection', 'Service', 'InboxMessage', 'ResultSet', 'Subscription' ] Base = declarative_base(name='Model') MYSQL_LARGE_BINARY = mysql.MEDIUMBLOB() def get_utc_now(): return datetime.utcnow().replace(tzinfo=pytz.UTC) class AbstractModel(Base): __abstract__ = True date_created = schema.Column(types.DateTime(timezone=True), default=get_utc_now) collection_to_content_block = schema.Table( 'collection_to_content_block', Base.metadata,
class TestEndToEnd(object): timeout_seconds = 60 @pytest.fixture def table_name(self, replhandler): return '{0}_biz'.format(replhandler) @pytest.fixture def avro_schema(self, table_name): return { u'fields': [{ u'type': u'int', u'name': u'id', u'pkey': 1 }, { u'default': None, u'maxlen': 64, u'type': [u'null', u'string'], u'name': u'name' }], u'namespace': u'', u'name': table_name, u'type': u'record', u'pkey': [u'id'] } @pytest.fixture(params=[{ 'table_name': 'test_complex_table', 'test_schema': [ # test_bit # ColumnInfo('BIT(8)', mysql.BIT, 3), # test_tinyint ColumnInfo('TINYINT', mysql.TINYINT(), 127), ColumnInfo('TINYINT(3) SIGNED', mysql.TINYINT(display_width=3, unsigned=False), -128), ColumnInfo('TINYINT(3) UNSIGNED', mysql.TINYINT(display_width=3, unsigned=True), 255), ColumnInfo( 'TINYINT(3) UNSIGNED ZEROFILL', mysql.TINYINT(display_width=3, unsigned=True, zerofill=True), 5), ColumnInfo('BOOL', mysql.BOOLEAN(), 1), ColumnInfo('BOOLEAN', mysql.BOOLEAN(), 1), # test_smallint ColumnInfo('SMALLINT', mysql.SMALLINT(), 32767), ColumnInfo('SMALLINT(5) SIGNED', mysql.SMALLINT(display_width=5, unsigned=False), -32768), ColumnInfo('SMALLINT(5) UNSIGNED', mysql.SMALLINT(display_width=5, unsigned=True), 65535), ColumnInfo( 'SMALLINT(3) UNSIGNED ZEROFILL', mysql.SMALLINT(display_width=3, unsigned=True, zerofill=True), 5), # test_mediumint ColumnInfo('MEDIUMINT', mysql.MEDIUMINT(), 8388607), ColumnInfo('MEDIUMINT(7) SIGNED', mysql.MEDIUMINT(display_width=7, unsigned=False), -8388608), ColumnInfo('MEDIUMINT(8) UNSIGNED', mysql.MEDIUMINT(display_width=8, unsigned=True), 16777215), ColumnInfo( 'MEDIUMINT(3) UNSIGNED ZEROFILL', mysql.MEDIUMINT(display_width=3, unsigned=True, zerofill=True), 5), # test_int ColumnInfo('INT', mysql.INTEGER(), 2147483647), ColumnInfo('INT(10) SIGNED', mysql.INTEGER(display_width=10, unsigned=False), -2147483648), ColumnInfo('INT(11) UNSIGNED', mysql.INTEGER(display_width=11, unsigned=True), 4294967295), ColumnInfo( 'INT(3) UNSIGNED ZEROFILL', mysql.INTEGER(display_width=3, unsigned=True, zerofill=True), 5), ColumnInfo('INTEGER(3)', mysql.INTEGER(display_width=3), 3), # test_bigint ColumnInfo('BIGINT(19)', mysql.BIGINT(display_width=19), 23372854775807), ColumnInfo('BIGINT(19) SIGNED', mysql.BIGINT(display_width=19, unsigned=False), -9223372036854775808), # ColumnInfo('BIGINT(20) UNSIGNED', mysql.INTEGER(display_width=20, unsigned=True), 18446744073709551615), ColumnInfo( 'BIGINT(3) UNSIGNED ZEROFILL', mysql.BIGINT(display_width=3, unsigned=True, zerofill=True), 5), # test_decimal ColumnInfo('DECIMAL(9, 2)', mysql.DECIMAL(precision=9, scale=2), 101.41), ColumnInfo('DECIMAL(12, 11) SIGNED', mysql.DECIMAL(precision=12, scale=11, unsigned=False), -3.14159265359), ColumnInfo('DECIMAL(2, 1) UNSIGNED', mysql.DECIMAL(precision=2, scale=1, unsigned=True), 0.0), ColumnInfo( 'DECIMAL(9, 2) UNSIGNED ZEROFILL', mysql.DECIMAL(precision=9, scale=2, unsigned=True, zerofill=True), 5.22), ColumnInfo('DEC(9, 3)', mysql.DECIMAL(precision=9, scale=3), 5.432), ColumnInfo('FIXED(9, 3)', mysql.DECIMAL(precision=9, scale=3), 45.432), # test_float ColumnInfo('FLOAT', mysql.FLOAT(), 3.14), ColumnInfo('FLOAT(5, 3) SIGNED', mysql.FLOAT(precision=5, scale=3, unsigned=False), -2.14), ColumnInfo('FLOAT(5, 3) UNSIGNED', mysql.FLOAT(precision=5, scale=3, unsigned=True), 2.14), ColumnInfo( 'FLOAT(5, 3) UNSIGNED ZEROFILL', mysql.FLOAT(precision=5, scale=3, unsigned=True, zerofill=True), 24.00), ColumnInfo('FLOAT(5)', mysql.FLOAT(5), 24.01), ColumnInfo('FLOAT(30)', mysql.FLOAT(30), 24.01), # test_double ColumnInfo('DOUBLE', mysql.DOUBLE(), 3.14), ColumnInfo('DOUBLE(5, 3) SIGNED', mysql.DOUBLE(precision=5, scale=3, unsigned=False), -3.14), ColumnInfo('DOUBLE(5, 3) UNSIGNED', mysql.DOUBLE(precision=5, scale=3, unsigned=True), 2.14), ColumnInfo( 'DOUBLE(5, 3) UNSIGNED ZEROFILL', mysql.DOUBLE(precision=5, scale=3, unsigned=True, zerofill=True), 24.00), ColumnInfo('DOUBLE PRECISION', mysql.DOUBLE(), 3.14), ColumnInfo('REAL', mysql.DOUBLE(), 3.14), # test_date_time ColumnInfo('DATE', mysql.DATE(), datetime.date(1901, 1, 1)), ColumnInfo('DATE', mysql.DATE(), datetime.date(2050, 12, 31)), ColumnInfo('DATETIME', mysql.DATETIME(), datetime.datetime(1970, 1, 1, 0, 0, 1, 0)), ColumnInfo('DATETIME', mysql.DATETIME(), datetime.datetime(2038, 1, 19, 3, 14, 7, 0)), ColumnInfo('DATETIME(6)', mysql.DATETIME(fsp=6), datetime.datetime(1970, 1, 1, 0, 0, 1, 111111)), ColumnInfo('DATETIME(6)', mysql.DATETIME(fsp=6), datetime.datetime(2038, 1, 19, 3, 14, 7, 999999)), ColumnInfo('TIMESTAMP', mysql.TIMESTAMP(), datetime.datetime(1970, 1, 1, 0, 0, 1, 0)), ColumnInfo('TIMESTAMP', mysql.TIMESTAMP(), datetime.datetime(2038, 1, 19, 3, 14, 7, 0)), ColumnInfo('TIMESTAMP(6)', mysql.TIMESTAMP(fsp=6), datetime.datetime(1970, 1, 1, 0, 0, 1, 111111)), ColumnInfo('TIMESTAMP(6)', mysql.TIMESTAMP(fsp=6), datetime.datetime(2038, 1, 19, 3, 14, 7, 999999)), ColumnInfo('TIME', mysql.TIME(), datetime.timedelta(0, 0, 0)), ColumnInfo('TIME', mysql.TIME(), datetime.timedelta(0, 23 * 3600 + 59 * 60 + 59, 0)), ColumnInfo('TIME(6)', mysql.TIME(fsp=6), datetime.timedelta(0, 0, 111111)), ColumnInfo('TIME(6)', mysql.TIME(fsp=6), datetime.timedelta(0, 23 * 3600 + 59 * 60 + 59, 999999)), ColumnInfo('YEAR', mysql.YEAR(), 2000), ColumnInfo('YEAR(4)', mysql.YEAR(display_width=4), 2000), # test_char ColumnInfo('CHAR', mysql.CHAR(), 'a'), ColumnInfo('CHARACTER', mysql.CHAR(), 'a'), ColumnInfo('NATIONAL CHAR', mysql.CHAR(), 'a'), ColumnInfo('NCHAR', mysql.CHAR(), 'a'), ColumnInfo('CHAR(0)', mysql.CHAR(length=0), ''), ColumnInfo('CHAR(10)', mysql.CHAR(length=10), '1234567890'), ColumnInfo('VARCHAR(1000)', mysql.VARCHAR(length=1000), 'asdasdd'), ColumnInfo('CHARACTER VARYING(1000)', mysql.VARCHAR(length=1000), 'test dsafnskdf j'), ColumnInfo('NATIONAL VARCHAR(1000)', mysql.VARCHAR(length=1000), 'asdkjasd'), ColumnInfo('NVARCHAR(1000)', mysql.VARCHAR(length=1000), 'asdkjasd'), ColumnInfo('VARCHAR(10000)', mysql.VARCHAR(length=10000), '1234567890'), # test_binary ColumnInfo('BINARY(5)', mysql.BINARY(length=5), 'hello'), ColumnInfo('VARBINARY(100)', mysql.VARBINARY(length=100), 'hello'), ColumnInfo('TINYBLOB', mysql.TINYBLOB(), 'hello'), ColumnInfo('TINYTEXT', mysql.TINYTEXT(), 'hello'), ColumnInfo('BLOB', mysql.BLOB(), 'hello'), ColumnInfo('BLOB(100)', mysql.BLOB(length=100), 'hello'), ColumnInfo('TEXT', mysql.TEXT(), 'hello'), ColumnInfo('TEXT(100)', mysql.TEXT(length=100), 'hello'), ColumnInfo('MEDIUMBLOB', mysql.MEDIUMBLOB(), 'hello'), ColumnInfo('MEDIUMTEXT', mysql.MEDIUMTEXT(), 'hello'), ColumnInfo('LONGBLOB', mysql.LONGBLOB(), 'hello'), ColumnInfo('LONGTEXT', mysql.LONGTEXT(), 'hello'), # test_enum ColumnInfo("ENUM('ONE', 'TWO')", mysql.ENUM(['ONE', 'TWO']), 'ONE'), # test_set ColumnInfo("SET('ONE', 'TWO')", mysql.SET(['ONE', 'TWO']), set(['ONE', 'TWO'])) ] }]) def complex_table(self, request): return request.param @pytest.fixture def complex_table_name(self, replhandler, complex_table): return "{0}_{1}".format(replhandler, complex_table['table_name']) @pytest.fixture def complex_table_schema(self, complex_table): return complex_table['test_schema'] def _build_sql_column_name(self, complex_column_name): return 'test_{}'.format(complex_column_name) def _build_complex_column_create_query(self, complex_column_name, complex_column_schema): return '`{0}` {1}'.format(complex_column_name, complex_column_schema) @pytest.fixture def complex_table_create_query(self, complex_table_schema): return ", ".join([ self._build_complex_column_create_query( self._build_sql_column_name(indx), complex_column_schema.type) for indx, complex_column_schema in enumerate(complex_table_schema) ]) @pytest.fixture def sqla_objs(self, complex_table_schema): return [ complex_column_schema.sqla_obj for complex_column_schema in complex_table_schema ] @pytest.fixture def create_complex_table(self, containers, rbrsource, complex_table_name, complex_table_create_query): if complex_table_create_query.strip(): complex_table_create_query = ", {}".format( complex_table_create_query) query = """CREATE TABLE {complex_table_name} ( `id` int(11) NOT NULL PRIMARY KEY {complex_table_create_query} ) ENGINE=InnoDB DEFAULT CHARSET=utf8 """.format(complex_table_name=complex_table_name, complex_table_create_query=complex_table_create_query) execute_query_get_one_row(containers, rbrsource, query) @pytest.fixture def ComplexModel(self, complex_table_name, create_complex_table, complex_table_schema): class Model(Base): __tablename__ = complex_table_name id = Column('id', Integer, primary_key=True) for indx, complex_column_schema in enumerate(complex_table_schema): col_name = self._build_sql_column_name(indx) setattr(Model, col_name, Column(col_name, complex_column_schema.sqla_obj)) return Model @pytest.fixture def actual_complex_data(self, complex_table_schema): res = {'id': 1} for indx, complex_column_schema in enumerate(complex_table_schema): if isinstance(complex_column_schema.sqla_obj, mysql.DATE): data = complex_column_schema.data.strftime('%Y-%m-%d') elif isinstance(complex_column_schema.sqla_obj, mysql.DATETIME): data = complex_column_schema.data.strftime( '%Y-%m-%d %H:%M:%S.%f') elif isinstance(complex_column_schema.sqla_obj, mysql.TIMESTAMP): data = complex_column_schema.data.strftime( '%Y-%m-%d %H:%M:%S.%f') elif isinstance(complex_column_schema.sqla_obj, mysql.TIME): time = datetime.time( complex_column_schema.data.seconds / 3600, (complex_column_schema.data.seconds / 60) % 60, complex_column_schema.data.seconds % 60, complex_column_schema.data.microseconds) data = time.strftime('%H:%M:%S.%f') else: data = complex_column_schema.data res.update({self._build_sql_column_name(indx): data}) return res @pytest.fixture def expected_complex_data(self, actual_complex_data, complex_table_schema): expected_complex_data_dict = {'id': 1} for indx, complex_column_schema in enumerate(complex_table_schema): column_name = self._build_sql_column_name(indx) if isinstance(complex_column_schema.sqla_obj, mysql.SET): expected_complex_data_dict[column_name] = \ sorted(actual_complex_data[column_name]) elif isinstance(complex_column_schema.sqla_obj, mysql.DATETIME): date_time_obj = \ complex_column_schema.data.isoformat() expected_complex_data_dict[column_name] = date_time_obj elif isinstance(complex_column_schema.sqla_obj, mysql.TIMESTAMP): date_time_obj = \ complex_column_schema.data.replace(tzinfo=pytz.utc) expected_complex_data_dict[column_name] = date_time_obj elif isinstance(complex_column_schema.sqla_obj, mysql.TIME): number_of_micros = transform_timedelta_to_number_of_microseconds( complex_column_schema.data) expected_complex_data_dict[column_name] = number_of_micros else: expected_complex_data_dict[column_name] = \ complex_column_schema.data return expected_complex_data_dict def test_complex_table(self, containers, rbrsource, complex_table_name, ComplexModel, actual_complex_data, expected_complex_data, schematizer, namespace, rbr_source_session, gtid_enabled): if not gtid_enabled: increment_heartbeat(containers, rbrsource) complex_instance = ComplexModel(**actual_complex_data) rbr_source_session.add(complex_instance) rbr_source_session.commit() messages = _fetch_messages(containers, schematizer, namespace, complex_table_name, 1) expected_messages = [ { 'message_type': MessageType.create, 'payload_data': expected_complex_data }, ] _verify_messages(messages, expected_messages) def test_create_table(self, containers, rbrsource, schematracker, create_table_query, avro_schema, table_name, namespace, schematizer, rbr_source_session, gtid_enabled): if not gtid_enabled: increment_heartbeat(containers, rbrsource) execute_query_get_one_row( containers, rbrsource, create_table_query.format(table_name=table_name)) # Need to poll for the creation of the table _wait_for_table(containers, schematracker, table_name) # Check the schematracker db also has the table. verify_create_table_query = "SHOW CREATE TABLE {table_name}".format( table_name=table_name) verify_create_table_result = execute_query_get_one_row( containers, schematracker, verify_create_table_query) expected_create_table_result = execute_query_get_one_row( containers, rbrsource, verify_create_table_query) self.assert_expected_result(verify_create_table_result, expected_create_table_result) # It's necessary to insert data for the topic to actually be created. Biz = _generate_basic_model(table_name) rbr_source_session.add(Biz(id=1, name='insert')) rbr_source_session.commit() _wait_for_schematizer_topic(schematizer, namespace, table_name) # Check schematizer. self.check_schematizer_has_correct_source_info(table_name=table_name, avro_schema=avro_schema, namespace=namespace, schematizer=schematizer) def test_create_table_with_row_format(self, containers, rbrsource, schematracker, replhandler, gtid_enabled): table_name = '{0}_row_format_tester'.format(replhandler) create_table_stmt = """ CREATE TABLE {name} ( id int(11) primary key) ROW_FORMAT=COMPRESSED ENGINE=InnoDB """.format(name=table_name) if not gtid_enabled: increment_heartbeat(containers, rbrsource) execute_query_get_one_row(containers, rbrsource, create_table_stmt) _wait_for_table(containers, schematracker, table_name) # Check the schematracker db also has the table. verify_create_table_query = "SHOW CREATE TABLE {table_name}".format( table_name=table_name) verify_create_table_result = execute_query_get_one_row( containers, schematracker, verify_create_table_query) expected_create_table_result = execute_query_get_one_row( containers, rbrsource, verify_create_table_query) self.assert_expected_result(verify_create_table_result, expected_create_table_result) def test_alter_table(self, containers, rbrsource, schematracker, alter_table_query, table_name, gtid_enabled): if not gtid_enabled: increment_heartbeat(containers, rbrsource) execute_query_get_one_row( containers, rbrsource, alter_table_query.format(table_name=table_name)) execute_query_get_one_row( containers, rbrsource, "ALTER TABLE {name} ROW_FORMAT=COMPRESSED".format(name=table_name)) time.sleep(2) # Check the schematracker db also has the table. verify_describe_table_query = "DESCRIBE {table_name}".format( table_name=table_name) verify_alter_table_result = execute_query_get_all_rows( containers, schematracker, verify_describe_table_query) expected_alter_table_result = execute_query_get_all_rows( containers, rbrsource, verify_describe_table_query) if 'address' in verify_alter_table_result[0].values(): actual_result = verify_alter_table_result[0] elif 'address' in verify_alter_table_result[1].values(): actual_result = verify_alter_table_result[1] else: raise AssertionError('The alter table query did not succeed') if 'address' in expected_alter_table_result[0].values(): expected_result = expected_alter_table_result[0] else: expected_result = expected_alter_table_result[1] self.assert_expected_result(actual_result, expected_result) def test_basic_table(self, containers, replhandler, rbrsource, create_table_query, namespace, schematizer, rbr_source_session, gtid_enabled): if not gtid_enabled: increment_heartbeat(containers, rbrsource) source = "{0}_basic_table".format(replhandler) execute_query_get_one_row(containers, rbrsource, create_table_query.format(table_name=source)) BasicModel = _generate_basic_model(source) model_1 = BasicModel(id=1, name='insert') model_2 = BasicModel(id=2, name='insert') rbr_source_session.add(model_1) rbr_source_session.add(model_2) rbr_source_session.commit() model_1.name = 'update' rbr_source_session.delete(model_2) rbr_source_session.commit() messages = _fetch_messages(containers, schematizer, namespace, source, 4) expected_messages = [ { 'message_type': MessageType.create, 'payload_data': { 'id': 1, 'name': 'insert' } }, { 'message_type': MessageType.create, 'payload_data': { 'id': 2, 'name': 'insert' } }, { 'message_type': MessageType.update, 'payload_data': { 'id': 1, 'name': 'update' }, 'previous_payload_data': { 'id': 1, 'name': 'insert' } }, { 'message_type': MessageType.delete, 'payload_data': { 'id': 2, 'name': 'insert' } }, ] _verify_messages(messages, expected_messages) def test_table_with_contains_pii(self, containers, replhandler, rbrsource, create_table_query, namespace, schematizer, rbr_source_session, gtid_enabled): with reconfigure(encryption_type='AES_MODE_CBC-1', key_location='acceptance/configs/data_pipeline/'): if not gtid_enabled: increment_heartbeat(containers, rbrsource) source = "{}_secret_table".format(replhandler) execute_query_get_one_row( containers, rbrsource, create_table_query.format(table_name=source)) BasicModel = _generate_basic_model(source) model_1 = BasicModel(id=1, name='insert') model_2 = BasicModel(id=2, name='insert') rbr_source_session.add(model_1) rbr_source_session.add(model_2) rbr_source_session.commit() messages = _fetch_messages(containers, schematizer, namespace, source, 2) expected_messages = [{ 'message_type': MessageType.create, 'payload_data': { 'id': 1, 'name': 'insert' } }, { 'message_type': MessageType.create, 'payload_data': { 'id': 2, 'name': 'insert' } }] _verify_messages(messages, expected_messages) def check_schematizer_has_correct_source_info(self, table_name, avro_schema, namespace, schematizer): sources = schematizer.get_sources_by_namespace(namespace) source = next(src for src in reversed(sources) if src.name == table_name) topic = schematizer.get_topics_by_source_id(source.source_id)[-1] schema = schematizer.get_latest_schema_by_topic_name(topic.name) assert schema.topic.source.name == table_name assert schema.topic.source.namespace.name == namespace assert schema.schema_json == avro_schema def assert_expected_result(self, result, expected): for key, value in expected.iteritems(): assert result[key] == value
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('gh_login', sa.String(length=200), nullable=False), sa.Column('gh_name', sa.String(length=200), nullable=False), sa.Column('gh_access_token', sa.String(length=100), nullable=False), sa.Column('gh_avatar_url', sa.String(length=500), nullable=False), sa.Column('repos_last_synchronized_at', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('gh_id'), sa.UniqueConstraint('gh_login')) op.create_table( 'user_repository', sa.Column('id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('gh_name', sa.String(length=200), nullable=False), sa.Column('gh_full_name', sa.String(length=200), nullable=False), sa.Column('gh_clone_url', sa.String(length=200), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'organization', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('gh_login', sa.String(length=200), nullable=False), sa.Column('gh_name', sa.String(length=200), nullable=False), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'project', sa.Column('id', sa.Integer(), nullable=False), sa.Column('owner_id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('gh_name', sa.String(length=200), nullable=False), sa.Column('gh_full_name', sa.String(length=200), nullable=False), sa.Column('gh_login', sa.String(length=200), nullable=False), sa.Column('gh_clone_url', sa.String(length=200), nullable=False), sa.Column('gh_key_id', sa.Integer(), nullable=False), sa.Column('rsa_private_key', sa.Text(), nullable=False), sa.Column('rsa_public_key', sa.Text(), nullable=False), sa.ForeignKeyConstraint( ['owner_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('gh_id')) op.create_table('membership', sa.Column('project_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint( ['project_id'], ['project.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint()) op.create_table( 'build', sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('number', sa.Integer(), nullable=False), sa.Column('gh_commit_sha', sa.String(length=40), nullable=False), sa.Column('gh_commit_author', sa.String(length=200), nullable=False), sa.Column('gh_commit_message', sa.String(length=2000), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('status', sa.String(length=40), nullable=False), sa.ForeignKeyConstraint( ['project_id'], ['project.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('gh_commit_sha')) op.create_table( 'organization_repository', sa.Column('id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('gh_name', sa.String(length=200), nullable=False), sa.Column('gh_full_name', sa.String(length=200), nullable=False), sa.Column('gh_clone_url', sa.String(length=200), nullable=False), sa.Column('organization_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['organization_id'], ['organization.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'hook', sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=200), nullable=False), sa.Column('build_script', sa.Text(), nullable=False), sa.Column('docker_image', sa.String(length=200), nullable=False), sa.ForeignKeyConstraint( ['project_id'], ['project.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'hook_call', sa.Column('id', sa.Integer(), nullable=False), sa.Column('hook_id', sa.Integer(), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('gh_payload', sa.PickleType(), nullable=False), sa.ForeignKeyConstraint(['hook_id'], ['hook.id'], ondelete='SET NULL'), sa.PrimaryKeyConstraint('id')) op.create_table( 'job', sa.Column('id', sa.Integer(), nullable=False), sa.Column('build_id', sa.Integer(), nullable=False), sa.Column('hook_call_id', sa.Integer(), nullable=False), sa.Column('started_at', sa.DateTime(), nullable=True), sa.Column('finished_at', sa.DateTime(), nullable=True), sa.Column('return_code', sa.Integer(), nullable=True), sa.Column('stdout', mysql.MEDIUMBLOB(), nullable=True), sa.Column('task_uuid', sa.String(length=36), nullable=True), sa.ForeignKeyConstraint( ['build_id'], ['build.id'], ), sa.ForeignKeyConstraint( ['hook_call_id'], ['hook_call.id'], ), sa.PrimaryKeyConstraint('id'))
import json from datetime import datetime from sqlalchemy import schema, types from sqlalchemy.orm import relationship, validates from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.dialects import mysql __all__ = [ 'Base', 'ContentBlock', 'DataCollection', 'Service', 'InboxMessage', 'ResultSet', 'Subscription' ] Base = declarative_base(name='Model') MYSQL_MEDIUMBLOB = mysql.MEDIUMBLOB() class AbstractModel(Base): __abstract__ = True date_created = schema.Column(types.DateTime(timezone=True), default=datetime.utcnow) collection_to_content_block = schema.Table( 'collection_to_content_block', Base.metadata, schema.Column('collection_id', types.Integer, schema.ForeignKey('data_collections.id', ondelete='CASCADE')), schema.Column('content_block_id',
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('users', sa.Column('photo', mysql.MEDIUMBLOB(), nullable=True))
def upgrade(): # Shared tables op.create_table( 'users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=32, collation='ascii_general_ci'), nullable=False), sa.Column('email', sqlalchemy_utils.types.email.EmailType(length=255), nullable=True), # These are actually PasswordType, UserStatusType and UserLevelType, # but database-wise binary and integers are what's being used sa.Column('password_hash', sa.Binary(length=255), nullable=False), sa.Column('status', sa.Integer(), nullable=False), sa.Column('level', sa.Integer(), nullable=False), sa.Column('created_time', sa.DateTime(), nullable=True), sa.Column('last_login_date', sa.DateTime(), nullable=True), sa.Column('last_login_ip', sa.Binary(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email'), sa.UniqueConstraint('username')) op.create_table( 'trackers', sa.Column('id', sa.Integer(), nullable=False), sa.Column('uri', sa.String(length=255, collation='utf8_general_ci'), nullable=False), sa.Column('disabled', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('uri')) # Nyaa and Sukebei for prefix in TABLE_PREFIXES: # Main categories op.create_table( prefix + '_main_categories', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=64), nullable=False), sa.PrimaryKeyConstraint('id')) # Sub categories op.create_table( prefix + '_sub_categories', sa.Column('id', sa.Integer(), nullable=False), sa.Column('main_category_id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=64), nullable=False), sa.ForeignKeyConstraint( ['main_category_id'], [prefix + '_main_categories.id'], ), sa.PrimaryKeyConstraint('id', 'main_category_id')) # Main torrent table op.create_table( prefix + '_torrents', sa.Column('id', sa.Integer(), nullable=False), sa.Column('info_hash', sa.BINARY(length=20), nullable=False), sa.Column('display_name', sa.String(length=255, collation='utf8_general_ci'), nullable=False), sa.Column('torrent_name', sa.String(length=255), nullable=False), sa.Column('information', sa.String(length=255), nullable=False), sa.Column('description', mysql.TEXT(collation='utf8mb4_bin'), nullable=False), sa.Column('filesize', sa.BIGINT(), nullable=False), sa.Column('encoding', sa.String(length=32), nullable=False), sa.Column('flags', sa.Integer(), nullable=False), sa.Column('uploader_id', sa.Integer(), nullable=True), sa.Column('has_torrent', sa.Boolean(), nullable=False), sa.Column('created_time', sa.DateTime(), nullable=False), sa.Column('updated_time', sa.DateTime(), nullable=False), sa.Column('main_category_id', sa.Integer(), nullable=False), sa.Column('sub_category_id', sa.Integer(), nullable=False), sa.Column('redirect', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['main_category_id', 'sub_category_id'], [ prefix + '_sub_categories.main_category_id', prefix + '_sub_categories.id' ], ), sa.ForeignKeyConstraint( ['main_category_id'], [prefix + '_main_categories.id'], ), sa.ForeignKeyConstraint( ['redirect'], [prefix + '_torrents.id'], ), sa.ForeignKeyConstraint( ['uploader_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_' + prefix + '_torrents_display_name'), prefix + '_torrents', ['display_name'], unique=False) op.create_index(op.f('ix_' + prefix + '_torrents_filesize'), prefix + '_torrents', ['filesize'], unique=False) op.create_index(op.f('ix_' + prefix + '_torrents_flags'), prefix + '_torrents', ['flags'], unique=False) op.create_index(op.f('ix_' + prefix + '_torrents_info_hash'), prefix + '_torrents', ['info_hash'], unique=True) op.create_index(prefix + '_uploader_flag_idx', prefix + '_torrents', ['uploader_id', 'flags'], unique=False) # Statistics for torrents op.create_table( prefix + '_statistics', sa.Column('torrent_id', sa.Integer(), nullable=False), sa.Column('seed_count', sa.Integer(), nullable=False), sa.Column('leech_count', sa.Integer(), nullable=False), sa.Column('download_count', sa.Integer(), nullable=False), sa.Column('last_updated', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['torrent_id'], [prefix + '_torrents.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('torrent_id')) op.create_index(op.f('ix_' + prefix + '_statistics_download_count'), prefix + '_statistics', ['download_count'], unique=False) op.create_index(op.f('ix_' + prefix + '_statistics_leech_count'), prefix + '_statistics', ['leech_count'], unique=False) op.create_index(op.f('ix_' + prefix + '_statistics_seed_count'), prefix + '_statistics', ['seed_count'], unique=False) # Trackers relationships for torrents op.create_table( prefix + '_torrent_trackers', sa.Column('torrent_id', sa.Integer(), nullable=False), sa.Column('tracker_id', sa.Integer(), nullable=False), sa.Column('order', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['torrent_id'], [prefix + '_torrents.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint(['tracker_id'], ['trackers.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('torrent_id', 'tracker_id')) op.create_index(op.f('ix_' + prefix + '_torrent_trackers_order'), prefix + '_torrent_trackers', ['order'], unique=False) # Torrent filelists op.create_table(prefix + '_torrents_filelist', sa.Column('torrent_id', sa.Integer(), nullable=False), sa.Column('filelist_blob', mysql.MEDIUMBLOB(), nullable=True), sa.ForeignKeyConstraint(['torrent_id'], [prefix + '_torrents.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('torrent_id'), mysql_row_format='COMPRESSED') # Torrent info_dicts op.create_table(prefix + '_torrents_info', sa.Column('torrent_id', sa.Integer(), nullable=False), sa.Column('info_dict', mysql.MEDIUMBLOB(), nullable=True), sa.ForeignKeyConstraint(['torrent_id'], [prefix + '_torrents.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('torrent_id'), mysql_row_format='COMPRESSED')
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('gh_login', sa.String(length=200), nullable=False), sa.Column('gh_name', sa.String(length=200), nullable=False), sa.Column('gh_access_token', sa.String(length=100), nullable=False), sa.Column('gh_avatar_url', sa.String(length=500), nullable=False), sa.Column('repos_last_synchronized_at', sa.DateTime(), nullable=True), sa.Column('email', sa.String(length=1000), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('gh_id'), sa.UniqueConstraint('gh_login')) op.create_table( 'organization', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('gh_login', sa.String(length=200), nullable=False), sa.Column('gh_name', sa.String(length=200), nullable=False), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index('ix_organization_gh_id', 'organization', ['gh_id'], unique=False) op.create_table( 'user_repository', sa.Column('id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('gh_name', sa.String(length=200), nullable=False), sa.Column('gh_full_name', sa.String(length=200), nullable=False), sa.Column('gh_ssh_clone_url', sa.String(length=200), nullable=False), sa.Column('gh_https_clone_url', sa.String(length=200), nullable=False), sa.Column('is_public', sa.Boolean(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'project', sa.Column('id', sa.Integer(), nullable=False), sa.Column('owner_id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('gh_name', sa.String(length=200), nullable=False), sa.Column('gh_full_name', sa.String(length=200), nullable=False), sa.Column('gh_login', sa.String(length=200), nullable=False), sa.Column('gh_ssh_clone_url', sa.String(length=200), nullable=False), sa.Column('gh_https_clone_url', sa.String(length=200), nullable=False), sa.Column('is_public', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint( ['owner_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('gh_id')) op.create_table( 'hook', sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('title', sa.String(length=200), nullable=False), sa.Column('install_script', sa.Text(), nullable=True), sa.Column('build_script', sa.Text(), nullable=False), sa.Column('docker_image', sa.String(length=200), nullable=False), sa.ForeignKeyConstraint( ['project_id'], ['project.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table('deploy_key', sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('rsa_private_key', sa.Text(), nullable=False), sa.Column('rsa_public_key', sa.Text(), nullable=False), sa.ForeignKeyConstraint( ['project_id'], ['project.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'build', sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('number', sa.Integer(), nullable=False), sa.Column('gh_commit_ref', sa.String(length=200), nullable=False), sa.Column('gh_commit_sha', sa.String(length=40), nullable=False), sa.Column('gh_commit_author', sa.String(length=200), nullable=False), sa.Column('gh_commit_message', sa.String(length=2000), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('status', sa.String(length=40), nullable=False), sa.ForeignKeyConstraint( ['project_id'], ['project.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('project_id', 'gh_commit_ref', 'gh_commit_sha', name='unique_ref_and_sha_within_project')) op.create_index('ix_build_created_at', 'build', ['created_at'], unique=False) op.create_index('ix_build_number', 'build', ['number'], unique=False) op.create_table( 'organization_repository', sa.Column('id', sa.Integer(), nullable=False), sa.Column('gh_id', sa.Integer(), nullable=False), sa.Column('gh_name', sa.String(length=200), nullable=False), sa.Column('gh_full_name', sa.String(length=200), nullable=False), sa.Column('gh_ssh_clone_url', sa.String(length=200), nullable=False), sa.Column('gh_https_clone_url', sa.String(length=200), nullable=False), sa.Column('is_public', sa.Boolean(), nullable=False), sa.Column('organization_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['organization_id'], ['organization.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'membership', sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('allows_management', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint( ['project_id'], ['project.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('project_id', 'user_id')) op.create_table( 'tracked_file', sa.Column('id', sa.Integer(), nullable=False), sa.Column('hook_id', sa.Integer(), nullable=False), sa.Column('path', sa.String(length=250, collation='utf8_bin'), nullable=False), sa.ForeignKeyConstraint( ['hook_id'], ['hook.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('hook_id', 'path', name='unique_tracked_file_within_hook')) op.create_table( 'hook_call', sa.Column('id', sa.Integer(), nullable=False), sa.Column('hook_id', sa.Integer(), nullable=True), sa.Column('build_id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('gh_payload', sa.LargeBinary(), nullable=False), sa.ForeignKeyConstraint( ['build_id'], ['build.id'], ), sa.ForeignKeyConstraint(['hook_id'], ['hook.id'], ondelete='SET NULL'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('build_id', 'hook_id', name='unique_hook_call_within_build')) op.create_table( 'job', sa.Column('id', sa.Integer(), nullable=False), sa.Column('build_id', sa.Integer(), nullable=False), sa.Column('hook_call_id', sa.Integer(), nullable=False), sa.Column('started_at', sa.DateTime(), nullable=True), sa.Column('finished_at', sa.DateTime(), nullable=True), sa.Column('return_code', sa.Integer(), nullable=True), sa.Column('stdout', mysql.MEDIUMBLOB(), nullable=True), sa.Column('task_uuid', sa.String(length=36), nullable=True), sa.ForeignKeyConstraint( ['build_id'], ['build.id'], ), sa.ForeignKeyConstraint( ['hook_call_id'], ['hook_call.id'], ), sa.PrimaryKeyConstraint('id'))