def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if not is_sqlite(): op.drop_constraint('attribute_privacy_fk_privacy_group', 'attribute_privacy', type_='foreignkey') if is_sqlite(): with op.batch_alter_table('attribute_privacy') as batch_op: batch_op.drop_column('attribute_privacy_group_id') else: op.drop_column('attribute_privacy', 'attribute_privacy_group_id') op.drop_table('attribute_privacy_group')
def get_commands(): values = [ 'XML_FILE', 'NETCDF4', 'HDF5', 'SHAPEFILE', 'TEXT', 'CUSTOM', 'JSON', 'CSV', 'PICKLE', 'GEO_JSON' ] if is_mysql(): all_commands = [[ ''' ALTER TABLE data_source CHANGE `format` `format` ENUM('XML_FILE','NETCDF4','HDF5','SHAPEFILE','TEXT','CUSTOM','JSON', 'CSV','PICKLE','GEO_JSON') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; ''', ''' ALTER TABLE data_source CHANGE `format` `format` ENUM('XML_FILE','NETCDF4','HDF5','SHAPEFILE','TEXT','CUSTOM','JSON', 'CSV','PICKLE','GEO_JSON') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; ''' ]] elif is_psql(): all_commands = [[ get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', values, 'CSV'), get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', values, 'CSV'), ]] elif is_sqlite(): all_commands = [[[], []]] return all_commands
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### if not is_sqlite(): op.create_foreign_key('attribute_privacy_attribute_id_fk', 'attribute_privacy', 'attribute', ['attribute_id'], ['id'], ondelete='CASCADE') if is_mysql(): op.get_bind().execute(text(""" ALTER TABLE data_source CHANGE COLUMN `format` `format` ENUM( 'CSV', 'CUSTOM', 'GEO_JSON', 'HAR_IMAGE_FOLDER', 'HDF5', 'DATA_FOLDER', 'IMAGE_FOLDER', 'JDBC', 'JSON', 'NETCDF4', 'NPY', 'PARQUET', 'PICKLE', 'SAV', 'SHAPEFILE', 'TAR_IMAGE_FOLDER', 'TEXT', 'VIDEO_FOLDER', 'UNKNOWN', 'XML_FILE') CHARACTER SET 'utf8' NOT NULL ;""")) elif is_psql(): new_ds_values = ['CSV', 'CUSTOM', 'GEO_JSON', 'HAR_IMAGE_FOLDER', 'HDF5', 'DATA_FOLDER', 'IMAGE_FOLDER', 'JDBC', 'JSON', 'NETCDF4', 'NPY', 'PARQUET', 'PICKLE', 'SAV', 'SHAPEFILE', 'TAR_IMAGE_FOLDER', 'TEXT', 'VIDEO_FOLDER', 'UNKNOWN', 'XML_FILE'] all_commands = [[ get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', new_ds_values, 'CSV'), None ]] upgrade_actions(all_commands)
def downgrade(): if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.alter_column('attribute_delimiter', existing_type=sa.String(length=20), type_=sa.VARCHAR(length=4), existing_nullable=True) batch_op.alter_column('record_delimiter', existing_type=sa.String(length=20), type_=sa.VARCHAR(length=4), existing_nullable=True) batch_op.alter_column('text_delimiter', existing_type=sa.String(length=20), type_=sa.VARCHAR(length=4), existing_nullable=True) batch_op.drop_column('encoding') batch_op.drop_column('is_first_line_header') else: op.alter_column('data_source', 'attribute_delimiter', existing_type=sa.String(length=20), type_=sa.VARCHAR(length=4), existing_nullable=True) op.alter_column('data_source', 'record_delimiter', existing_type=sa.String(length=20), type_=sa.VARCHAR(length=4), existing_nullable=True) op.alter_column('data_source', 'text_delimiter', existing_type=sa.String(length=20), type_=sa.VARCHAR(length=4), existing_nullable=True) op.drop_column('data_source', 'encoding') op.drop_column('data_source', 'is_first_line_header')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.drop_column('initialization_job_id') else: op.drop_column('data_source', 'initialization_job_id')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('model') as batch_op: batch_op.drop_column('class_name') else: op.drop_column('model', 'class_name')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('storage') as batch_op: batch_op.drop_column('extra_params') batch_op.drop_column('client_url') with op.batch_alter_table('data_source') as batch_op: batch_op.drop_column('is_lookup') else: op.drop_column('storage', 'extra_params') op.drop_column('storage', 'client_url') op.drop_column('data_source', 'is_lookup') if is_mysql(): op.execute(""" ALTER TABLE `storage` CHANGE `type` `type` ENUM( 'CASSANDRA','ELASTIC_SEARCH','HDFS','HIVE','JDBC','LOCAL','MONGODB' ) CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""") elif is_psql(): storage_values = ['CASSANDRA','ELASTIC_SEARCH','HDFS', 'HIVE', 'JDBC','LOCAL','MONGODB'] all_commands = [ [ None, get_psql_enum_alter_commands(['storage'], ['type'], 'StorageTypeEnumType', storage_values, 'HDFS'), ] ] downgrade_actions(all_commands)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('attribute') as batch_op: batch_op.drop_column('format') else: op.drop_column('attribute', 'format')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('attribute_privacy') as batch_op: batch_op.add_column(sa.Column('attribute_name', sa.String(length=200), nullable=False, server_default='')) else: op.add_column('attribute_privacy', sa.Column('attribute_name', sa.String(length=200), nullable=False))
def downgrade(): if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.drop_column('initialization') else: op.drop_column('data_source', 'initialization') if is_psql(): op.get_bind().execute('DROP TYPE "DataSourceInitializationEnumType"')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('attribute') as batch_op: batch_op.add_column(sa.Column('format', sa.String(length=100), nullable=True)) else: op.add_column('attribute', sa.Column('format', sa.String(length=100), nullable=True))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.drop_column('treat_as_missing') batch_op.drop_column('is_public') else: op.drop_column('data_source', 'treat_as_missing') op.drop_column('data_source', 'is_public')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('data_source_permission') as batch_op: batch_op.drop_column('user_login') batch_op.drop_column('user_name') else: op.drop_column('data_source_permission', 'user_name') op.drop_column('data_source_permission', 'user_login')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.drop_column('record_delimiter') batch_op.drop_column('attribute_delimiter') else: op.drop_column('data_source', 'record_delimiter') op.drop_column('data_source', 'attribute_delimiter')
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.drop_index(op.f('ix_data_source_use_in_workflow')) batch_op.drop_column('use_in_workflow') else: op.drop_index(op.f('ix_data_source_use_in_workflow'), table_name='data_source') op.drop_column('data_source', 'use_in_workflow')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.add_column(sa.Column('text_delimiter', sa.String(length=4), nullable=True)) with op.batch_alter_table('attribute_privacy') as batch_op: batch_op.add_column(sa.Column('data_type', sa.Enum('ENUM', 'LAT_LONG', 'DOUBLE', 'DECIMAL', 'FLOAT', 'CHARACTER', 'LONG', 'DATETIME', 'VECTOR', 'TEXT', 'TIME', 'DATE', 'INTEGER', 'TIMESTAMP', name='DataTypeEnumType'), nullable=True)) batch_op.add_column(sa.Column('is_global_law', sa.Boolean(), nullable=True)) batch_op.alter_column('attribute_id', nullable=True) batch_op.alter_column('category_model', nullable=True) batch_op.alter_column('category_technique', nullable=True) batch_op.alter_column('hierarchical_structure_type', nullable=True) batch_op.alter_column('hierarchy', nullable=True) batch_op.alter_column('privacy_model', nullable=True) batch_op.alter_column('privacy_model_parameters', nullable=True) batch_op.alter_column('privacy_model_technique', nullable=True) batch_op.alter_column('unlock_privacy_key', nullable=True) else: op.add_column('data_source', sa.Column('text_delimiter', sa.String(length=4), nullable=True)) op.add_column('attribute_privacy', sa.Column('data_type', sa.Enum('ENUM', 'LAT_LONG', 'DOUBLE', 'DECIMAL', 'FLOAT', 'CHARACTER', 'LONG', 'DATETIME', 'VECTOR', 'TEXT', 'TIME', 'DATE', 'INTEGER', 'TIMESTAMP', name='DataTypeEnumType'), nullable=True)) op.add_column('attribute_privacy', sa.Column('is_global_law', sa.Boolean(), nullable=True)) op.alter_column('attribute_privacy', 'attribute_id', existing_type=mysql.INTEGER(display_width=11), nullable=True) op.alter_column('attribute_privacy', 'category_model', existing_type=mysql.TEXT(), nullable=True) op.alter_column('attribute_privacy', 'category_technique', existing_type=mysql.VARCHAR(length=100), nullable=True) op.alter_column('attribute_privacy', 'hierarchical_structure_type', existing_type=mysql.VARCHAR(length=100), nullable=True) op.alter_column('attribute_privacy', 'hierarchy', existing_type=mysql.TEXT(), nullable=True) op.alter_column('attribute_privacy', 'privacy_model', existing_type=mysql.TEXT(), nullable=True) op.alter_column('attribute_privacy', 'privacy_model_parameters', existing_type=mysql.TEXT(), nullable=True) op.alter_column('attribute_privacy', 'privacy_model_technique', existing_type=mysql.VARCHAR(length=100), nullable=True) op.alter_column('attribute_privacy', 'unlock_privacy_key', existing_type=mysql.VARCHAR(length=400), nullable=True)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('model') as batch_op: batch_op.drop_column('workflow_name') batch_op.drop_column('workflow_id') batch_op.drop_column('task_id') batch_op.drop_column('job_id') else: op.drop_column('model', 'workflow_name') op.drop_column('model', 'workflow_id') op.drop_column('model', 'task_id') op.drop_column('model', 'job_id')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.add_column( sa.Column('is_multiline', sa.Boolean(), nullable=False, server_default='0')) else: op.add_column( 'data_source', sa.Column('is_multiline', sa.Boolean(), nullable=False, default=0))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.drop_column('text_delimiter') with op.batch_alter_table('attribute_privacy') as batch_op: batch_op.drop_column('data_type') batch_op.drop_column('is_global_law') batch_op.alter_column('attribute_id', nullable=False, server_default='') batch_op.alter_column('category_model', nullable=False, server_default='') batch_op.alter_column('category_technique', nullable=False, server_default='') batch_op.alter_column('hierarchical_structure_type', nullable=False, server_default='') batch_op.alter_column('hierarchy', nullable=False, server_default='') batch_op.alter_column('privacy_model', nullable=False, server_default='') batch_op.alter_column('privacy_model_parameters', nullable=False, server_default='') batch_op.alter_column('privacy_model_technique', nullable=False, server_default='') batch_op.alter_column('unlock_privacy_key', nullable=False, server_default='') else: op.drop_column('data_source', 'text_delimiter') op.alter_column('attribute_privacy', 'unlock_privacy_key', existing_type=mysql.VARCHAR(length=400), nullable=False) op.alter_column('attribute_privacy', 'privacy_model_technique', existing_type=mysql.VARCHAR(length=100), nullable=False) op.alter_column('attribute_privacy', 'privacy_model_parameters', existing_type=mysql.TEXT(), nullable=False) op.alter_column('attribute_privacy', 'privacy_model', existing_type=mysql.TEXT(), nullable=False) op.alter_column('attribute_privacy', 'hierarchy', existing_type=mysql.TEXT(), nullable=False) op.alter_column('attribute_privacy', 'hierarchical_structure_type', existing_type=mysql.VARCHAR(length=100), nullable=False) op.alter_column('attribute_privacy', 'category_technique', existing_type=mysql.VARCHAR(length=100), nullable=False) op.alter_column('attribute_privacy', 'category_model', existing_type=mysql.TEXT(), nullable=False) op.alter_column('attribute_privacy', 'attribute_id', existing_type=mysql.INTEGER(display_width=11), nullable=False) op.drop_column('attribute_privacy', 'is_global_law') op.drop_column('attribute_privacy', 'data_type')
def upgrade(): if is_psql(): ds_enum = postgresql.ENUM('NO_INITIALIZED', 'INITIALIZING', 'INITIALIZED', name='DataSourceInitializationEnumType') ds_enum.create(op.get_bind()) if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.add_column(sa.Column('initialization', sa.String(length=100), nullable=False, server_default='NO_INITIALIZED')) else: op.add_column('data_source', sa.Column( 'initialization', sa.Enum('NO_INITIALIZED', 'INITIALIZING', 'INITIALIZED', name='DataSourceInitializationEnumType'), nullable=False))
def upgrade(): if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.alter_column('attribute_delimiter', existing_type=sa.String(length=4), type_=sa.VARCHAR(length=20), existing_nullable=True) batch_op.alter_column('record_delimiter', existing_type=sa.String(length=4), type_=sa.VARCHAR(length=20), existing_nullable=True) batch_op.alter_column('text_delimiter', existing_type=sa.String(length=4), type_=sa.VARCHAR(length=20), existing_nullable=True) batch_op.add_column( sa.Column('encoding', sa.String(length=20), nullable=True)) batch_op.add_column( sa.Column('is_first_line_header', sa.Boolean(), nullable=False, server_default='0')) else: op.alter_column('data_source', 'attribute_delimiter', existing_type=sa.String(length=4), type_=sa.VARCHAR(length=20), existing_nullable=True) op.alter_column('data_source', 'record_delimiter', existing_type=sa.String(length=4), type_=sa.VARCHAR(length=20), existing_nullable=True) op.alter_column('data_source', 'text_delimiter', existing_type=sa.String(length=4), type_=sa.VARCHAR(length=20), existing_nullable=True) op.add_column( 'data_source', sa.Column('encoding', sa.String(length=20), nullable=True)) op.add_column( 'data_source', sa.Column('is_first_line_header', sa.Boolean(), nullable=False, server_default='0'))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('attribute') as batch_op: batch_op.drop_column('key') else: op.drop_column('attribute', 'key') op.drop_table('attribute_foreign_key') op.drop_table('data_source_foreign_key') if is_psql(): op.get_bind().execute( 'DROP TYPE "AttributeForeignKeyDirectionEnumType"')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('attribute_privacy_group', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id')) op.add_column( 'attribute_privacy', sa.Column('attribute_privacy_group_id', sa.Integer(), nullable=True)) if not is_sqlite(): op.create_foreign_key('attribute_privacy_fk_privacy_group', 'attribute_privacy', 'attribute_privacy_group', ['attribute_privacy_group_id'], ['id'])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table( 'data_source_foreign_key', sa.Column('id', sa.Integer(), nullable=False), sa.Column('from_source_id', sa.Integer(), nullable=False), sa.Column('to_source_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['from_source_id'], ['data_source.id'], ), sa.ForeignKeyConstraint( ['to_source_id'], ['data_source.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'attribute_foreign_key', sa.Column('id', sa.Integer(), nullable=False), sa.Column('order', sa.Integer(), nullable=False), sa.Column('direction', sa.Enum('FROM', 'TO', name='AttributeForeignKeyDirectionEnumType'), nullable=False), sa.Column('foreign_key_id', sa.Integer(), nullable=False), sa.Column('from_attribute_id', sa.Integer(), nullable=False), sa.Column('to_attribute_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['foreign_key_id'], ['data_source_foreign_key.id'], ), sa.ForeignKeyConstraint( ['from_attribute_id'], ['attribute.id'], ), sa.ForeignKeyConstraint( ['to_attribute_id'], ['attribute.id'], ), sa.PrimaryKeyConstraint('id')) if is_sqlite(): with op.batch_alter_table('attribute') as batch_op: batch_op.add_column( sa.Column('key', sa.Boolean(), nullable=False, server_default='false')) else: op.add_column('attribute', sa.Column('key', sa.Boolean(), nullable=False))
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.drop_column('privacy_aware') batch_op.drop_column('statistics_process_counter') else: op.drop_column('data_source', 'statistics_process_counter') op.drop_column('data_source', 'privacy_aware') op.drop_table('attribute_privacy') op.drop_table('privacy_risk') op.drop_table('data_source_permission') op.drop_table('storage_permission') if is_psql(): op.get_bind().execute('DROP TYPE "AnonymizationTechniqueEnumType"') op.get_bind().execute('DROP TYPE "PermissionTypeEnumType"') op.get_bind().execute('DROP TYPE "PrivacyRiskTypeEnumType"')
def upgrade(): if is_mysql(): op.add_column('data_source', sa.Column('command', mysql.LONGTEXT(), nullable=True)) op.get_bind().execute( text(""" ALTER TABLE storage CHANGE `type` `type` ENUM('HDFS', 'OPHIDIA','ELASTIC_SEARCH','MONGODB', 'POSTGIS','HBASE','CASSANDRA','JDBC') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""" )) elif is_psql(): op.add_column('data_source', sa.Column('command', sa.Text(), nullable=True)) upgrade_actions(get_commands()) if is_sqlite(): with op.batch_alter_table('storage') as batch_op: batch_op.add_column( sa.Column('enabled', sa.Boolean(), nullable=False, server_default='true')) with op.batch_alter_table('data_source') as batch_op: batch_op.add_column( sa.Column('updated', sa.DateTime(), nullable=False, server_default='2021-01-01')) batch_op.add_column(sa.Column('command', sa.Text(), nullable=True)) else: op.add_column( 'storage', sa.Column('enabled', sa.Boolean(), nullable=False, server_default=sa.schema.DefaultClause("1"), default=1)) op.add_column( 'data_source', sa.Column('updated', sa.DateTime(), nullable=False, server_default='2018-01-01'))
def downgrade(): if is_mysql(): op.get_bind().execute( text(""" ALTER TABLE storage CHANGE `type` `type` ENUM('HDFS', 'OPHIDIA','ELASTIC_SEARCH','MONGODB', 'POSTGIS','HBASE','CASSANDRA') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""" )) elif is_psql(): downgrade_actions(get_commands()) if is_sqlite(): with op.batch_alter_table('storage') as batch_op: batch_op.drop_column('enabled') with op.batch_alter_table('data_source') as batch_op: batch_op.drop_column('updated') batch_op.drop_column('command') else: op.drop_column('data_source', 'command') op.drop_column('storage', 'enabled') op.drop_column('data_source', 'updated')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('data_source_permission') as batch_op: batch_op.add_column( sa.Column('user_login', sa.String(length=50), nullable=False, server_default='')) batch_op.add_column( sa.Column('user_name', sa.String(length=50), nullable=False, server_default='')) else: op.add_column( 'data_source_permission', sa.Column('user_login', sa.String(length=50), nullable=False)) op.add_column( 'data_source_permission', sa.Column('user_name', sa.String(length=200), nullable=False))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('model') as batch_op: batch_op.add_column( sa.Column('job_id', sa.Integer(), nullable=False, server_default='0')) batch_op.add_column( sa.Column('task_id', sa.String(length=200), nullable=False, server_default='0')) batch_op.add_column( sa.Column('workflow_id', sa.Integer(), nullable=False, server_default='0')) batch_op.add_column( sa.Column('workflow_name', sa.String(length=200), nullable=True)) else: op.add_column('model', sa.Column('job_id', sa.Integer(), nullable=False)) op.add_column( 'model', sa.Column('task_id', sa.String(length=200), nullable=False)) op.add_column('model', sa.Column('workflow_id', sa.Integer(), nullable=False)) op.add_column( 'model', sa.Column('workflow_name', sa.String(length=200), nullable=True))
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'storage_permission', sa.Column('id', sa.Integer(), nullable=False), sa.Column('permission', sa.Enum('READ', 'MANAGE', 'DENY', 'WRITE', name='PermissionTypeEnumType'), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('storage_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['storage_id'], ['storage.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'data_source_permission', sa.Column('id', sa.Integer(), nullable=False), sa.Column('permission', sa.Enum('READ', 'MANAGE', 'DENY', 'WRITE', name='PermissionTypeEnumType'), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('data_source_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['data_source_id'], ['data_source.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'privacy_risk', sa.Column('id', sa.Integer(), nullable=False), sa.Column('type', sa.Enum('IDENTIFICATION', name='PrivacyRiskTypeEnumType'), nullable=False), sa.Column('probability', sa.Float(), nullable=True), sa.Column('impact', sa.Float(), nullable=True), sa.Column('value', sa.Float(), nullable=False), sa.Column('detail', sa.Text(), nullable=False), sa.Column('data_source_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['data_source_id'], ['data_source.id'], ), sa.PrimaryKeyConstraint('id')) op.create_table( 'attribute_privacy', sa.Column('id', sa.Integer(), nullable=False), sa.Column('privacy_type', sa.String(length=100), nullable=False), sa.Column('category_technique', sa.String(length=100), nullable=False), sa.Column('anonymization_technique', sa.Enum('MASK', 'GENERALIZATION', 'SUPPRESSION', name='AnonymizationTechniqueEnumType'), nullable=False), sa.Column('hierarchical_structure_type', sa.String(length=100), nullable=False), sa.Column('privacy_model_technique', sa.String(length=100), nullable=False), sa.Column('hierarchy', sa.Text(), nullable=False), sa.Column('category_model', sa.Text(), nullable=False), sa.Column('privacy_model', sa.Text(), nullable=False), sa.Column('privacy_model_parameters', sa.Text(), nullable=False), sa.Column('unlock_privacy_key', sa.String(length=400), nullable=False), sa.Column('attribute_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint( ['attribute_id'], ['attribute.id'], ), sa.PrimaryKeyConstraint('id')) if is_sqlite(): with op.batch_alter_table('data_source') as batch_op: batch_op.add_column( sa.Column('privacy_aware', sa.Boolean(), nullable=False, server_default='false')) batch_op.add_column( sa.Column('statistics_process_counter', sa.Integer(), nullable=False, server_default='false')) else: op.add_column('data_source', sa.Column('privacy_aware', sa.Boolean(), nullable=False)) op.add_column( 'data_source', sa.Column('statistics_process_counter', sa.Integer(), nullable=False))