def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('storage', sa.Column('client_url', sa.String(length=1000), nullable=True)) if is_mysql(): op.add_column('storage', sa.Column('extra_params', mysql.LONGTEXT(), nullable=True)) op.add_column('data_source', sa.Column('is_lookup', sa.Boolean(), nullable=False, server_default='0')) else: op.add_column('storage', sa.Column('extra_params', sa.Text(), nullable=True)) op.add_column('data_source', sa.Column('is_lookup', sa.Boolean(), nullable=False, server_default='false')) if is_mysql(): op.execute(""" ALTER TABLE `storage` CHANGE `type` `type` ENUM( 'CASSANDRA','ELASTIC_SEARCH','HDFS','HIVE', 'HIVE_WAREHOUSE', 'JDBC','LOCAL','MONGODB' ) CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""") elif is_psql(): storage_values = ['CASSANDRA','ELASTIC_SEARCH','HDFS', 'HIVE', 'HIVE_WAREHOUSE', 'JDBC','LOCAL','MONGODB'] all_commands = [ [ get_psql_enum_alter_commands(['storage'], ['type'], 'StorageTypeEnumType', storage_values, 'HDFS'), None ] ] upgrade_actions(all_commands)
def get_commands(): values = [ 'XML_FILE', 'NETCDF4', 'HDF5', 'SHAPEFILE', 'TEXT', 'CUSTOM', 'JSON', 'CSV', 'PICKLE', 'GEO_JSON' ] if is_mysql(): all_commands = [[ ''' ALTER TABLE data_source CHANGE `format` `format` ENUM('XML_FILE','NETCDF4','HDF5','SHAPEFILE','TEXT','CUSTOM','JSON', 'CSV','PICKLE','GEO_JSON') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; ''', ''' ALTER TABLE data_source CHANGE `format` `format` ENUM('XML_FILE','NETCDF4','HDF5','SHAPEFILE','TEXT','CUSTOM','JSON', 'CSV','PICKLE','GEO_JSON') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; ''' ]] elif is_psql(): all_commands = [[ get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', values, 'CSV'), get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', values, 'CSV'), ]] elif is_sqlite(): all_commands = [[[], []]] return all_commands
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### # op.drop_constraint('attribute_privacy_attribute_id_fk', 'attribute_privacy', type_='foreignkey') if is_mysql(): op.get_bind().execute(text(""" ALTER TABLE data_source CHANGE COLUMN `format` `format` ENUM( 'CSV', 'CUSTOM', 'GEO_JSON', 'HAR_IMAGE_FOLDER', 'HDF5', 'DATA_FOLDER', 'IMAGE_FOLDER', 'JDBC', 'JSON', 'NETCDF4', 'PARQUET', 'PICKLE', 'SHAPEFILE', 'TAR_IMAGE_FOLDER', 'TEXT', 'VIDEO_FOLDER', 'UNKNOWN', 'XML_FILE') CHARACTER SET 'utf8' NOT NULL ;""")) # ### end Alembic commands ### elif is_psql(): old_ds_values = ['CSV', 'CUSTOM', 'GEO_JSON', 'HAR_IMAGE_FOLDER', 'HDF5', 'DATA_FOLDER', 'IMAGE_FOLDER', 'JDBC', 'JSON', 'NETCDF4', 'PARQUET', 'PICKLE', 'SHAPEFILE', 'TAR_IMAGE_FOLDER', 'TEXT', 'VIDEO_FOLDER', 'UNKNOWN', 'XML_FILE'] all_commands = [[ None, get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', old_ds_values, 'CSV'), ]] downgrade_actions(all_commands)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### if not is_sqlite(): op.create_foreign_key('attribute_privacy_attribute_id_fk', 'attribute_privacy', 'attribute', ['attribute_id'], ['id'], ondelete='CASCADE') if is_mysql(): op.get_bind().execute(text(""" ALTER TABLE data_source CHANGE COLUMN `format` `format` ENUM( 'CSV', 'CUSTOM', 'GEO_JSON', 'HAR_IMAGE_FOLDER', 'HDF5', 'DATA_FOLDER', 'IMAGE_FOLDER', 'JDBC', 'JSON', 'NETCDF4', 'NPY', 'PARQUET', 'PICKLE', 'SAV', 'SHAPEFILE', 'TAR_IMAGE_FOLDER', 'TEXT', 'VIDEO_FOLDER', 'UNKNOWN', 'XML_FILE') CHARACTER SET 'utf8' NOT NULL ;""")) elif is_psql(): new_ds_values = ['CSV', 'CUSTOM', 'GEO_JSON', 'HAR_IMAGE_FOLDER', 'HDF5', 'DATA_FOLDER', 'IMAGE_FOLDER', 'JDBC', 'JSON', 'NETCDF4', 'NPY', 'PARQUET', 'PICKLE', 'SAV', 'SHAPEFILE', 'TAR_IMAGE_FOLDER', 'TEXT', 'VIDEO_FOLDER', 'UNKNOWN', 'XML_FILE'] all_commands = [[ get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', new_ds_values, 'CSV'), None ]] upgrade_actions(all_commands)
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### if is_sqlite(): with op.batch_alter_table('storage') as batch_op: batch_op.drop_column('extra_params') batch_op.drop_column('client_url') with op.batch_alter_table('data_source') as batch_op: batch_op.drop_column('is_lookup') else: op.drop_column('storage', 'extra_params') op.drop_column('storage', 'client_url') op.drop_column('data_source', 'is_lookup') if is_mysql(): op.execute(""" ALTER TABLE `storage` CHANGE `type` `type` ENUM( 'CASSANDRA','ELASTIC_SEARCH','HDFS','HIVE','JDBC','LOCAL','MONGODB' ) CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""") elif is_psql(): storage_values = ['CASSANDRA','ELASTIC_SEARCH','HDFS', 'HIVE', 'JDBC','LOCAL','MONGODB'] all_commands = [ [ None, get_psql_enum_alter_commands(['storage'], ['type'], 'StorageTypeEnumType', storage_values, 'HDFS'), ] ] downgrade_actions(all_commands)
def upgrade(): if is_mysql(): op.add_column('data_source', sa.Column('command', mysql.LONGTEXT(), nullable=True)) op.get_bind().execute( text(""" ALTER TABLE storage CHANGE `type` `type` ENUM('HDFS', 'OPHIDIA','ELASTIC_SEARCH','MONGODB', 'POSTGIS','HBASE','CASSANDRA','JDBC') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""" )) elif is_psql(): op.add_column('data_source', sa.Column('command', sa.Text(), nullable=True)) upgrade_actions(get_commands()) if is_sqlite(): with op.batch_alter_table('storage') as batch_op: batch_op.add_column( sa.Column('enabled', sa.Boolean(), nullable=False, server_default='true')) with op.batch_alter_table('data_source') as batch_op: batch_op.add_column( sa.Column('updated', sa.DateTime(), nullable=False, server_default='2021-01-01')) batch_op.add_column(sa.Column('command', sa.Text(), nullable=True)) else: op.add_column( 'storage', sa.Column('enabled', sa.Boolean(), nullable=False, server_default=sa.schema.DefaultClause("1"), default=1)) op.add_column( 'data_source', sa.Column('updated', sa.DateTime(), nullable=False, server_default='2018-01-01'))
def upgrade(): if is_mysql(): op.execute(""" ALTER TABLE `storage` CHANGE `type` `type` ENUM( 'CASSANDRA','ELASTIC_SEARCH','HDFS','HIVE', 'HIVE_WAREHOUSE', 'JDBC', 'KAFKA', 'LOCAL','MONGODB' ) CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""") elif is_psql(): storage_values = [ 'CASSANDRA', 'ELASTIC_SEARCH', 'HDFS', 'HIVE', 'HIVE_WAREHOUSE', 'JDBC', 'KAFKA', 'LOCAL', 'MONGODB' ] all_commands = [[ get_psql_enum_alter_commands(['storage'], ['type'], 'StorageTypeEnumType', storage_values, 'HDFS'), None ]] upgrade_actions(all_commands)
def downgrade(): if is_mysql(): op.get_bind().execute( text(""" ALTER TABLE storage CHANGE `type` `type` ENUM('HDFS', 'OPHIDIA','ELASTIC_SEARCH','MONGODB', 'POSTGIS','HBASE','CASSANDRA') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""" )) elif is_psql(): downgrade_actions(get_commands()) if is_sqlite(): with op.batch_alter_table('storage') as batch_op: batch_op.drop_column('enabled') with op.batch_alter_table('data_source') as batch_op: batch_op.drop_column('updated') batch_op.drop_column('command') else: op.drop_column('data_source', 'command') op.drop_column('storage', 'enabled') op.drop_column('data_source', 'updated')
def get_commands(): all_commands = [] if is_mysql(): all_commands = [("""ALTER TABLE `model` CHANGE `type` `type` ENUM ('KERAS','MLEAP', 'PERFORMANCE_SPARK','PERFORMANCE_KERAS','PERFORMANCE', 'SPARK_ML_REGRESSION','SPARK_MLLIB_CLASSIFICATION','SPARK_ML_CLASSIFICATION','UNSPECIFIED') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; """, """ALTER TABLE `model` CHANGE `type` `type` ENUM ('KERAS','PERFORMANCE_SPARK','PERFORMANCE_KERAS', 'PERFORMANCE','SPARK_ML_REGRESSION','SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION','UNSPECIFIED') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; """)] elif is_psql(): new_model_values = [ 'KERAS', 'MLEAP', 'PERFORMANCE_SPARK', 'PERFORMANCE_KERAS', 'PERFORMANCE', 'SPARK_ML_REGRESSION', 'SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION', 'UNSPECIFIED' ] old_model_values = [ 'KERAS', 'PERFORMANCE_SPARK', 'PERFORMANCE_KERAS', 'PERFORMANCE', 'SPARK_ML_REGRESSION', 'SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION', 'UNSPECIFIED' ] all_commands = [ [ get_psql_enum_alter_commands(['model'], ['type'], 'ModelTypeEnumType', new_model_values, 'UNSPECIFIED'), get_psql_enum_alter_commands(['model'], ['type'], 'ModelTypeEnumType', old_model_values, 'UNSPECIFIED'), ], ] return all_commands
def get_commands(): all_commands = [] if is_mysql(): all_commands = [ [ ''' ALTER TABLE data_source CHANGE `format` `format` ENUM('CSV','CUSTOM','GEO_JSON','HDF5','JDBC','JSON','NETCDF4', 'PARQUET','PICKLE','SHAPEFILE','TEXT','UNKNOWN', 'XML_FILE') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; ''', ''' ALTER TABLE data_source CHANGE `format` `format` ENUM('CSV','CUSTOM','GEO_JSON','HDF5','JDBC','JSON','NETCDF4', 'PICKLE','SHAPEFILE','TEXT','XML_FILE') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; ''' ], [ """ ALTER TABLE data_source CHANGE `task_id` `task_id` VARCHAR(255) NULL; """, """ALTER TABLE data_source CHANGE `task_id` `task_id` INT NULL; """, ], [ """ ALTER TABLE attribute CHANGE `type` `type` ENUM('BINARY','CHARACTER','DECIMAL','DATE','DATETIME','DOUBLE','ENUM', 'FLOAT','INTEGER','LAT_LONG','LONG','TEXT','TIME','VECTOR','TIMESTAMP') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; """, """ ALTER TABLE attribute CHANGE `type` `type` ENUM('BINARY','CHARACTER','DECIMAL','DATE','DATETIME','DOUBLE','ENUM', 'FLOAT','INTEGER','LAT_LONG','LONG','TEXT','TIME','VECTOR','TIMESTAMP') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; """ ] ] elif is_psql(): new_ds_values = [ 'CSV', 'CUSTOM', 'GEO_JSON', 'HDF5', 'JDBC', 'JSON', 'NETCDF4', 'PARQUET', 'PICKLE', 'SHAPEFILE', 'TEXT', 'UNKNOWN', 'XML_FILE' ] old_ds_values = [ 'CSV', 'CUSTOM', 'GEO_JSON', 'HDF5', 'JDBC', 'JSON', 'NETCDF4', 'PICKLE', 'SHAPEFILE', 'TEXT', 'XML_FILE' ] new_attr_values = [ 'BINARY', 'CHARACTER', 'DECIMAL', 'DATE', 'DATETIME', 'DOUBLE', 'ENUM', 'FLOAT', 'INTEGER', 'LAT_LONG', 'LONG', 'TEXT', 'TIME', 'VECTOR', 'TIMESTAMP' ] old_attr_values = [ 'BINARY', 'CHARACTER', 'DECIMAL', 'DATE', 'DATETIME', 'DOUBLE', 'ENUM', 'FLOAT', 'INTEGER', 'LAT_LONG', 'LONG', 'TEXT', 'TIME', 'VECTOR', 'TIMESTAMP' ] all_commands = [ [ get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', new_ds_values, 'CSV'), get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', old_ds_values, 'CSV'), ], [ 'ALTER TABLE data_source ALTER COLUMN task_id TYPE VARCHAR(255)', 'ALTER TABLE data_source ALTER COLUMN task_id TYPE INT USING (task_id::integer)', ], [ get_psql_enum_alter_commands( ['attribute', 'attribute_privacy'], ['type', 'data_type'], 'DataTypeEnumType', new_attr_values, 'INTEGER'), get_psql_enum_alter_commands( ['attribute', 'attribute_privacy'], ['type', 'data_type'], 'DataTypeEnumType', old_attr_values, 'INTEGER'), ] ] return all_commands
def get_commands(): all_commands = [] if is_mysql(): all_commands = [ (""" ALTER TABLE data_source CHANGE `format` `format` ENUM( 'CSV','CUSTOM','GEO_JSON','HAR_IMAGE_FOLDER','HDF5','DATA_FOLDER', 'IMAGE_FOLDER', 'JDBC','JSON','NETCDF4','PARQUET','PICKLE','SHAPEFILE', 'TAR_IMAGE_FOLDER','TEXT', 'VIDEO_FOLDER', 'UNKNOWN','XML_FILE') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""", """ ALTER TABLE data_source CHANGE `format` `format` ENUM( 'CSV','CUSTOM','GEO_JSON','HDF5','JDBC','JSON', 'NETCDF4','PARQUET','PICKLE','SHAPEFILE','TEXT', 'UNKNOWN','XML_FILE') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""" ), (""" ALTER TABLE `storage` CHANGE `type` `type` ENUM( 'HDFS','OPHIDIA','ELASTIC_SEARCH','MONGODB','POSTGIS','HBASE', 'CASSANDRA','JDBC','LOCAL') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""", """ ALTER TABLE `storage` CHANGE `type` `type` ENUM( 'HDFS','OPHIDIA','ELASTIC_SEARCH','MONGODB','POSTGIS','HBASE', 'CASSANDRA','JDBC') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""", ), ( """ALTER TABLE `model` CHANGE `type` `type` ENUM( 'KERAS','SPARK_ML_REGRESSION','SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION','UNSPECIFIED') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; """, """ALTER TABLE `model` CHANGE `type` `type` ENUM( 'KERAS','SPARK_ML_REGRESSION','SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION','UNSPECIFIED') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; """ ) ] elif is_psql(): old_ds_values = ['CSV','CUSTOM','GEO_JSON','HDF5','JDBC','JSON','NETCDF4', 'PARQUET','PICKLE','SHAPEFILE','TEXT','UNKNOWN', 'XML_FILE'] new_ds_values = ['CSV','CUSTOM','GEO_JSON','HAR_IMAGE_FOLDER','HDF5','DATA_FOLDER', 'IMAGE_FOLDER', 'JDBC','JSON','NETCDF4','PARQUET','PICKLE','SHAPEFILE', 'TAR_IMAGE_FOLDER','TEXT', 'VIDEO_FOLDER', 'UNKNOWN','XML_FILE'] new_model_values = ['KERAS','SPARK_ML_REGRESSION','SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION','UNSPECIFIED'] old_model_values = ['KERAS','SPARK_ML_REGRESSION','SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION','UNSPECIFIED'] new_storage_values = [ 'HDFS','OPHIDIA','ELASTIC_SEARCH','MONGODB','POSTGIS','HBASE', 'CASSANDRA','JDBC','LOCAL'] old_storage_values = [ 'HDFS','OPHIDIA','ELASTIC_SEARCH','MONGODB','POSTGIS','HBASE', 'CASSANDRA','JDBC'] all_commands = [ [ get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', new_ds_values, 'CSV'), get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', old_ds_values, 'CSV'), ], [ get_psql_enum_alter_commands(['model'], ['type'], 'ModelTypeEnumType', new_model_values, 'UNSPECIFIED'), get_psql_enum_alter_commands(['model'], ['type'], 'ModelTypeEnumType', old_model_values, 'UNSPECIFIED'), ], [ get_psql_enum_alter_commands(['storage'], ['type'], 'StorageTypeEnumType', new_storage_values, 'HDFS'), get_psql_enum_alter_commands(['storage'], ['type'], 'StorageTypeEnumType', old_storage_values, 'HDFS'), ] ] return all_commands
def get_commands(): all_commands = [] if is_mysql(): all_commands = [ [ ''' ALTER TABLE `model` CHANGE `type` `type` ENUM('KERAS','PERFORMANCE_SPARK', 'PERFORMANCE_KERAS', 'PERFORMANCE', 'SPARK_ML_REGRESSION', 'SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION','UNSPECIFIED') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; ''', ''' ALTER TABLE `model` CHANGE `type` `type` ENUM('KERAS', 'SPARK_ML_REGRESSION', 'PERFORMANCE', 'SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION','UNSPECIFIED') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; ''' ], #['ALTER TABLE `model` ADD INDEX `inx_type` (`type`);', # 'ALTER TABLE `model` DROP INDEX `inx_type`;'], [ """ ALTER TABLE `model` CHANGE `job_id` `job_id` INT(11) NULL, CHANGE `task_id` `task_id` VARCHAR(200) CHARSET utf8 COLLATE utf8_unicode_ci NULL, CHANGE `workflow_id` `workflow_id` INT(11) NULL; """, """ ALTER TABLE `model` CHANGE `job_id` `job_id` INT(11), CHANGE `task_id` `task_id` VARCHAR(200) CHARSET utf8 COLLATE utf8_unicode_ci, CHANGE `workflow_id` `workflow_id` INT(11);""" ], [ """ ALTER TABLE `storage` CHANGE `type` `type` ENUM( 'CASSANDRA','ELASTIC_SEARCH','HBASE','HDFS','JDBC','LOCAL', 'MONGODB','OPHIDIA','POSTGIS') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""", """ ALTER TABLE `storage` CHANGE `type` `type` ENUM( 'CASSANDRA','ELASTIC_SEARCH','HBASE','HDFS','JDBC','LOCAL', 'MONGODB','OPHIDIA','POSTGIS') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL;""", ], [ """ ALTER TABLE `data_source` CHANGE `format` `format` ENUM('CSV','CUSTOM','GEO_JSON','HAR_IMAGE_FOLDER','HDF5', 'DATA_FOLDER','IMAGE_FOLDER','JDBC','JSON','NETCDF4','PARQUET', 'PICKLE','SHAPEFILE','TAR_IMAGE_FOLDER','TEXT','VIDEO_FOLDER', 'UNKNOWN', 'XML_FILE') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; """, """ ALTER TABLE `data_source` CHANGE `format` `format` ENUM('CSV','CUSTOM','GEO_JSON','HAR_IMAGE_FOLDER','HDF5', 'DATA_FOLDER','IMAGE_FOLDER','JDBC','JSON','NETCDF4','PARQUET', 'PICKLE','SHAPEFILE','TAR_IMAGE_FOLDER','TEXT','VIDEO_FOLDER', 'UNKNOWN','XML_FILE') CHARSET utf8 COLLATE utf8_unicode_ci NOT NULL; """ ] ] elif is_psql(): new_ds_values = [ 'CSV', 'CUSTOM', 'GEO_JSON', 'HDF5', 'JDBC', 'JSON', 'NETCDF4', 'PARQUET', 'PICKLE', 'SHAPEFILE', 'TEXT', 'UNKNOWN', 'XML_FILE' ] old_ds_values = [ 'CSV', 'CUSTOM', 'GEO_JSON', 'HDF5', 'JDBC', 'JSON', 'NETCDF4', 'PICKLE', 'SHAPEFILE', 'TEXT', 'XML_FILE' ] new_model_values = [ 'KERAS', 'PERFORMANCE_SPARK', 'PERFORMANCE_KERAS', 'PERFORMANCE', 'SPARK_ML_REGRESSION', 'SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION', 'UNSPECIFIED' ] old_model_values = [ 'KERAS', 'SPARK_ML_REGRESSION', 'PERFORMANCE', 'SPARK_MLLIB_CLASSIFICATION', 'SPARK_ML_CLASSIFICATION', 'UNSPECIFIED' ] new_storage_values = [ 'CASSANDRA', 'ELASTIC_SEARCH', 'HBASE', 'HDFS', 'JDBC', 'LOCAL', 'MONGODB', 'OPHIDIA', 'POSTGIS' ] old_storage_values = [ 'CASSANDRA', 'ELASTIC_SEARCH', 'HBASE', 'HDFS', 'JDBC', 'LOCAL', 'MONGODB', 'OPHIDIA', 'POSTGIS' ] all_commands = [ [ get_psql_enum_alter_commands(['model'], ['type'], 'ModelTypeEnumType', new_model_values, 'UNSPECIFIED'), get_psql_enum_alter_commands(['model'], ['type'], 'ModelTypeEnumType', old_model_values, 'UNSPECIFIED'), ], [ """ ALTER TABLE model ALTER COLUMN job_id DROP NOT NULL, ALTER COLUMN task_id DROP NOT NULL, ALTER COLUMN workflow_id DROP NOT NULL; """, """ ALTER TABLE model ALTER COLUMN job_id DROP NOT NULL, ALTER COLUMN task_id DROP NOT NULL, ALTER COLUMN workflow_id DROP NOT NULL; """ ], [ get_psql_enum_alter_commands(['storage'], ['type'], 'StorageTypeEnumType', new_storage_values, 'HDFS'), get_psql_enum_alter_commands(['storage'], ['type'], 'StorageTypeEnumType', old_storage_values, 'HDFS'), ], [ get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', new_ds_values, 'CSV'), get_psql_enum_alter_commands(['data_source'], ['format'], 'DataSourceFormatEnumType', old_ds_values, 'CSV'), ] ] return all_commands