Пример #1
0
 def test_drop_table_comment(self):
     # this is handled by SQLAlchemy's compilers
     context = op_fixture("oracle")
     op.drop_table_comment(
         't2',
         existing_comment='t2 table',
         schema='foo'
     )
     context.assert_(
         "COMMENT ON TABLE foo.t2 IS ''"
     )
 def test_drop_table_comment(self):
     # this is handled by SQLAlchemy's compilers
     context = op_fixture("postgresql")
     op.drop_table_comment("t2", existing_comment="t2 table", schema="foo")
     context.assert_("COMMENT ON TABLE foo.t2 IS NULL")
Пример #3
0
 def test_drop_table_comment(self):
     # this is handled by SQLAlchemy's compilers
     context = op_fixture("mysql")
     op.drop_table_comment("t2", existing_comment="t2 table", schema="foo")
     context.assert_("ALTER TABLE foo.t2 COMMENT ''")
Пример #4
0
def downgrade() -> None:
    # ### commands auto generated by Alembic - please adjust! ###
    op.alter_column(
        "state_supervision_violation_response_history",
        "supervision_violation_response_id",
        existing_type=sa.INTEGER(),
        comment=None,
        existing_comment=
        "Unique identifier for a(n) supervision violation response, generated automatically by the Recidiviz system. This identifier is not stable over time (it may change if historical data is re-ingested), but should be used within the context of a given dataset to connect this object to relevant supervision violation response information.",
        existing_nullable=False,
    )
    op.drop_table_comment(
        "state_supervision_violation_response_decision_agent_association",
        existing_comment=
        "Association table that connects supervision violation responses with agents by their ids.",
        schema=None,
    )
    op.alter_column(
        "state_supervision_violation_response_decision_agent_association",
        "supervision_violation_response_id",
        existing_type=sa.INTEGER(),
        comment=None,
        existing_comment=
        "Unique identifier for a(n) violation response, generated automatically by the Recidiviz system. This identifier is not stable over time (it may change if historical data is re-ingested), but should be used within the context of a given dataset to connect this object to relevant violation response information.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_supervision_violation_response_decision_agent_association",
        "agent_id",
        existing_type=sa.INTEGER(),
        comment=None,
        existing_comment=
        "Unique identifier for a(n) agent, generated automatically by the Recidiviz system. This identifier is not stable over time (it may change if historical data is re-ingested), but should be used within the context of a given dataset to connect this object to relevant agent information.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_supervision_violated_condition_entry_history",
        "supervision_violation_id",
        existing_type=sa.INTEGER(),
        comment=None,
        existing_comment=
        "Unique identifier for a(n) supervision violation, generated automatically by the Recidiviz system. This identifier is not stable over time (it may change if historical data is re-ingested), but should be used within the context of a given dataset to connect this object to relevant supervision violation information.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_supervision_violated_condition_entry",
        "supervision_violation_id",
        existing_type=sa.INTEGER(),
        comment=None,
        existing_comment=
        "Unique identifier for a(n) supervision violation, generated automatically by the Recidiviz system. This identifier is not stable over time (it may change if historical data is re-ingested), but should be used within the context of a given dataset to connect this object to relevant supervision violation information.",
        existing_nullable=True,
    )
    op.drop_table_comment(
        "state_supervision_period_program_assignment_association",
        existing_comment=
        "Association table that connects supervision periods with program assignments by their ids.",
        schema=None,
    )
    op.alter_column(
        "state_person_external_id",
        "person_external_id_id",
        existing_type=sa.INTEGER(),
        comment=None,
        existing_comment=
        "Unique identifier for a(n) person external id, generated automatically by the Recidiviz system. This identifier is not stable over time (it may change if historical data is re-ingested), but should be used within the context of a given dataset to connect this object to others.",
        existing_nullable=False,
        autoincrement=True,
    )
    op.alter_column(
        "state_parole_decision_history",
        "incarceration_period_id",
        existing_type=sa.INTEGER(),
        comment=None,
        existing_comment=
        "Unique identifier for a(n) incarceration period, generated automatically by the Recidiviz system. This identifier is not stable over time (it may change if historical data is re-ingested), but should be used within the context of a given dataset to connect this object to relevant incarceration period information.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_parole_decision",
        "incarceration_period_id",
        existing_type=sa.INTEGER(),
        comment=None,
        existing_comment=
        "Unique identifier for a(n) incarceration period, generated automatically by the Recidiviz system. This identifier is not stable over time (it may change if historical data is re-ingested), but should be used within the context of a given dataset to connect this object to relevant incarceration period information.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_incarceration_incident_history",
        "incarceration_incident_history_id",
        existing_type=sa.INTEGER(),
        comment=None,
        existing_comment=
        "This primary key should not be used. It only exists because SQLAlchemy requires every table to have a unique primary key.",
        existing_nullable=False,
        autoincrement=True,
    )
Пример #5
0
 def test_drop_table_comment(self):
     # this is handled by SQLAlchemy's compilers
     context = op_fixture("mysql")
     op.drop_table_comment("t2", existing_comment="t2 table", schema="foo")
     context.assert_("ALTER TABLE foo.t2 COMMENT ''")
Пример #6
0
    def test_drop_table_comment_op(self):
        context = op_fixture()

        op.drop_table_comment("some_table")

        context.assert_("COMMENT ON TABLE some_table IS NULL")
Пример #7
0
 def test_drop_table_comment(self):
     # this is handled by SQLAlchemy's compilers
     context = op_fixture("oracle")
     op.drop_table_comment('t2', existing_comment='t2 table', schema='foo')
     context.assert_("COMMENT ON TABLE foo.t2 IS ''")
Пример #8
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.drop_table_comment('blogtype',
                          existing_comment='???? - ????,????,????',
                          schema=None)
    op.drop_table_comment('category', existing_comment='????', schema=None)
    op.alter_column('reply',
                    'topic_id',
                    existing_type=mysql.INTEGER(display_width=11),
                    nullable=True)
    op.alter_column('reply',
                    'user_id',
                    existing_type=mysql.INTEGER(display_width=11),
                    nullable=True)
    op.drop_table_comment('reply', existing_comment='????', schema=None)
    op.drop_table_comment('topic', existing_comment='????', schema=None)
    op.drop_table_comment('user', existing_comment='?????', schema=None)
    op.alter_column('voke',
                    'topic_id',
                    existing_type=mysql.INTEGER(display_width=11),
                    nullable=True)
    op.alter_column('voke',
                    'user_id',
                    existing_type=mysql.INTEGER(display_width=11),
                    nullable=True)
    op.drop_table_comment('voke', existing_comment='??', schema=None)
Пример #9
0
    def test_drop_table_comment_op(self):
        context = op_fixture()

        op.drop_table_comment("some_table")

        context.assert_("COMMENT ON TABLE some_table IS NULL")
Пример #10
0
def downgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.alter_column('import_tables',
                    'table_id',
                    existing_type=mysql.INTEGER(display_width=11),
                    comment=None,
                    existing_comment='Unique identifier',
                    existing_nullable=False,
                    autoincrement=True)
    op.alter_column('export_tables',
                    'table_id',
                    existing_type=mysql.INTEGER(display_width=11),
                    comment=None,
                    existing_comment='Unique identifier of the table',
                    existing_nullable=False,
                    autoincrement=True)
    op.alter_column('export_tables',
                    'hive_javaheap',
                    existing_type=mysql.BIGINT(display_width=20),
                    comment=None,
                    existing_comment='Heap size for Hive',
                    existing_nullable=True)
    op.drop_column('export_tables', 'airflow_priority')
    op.drop_table_comment(
        'airflow_tasks',
        existing_comment=
        'All DAGs can be customized by adding Tasks into the DAG. Depending on what placement and type of Tasks that is created, DBImport will add custom placeholders to keep the DAG separated in three different parts. Before, In and After Main. In main is where all regular Imports, export or ETL jobs are executed. If you want to execute something before these, you place it in Before Main. And if you want to execute something after, you place it in After main. Please check the Airflow Integration part of the documentation for more examples and better understanding of the data you can put into this table',
        schema=None)
    op.alter_column('airflow_tasks',
                    'task_type',
                    existing_type=mysql.ENUM('shell script', 'Hive SQL Script',
                                             'JDBC SQL'),
                    comment=None,
                    existing_comment='The type of the Task',
                    existing_nullable=False,
                    existing_server_default=sa.text("'Hive SQL Script'"))
    op.alter_column('airflow_tasks',
                    'task_name',
                    existing_type=mysql.VARCHAR(length=64),
                    comment=None,
                    existing_comment='Name of the Task in Airflow')
    op.alter_column(
        'airflow_tasks',
        'task_dependency_in_main',
        existing_type=mysql.VARCHAR(length=256),
        comment=None,
        existing_comment=
        'If placement is In Main, this defines a dependency for the Task. Comma separated list',
        existing_nullable=True)
    op.alter_column(
        'airflow_tasks',
        'task_config',
        existing_type=mysql.VARCHAR(length=256),
        comment=None,
        existing_comment=
        'The configuration for the Task. Depends on what Task type it is.',
        existing_nullable=True)
    op.alter_column('airflow_tasks',
                    'placement',
                    existing_type=mysql.ENUM('before main', 'after main',
                                             'in main'),
                    comment=None,
                    existing_comment='Placement for the Task',
                    existing_nullable=False,
                    existing_server_default=sa.text("'after main'"))
    op.alter_column(
        'airflow_tasks',
        'include_in_airflow',
        existing_type=mysql.TINYINT(display_width=4),
        comment=None,
        existing_comment=
        'Enable or disable the Task in the DAG during creation of DAG file.',
        existing_nullable=False,
        existing_server_default=sa.text("'1'"))
    op.alter_column('airflow_tasks',
                    'hive_db',
                    existing_type=mysql.VARCHAR(length=256),
                    comment=None,
                    existing_comment='<NOT USED>',
                    existing_nullable=True)
    op.alter_column('airflow_tasks',
                    'dag_name',
                    existing_type=mysql.VARCHAR(length=64),
                    comment=None,
                    existing_comment='Name of DAG to add Tasks to')
    op.alter_column(
        'airflow_tasks',
        'airflow_priority',
        existing_type=mysql.TINYINT(display_width=4),
        comment=None,
        existing_comment='Airflow Priority. Higher number, higher priority',
        existing_nullable=True)
    op.alter_column('airflow_tasks',
                    'airflow_pool',
                    existing_type=mysql.VARCHAR(length=64),
                    comment=None,
                    existing_comment='Airflow Pool to use.',
                    existing_nullable=True)
    op.drop_column('airflow_tasks', 'sensor_timeout_minutes')
    op.drop_column('airflow_tasks', 'sensor_poke_interval')
    op.drop_column('airflow_tasks', 'sensor_connection')
    op.drop_constraint('FK_airflow_tasks_jdbc_connections',
                       'airflow_tasks',
                       type_='foreignkey')
    op.create_foreign_key('airflow_tasks_ibfk_1', 'airflow_tasks',
                          'jdbc_connections', ['jdbc_dbalias'], ['dbalias'])
    op.drop_table_comment(
        'airflow_import_dags',
        existing_comment=
        'To create a DAG in Airflow for Imports, this is the table that holds all definitions of the DAG configuration, including the filter that defines what tables to import, schedules times, pool names and much more.',
        schema=None)
    op.alter_column('airflow_import_dags',
                    'use_python_dbimport',
                    existing_type=mysql.TINYINT(display_width=4),
                    comment=None,
                    existing_comment='Legacy use only. Always put this to 1',
                    existing_nullable=False,
                    existing_server_default=sa.text("'1'"))
    op.alter_column('airflow_import_dags',
                    'retries_stage2',
                    existing_type=mysql.TINYINT(display_width=4),
                    comment=None,
                    existing_comment='Specific retries number for ETL Phase',
                    existing_nullable=True)
    op.alter_column(
        'airflow_import_dags',
        'retries_stage1',
        existing_type=mysql.TINYINT(display_width=4),
        comment=None,
        existing_comment='Specific retries number for Import Phase',
        existing_nullable=True)
    op.alter_column(
        'airflow_import_dags',
        'retries',
        existing_type=mysql.TINYINT(display_width=4),
        comment=None,
        existing_comment=
        'How many retries should be Task do in Airflow before it failes',
        existing_nullable=False,
        existing_server_default=sa.text("'5'"))
    op.alter_column(
        'airflow_import_dags',
        'pool_stage2',
        existing_type=mysql.VARCHAR(length=256),
        comment='Airflow pool used for stage2 tasks. NULL for default pool',
        existing_comment=
        'Airflow pool used for stage2 tasks. NULL for the default DAG pool',
        existing_nullable=True)
    op.alter_column(
        'airflow_import_dags',
        'pool_stage1',
        existing_type=mysql.VARCHAR(length=256),
        comment='Airflow pool used for stage1 tasks. NULL for default pool',
        existing_comment=
        'Airflow pool used for stage1 tasks. NULL for the default Hostname pool',
        existing_nullable=True)
    op.alter_column(
        'airflow_import_dags',
        'finish_all_stage1_first',
        existing_type=mysql.TINYINT(display_width=4),
        comment=None,
        existing_comment=
        '1 = All Import phase jobs will be completed first, and when all is successfull, the ETL phase start',
        existing_nullable=False,
        existing_server_default=sa.text("'0'"))
    op.alter_column('airflow_import_dags',
                    'auto_table_discovery',
                    existing_type=mysql.TINYINT(display_width=4),
                    comment=None,
                    existing_comment='<NOT USED>',
                    existing_nullable=False,
                    existing_server_default=sa.text("'1'"))
    op.drop_column('airflow_import_dags', 'run_import_and_etl_separate')
    op.drop_column('airflow_import_dags', 'auto_regenerate_dag')
    op.drop_table_comment(
        'airflow_export_dags',
        existing_comment=
        'To create a DAG in Airflow for Exports, this is the table that holds all definitions of the DAG configuration, including the filter that defines what tables to export, schedules times, pool names and much more. ',
        schema=None)
    op.alter_column('airflow_export_dags',
                    'use_python_dbimport',
                    existing_type=mysql.TINYINT(display_width=4),
                    comment=None,
                    existing_comment='Legacy use only. Always put this to 1',
                    existing_nullable=False,
                    existing_server_default=sa.text("'1'"))
    op.alter_column(
        'airflow_export_dags',
        'retries',
        existing_type=mysql.TINYINT(display_width=4),
        comment=None,
        existing_comment=
        'How many retries should be Task do in Airflow before it failes',
        existing_nullable=True)
    op.alter_column(
        'airflow_export_dags',
        'operator_notes',
        existing_type=mysql.TEXT(),
        comment='Free text field to write a note about the import. ',
        existing_comment='Free text field to write a note about the export. ',
        existing_nullable=True)
    op.drop_column('airflow_export_dags', 'auto_regenerate_dag')
    op.drop_table_comment(
        'airflow_etl_dags',
        existing_comment=
        'To create a DAG in Airflow for only ETL jobs, this is the table that holds all definitions of the DAG configuration, including the filter that defines what ETL jobs to run, schedules times, pool names and much more. ',
        schema=None)
    op.alter_column('airflow_etl_dags',
                    'trigger_dag_on_success',
                    existing_type=mysql.VARCHAR(length=64),
                    comment=None,
                    existing_comment='<NOT USED>',
                    existing_nullable=True)
    op.alter_column(
        'airflow_etl_dags',
        'retries',
        existing_type=mysql.TINYINT(display_width=4),
        comment=None,
        existing_comment=
        'How many retries should be Task do in Airflow before it failes',
        existing_nullable=True)
    op.alter_column(
        'airflow_etl_dags',
        'operator_notes',
        existing_type=mysql.TEXT(),
        comment='Free text field to write a note about the import. ',
        existing_comment='Free text field to write a note about the ETL job. ',
        existing_nullable=True)
    op.alter_column(
        'airflow_etl_dags',
        'filter_task',
        existing_type=mysql.VARCHAR(length=64),
        comment=None,
        existing_comment='Filter string for TASK in etl_jobs table',
        existing_nullable=True)
    op.alter_column(
        'airflow_etl_dags',
        'filter_target_db',
        existing_type=mysql.VARCHAR(length=256),
        comment=None,
        existing_comment='Filter string for TARGET_DB in etl_jobs table',
        existing_nullable=True)
    op.alter_column(
        'airflow_etl_dags',
        'filter_source_db',
        existing_type=mysql.VARCHAR(length=256),
        comment=None,
        existing_comment='Filter string for SOURCE_DB in etl_jobs table',
        existing_nullable=True)
    op.alter_column('airflow_etl_dags',
                    'filter_job',
                    existing_type=mysql.VARCHAR(length=64),
                    comment=None,
                    existing_comment='Filter string for JOB in etl_jobs table',
                    existing_nullable=False)
    op.drop_column('airflow_etl_dags', 'auto_regenerate_dag')
    op.drop_table_comment(
        'airflow_dag_sensors',
        existing_comment='This table is used only in Legacy DBImport',
        schema=None)
    op.drop_table_comment(
        'airflow_custom_dags',
        existing_comment=
        'Its possible to construct a DAG that have no import, export or ETL definitions in it, but instead just Tasks from the airflow_task table. That might nbe useful to for example run custom Hive Code after an import is completed as a separate DAG. Defining a DAG in here also requires you to have at least one task in airflow_tasks defined "in main"',
        schema=None)
    op.alter_column(
        'airflow_custom_dags',
        'retries',
        existing_type=mysql.INTEGER(display_width=11),
        comment=None,
        existing_comment=
        'How many retries should be Task do in Airflow before it failes',
        existing_nullable=False,
        existing_server_default=sa.text("'0'"))
    op.alter_column(
        'airflow_custom_dags',
        'operator_notes',
        existing_type=mysql.TEXT(),
        comment='Free text field to write a note about the import. ',
        existing_comment=
        'Free text field to write a note about the custom DAG. ',
        existing_nullable=True)
    op.drop_column('airflow_custom_dags', 'auto_regenerate_dag')
    op.create_table('airflow_execution_type',
                    sa.Column('executionid',
                              mysql.INTEGER(display_width=11),
                              autoincrement=True,
                              nullable=False),
                    sa.Column('execution_type', mysql.TEXT(), nullable=False),
                    sa.PrimaryKeyConstraint('executionid'),
                    mysql_default_charset='latin1',
                    mysql_engine='InnoDB')
    op.create_table(
        'airflow_import_dag_execution',
        sa.Column('dag_name', mysql.VARCHAR(length=64), nullable=False),
        sa.Column('task_name', mysql.VARCHAR(length=256), nullable=False),
        sa.Column('task_config', mysql.TEXT(), nullable=False),
        sa.Column('executionid',
                  mysql.INTEGER(display_width=11),
                  autoincrement=False,
                  nullable=False),
        sa.ForeignKeyConstraint(['executionid'],
                                ['airflow_execution_type.executionid'],
                                name='airflow_import_dag_execution_ibfk_1'),
        sa.PrimaryKeyConstraint('dag_name', 'task_name'),
        mysql_default_charset='latin1',
        mysql_engine='InnoDB')
    op.create_index('FK_airflow_import_dag_execution_airflow_execution_type',
                    'airflow_import_dag_execution', ['executionid'],
                    unique=False)
    op.create_table(
        'airflow_import_task_execution',
        sa.Column('hive_db', mysql.VARCHAR(length=256), nullable=False),
        sa.Column('hive_table', mysql.VARCHAR(length=256), nullable=False),
        sa.Column('stage',
                  mysql.TINYINT(display_width=4),
                  autoincrement=False,
                  nullable=False),
        sa.Column('executionid',
                  mysql.INTEGER(display_width=11),
                  autoincrement=False,
                  nullable=False),
        sa.Column('task_config', mysql.TEXT(), nullable=False),
        sa.Column('task_name', mysql.TEXT(), nullable=True),
        sa.ForeignKeyConstraint(['executionid'],
                                ['airflow_execution_type.executionid'],
                                name='airflow_import_task_execution_ibfk_1'),
        sa.PrimaryKeyConstraint('hive_db', 'hive_table', 'stage'),
        mysql_default_charset='latin1',
        mysql_engine='InnoDB')
    op.create_index('FK_airflow_import_task_execution_airflow_execution_type',
                    'airflow_import_task_execution', ['executionid'],
                    unique=False)
def downgrade() -> None:
    # ### commands auto generated by Alembic - please adjust! ###
    op.alter_column(
        "state_supervision_violation_type_entry_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_supervision_violation_type_entry_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_supervision_violation_response_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_supervision_violation_response_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_supervision_violation_response_decision_entry_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_supervision_violation_response_decision_entry_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_supervision_violation_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_supervision_violation_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_supervision_violated_condition_entry_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_supervision_violated_condition_entry_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_supervision_sentence_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_supervision_sentence_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_supervision_period_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_supervision_period_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_supervision_contact_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_supervision_contact_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_supervision_case_type_entry_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_supervision_case_type_entry_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_sentence_group_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_sentence_group_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_program_assignment_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_program_assignment_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_person_race_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_person_race_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_person_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_person_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_person_external_id_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_person_external_id_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_person_ethnicity_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_person_ethnicity_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_person_alias_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_person_alias_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_parole_decision_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_parole_decision_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_incarceration_sentence_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_incarceration_sentence_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_incarceration_period_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_incarceration_period_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_incarceration_incident_outcome_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_incarceration_incident_outcome_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_incarceration_incident_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_incarceration_incident_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_fine_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_fine_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_early_discharge_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_early_discharge_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_court_case_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_court_case_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_charge_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_charge_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_bond_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_bond_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_assessment_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_assessment_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_agent_history",
        "valid_to",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns stopped having the associated values.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_agent_history",
        "valid_from",
        existing_type=postgresql.TIMESTAMP(),
        comment=None,
        existing_comment=
        "Timestamp on which the columns started having the associated values.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_agent_history",
        "state_code",
        existing_type=sa.VARCHAR(length=255),
        comment=None,
        existing_comment="The state this agent operates in.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_agent_history",
        "full_name",
        existing_type=sa.VARCHAR(length=255),
        comment=None,
        existing_comment="The state agent's full name.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_agent_history",
        "external_id",
        existing_type=sa.VARCHAR(length=255),
        comment=None,
        existing_comment=
        "The  unique identifier for the StateAgent, unique within the scope of the source data system",
        existing_nullable=True,
    )
    op.alter_column(
        "state_agent_history",
        "agent_type_raw_text",
        existing_type=sa.VARCHAR(length=255),
        comment=None,
        existing_comment="The raw text value of the agent type.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_agent_history",
        "agent_type",
        existing_type=postgresql.ENUM(
            "CORRECTIONAL_OFFICER",
            "JUDGE",
            "PAROLE_BOARD_MEMBER",
            "SUPERVISION_OFFICER",
            "UNIT_SUPERVISOR",
            "PRESENT_WITHOUT_INFO",
            "INTERNAL_UNKNOWN",
            name="state_agent_type",
        ),
        comment=None,
        existing_comment="The type of agent.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_agent_history",
        "agent_id",
        existing_type=sa.INTEGER(),
        comment=None,
        existing_comment=
        "Unique identifier for an agent. If not specified, one will be generated.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_agent_history",
        "agent_history_id",
        type_=sa.INTEGER(),
        existing_type=sa.INTEGER(),
        comment=None,
        existing_comment=
        "This primary key should not be used. It only exists because SQLAlchemy requires every table to have a unique primary key.",
        existing_nullable=False,
        autoincrement=True,
    )
    op.drop_table_comment(
        "state_agent",
        existing_comment=
        "The StateAgent object represents some agent operating on behalf of the criminal justice system, usually referenced in the context of taking some action related to a person moving through that system. This includes references such as the judges trying cases, the officers supervising people on parole, the individuals who make a decision at a parole hearing, and so on. We entity match across StateAgents where possible so that we can see the full scope of actions taken by a particular agent to understand patterns in their behavior.",
        schema=None,
    )
    op.alter_column(
        "state_agent",
        "state_code",
        existing_type=sa.VARCHAR(length=255),
        comment=None,
        existing_comment="The state this agent operates in.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_agent",
        "full_name",
        existing_type=sa.VARCHAR(length=255),
        comment=None,
        existing_comment="The state agent's full name.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_agent",
        "external_id",
        existing_type=sa.VARCHAR(length=255),
        comment=None,
        existing_comment=
        "The  unique identifier for the StateAgent, unique within the scope of the source data system",
        existing_nullable=True,
    )
    op.alter_column(
        "state_agent",
        "agent_type_raw_text",
        existing_type=sa.VARCHAR(length=255),
        comment=None,
        existing_comment="The raw text value of the agent type.",
        existing_nullable=True,
    )
    op.alter_column(
        "state_agent",
        "agent_type",
        existing_type=postgresql.ENUM(
            "CORRECTIONAL_OFFICER",
            "JUDGE",
            "PAROLE_BOARD_MEMBER",
            "SUPERVISION_OFFICER",
            "UNIT_SUPERVISOR",
            "PRESENT_WITHOUT_INFO",
            "INTERNAL_UNKNOWN",
            name="state_agent_type",
        ),
        comment=None,
        existing_comment="The type of agent.",
        existing_nullable=False,
    )
    op.alter_column(
        "state_agent",
        "agent_id",
        type_=sa.INTEGER(),
        existing_type=sa.INTEGER(),
        comment=None,
        existing_comment=
        "Unique identifier for an agent. If not specified, one will be generated.",
        existing_nullable=False,
        autoincrement=True,
        existing_server_default=sa.text(
            "nextval('state_agent_agent_id_seq'::regclass)"),
    )
Пример #12
0
 def test_drop_table_comment(self):
     # this is handled by SQLAlchemy's compilers
     context = op_fixture("postgresql")
     op.drop_table_comment("t2", existing_comment="t2 table", schema="foo")
     context.assert_("COMMENT ON TABLE foo.t2 IS NULL")
Пример #13
0
def test_alembic_scenario(alembic_table):
    """
    Exercise all of the operations we support.

    It's a little awkward because we have to avoid doing too many
    operations on the same table to avoid tripping over limits on
    table mods within a short time.
    """
    from alembic import op

    assert alembic_table("account") is None

    account = op.create_table(
        "account",
        Column("id", Integer, nullable=False),
        Column("name", String(50), nullable=False, comment="The name"),
        Column("description", String(200)),
    )
    assert alembic_table("account", "schema") == [
        "SchemaField('id', 'INTEGER', 'REQUIRED')",
        "SchemaField('name', 'STRING(50)', 'REQUIRED', 'The name')",
        "SchemaField('description', 'STRING(200)', 'NULLABLE')",
    ]

    op.bulk_insert(
        account,
        [
            dict(id=1, name="home", description="the home account"),
            dict(id=2, name="operations", description="the ops account"),
            dict(id=3, name="savings", description=None),
        ],
    )

    assert alembic_table("account", "rows") == [
        {"description": "the home account", "id": 1, "name": "home"},
        {"description": "the ops account", "id": 2, "name": "operations"},
        {"description": None, "id": 3, "name": "savings"},
    ]

    op.add_column(
        "account", Column("last_transaction_date", DateTime, comment="when updated")
    )

    assert alembic_table("account", "schema") == [
        "SchemaField('id', 'INTEGER', 'REQUIRED')",
        "SchemaField('name', 'STRING(50)', 'REQUIRED', 'The name')",
        "SchemaField('description', 'STRING(200)', 'NULLABLE')",
        "SchemaField('last_transaction_date', 'DATETIME', 'NULLABLE', 'when updated')",
    ]

    op.create_table(
        "account_w_comment",
        Column("id", Integer, nullable=False),
        Column("name", String(50), nullable=False, comment="The name"),
        Column("description", String(200)),
        comment="This table has comments",
    )
    assert alembic_table("account_w_comment").description == "This table has comments"
    op.drop_table_comment("account_w_comment")
    assert alembic_table("account_w_comment").description is None

    op.drop_column("account_w_comment", "description")
    assert alembic_table("account_w_comment", "schema") == [
        "SchemaField('id', 'INTEGER', 'REQUIRED')",
        "SchemaField('name', 'STRING(50)', 'REQUIRED', 'The name')",
    ]

    op.drop_table("account_w_comment")
    assert alembic_table("account_w_comment") is None

    op.rename_table("account", "accounts")
    assert alembic_table("account") is None
    assert alembic_table("accounts", "schema") == [
        "SchemaField('id', 'INTEGER', 'REQUIRED')",
        "SchemaField('name', 'STRING(50)', 'REQUIRED', 'The name')",
        "SchemaField('description', 'STRING(200)', 'NULLABLE')",
        "SchemaField('last_transaction_date', 'DATETIME', 'NULLABLE', 'when updated')",
    ]
    op.drop_table("accounts")
    assert alembic_table("accounts") is None

    op.execute(
        """
        create table transactions(
            account INT64 NOT NULL,
            transaction_time DATETIME NOT NULL,
            amount NUMERIC(11, 2) NOT NULL
            )
        partition by DATE(transaction_time)
        """
    )

    # The only thing we can alter about a column is we can make it
    # nullable:
    op.alter_column("transactions", "amount", True)
    assert alembic_table("transactions", "schema") == [
        "SchemaField('account', 'INTEGER', 'REQUIRED')",
        "SchemaField('transaction_time', 'DATETIME', 'REQUIRED')",
        "SchemaField('amount', 'NUMERIC(11, 2)', 'NULLABLE')",
    ]

    op.create_table_comment("transactions", "Transaction log")
    assert alembic_table("transactions").description == "Transaction log"

    op.drop_table("transactions")
Пример #14
0
def downgrade():
    op.drop_table_comment('document_type')
    op.drop_table_comment('inspected_location_type')
    op.drop_table_comment('inspection_status')
    op.drop_table_comment('legislation_act')
    op.drop_table_comment('legislation_compliance_article')
    op.drop_table_comment('location')
    op.drop_table_comment('nris_raw_data')
    op.drop_table_comment('document')
    op.drop_table_comment('inspection')
    op.drop_table_comment('inspection_type')
    op.drop_table_comment('legislation_act_section')
    op.drop_table_comment('inspected_location')
    op.drop_table_comment('inspection_document_xref')
    op.drop_table_comment('inspected_location_document_xref')
    op.drop_table_comment('order_advisory_detail')
    op.drop_table_comment('order_request_detail')
    op.drop_table_comment('order_stop_detail')
    op.drop_table_comment('order_warning_detail')
    op.drop_table_comment('order_stop_detail_document_xref')
    op.drop_table_comment('noncompliance_legislation')
    op.drop_table_comment('noncompliance_permit')
Пример #15
0
def upgrade():
    # ### commands auto generated by Alembic - please adjust! ###
    op.alter_column('aircrafts_data',
                    'aircraft_code',
                    existing_type=sa.CHAR(length=3),
                    comment=None,
                    existing_comment='Aircraft code, IATA')
    op.alter_column('aircrafts_data',
                    'model',
                    existing_type=postgresql.JSONB(astext_type=sa.Text()),
                    comment=None,
                    existing_comment='Aircraft model',
                    existing_nullable=False)
    op.alter_column('aircrafts_data',
                    'rrange',
                    existing_type=sa.INTEGER(),
                    comment=None,
                    existing_comment='Maximal flying distance, km',
                    existing_nullable=False)
    op.drop_table_comment('aircrafts_data',
                          existing_comment='Aircrafts (internal data)',
                          schema=None)
    op.alter_column('airports_data',
                    'airport_code',
                    existing_type=sa.CHAR(length=3),
                    comment=None,
                    existing_comment='Airport code')
    op.alter_column('airports_data',
                    'airport_name',
                    existing_type=postgresql.JSONB(astext_type=sa.Text()),
                    comment=None,
                    existing_comment='Airport name',
                    existing_nullable=False)
    op.alter_column('airports_data',
                    'city',
                    existing_type=postgresql.JSONB(astext_type=sa.Text()),
                    comment=None,
                    existing_comment='City',
                    existing_nullable=False)
    op.alter_column(
        'airports_data',
        'coordinates',
        existing_type=Point(),
        comment=None,
        existing_comment='Airport coordinates (longitude and latitude)',
        existing_nullable=False)
    op.alter_column('airports_data',
                    'timezone',
                    existing_type=sa.TEXT(),
                    comment=None,
                    existing_comment='Airport time zone',
                    existing_nullable=False)
    op.drop_table_comment('airports_data',
                          existing_comment='Airports (internal data)',
                          schema=None)
    op.alter_column('boarding_passes',
                    'boarding_no',
                    existing_type=sa.INTEGER(),
                    comment=None,
                    existing_comment='Boarding pass number',
                    existing_nullable=False)
    op.alter_column('boarding_passes',
                    'flight_id',
                    existing_type=sa.INTEGER(),
                    comment=None,
                    existing_comment='Flight ID')
    op.alter_column('boarding_passes',
                    'seat_no',
                    existing_type=sa.VARCHAR(length=4),
                    comment=None,
                    existing_comment='Seat number',
                    existing_nullable=False)
    op.alter_column('boarding_passes',
                    'ticket_no',
                    existing_type=sa.CHAR(length=13),
                    comment=None,
                    existing_comment='Ticket number')
    op.drop_table_comment('boarding_passes',
                          existing_comment='Boarding passes',
                          schema=None)
    op.alter_column('bookings',
                    'book_date',
                    existing_type=postgresql.TIMESTAMP(timezone=True),
                    comment=None,
                    existing_comment='Booking date',
                    existing_nullable=False)
    op.alter_column('bookings',
                    'book_ref',
                    existing_type=sa.CHAR(length=6),
                    comment=None,
                    existing_comment='Booking number')
    op.alter_column('bookings',
                    'total_amount',
                    existing_type=sa.NUMERIC(precision=10, scale=2),
                    comment=None,
                    existing_comment='Total booking cost',
                    existing_nullable=False)
    op.drop_table_comment('bookings', existing_comment='Bookings', schema=None)
    op.alter_column('flights',
                    'actual_arrival',
                    existing_type=postgresql.TIMESTAMP(timezone=True),
                    comment=None,
                    existing_comment='Actual arrival time',
                    existing_nullable=True)
    op.alter_column('flights',
                    'actual_departure',
                    existing_type=postgresql.TIMESTAMP(timezone=True),
                    comment=None,
                    existing_comment='Actual departure time',
                    existing_nullable=True)
    op.alter_column('flights',
                    'aircraft_code',
                    existing_type=sa.CHAR(length=3),
                    comment=None,
                    existing_comment='Aircraft code, IATA',
                    existing_nullable=False)
    op.alter_column('flights',
                    'arrival_airport',
                    existing_type=sa.CHAR(length=3),
                    comment=None,
                    existing_comment='Airport of arrival',
                    existing_nullable=False)
    op.alter_column('flights',
                    'departure_airport',
                    existing_type=sa.CHAR(length=3),
                    comment=None,
                    existing_comment='Airport of departure',
                    existing_nullable=False)
    op.alter_column('flights',
                    'flight_id',
                    existing_type=sa.INTEGER(),
                    comment=None,
                    existing_comment='Flight ID',
                    autoincrement=True)
    op.alter_column('flights',
                    'flight_no',
                    existing_type=sa.CHAR(length=6),
                    comment=None,
                    existing_comment='Flight number',
                    existing_nullable=False)
    op.alter_column('flights',
                    'scheduled_arrival',
                    existing_type=postgresql.TIMESTAMP(timezone=True),
                    comment=None,
                    existing_comment='Scheduled arrival time',
                    existing_nullable=False)
    op.alter_column('flights',
                    'scheduled_departure',
                    existing_type=postgresql.TIMESTAMP(timezone=True),
                    comment=None,
                    existing_comment='Scheduled departure time',
                    existing_nullable=False)
    op.alter_column('flights',
                    'status',
                    existing_type=sa.VARCHAR(length=20),
                    comment=None,
                    existing_comment='Flight status',
                    existing_nullable=False)
    op.drop_table_comment('flights', existing_comment='Flights', schema=None)
    op.alter_column('seats',
                    'aircraft_code',
                    existing_type=sa.CHAR(length=3),
                    comment=None,
                    existing_comment='Aircraft code, IATA')
    op.alter_column('seats',
                    'fare_conditions',
                    existing_type=sa.VARCHAR(length=10),
                    comment=None,
                    existing_comment='Travel class',
                    existing_nullable=False)
    op.alter_column('seats',
                    'seat_no',
                    existing_type=sa.VARCHAR(length=4),
                    comment=None,
                    existing_comment='Seat number')
    op.drop_constraint('seats_aircraft_code_fkey', 'seats', type_='foreignkey')
    op.create_foreign_key('boarding_passes_ticket_no_fkey', 'seats',
                          'aircrafts_data', ['aircraft_code'],
                          ['aircraft_code'])
    op.drop_table_comment('seats', existing_comment='Seats', schema=None)
    op.alter_column('ticket_flights',
                    'amount',
                    existing_type=sa.NUMERIC(precision=10, scale=2),
                    comment=None,
                    existing_comment='Travel cost',
                    existing_nullable=False)
    op.alter_column('ticket_flights',
                    'fare_conditions',
                    existing_type=sa.VARCHAR(length=10),
                    comment=None,
                    existing_comment='Travel class',
                    existing_nullable=False)
    op.alter_column('ticket_flights',
                    'flight_id',
                    existing_type=sa.INTEGER(),
                    comment=None,
                    existing_comment='Flight ID')
    op.alter_column('ticket_flights',
                    'ticket_no',
                    existing_type=sa.CHAR(length=13),
                    comment=None,
                    existing_comment='Ticket number')
    op.drop_table_comment('ticket_flights',
                          existing_comment='Flight segment',
                          schema=None)
    op.alter_column('tickets',
                    'book_ref',
                    existing_type=sa.CHAR(length=6),
                    comment=None,
                    existing_comment='Booking number',
                    existing_nullable=False)
    op.alter_column('tickets',
                    'contact_data',
                    existing_type=postgresql.JSONB(astext_type=sa.Text()),
                    comment=None,
                    existing_comment='Passenger contact information',
                    existing_nullable=True)
    op.alter_column('tickets',
                    'passenger_id',
                    existing_type=sa.VARCHAR(length=20),
                    comment=None,
                    existing_comment='Passenger ID',
                    existing_nullable=False)
    op.alter_column('tickets',
                    'passenger_name',
                    existing_type=sa.TEXT(),
                    comment=None,
                    existing_comment='Passenger name',
                    existing_nullable=False)
    op.alter_column('tickets',
                    'ticket_no',
                    existing_type=sa.CHAR(length=13),
                    comment=None,
                    existing_comment='Ticket number')
    op.drop_table_comment('tickets', existing_comment='Tickets', schema=None)