def downgrade(): """Drop FAB Tables""" conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() fab_tables = [ "ab_permission", "ab_view_menu", "ab_role", "ab_permission_view", "ab_permission_view_role", "ab_user", "ab_user_role", "ab_register_user", ] for table in fab_tables: if table in tables: indexes = inspector.get_foreign_keys(table) for index in indexes: if conn.dialect.name != "sqlite": op.drop_constraint(index.get('name'), table, type_='foreignkey') for table in fab_tables: if table in tables: if conn.dialect.name == "sqlite": op.execute("PRAGMA foreign_keys=off") op.drop_table(table) op.execute("PRAGMA foreign_keys=on") else: op.drop_table(table)
def downgrade(): """Unapply Increase length of ``Flask-AppBuilder`` ``ab_view_menu.name`` column""" conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if "ab_view_menu" in tables: if conn.dialect.name == "sqlite": op.execute("PRAGMA foreign_keys=off") op.execute(""" CREATE TABLE IF NOT EXISTS ab_view_menu_dg_tmp ( id INTEGER NOT NULL PRIMARY KEY, name VARCHAR(100) NOT NULL UNIQUE ); """) op.execute( "INSERT INTO ab_view_menu_dg_tmp(id, name) select id, name from ab_view_menu;" ) op.execute("DROP TABLE ab_view_menu") op.execute( "ALTER TABLE ab_view_menu_dg_tmp rename to ab_view_menu;") op.execute("PRAGMA foreign_keys=on") else: op.alter_column(table_name='ab_view_menu', column_name='name', type_=sa.String(length=100), nullable=False)
def downgrade(): """Unapply Drop ``KubeResourceVersion`` and ``KubeWorkerId``entifier tables""" conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if WORKER_UUID_TABLE not in tables: _add_worker_uuid_table() if WORKER_RESOURCEVERSION_TABLE not in tables: _add_resource_table()
def upgrade(): """Apply Drop ``KubeResourceVersion`` and ``KubeWorkerId``entifier tables""" conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if WORKER_UUID_TABLE in tables: op.drop_table(WORKER_UUID_TABLE) if WORKER_RESOURCEVERSION_TABLE in tables: op.drop_table(WORKER_RESOURCEVERSION_TABLE)
def create_db(self): try: engine = self.get_session.get_bind(mapper=None, clause=None) inspector = sqla_compat.inspect(engine) if "ab_user" not in inspector.get_table_names(): log.info(c.LOGMSG_INF_SEC_NO_DB) Base.metadata.create_all(engine) log.info(c.LOGMSG_INF_SEC_ADD_DB) super().create_db() except Exception as e: log.error(c.LOGMSG_ERR_SEC_CREATE_DB.format(str(e))) exit(1)
def upgrade(): """Apply Remove id column from xcom""" conn = op.get_bind() inspector = inspect(conn) with op.batch_alter_table('xcom') as bop: xcom_columns = [ col.get('name') for col in inspector.get_columns("xcom") ] if "id" in xcom_columns: if conn.dialect.name == 'mssql': constraint_dict = get_table_constraints(conn, "xcom") drop_column_constraints(bop, 'id', constraint_dict) bop.drop_column('id') bop.drop_index('idx_xcom_dag_task_date') # mssql doesn't allow primary keys with nullable columns if conn.dialect.name != 'mssql': bop.create_primary_key( 'pk_xcom', ['dag_id', 'task_id', 'key', 'execution_date'])
def upgrade(): op.add_column('task_instance', sa.Column('max_tries', sa.Integer, server_default="-1")) # Check if table task_instance exist before data migration. This check is # needed for database that does not create table until migration finishes. # Checking task_instance table exists prevent the error of querying # non-existing task_instance table. connection = op.get_bind() inspector = inspect(connection) tables = inspector.get_table_names() if 'task_instance' in tables: # Get current session sessionmaker = sa.orm.sessionmaker() session = sessionmaker(bind=connection) if not bool(session.query(TaskInstance).first()): return dagbag = DagBag(settings.DAGS_FOLDER) query = session.query(sa.func.count(TaskInstance.max_tries)).filter(TaskInstance.max_tries == -1) # Separate db query in batch to prevent loading entire table # into memory and cause out of memory error. while query.scalar(): tis = session.query(TaskInstance).filter(TaskInstance.max_tries == -1).limit(BATCH_SIZE).all() for ti in tis: dag = dagbag.get_dag(ti.dag_id) if not dag or not dag.has_task(ti.task_id): # task_instance table might not have the up-to-date # information, i.e dag or task might be modified or # deleted in dagbag but is reflected in task instance # table. In this case we do not retry the task that can't # be parsed. ti.max_tries = ti.try_number else: task = dag.get_task(ti.task_id) if task.retries: ti.max_tries = task.retries else: ti.max_tries = ti.try_number session.merge(ti) session.commit() # Commit the current session. session.commit()
def upgrade(): conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if 'sensor_instance' in tables: return op.create_table( 'sensor_instance', sa.Column('id', sa.Integer(), nullable=False), sa.Column('task_id', StringID(), nullable=False), sa.Column('dag_id', StringID(), nullable=False), sa.Column('execution_date', TIMESTAMP, nullable=False), sa.Column('state', sa.String(length=20), nullable=True), sa.Column('try_number', sa.Integer(), nullable=True), sa.Column('start_date', TIMESTAMP, nullable=True), sa.Column('operator', sa.String(length=1000), nullable=False), sa.Column('op_classpath', sa.String(length=1000), nullable=False), sa.Column('hashcode', sa.BigInteger(), nullable=False), sa.Column('shardcode', sa.Integer(), nullable=False), sa.Column('poke_context', sa.Text(), nullable=False), sa.Column('execution_context', sa.Text(), nullable=True), sa.Column('created_at', TIMESTAMP, default=func.now(), nullable=False), sa.Column('updated_at', TIMESTAMP, default=func.now(), nullable=False), sa.PrimaryKeyConstraint('id'), ) op.create_index('ti_primary_key', 'sensor_instance', ['dag_id', 'task_id', 'execution_date'], unique=True) op.create_index('si_hashcode', 'sensor_instance', ['hashcode'], unique=False) op.create_index('si_shardcode', 'sensor_instance', ['shardcode'], unique=False) op.create_index('si_state_shard', 'sensor_instance', ['state', 'shardcode'], unique=False) op.create_index('si_updated_at', 'sensor_instance', ['updated_at'], unique=False)
def upgrade(): # first check if the user already has this done. This should only be # true for users who are upgrading from a previous version of Airflow # that predates Alembic integration conn = op.get_bind() inspector = inspect(conn) # this will only be true if 'connection' already exists in the db, # but not if alembic created it in a previous migration if 'connection' in inspector.get_table_names(): col_names = [c['name'] for c in inspector.get_columns('connection')] if 'is_encrypted' in col_names: return op.add_column( 'connection', sa.Column('is_encrypted', sa.Boolean, unique=False, default=False)) conn = op.get_bind() conn.execute(connectionhelper.update().values(is_encrypted=False))
def upgrade(): conn = op.get_bind() inspector = inspect(conn) if RESOURCE_TABLE not in inspector.get_table_names(): columns_and_constraints = [ sa.Column("one_row_id", sa.Boolean, server_default=sa.true(), primary_key=True), sa.Column("resource_version", sa.String(255)), ] # alembic creates an invalid SQL for mssql and mysql dialects if conn.dialect.name in {"mysql"}: columns_and_constraints.append( sa.CheckConstraint("one_row_id<>0", name="kube_resource_version_one_row_id") ) elif conn.dialect.name not in {"mssql"}: columns_and_constraints.append( sa.CheckConstraint("one_row_id", name="kube_resource_version_one_row_id") ) table = op.create_table(RESOURCE_TABLE, *columns_and_constraints) op.bulk_insert(table, [{"resource_version": ""}])
def upgrade(): """Apply Add ``run_type`` column in ``dag_run`` table""" run_type_col_type = sa.String(length=50) conn = op.get_bind() inspector = inspect(conn) dag_run_columns = [ col.get('name') for col in inspector.get_columns("dag_run") ] if "run_type" not in dag_run_columns: # Add nullable column with op.batch_alter_table("dag_run") as batch_op: batch_op.add_column( sa.Column("run_type", run_type_col_type, nullable=True)) # Generate run type for existing records sessionmaker = sa.orm.sessionmaker() session = sessionmaker(bind=conn) for run_type in DagRunType: session.query(DagRun).filter( DagRun.run_id.like(f"{run_type.value}__%")).update( {DagRun.run_type: run_type.value}, synchronize_session=False) session.query(DagRun).filter(DagRun.run_type.is_(None)).update( {DagRun.run_type: DagRunType.MANUAL.value}, synchronize_session=False) session.commit() # Make run_type not nullable with op.batch_alter_table("dag_run") as batch_op: batch_op.alter_column("run_type", existing_type=run_type_col_type, type_=run_type_col_type, nullable=False)
def upgrade(): # We previously had a KnownEvent's table, but we deleted the table without # a down migration to remove it (so we didn't delete anyone's data if they # were happening to use the feature. # # But before we can delete the users table we need to drop the FK conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if 'known_event' in tables: for fkey in inspector.get_foreign_keys(table_name="known_event", referred_table="users"): if fkey['name']: with op.batch_alter_table(table_name='known_event') as bop: bop.drop_constraint(fkey['name'], type_="foreignkey") if "chart" in tables: op.drop_table("chart", ) if "users" in tables: op.drop_table("users")
def upgrade(): conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if 'connection' not in tables: op.create_table( 'connection', sa.Column('id', sa.Integer(), nullable=False), sa.Column('conn_id', StringID(), nullable=True), sa.Column('conn_type', sa.String(length=500), nullable=True), sa.Column('host', sa.String(length=500), nullable=True), sa.Column('schema', sa.String(length=500), nullable=True), sa.Column('login', sa.String(length=500), nullable=True), sa.Column('password', sa.String(length=500), nullable=True), sa.Column('port', sa.Integer(), nullable=True), sa.Column('extra', sa.String(length=5000), nullable=True), sa.PrimaryKeyConstraint('id'), ) if 'dag' not in tables: op.create_table( 'dag', sa.Column('dag_id', StringID(), nullable=False), sa.Column('is_paused', sa.Boolean(), nullable=True), sa.Column('is_subdag', sa.Boolean(), nullable=True), sa.Column('is_active', sa.Boolean(), nullable=True), sa.Column('last_scheduler_run', sa.DateTime(), nullable=True), sa.Column('last_pickled', sa.DateTime(), nullable=True), sa.Column('last_expired', sa.DateTime(), nullable=True), sa.Column('scheduler_lock', sa.Boolean(), nullable=True), sa.Column('pickle_id', sa.Integer(), nullable=True), sa.Column('fileloc', sa.String(length=2000), nullable=True), sa.Column('owners', sa.String(length=2000), nullable=True), sa.PrimaryKeyConstraint('dag_id'), ) if 'dag_pickle' not in tables: op.create_table( 'dag_pickle', sa.Column('id', sa.Integer(), nullable=False), sa.Column('pickle', sa.PickleType(), nullable=True), sa.Column('created_dttm', sa.DateTime(), nullable=True), sa.Column('pickle_hash', sa.BigInteger(), nullable=True), sa.PrimaryKeyConstraint('id'), ) if 'import_error' not in tables: op.create_table( 'import_error', sa.Column('id', sa.Integer(), nullable=False), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('filename', sa.String(length=1024), nullable=True), sa.Column('stacktrace', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id'), ) if 'job' not in tables: op.create_table( 'job', sa.Column('id', sa.Integer(), nullable=False), sa.Column('dag_id', sa.String(length=250), nullable=True), sa.Column('state', sa.String(length=20), nullable=True), sa.Column('job_type', sa.String(length=30), nullable=True), sa.Column('start_date', sa.DateTime(), nullable=True), sa.Column('end_date', sa.DateTime(), nullable=True), sa.Column('latest_heartbeat', sa.DateTime(), nullable=True), sa.Column('executor_class', sa.String(length=500), nullable=True), sa.Column('hostname', sa.String(length=500), nullable=True), sa.Column('unixname', sa.String(length=1000), nullable=True), sa.PrimaryKeyConstraint('id'), ) op.create_index('job_type_heart', 'job', ['job_type', 'latest_heartbeat'], unique=False) if 'log' not in tables: op.create_table( 'log', sa.Column('id', sa.Integer(), nullable=False), sa.Column('dttm', sa.DateTime(), nullable=True), sa.Column('dag_id', StringID(), nullable=True), sa.Column('task_id', StringID(), nullable=True), sa.Column('event', sa.String(length=30), nullable=True), sa.Column('execution_date', sa.DateTime(), nullable=True), sa.Column('owner', sa.String(length=500), nullable=True), sa.PrimaryKeyConstraint('id'), ) if 'sla_miss' not in tables: op.create_table( 'sla_miss', sa.Column('task_id', StringID(), nullable=False), sa.Column('dag_id', StringID(), nullable=False), sa.Column('execution_date', sa.DateTime(), nullable=False), sa.Column('email_sent', sa.Boolean(), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('description', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('task_id', 'dag_id', 'execution_date'), ) if 'slot_pool' not in tables: op.create_table( 'slot_pool', sa.Column('id', sa.Integer(), nullable=False), sa.Column('pool', StringID(length=50), nullable=True), sa.Column('slots', sa.Integer(), nullable=True), sa.Column('description', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('pool'), ) if 'task_instance' not in tables: op.create_table( 'task_instance', sa.Column('task_id', StringID(), nullable=False), sa.Column('dag_id', StringID(), nullable=False), sa.Column('execution_date', sa.DateTime(), nullable=False), sa.Column('start_date', sa.DateTime(), nullable=True), sa.Column('end_date', sa.DateTime(), nullable=True), sa.Column('duration', sa.Integer(), nullable=True), sa.Column('state', sa.String(length=20), nullable=True), sa.Column('try_number', sa.Integer(), nullable=True), sa.Column('hostname', sa.String(length=1000), nullable=True), sa.Column('unixname', sa.String(length=1000), nullable=True), sa.Column('job_id', sa.Integer(), nullable=True), sa.Column('pool', sa.String(length=50), nullable=True), sa.Column('queue', sa.String(length=50), nullable=True), sa.Column('priority_weight', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('task_id', 'dag_id', 'execution_date'), ) op.create_index('ti_dag_state', 'task_instance', ['dag_id', 'state'], unique=False) op.create_index('ti_pool', 'task_instance', ['pool', 'state', 'priority_weight'], unique=False) op.create_index('ti_state_lkp', 'task_instance', ['dag_id', 'task_id', 'execution_date', 'state'], unique=False) if 'user' not in tables: op.create_table( 'user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', StringID(), nullable=True), sa.Column('email', sa.String(length=500), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('username'), ) if 'variable' not in tables: op.create_table( 'variable', sa.Column('id', sa.Integer(), nullable=False), sa.Column('key', StringID(), nullable=True), sa.Column('val', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('key'), ) if 'chart' not in tables: op.create_table( 'chart', sa.Column('id', sa.Integer(), nullable=False), sa.Column('label', sa.String(length=200), nullable=True), sa.Column('conn_id', sa.String(length=250), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('chart_type', sa.String(length=100), nullable=True), sa.Column('sql_layout', sa.String(length=50), nullable=True), sa.Column('sql', sa.Text(), nullable=True), sa.Column('y_log_scale', sa.Boolean(), nullable=True), sa.Column('show_datatable', sa.Boolean(), nullable=True), sa.Column('show_sql', sa.Boolean(), nullable=True), sa.Column('height', sa.Integer(), nullable=True), sa.Column('default_params', sa.String(length=5000), nullable=True), sa.Column('x_is_date', sa.Boolean(), nullable=True), sa.Column('iteration_no', sa.Integer(), nullable=True), sa.Column('last_modified', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint( ['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id'), ) if 'xcom' not in tables: op.create_table( 'xcom', sa.Column('id', sa.Integer(), nullable=False), sa.Column('key', StringID(length=512), nullable=True), sa.Column('value', sa.PickleType(), nullable=True), sa.Column('timestamp', sa.DateTime(), default=func.now, nullable=False), sa.Column('execution_date', sa.DateTime(), nullable=False), sa.Column('task_id', StringID(), nullable=False), sa.Column('dag_id', StringID(), nullable=False), sa.PrimaryKeyConstraint('id'), )
def upgrade(): """Create FAB Tables""" conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if "ab_permission" not in tables: op.create_table( 'ab_permission', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), sa.Column('name', sa.String(length=100), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name'), ) if "ab_view_menu" not in tables: op.create_table( 'ab_view_menu', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), sa.Column('name', sa.String(length=100), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name'), ) if "ab_role" not in tables: op.create_table( 'ab_role', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), sa.Column('name', sa.String(length=64), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name'), ) if "ab_permission_view" not in tables: op.create_table( 'ab_permission_view', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), sa.Column('permission_id', sa.Integer(), nullable=True), sa.Column('view_menu_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['permission_id'], ['ab_permission.id']), sa.ForeignKeyConstraint(['view_menu_id'], ['ab_view_menu.id']), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('permission_id', 'view_menu_id'), ) if "ab_permission_view_role" not in tables: op.create_table( 'ab_permission_view_role', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), sa.Column('permission_view_id', sa.Integer(), nullable=True), sa.Column('role_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['permission_view_id'], ['ab_permission_view.id']), sa.ForeignKeyConstraint(['role_id'], ['ab_role.id']), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint("permission_view_id", "role_id"), ) if "ab_user" not in tables: op.create_table( 'ab_user', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), sa.Column('first_name', sa.String(length=64), nullable=False), sa.Column('last_name', sa.String(length=64), nullable=False), sa.Column('username', sa.String(length=64), nullable=False), sa.Column('password', sa.String(length=256), nullable=True), sa.Column('active', sa.Boolean(), nullable=True), sa.Column('email', sa.String(length=64), nullable=False), sa.Column('last_login', sa.DateTime(), nullable=True), sa.Column('login_count', sa.Integer(), nullable=True), sa.Column('fail_login_count', sa.Integer(), nullable=True), sa.Column('created_on', sa.DateTime(), nullable=True), sa.Column('changed_on', sa.DateTime(), nullable=True), sa.Column('created_by_fk', sa.Integer(), nullable=True), sa.Column('changed_by_fk', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id']), sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id']), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email'), sa.UniqueConstraint('username'), ) if "ab_user_role" not in tables: op.create_table( 'ab_user_role', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('role_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint( ['role_id'], ['ab_role.id'], ), sa.ForeignKeyConstraint( ['user_id'], ['ab_user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('user_id', 'role_id'), ) if "ab_register_user" not in tables: op.create_table( 'ab_register_user', sa.Column('id', sa.Integer(), nullable=False, primary_key=True), sa.Column('first_name', sa.String(length=64), nullable=False), sa.Column('last_name', sa.String(length=64), nullable=False), sa.Column('username', sa.String(length=64), nullable=False), sa.Column('password', sa.String(length=256), nullable=True), sa.Column('email', sa.String(length=64), nullable=False), sa.Column('registration_date', sa.DateTime(), nullable=True), sa.Column('registration_hash', sa.String(length=256), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('username'), )
def downgrade(): conn = op.get_bind() inspector = inspect(conn) if RESOURCE_TABLE in inspector.get_table_names(): op.drop_table(RESOURCE_TABLE)
def downgrade(): conn = op.get_bind() inspector = inspect(conn) tables = inspector.get_table_names() if 'sensor_instance' in tables: op.drop_table('sensor_instance')