def upgrade_sqlite(migrate_engine): meta = sqlalchemy.MetaData() meta.bind = migrate_engine tmpl_table = sqlalchemy.Table('raw_template', meta, autoload=True) newcols = [ sqlalchemy.Column('environment', heat_db_types.Json), sqlalchemy.Column('predecessor', sqlalchemy.Integer, sqlalchemy.ForeignKey('raw_template.id')) ] new_template = migrate_utils.clone_table('new_raw_template', tmpl_table, meta, newcols=newcols) stack_table = sqlalchemy.Table('stack', meta, autoload=True) ignorecols = [stack_table.c.parameters.name] new_stack = migrate_utils.clone_table('new_stack', stack_table, meta, ignorecols=ignorecols) # migrate parameters to environment templates = list(tmpl_table.select().order_by( sqlalchemy.sql.expression.asc(tmpl_table.c.created_at)).execute()) stacks = list(stack_table.select().order_by( sqlalchemy.sql.expression.asc(stack_table.c.created_at)).execute()) stack_parameters = {} for s in stacks: stack_parameters[s.raw_template_id] = (s.parameters, s.deleted_at) colnames = [c.name for c in tmpl_table.columns] for template in templates: values = dict( zip(colnames, map(lambda colname: getattr(template, colname), colnames))) params, del_at = stack_parameters.get(values['id'], (None, None)) if params is not None and del_at is None: values['environment'] = params migrate_engine.execute(new_template.insert(values)) # migrate stacks to new table migrate_utils.migrate_data(migrate_engine, stack_table, new_stack, skip_columns=['parameters']) # Drop old tables and rename new ones tmpl_table.drop() # add the indexes back to new table _add_indexes(migrate_engine, new_stack) new_template.rename('raw_template')
def upgrade_sqlite(migrate_engine): meta = sqlalchemy.MetaData() meta.bind = migrate_engine tmpl_table = sqlalchemy.Table('raw_template', meta, autoload=True) newcols = [ sqlalchemy.Column('environment', heat_db_types.Json), sqlalchemy.Column('predecessor', sqlalchemy.Integer, sqlalchemy.ForeignKey('raw_template.id'))] new_template = migrate_utils.clone_table('new_raw_template', tmpl_table, meta, newcols=newcols) stack_table = sqlalchemy.Table('stack', meta, autoload=True) ignorecols = [stack_table.c.parameters.name] new_stack = migrate_utils.clone_table('new_stack', stack_table, meta, ignorecols=ignorecols) # migrate parameters to environment templates = list(tmpl_table.select().order_by( sqlalchemy.sql.expression.asc(tmpl_table.c.created_at)) .execute()) stacks = list(stack_table.select().order_by( sqlalchemy.sql.expression.asc(stack_table.c.created_at)) .execute()) stack_parameters = {} for s in stacks: stack_parameters[s.raw_template_id] = (s.parameters, s.deleted_at) colnames = [c.name for c in tmpl_table.columns] for template in templates: values = dict(zip(colnames, map(lambda colname: getattr(template, colname), colnames))) params, del_at = stack_parameters.get(values['id'], (None, None)) if params is not None and del_at is None: values['environment'] = params migrate_engine.execute(new_template.insert(values)) # migrate stacks to new table migrate_utils.migrate_data(migrate_engine, stack_table, new_stack, skip_columns=['parameters']) # Drop old tables and rename new ones tmpl_table.drop() # add the indexes back to new table _add_indexes(migrate_engine, new_stack) new_template.rename('raw_template')
def upgrade_sqlite(migrate_engine): meta = sqlalchemy.MetaData() meta.bind = migrate_engine tmpl_table = sqlalchemy.Table('raw_template', meta, autoload=True) newcols = [ sqlalchemy.Column('environment', heat_db_types.Json), sqlalchemy.Column('predecessor', sqlalchemy.Integer, sqlalchemy.ForeignKey('raw_template.id'))] new_template = migrate_utils.clone_table('new_raw_template', tmpl_table, meta, newcols=newcols) new_template.create() stack_table = sqlalchemy.Table('stack', meta, autoload=True) ignorecols = [stack_table.c.parameters] new_stack = migrate_utils.clone_table('new_stack', stack_table, meta, ignorecols=ignorecols) new_stack.create() # migrate parameters to environment templates = list(tmpl_table.select().order_by( sqlalchemy.sql.expression.asc(tmpl_table.c.created_at)) .execute()) colnames = [c.name for c in tmpl_table.columns] for template in templates: values = dict(zip(colnames, map(lambda colname: getattr(template, colname), colnames))) params = stack_table.select(stack_table.c.parameters).\ where(stack_table.c.raw_template_id == values['id']).\ execute().fetchone() values['environment'] = params migrate_engine.execute(new_template.insert(values)) # migrate stacks to new table stacks = list(stack_table.select().order_by( sqlalchemy.sql.expression.asc(stack_table.c.created_at)) .execute()) colnames = [c.name for c in stack_table.columns] for stack in stacks: values = dict(zip(colnames, map(lambda colname: getattr(stack, colname), colnames))) del values['parameters'] migrate_engine.execute(new_stack.insert(values)) # Drop old tables and rename new ones stack_table.drop() tmpl_table.drop() new_stack.rename('stack') new_template.rename('raw_template')
def downgrade_sqlite(migrate_engine): meta = sqlalchemy.MetaData() meta.bind = migrate_engine stack_table = sqlalchemy.Table('stack', meta, autoload=True) newcols = [sqlalchemy.Column('parameters', heat_db_types.Json)] new_stack = migrate_utils.clone_table('new_stack', stack_table, meta, newcols=newcols) new_stack.create() tmpl_table = sqlalchemy.Table('raw_template', meta, autoload=True) ignorecols = [tmpl_table.c.environment, tmpl_table.c.predecessor] new_template = migrate_utils.clone_table('new_raw_template', tmpl_table, meta, ignorecols=ignorecols) new_template.create() # migrate stack data to new table stacks = list(stack_table.select().order_by( sqlalchemy.sql.expression.asc(stack_table.c.created_at)) .execute()) colnames = [c.name for c in stack_table.columns] for stack in stacks: values = dict(zip(colnames, map(lambda colname: getattr(stack, colname), colnames))) migrate_engine.execute(new_stack.insert(values)) update_query = new_stack.update().values( parameters=sqlalchemy.select([tmpl_table.c.environment]). where(new_stack.c.raw_template_id == tmpl_table.c.id). as_scalar()) migrate_engine.execute(update_query) # migrate template data to new table templates = list(tmpl_table.select().order_by( sqlalchemy.sql.expression.asc(tmpl_table.c.created_at)) .execute()) colnames = [c.name for c in tmpl_table.columns] for template in templates: values = dict(zip(colnames, map(lambda colname: getattr(template, colname), colnames))) del values['environment'] del values['predecessor'] migrate_engine.execute(new_template.insert(values)) stack_table.drop() tmpl_table.drop() new_stack.rename('stack') new_template.rename('raw_template')
def _upgrade_sqlite(migrate_engine): meta = sqlalchemy.MetaData(bind=migrate_engine) stack = sqlalchemy.Table('stack', meta, autoload=True) table_name = stack.name newcols = [ sqlalchemy.Column('prev_raw_template_id', sqlalchemy.Integer, sqlalchemy.ForeignKey('raw_template.id', name='prev_raw_template_ref')), sqlalchemy.Column('current_traversal', sqlalchemy.String(36)), sqlalchemy.Column('current_deps', heat_db_types.Json), ] new_stack = migrate_utils.clone_table(table_name + '__tmp__', stack, meta, newcols=newcols) # migrate stacks into new table stacks = list(stack.select().order_by( sqlalchemy.sql.expression.asc(stack.c.created_at)) .execute()) colnames = [c.name for c in stack.columns] for s in stacks: values = dict(zip(colnames, map(lambda colname: getattr(s, colname), colnames))) migrate_engine.execute(new_stack.insert(values)) # Drop old tables and rename new ones stack.drop() new_stack.rename('stack') # add the indexes back _add_indexes(migrate_engine, new_stack)
def test_clone_table_adds_or_deletes_columns(self): meta = MetaData() meta.bind = self.engine table = Table('dummy', meta, Column('id', String(36), primary_key=True, nullable=False), Column('A', Boolean, default=False) ) table.create() newcols = [ Column('B', Boolean, default=False), Column('C', String(255), default='foobar') ] ignorecols = [ table.c.A.name ] new_table = migrate_utils.clone_table('new_dummy', table, meta, newcols=newcols, ignorecols=ignorecols) col_names = [c.name for c in new_table.columns] self.assertEqual(3, len(col_names)) self.assertIsNotNone(new_table.c.B) self.assertIsNotNone(new_table.c.C) self.assertNotIn('A', col_names)
def _upgrade_sqlite(migrate_engine): meta = sqlalchemy.MetaData(bind=migrate_engine) stack = sqlalchemy.Table('stack', meta, autoload=True) table_name = stack.name newcols = [ sqlalchemy.Column( 'prev_raw_template_id', sqlalchemy.Integer, sqlalchemy.ForeignKey('raw_template.id', name='prev_raw_template_ref')), sqlalchemy.Column('current_traversal', sqlalchemy.String(36)), sqlalchemy.Column('current_deps', heat_db_types.Json), ] new_stack = migrate_utils.clone_table(table_name + '__tmp__', stack, meta, newcols=newcols) # migrate stacks into new table stacks = list(stack.select().order_by( sqlalchemy.sql.expression.asc(stack.c.created_at)).execute()) colnames = [c.name for c in stack.columns] for s in stacks: values = dict( zip(colnames, map(lambda colname: getattr(s, colname), colnames))) migrate_engine.execute(new_stack.insert(values)) # Drop old tables and rename new ones stack.drop() new_stack.rename('stack') # add the indexes back _add_indexes(migrate_engine, new_stack)
def test_clone_table_adds_or_deletes_columns(self): meta = MetaData() meta.bind = self.engine table = Table( 'dummy', meta, Column('id', String(36), primary_key=True, nullable=False), Column('A', Boolean, default=False)) table.create() newcols = [ Column('B', Boolean, default=False), Column('C', String(255), default='foobar') ] ignorecols = [table.c.A.name] new_table = migrate_utils.clone_table('new_dummy', table, meta, newcols=newcols, ignorecols=ignorecols) col_names = [c.name for c in new_table.columns] self.assertEqual(3, len(col_names)) self.assertIsNotNone(new_table.c.B) self.assertIsNotNone(new_table.c.C) self.assertNotIn('A', col_names)
def _downgrade_sqlite(migrate_engine): meta = sqlalchemy.MetaData(bind=migrate_engine) stack = sqlalchemy.Table('stack', meta, autoload=True) table_name = stack.name # ignore CheckConstraints and FK Constraint on prev_raw_template_id. ignorecols = [ stack.c.prev_raw_template_id.name, stack.c.current_traversal.name, stack.c.current_deps.name, ] ignorecons = [ 'prev_raw_template_ref', ] new_stack = migrate_utils.clone_table(table_name + '__tmp__', stack, meta, ignorecols=ignorecols, ignorecons=ignorecons) migrate_utils.migrate_data( migrate_engine, stack, new_stack, ['prev_raw_template_id', 'current_traversal', 'current_deps']) # add the indexes back to new table _add_indexes(migrate_engine, new_stack)
def _downgrade_sqlite(migrate_engine): meta = sqlalchemy.MetaData(bind=migrate_engine) stack = sqlalchemy.Table('stack', meta, autoload=True) table_name = stack.name # ignore CheckConstraints and FK Constraint on prev_raw_template_id. ignorecols = [ stack.c.prev_raw_template_id.name, stack.c.current_traversal.name, stack.c.current_deps.name, ] ignorecons = [ 'prev_raw_template_ref', ] new_stack = migrate_utils.clone_table(table_name + '__tmp__', stack, meta, ignorecols=ignorecols, ignorecons=ignorecons) migrate_utils.migrate_data(migrate_engine, stack, new_stack, ['prev_raw_template_id', 'current_traversal', 'current_deps']) # add the indexes back to new table _add_indexes(migrate_engine, new_stack)
def _downgrade_062_sqlite(migrate_engine, metadata, table): new_table = migrate_utils.clone_table( table.name + '__tmp__', table, metadata, ignorecols=['parent_resource_name']) migrate_utils.migrate_data(migrate_engine, table, new_table, ['parent_resource_name'])
def test_clone_table_retains_constraints(self): meta = MetaData() meta.bind = self.engine parent = Table('parent', meta, Column('id', String(36), primary_key=True, nullable=False), Column('A', Integer), Column('B', Integer), Column('C', Integer, CheckConstraint('C>100', name="above 100")), Column('D', Integer, unique=True), UniqueConstraint('A', 'B', name='uix_1') ) parent.create() child = Table('child', meta, Column('id', String(36), ForeignKey('parent.id', name="parent_ref"), primary_key=True, nullable=False), Column('A', Boolean, default=False) ) child.create() ignorecols = [ parent.c.D.name, ] new_parent = migrate_utils.clone_table('new_parent', parent, meta, ignorecols=ignorecols) new_child = migrate_utils.clone_table('new_child', child, meta) self.assertTrue(_has_constraint(new_parent.constraints, UniqueConstraint, 'uix_1')) self.assertTrue(_has_constraint(new_parent.c.C.constraints, CheckConstraint, 'above 100')) self.assertTrue(_has_constraint(new_child.constraints, ForeignKeyConstraint, 'parent_ref'))
def test_clone_table_retains_constraints(self): meta = MetaData() meta.bind = self.engine parent = Table( 'parent', meta, Column('id', String(36), primary_key=True, nullable=False), Column('A', Integer), Column('B', Integer), Column('C', Integer, CheckConstraint('C>100', name="above 100")), Column('D', Integer, unique=True), UniqueConstraint('A', 'B', name='uix_1')) parent.create() child = Table( 'child', meta, Column('id', String(36), ForeignKey('parent.id', name="parent_ref"), primary_key=True, nullable=False), Column('A', Boolean, default=False)) child.create() ignorecols = [ parent.c.D.name, ] new_parent = migrate_utils.clone_table('new_parent', parent, meta, ignorecols=ignorecols) new_child = migrate_utils.clone_table('new_child', child, meta) self.assertTrue( _has_constraint(new_parent.constraints, UniqueConstraint, 'uix_1')) self.assertTrue( _has_constraint(new_parent.c.C.constraints, CheckConstraint, 'above 100')) self.assertTrue( _has_constraint(new_child.constraints, ForeignKeyConstraint, 'parent_ref'))
def upgrade_sqlite(migrate_engine): meta = sqlalchemy.MetaData() meta.bind = migrate_engine tmpl_table = sqlalchemy.Table('raw_template', meta, autoload=True) ignorecols = [tmpl_table.c.predecessor.name] new_template = migrate_utils.clone_table('new_raw_template', tmpl_table, meta, ignorecols=ignorecols) # migrate stacks to new table migrate_utils.migrate_data(migrate_engine, tmpl_table, new_template, skip_columns=['predecessor'])
def test_clone_table_ignores_constraints(self): meta = MetaData() meta.bind = self.engine table = Table( 'constraints_check', meta, Column('id', String(36), primary_key=True, nullable=False), Column('A', Integer), Column('B', Integer), Column('C', Integer, CheckConstraint('C>100', name="above 100")), UniqueConstraint('A', 'B', name='uix_1')) table.create() ignorecons = [ 'uix_1', ] new_table = migrate_utils.clone_table('constraints_check_tmp', table, meta, ignorecons=ignorecons) self.assertFalse( _has_constraint(new_table.constraints, UniqueConstraint, 'uix_1'))
def test_clone_table_swaps_columns(self): meta = MetaData() meta.bind = self.engine table = Table("dummy1", meta, Column('id', String(36), primary_key=True, nullable=False), Column('A', Boolean, default=False), ) table.create() swapcols = { 'A': Column('A', Integer, default=1), } new_table = migrate_utils.clone_table('swap_dummy', table, meta, swapcols=swapcols) self.assertIsNotNone(new_table.c.A) self.assertEqual(Integer, type(new_table.c.A.type))
def _downgrade_sqlite(migrate_engine): meta = sqlalchemy.MetaData() meta.bind = migrate_engine resource_table = sqlalchemy.Table('resource', meta, autoload=True) ignorecons = ['current_template_fkey_ref'] ignorecols = [resource_table.c.current_template_id.name, resource_table.c.needed_by.name, resource_table.c.requires.name, resource_table.c.replaces.name, resource_table.c.replaced_by.name] new_resource = migrate_utils.clone_table('new_resource', resource_table, meta, ignorecols=ignorecols, ignorecons=ignorecons) # migrate resources to new table migrate_utils.migrate_data(migrate_engine, resource_table, new_resource, skip_columns=ignorecols)
def _downgrade_sqlite(migrate_engine): meta = sqlalchemy.MetaData() meta.bind = migrate_engine resource_table = sqlalchemy.Table('resource', meta, autoload=True) ignorecons = ['current_template_fkey_ref'] ignorecols = [ resource_table.c.current_template_id.name, resource_table.c.needed_by.name, resource_table.c.requires.name, resource_table.c.replaces.name, resource_table.c.replaced_by.name ] new_resource = migrate_utils.clone_table('new_resource', resource_table, meta, ignorecols=ignorecols, ignorecons=ignorecons) # migrate resources to new table migrate_utils.migrate_data(migrate_engine, resource_table, new_resource, skip_columns=ignorecols)
def test_clone_table_swaps_columns(self): meta = MetaData() meta.bind = self.engine table = Table( "dummy1", meta, Column('id', String(36), primary_key=True, nullable=False), Column('A', Boolean, default=False), ) table.create() swapcols = { 'A': Column('A', Integer, default=1), } new_table = migrate_utils.clone_table('swap_dummy', table, meta, swapcols=swapcols) self.assertIsNotNone(new_table.c.A) self.assertEqual(Integer, type(new_table.c.A.type))
def test_clone_table_ignores_constraints(self): meta = MetaData() meta.bind = self.engine table = Table('constraints_check', meta, Column('id', String(36), primary_key=True, nullable=False), Column('A', Integer), Column('B', Integer), Column('C', Integer, CheckConstraint('C>100', name="above 100")), UniqueConstraint('A', 'B', name='uix_1') ) table.create() ignorecons = [ 'uix_1', ] new_table = migrate_utils.clone_table('constraints_check_tmp', table, meta, ignorecons=ignorecons) self.assertFalse(_has_constraint(new_table.constraints, UniqueConstraint, 'uix_1'))