def test_sql_all(self): app = app_cache.get_app_config('commands_sql').models_module output = sql_all(app, no_style(), connections[DEFAULT_DB_ALIAS]) self.assertEqual(self.count_ddl(output, 'CREATE TABLE'), 3) # PostgreSQL creates one additional index for CharField self.assertIn(self.count_ddl(output, 'CREATE INDEX'), [3, 4])
def test_sql_all(self): app = models.get_app("commands_sql") output = sql_all(app, no_style(), connections[DEFAULT_DB_ALIAS]) # PostgreSQL creates two indexes self.assertIn(len(output), [2, 3]) self.assertTrue(output[0].startswith("CREATE TABLE")) self.assertTrue(output[1].startswith("CREATE INDEX"))
def test_sql_all(self): app_config = apps.get_app_config('commands_sql') output = sql_all(app_config, no_style(), connections[DEFAULT_DB_ALIAS]) self.assertEqual(self.count_ddl(output, 'CREATE TABLE'), 3) # Number of indexes is backend-dependent self.assertTrue(1 <= self.count_ddl(output, 'CREATE INDEX') <= 4)
def test_sql_all(self): app_config = apps.get_app_config('commands_sql') output = sql_all(app_config, no_style(), connections[DEFAULT_DB_ALIAS]) self.assertEqual(self.count_ddl(output, 'CREATE TABLE'), 3) # PostgreSQL creates one additional index for CharField self.assertIn(self.count_ddl(output, 'CREATE INDEX'), [3, 4])
def handle_app(self, app, **options): db = options.get('database', DEFAULT_DB_ALIAS) verbosity = int(options.get('verbosity', 1)) connection = connections[db] drop_queries = u'\n'.join(sql_delete(app, self.style, connection)).encode('utf-8') cursor = connection.cursor() for query in drop_queries.split(';'): if query != '': if verbosity: self.stdout.write('\n\nExecuting query\n%s' % query.strip()) cursor.execute(query.strip()) cursor.close() create_queries = u'\n'.join(sql_all(app, self.style, connection)).encode('utf-8') cursor = connection.cursor() for query in create_queries.split(';'): if query != '': if verbosity: self.stdout.write('\n\nExecuting query\n%s' % query.strip()) cursor.execute(query.strip()) cursor.close() call_command('loaddata', 'initial_data', verbosity=verbosity, database=db)
def test_sql_all(self): app = models.get_app('commands_sql') output = sql_all(app, no_style(), connections[DEFAULT_DB_ALIAS]) # PostgreSQL creates two indexes self.assertIn(len(output), [2, 3]) self.assertTrue(output[0].startswith('CREATE TABLE')) self.assertTrue(output[1].startswith('CREATE INDEX'))
def test_sql_all(self): app_config = apps.get_app_config('commands_sql') with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RemovedInDjango20Warning) output = sql_all(app_config, no_style(), connections[DEFAULT_DB_ALIAS]) self.assertEqual(self.count_ddl(output, 'CREATE TABLE'), 3) # Number of indexes is backend-dependent self.assertTrue(1 <= self.count_ddl(output, 'CREATE INDEX') <= 4)
def test_sql_all(self): app_config = apps.get_app_config('commands_sql') with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RemovedInDjango20Warning) output = sql_all(app_config, no_style(), connections[DEFAULT_DB_ALIAS]) self.assertEqual(self.count_ddl(output, 'CREATE TABLE'), 3) # PostgreSQL creates one additional index for CharField self.assertIn(self.count_ddl(output, 'CREATE INDEX'), [3, 4])
def init(cls, application_names): applications = [models.get_app(application_name) for application_name in application_names] upgrade = u''.join([ u'\n'.join(sql_all(application, no_style()) + ['']).encode('utf-8') for application in applications]) downgrade = u''.join([u'\n'.join(sql_delete(application, no_style()) + ['']).encode('utf-8') for application in applications]) cls.add(upgrade, downgrade)
def reset_db(): using = DEFAULT_DB_ALIAS connection = connections[using] sql_list = sql_delete(elephantblog.models, no_style(), connection) sql_list += sql_all(elephantblog.models, no_style(), connection) try: cursor = connection.cursor() for sql in sql_list: cursor.execute(sql) except Exception, e: transaction.rollback_unless_managed() raise CommandError("Error: database couldn't be reset: %s" % e)
def create_table(cls): ''' Create tables for dbschema ''' transaction.commit() application = models.get_app('dbschema') sql = ''.join(sql_all(application, no_style())) try: connection.cursor().execute(sql) transaction.commit() except StandardError, error: # Tables already exists transaction.rollback()
def test_sql_all(self): app = models.get_app('commands_sql') output = sql_all(app, no_style(), connections[DEFAULT_DB_ALIAS]) self.assertTrue(output[0].startswith('CREATE TABLE')) if connections[DEFAULT_DB_ALIAS].vendor == 'oracle': self.assertEqual(len(output), 4) # Oracle creates a table, a sequence, a trigger and an index self.assertIn('CREATE SEQUENCE', output[1]) self.assertIn('CREATE OR REPLACE TRIGGER', output[2]) self.assertTrue(output[3].startswith('CREATE INDEX')) else: # PostgreSQL creates two indexes self.assertIn(len(output), [2, 3]) self.assertTrue(output[1].startswith('CREATE INDEX'))
def test_sql_all(self): app = models.get_app('commands_sql') output = sql_all(app, no_style(), connections[DEFAULT_DB_ALIAS]) self.assertTrue(output[0].startswith('CREATE TABLE')) if connections[DEFAULT_DB_ALIAS].vendor == 'oracle': self.assertEqual( len(output), 4 ) # Oracle creates a table, a sequence, a trigger and an index self.assertIn('CREATE SEQUENCE', output[1]) self.assertIn('CREATE OR REPLACE TRIGGER', output[2]) self.assertTrue(output[3].startswith('CREATE INDEX')) else: # PostgreSQL creates two indexes self.assertIn(len(output), [2, 3]) self.assertTrue(output[1].startswith('CREATE INDEX'))
def pre_import(self): import psycopg2 import re from django.core.management.color import no_style from django.core.management.sql import sql_all from django.db import models sys.path.append('../') sys.path.append('../../') alter_re = re.compile('^ALTER TABLE "(\w+)" ADD CONSTRAINT (\w+).*', re.I) alter_action = 'ALTER TABLE "\g<1>" DROP CONSTRAINT "\g<2>"' index_re = re.compile('^CREATE INDEX "(\w+)".*', re.I) index_action = 'DROP INDEX "\g<1>"' table_re = re.compile('^CREATE TABLE "(\w+)".*', re.I) references_re = re.compile('"(\w+)".*?REFERENCES "(\w+)" \("(\w+)"\) DEFERRABLE INITIALLY DEFERRED') references_action = 'ALTER TABLE "%(table)s" DROP CONSTRAINT "%(table)s_%(field)s_fkey"' references_stmt = 'ALTER TABLE "%(table)s" ADD CONSTRAINT "%(table)s_%(field)s_fkey" FOREIGN KEY ("%(field)s") ' \ 'REFERENCES "%(reftable)s" ("%(reffield)s") DEFERRABLE INITIALLY DEFERRED' sql = sql_all(models.get_app('geonames'), no_style(), connections[DEFAULT_DB_ALIAS]) for stmt in sql: if alter_re.search(stmt): self.cursor.execute(alter_re.sub(alter_action, stmt)) self.end_stmts.append(stmt) elif index_re.search(stmt): self.cursor.execute(index_re.sub(index_action, stmt)) self.end_stmts.append(stmt) elif table_re.search(stmt): table = table_re.search(stmt).group(1) for m in references_re.findall(stmt): try: self.cursor.execute(references_action % \ { 'table': table, 'field': m[0], 'reftable': m[1], 'reffield': m[2], }) except psycopg2.ProgrammingError, e: if 'constraint' in e and 'does not exist' in e: # The constraint has already been removed continue self.end_stmts.append(references_stmt % \ { 'table': table, 'field': m[0], 'reftable': m[1], 'reffield': m[2], })
def install_models(app_name): app_module = load_app(get_plugin_module_name(app_name)) if have_south(app_name): lang = get_language() # invalidate south cache to avoid very weird bugs (see #2025) migration.Migrations.invalidate_all_modules() migration.Migrations.calculate_dependencies(force=True) # migrate plugin with south call_command('migrate', app=app_name) # call_command activates a default 'en-us' locale in thread. we restore it activate(lang) else: style = no_style() cursor = connection.cursor() sql_commands = sql_all(app_module, style, connection) for sql_command in sql_commands: cursor.execute(sql_command) # update all content types update_all_contenttypes() transaction.commit()
def pre_import(self): self.end_stmts = [] import re from django.core.management.color import no_style from django.core.management.sql import sql_all from django.db import models sys.path.append("../") sys.path.append("../../") alter_re = re.compile('^ALTER TABLE "(\w+)" ADD CONSTRAINT (\w+).*', re.I) alter_action = 'ALTER TABLE "\g<1>" DROP CONSTRAINT "\g<2>"' index_re = re.compile('^CREATE INDEX "(\w+)".*', re.I) index_action = 'DROP INDEX "\g<1>"' table_re = re.compile('^CREATE TABLE "(\w+)".*', re.I) references_re = re.compile('"(\w+)".*?REFERENCES "(\w+)" \("(\w+)"\) DEFERRABLE INITIALLY DEFERRED') references_action = 'ALTER TABLE "%(table)s" DROP CONSTRAINT "%(table)s_%(field)s_fkey"' references_stmt = ( 'ALTER TABLE "%(table)s" ADD CONSTRAINT "%(table)s_%(field)s_fkey" FOREIGN KEY ("%(field)s") ' 'REFERENCES "%(reftable)s" ("%(reffield)s")' ) sql = sql_all(models.get_app("geonames"), no_style(), connections[DEFAULT_DB_ALIAS]) for stmt in sql: if alter_re.search(stmt): self.cursor.execute(alter_re.sub(alter_action, stmt)) self.end_stmts.append(stmt) elif index_re.search(stmt): self.cursor.execute(index_re.sub(index_action, stmt)) self.end_stmts.append(stmt) elif table_re.search(stmt): table = table_re.search(stmt).group(1) for m in references_re.findall(stmt): self.cursor.execute( references_action % {"table": table, "field": m[0], "reftable": m[1], "reffield": m[2]} ) self.end_stmts.append( references_stmt % {"table": table, "field": m[0], "reftable": m[1], "reffield": m[2]} ) self.cursor.execute("COMMIT")
def handle_app(self, app, **options): db = options.get('database', DEFAULT_DB_ALIAS) verbosity = int(options.get('verbosity', 1)) connection = connections[db] drop_queries = u'\n'.join(sql_delete(app, self.style, connection)).encode('utf-8') cursor = connection.cursor() for query in drop_queries.split(';'): if query != '': if verbosity: self.stdout.write('\n\nExecuting query\n%s' % query.strip()) cursor.execute(query.strip()) cursor.close() create_queries = u'\n'.join(sql_all(app, self.style, connection)).encode('utf-8') cursor = connection.cursor() for query in create_queries.split(';'): if query != '': if verbosity: self.stdout.write('\n\nExecuting query\n%s' % query.strip()) cursor.execute(query.strip()) cursor.close() call_command('loaddata', 'initial_data', verbosity = verbosity, database = db)
def handle_app_config(self, app_config, **options): if app_config.models_module is None: return connection = connections[options['database']] statements = sql_all(app_config, self.style, connection) return '\n'.join(statements)
def sql_reset(app, style, connection): "Returns a list of the DROP TABLE SQL, then the CREATE TABLE SQL, for the given module." return sql_delete(app, style, connection) + sql_all(app, style, connection)
def handle_app(self, app, **options): return '\n'.join( sql_all(app, self.style, connections[options.get('database')]))
def handle_app(self, app, **options): return u'\n'.join(sql_all(app, self.style, connections[options.get('database')])).encode('utf-8')
def test_sql(self): for alias in ALIAS: self.assertEqual( ALIAS_SQL[alias], u'\n'.join(sql_all(models.get_app('stringfield_tests'), no_style(), connections[alias])), )
def migrate(): from django.db import models, router, connections from django.conf import settings from django.core.management import color, sql import datetime import sys import types from shipping.models import AppVersion, AppVersionTreeThrough from life.models import Tree from elmo_commons.models import DurationThrough ship_connection = connections[router.db_for_write(AppVersion)] # hardcode an intermediate model to migrate AppVersion-Tree relations # to AppVersionTreeThrough, and get the sql details mig_name = "migration_phase_1" mig_models_name = mig_name + ".models" mig_module = types.ModuleType(mig_name) mig_module.models = types.ModuleType(mig_models_name) mig_module.__file__ = mig_name + "/__init.py" mig_module.models.__file__ = mig_name + "/models.py" sys.modules[mig_name] = mig_module sys.modules[mig_models_name] = mig_module.models settings.INSTALLED_APPS.append("migration_phase_1") meta_dict = dict( (k, getattr(AppVersionTreeThrough._meta, k)) for k in ("db_table", "managed", "auto_created", "unique_together", "verbose_name", "verbose_name_plural") ) meta_dict["app_label"] = "migration_phase_1" AVTT_meta = type("Meta", (object,), meta_dict) InterAppVersionTreeThrough = type( "AppVersionTreeThrough", (DurationThrough,), { "Meta": AVTT_meta, "__module__": "migration_phase_1.models", "appversion": models.ForeignKey("AppVersion", related_name="trees_over_time"), "tree": models.ForeignKey(Tree, related_name="appvers_over_time"), }, ) mig_module.models.AppVersionTreeThrough = InterAppVersionTreeThrough meta_dict = dict( (k, getattr(AppVersion._meta, k)) for k in ("db_table", "managed", "auto_created", "verbose_name", "verbose_name_plural") ) meta_dict["app_label"] = "migration_phase_1" AV_meta = type("Meta", (object,), meta_dict) InterAppVersion = type( "AppVersion", (models.Model,), { "Meta": AV_meta, "__module__": "migration_phase_1.models", "trees": models.ManyToManyField(Tree, through=InterAppVersionTreeThrough), "fallback": models.ForeignKey( "self", blank=True, null=True, default=None, on_delete=models.SET_NULL, related_name="followups" ), "accepts_signoffs": models.BooleanField(default=False), # tree of the previous model, name oldtree, override dbcolumn "tree": models.ForeignKey(Tree, blank=True, null=True), # lasttree works as is "lasttree": models.ForeignKey(Tree, related_name="legacy_appversions", blank=True, null=True), }, ) mig_module.models.AppVersion = InterAppVersion c = ship_connection.cursor() style = color.no_style() for stmnt in sql.sql_all(mig_module.models, style, ship_connection): if stmnt.startswith("CREATE TABLE"): if InterAppVersionTreeThrough._meta.db_table not in stmnt: # appversion table, we want to ALTER, not CREATE # find column definitions for fallback and accepts_signoffs for l in stmnt.splitlines(): if "fallback" in l or "accepts_signoffs" in l: c.execute( "ALTER TABLE %s ADD COLUMN %s;" % (InterAppVersion._meta.db_table, l.replace(",", "")) ) continue else: # appversiontreethrough table, execute below pass elif "ADD CONSTRAINT" in stmnt or stmnt.startswith("CREATE INDEX"): # add constraints and indices for the appversiontreethrough table, # or for the fallback field. if InterAppVersionTreeThrough._meta.db_table in stmnt: # add constraints to the new table below pass elif "fallback" in stmnt: # for appversion, only add constraints for fallback pass else: continue else: print stmnt c.execute(stmnt) create_app_tree = InterAppVersion.trees.through.objects.create for av in InterAppVersion.objects.all(): if av.tree_id is not None: create_app_tree(appversion=av, tree_id=av.tree_id, start=None) else: assert av.lasttree_id print "fix end of " + str(av) create_app_tree(appversion=av, tree_id=av.lasttree_id, start=None, end=datetime.datetime.utcnow()) # prune "migration_phase_1" app again del settings.INSTALLED_APPS[-1] from django.db.models import loading loading.cache.app_models.pop("migration_phase_1", None) loading.cache.register_models("migration_phase_1") # clear cache # empty sys modules from our fake modules sys.modules.pop(mig_name) sys.modules.pop(mig_models_name) # we can only remove columns for mysql, let's warn if we're not that: if settings.DATABASES["default"]["ENGINE"].split(".")[-1] != "mysql": print """ WARNING This migration can only remove tree and lasttree for mysql. """ return # next up, find the foreign key indexes. constraints = [] c.execute( """select CONSTRAINT_NAME from information_schema.table_constraints where table_schema = schema() and table_name = 'shipping_appversion';""" ) for (constraint,) in c.fetchall(): if "tree_id" in constraint: constraints.append(constraint) stmnt = """ALTER TABLE `shipping_appversion`""" subs = [" DROP COLUMN %s" % col for col in ("tree_id", "lasttree_id")] + [ " DROP FOREIGN KEY %s" % constraint for constraint in constraints ] stmnt += ",".join(subs) + ";" c.execute(stmnt)
def handle_app_config(self, app_config, **options): if app_config.models_module is None: return connection = connections[options.get('database')] statements = sql_all(app_config.models_module, self.style, connection) return '\n'.join(statements)
def get_sql_all(app, style): return management.sql_all(app, style)
def handle_app(self, app, **options): from django.core.management.sql import sql_all return '\n'.join(sql_all(app, self.style)).encode('utf-8')
def test_sql_all(self): app_config = apps.get_app_config('commands_sql_migrations') with self.assertRaises(CommandError): sql_all(app_config, no_style(), connections[DEFAULT_DB_ALIAS])
def handle_app(self, app, **options): from django.core.management.sql import sql_all return u'\n'.join(sql_all(app, self.style)).encode('utf-8')
def handle_app(self, app, **options): return u'\n'.join( sql_all(app, self.style, connections[options.get( 'database', DEFAULT_DB_ALIAS)])).encode('utf-8')
def migrate(): from django.db import models, router, connections from django.conf import settings from django.core.management import color, sql import datetime import sys import types from shipping.models import AppVersion, AppVersionTreeThrough from life.models import Tree from elmo_commons.models import DurationThrough ship_connection = connections[router.db_for_write(AppVersion)] # hardcode an intermediate model to migrate AppVersion-Tree relations # to AppVersionTreeThrough, and get the sql details mig_name = 'migration_phase_1' mig_models_name = mig_name + '.models' mig_module = types.ModuleType(mig_name) mig_module.models = types.ModuleType(mig_models_name) mig_module.__file__ = mig_name + '/__init.py' mig_module.models.__file__ = mig_name + '/models.py' sys.modules[mig_name] = mig_module sys.modules[mig_models_name] = mig_module.models settings.INSTALLED_APPS.append('migration_phase_1') meta_dict = dict( (k, getattr(AppVersionTreeThrough._meta, k)) for k in ('db_table', 'managed', 'auto_created', 'unique_together', 'verbose_name', 'verbose_name_plural')) meta_dict['app_label'] = 'migration_phase_1' AVTT_meta = type('Meta', (object, ), meta_dict) InterAppVersionTreeThrough = type( 'AppVersionTreeThrough', (DurationThrough, ), { 'Meta': AVTT_meta, '__module__': 'migration_phase_1.models', 'appversion': models.ForeignKey('AppVersion', related_name='trees_over_time'), 'tree': models.ForeignKey(Tree, related_name='appvers_over_time') }) mig_module.models.AppVersionTreeThrough = InterAppVersionTreeThrough meta_dict = dict((k, getattr(AppVersion._meta, k)) for k in ('db_table', 'managed', 'auto_created', 'verbose_name', 'verbose_name_plural')) meta_dict['app_label'] = 'migration_phase_1' AV_meta = type('Meta', (object, ), meta_dict) InterAppVersion = type( 'AppVersion', (models.Model, ), { 'Meta': AV_meta, '__module__': 'migration_phase_1.models', 'trees': models.ManyToManyField(Tree, through=InterAppVersionTreeThrough), 'fallback': models.ForeignKey('self', blank=True, null=True, default=None, on_delete=models.SET_NULL, related_name='followups'), 'accepts_signoffs': models.BooleanField(default=False), # tree of the previous model, name oldtree, override dbcolumn 'tree': models.ForeignKey(Tree, blank=True, null=True), # lasttree works as is 'lasttree': models.ForeignKey( Tree, related_name='legacy_appversions', blank=True, null=True) }) mig_module.models.AppVersion = InterAppVersion c = ship_connection.cursor() style = color.no_style() for stmnt in sql.sql_all(mig_module.models, style, ship_connection): if stmnt.startswith('CREATE TABLE'): if InterAppVersionTreeThrough._meta.db_table not in stmnt: # appversion table, we want to ALTER, not CREATE # find column definitions for fallback and accepts_signoffs for l in stmnt.splitlines(): if 'fallback' in l or 'accepts_signoffs' in l: c.execute('ALTER TABLE %s ADD COLUMN %s;' % (InterAppVersion._meta.db_table, l.replace(',', ''))) continue else: # appversiontreethrough table, execute below pass elif 'ADD CONSTRAINT' in stmnt or stmnt.startswith('CREATE INDEX'): # add constraints and indices for the appversiontreethrough table, # or for the fallback field. if InterAppVersionTreeThrough._meta.db_table in stmnt: # add constraints to the new table below pass elif 'fallback' in stmnt: # for appversion, only add constraints for fallback pass else: continue else: print stmnt c.execute(stmnt) create_app_tree = InterAppVersion.trees.through.objects.create for av in InterAppVersion.objects.all(): if av.tree_id is not None: create_app_tree(appversion=av, tree_id=av.tree_id, start=None) else: assert av.lasttree_id print "fix end of " + str(av) create_app_tree(appversion=av, tree_id=av.lasttree_id, start=None, end=datetime.datetime.utcnow()) # prune "migration_phase_1" app again del settings.INSTALLED_APPS[-1] from django.db.models import loading loading.cache.app_models.pop('migration_phase_1', None) loading.cache.register_models('migration_phase_1') # clear cache # empty sys modules from our fake modules sys.modules.pop(mig_name) sys.modules.pop(mig_models_name) # we can only remove columns for mysql, let's warn if we're not that: if settings.DATABASES['default']['ENGINE'].split('.')[-1] != 'mysql': print """ WARNING This migration can only remove tree and lasttree for mysql. """ return # next up, find the foreign key indexes. constraints = [] c.execute("""select CONSTRAINT_NAME from information_schema.table_constraints where table_schema = schema() and table_name = 'shipping_appversion';""") for (constraint, ) in c.fetchall(): if 'tree_id' in constraint: constraints.append(constraint) stmnt = """ALTER TABLE `shipping_appversion`""" subs = [' DROP COLUMN %s' % col for col in ('tree_id', 'lasttree_id')] + \ [' DROP FOREIGN KEY %s' % constraint for constraint in constraints] stmnt += ','.join(subs) + ';' c.execute(stmnt)