def sql_create(app, db_name=None): """Return SQL statements for creating all models for an app. This provides compatibility with all supported versions of Django. Args: app (module): The application module. db_name (str, optional): The database connection name. Defaults to the default database connection. Returns: list: The list of SQL statements used to create the models for the app. """ connection = connections[db_name or DEFAULT_DB_ALIAS] if BaseDatabaseSchemaEditor: # Django >= 1.7 with connection.schema_editor(collect_sql=True) as schema_editor: for model in get_models(app): schema_editor.create_model(model) return schema_editor.collected_sql else: # Django < 1.7 style = color.no_style() return (sql.sql_create(app, style, connection) + sql.sql_indexes(app, style, connection))
def test_sql_indexes(self): app_config = apps.get_app_config('commands_sql') with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RemovedInDjango20Warning) output = sql_indexes(app_config, no_style(), connections[DEFAULT_DB_ALIAS]) # Number of indexes is backend-dependent self.assertTrue(1 <= self.count_ddl(output, 'CREATE INDEX') <= 4)
def proposed_indexes(self): """ Return all indexes Django proposes & SQL for indexes """ all_indexes = [] proposed_indexes = {} index_sql = {} for app in get_apps(): all_indexes.append(u'\n'.join(sql_indexes(app, no_style(),connection)).encode('utf-8')) #Sort out all the proposed indexes by table. for index in all_indexes: indice = index.split('\n') for ind in indice: try: match = index_re.search(ind) name, table, field = match.groups() if proposed_indexes.has_key(table): proposed_indexes[table].append(field) else: proposed_indexes[table] = [field] if index_sql.has_key(name): index_sql[name].append(ind) else: index_sql[name] = [ind] except: pass return proposed_indexes, index_sql
def test_sql_indexes(self): app_config = apps.get_app_config('commands_sql') with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RemovedInDjango20Warning) output = sql_indexes(app_config, no_style(), connections[DEFAULT_DB_ALIAS]) # PostgreSQL creates one additional index for CharField self.assertIn(self.count_ddl(output, 'CREATE INDEX'), [3, 4])
def handle(self, *args, **options): all_indexes = [] proposed_indexes = {} index_sql = {} for app in get_apps(): all_indexes.append(u'\n'.join( sql_indexes(app, no_style(), connection)).encode('utf-8')) #Sort out all the proposed indexes by table. for index in all_indexes: indice = index.split('\n') for ind in indice: try: match = index_re.search(ind) name, table, field = match.groups() if table in proposed_indexes: proposed_indexes[table].append(name) else: proposed_indexes[table] = [name] if name in index_sql: index_sql[name].append(ind) else: index_sql[name] = [ind] except: pass #Now get all the real indexes. indexes = {} cursor = connection.cursor() vals = cursor.execute(CURRENT_INDEX_SQL) sql_back = cursor.fetchall() for row in sql_back: name, table = row if table in indexes: indexes[table].append(name) else: indexes[table] = [name] #For all the proposed indexes, see if they exist #If not, tell us! for prop_name, prop_tables in proposed_indexes.items(): for table in prop_tables: try: if not table in indexes[prop_name]: if not options['show']: logger.info("(%s, %s) is missing", prop_name, table) else: for index in index_sql[table]: if prop_name in index: logger.info(index) except KeyError: if not options['show']: logger.info("No Indexes for %s in original db", prop_name) else: for index in index_sql[table]: if table in index: logger.info(index)
def handle(self, *args, **options): all_indexes = [] proposed_indexes = {} index_sql = {} for app in get_apps(): all_indexes.append(u'\n'.join(sql_indexes(app, no_style(), connection)).encode('utf-8')) #Sort out all the proposed indexes by table. for index in all_indexes: indice = index.split('\n') for ind in indice: try: match = index_re.search(ind) name, table, field = match.groups() if table in proposed_indexes: proposed_indexes[table].append(name) else: proposed_indexes[table] = [name] if name in index_sql: index_sql[name].append(ind) else: index_sql[name] = [ind] except: pass #Now get all the real indexes. indexes = {} cursor = connection.cursor() vals = cursor.execute(CURRENT_INDEX_SQL) sql_back = cursor.fetchall() for row in sql_back: name, table = row if table in indexes: indexes[table].append(name) else: indexes[table] = [name] #For all the proposed indexes, see if they exist #If not, tell us! for prop_name, prop_tables in proposed_indexes.items(): for table in prop_tables: try: if not table in indexes[prop_name]: if not options['show']: logger.info("(%s, %s) is missing", prop_name, table) else: for index in index_sql[table]: if prop_name in index: logger.info(index) except KeyError: if not options['show']: logger.info("No Indexes for %s in original db", prop_name) else: for index in index_sql[table]: if table in index: logger.info(index)
def create_model_tables(): """ Create the table for the provided model(s) Yes, yes, yes. This *should* be part of Django. Instead, this logic is locked down like p**n star with an STD inside the `django.core.management` command, so we've got the logic here. """ style = no_style() app = d51.django.apps.tagging.tests.support.models statements = sql_create(app, style) + sql_indexes(app, style) execute_sql(statements)
def get_sql(self,appname): """ Return all SQL statements for given application """ from django.core.management.sql import sql_delete,sql_create,sql_custom,sql_indexes from django.db import connections, DEFAULT_DB_ALIAS app=self.get_app(appname) db=connections[DEFAULT_DB_ALIAS] # Tables creation statements create='\n'.join(sql_create(app, self.style, db)) # Custom SQL statements custom='\n'.join(sql_custom(app, self.style, db)) # Index creation statements indexes='\n'.join(sql_indexes(app, self.style, db)) # Delete statements delete='\n'.join(sql_delete(app, self.style, db)) return (create,custom,indexes,delete)
def execute_test_sql(start, end, sql, debug=False): """ Execute a test SQL sequence. This method also creates and destroys the database tables required by the models registered against the test application. start and end are the start- and end-point states of the application cache. sql is the list of sql statements to execute. cleanup is a list of extra sql statements required to clean up. This is primarily for any extra m2m tables that were added during a test that won't be cleaned up by Django's sql_delete() implementation. debug is a helper flag. It displays the ALL the SQL that would be executed, (including setup and teardown SQL), and executes the Django-derived setup/teardown SQL. """ # Set up the initial state of the app cache cache.app_models['tests'] = copy.deepcopy(start) # Install the initial tables and indicies style = no_style() execute_transaction(sql_create(evo_test, style), output=debug) execute_transaction(sql_indexes(evo_test, style), output=debug) create_test_data(models.get_models(evo_test)) # Set the app cache to the end state cache.app_models['tests'] = copy.deepcopy(end) try: # Execute the test sql if debug: write_sql(sql) else: execute_transaction(sql, output=True) finally: # Cleanup the apps. if debug: print sql_delete(evo_test, style) else: execute_transaction(sql_delete(evo_test, style), output=debug)
def test_sql_indexes(self): app_config = apps.get_app_config('commands_sql') output = sql_indexes(app_config, no_style(), connections[DEFAULT_DB_ALIAS]) # Number of indexes is backend-dependent self.assertTrue(1 <= self.count_ddl(output, 'CREATE INDEX') <= 4)
def handle_app(self, app, **options): return u'\n'.join( sql_indexes(app, self.style, connections[options.get( 'database', DEFAULT_DB_ALIAS)])).encode('utf-8')
def test_sql_indexes(self): app_config = apps.get_app_config('commands_sql') output = sql_indexes(app_config, no_style(), connections[DEFAULT_DB_ALIAS]) # PostgreSQL creates one additional index for CharField self.assertIn(self.count_ddl(output, 'CREATE INDEX'), [3, 4])
def handle_app_config(self, app_config, **options): if app_config.models_module is None: return connection = connections[options.get('database')] statements = sql_indexes(app_config, self.style, connection) return '\n'.join(statements)
def handle_app(self, app, **options): from django.core.management.sql import sql_indexes return u'\n'.join(sql_indexes(app, self.style)).encode('utf-8')
def handle_app(self, app, **options): return u'\n'.join(sql_indexes(app, self.style, connections[options.get('database', DEFAULT_DB_ALIAS)])).encode( 'utf-8')
def handle_app(self, app, **options): return u"\n".join(sql_indexes(app, self.style, connections[options.get("database")])).encode("utf-8")
def handle_app(self, app, **options): return '\n'.join( sql_indexes(app, self.style, connections[options.get('database')]))
def handle_app(self, app, **options): return '\n'.join(sql_indexes(app, self.style, connections[options.get('database')]))
def test_sql_indexes(self): app_config = apps.get_app_config('commands_sql_migrations') with self.assertRaises(CommandError): sql_indexes(app_config, no_style(), connections[DEFAULT_DB_ALIAS])
def test_sql_indexes(self): app = app_cache.get_app_config('commands_sql').models_module output = sql_indexes(app, no_style(), connections[DEFAULT_DB_ALIAS]) # PostgreSQL creates one additional index for CharField self.assertIn(self.count_ddl(output, 'CREATE INDEX'), [3, 4])
def modeltest(app_name, use_aka=True): if not app_name: raise Exception("No test name given") if not os.path.exists('settings.py'): raise Exception('Oops... file settings.py does not exist! Please copy your settings there!') from django.conf import settings from django.db.models.loading import get_apps, get_app from deseb.schema_evolution import evolvediff from django.core.management.color import no_style from deseb.actions import get_introspected_evolution_options from django.core.management.sql import sql_create, sql_indexes from django.db.transaction import commit_on_success from django.db import connection from deseb.actions import get_schemas, show_evolution_plan if DEBUG: print "Test %s" % app_name #reset on post state and pre state from deseb import add_aka_support if use_aka: add_aka_support() style = no_style() settings.INSTALLED_APPS = tuple(list(settings.INSTALLED_APPS[:5]) + [app_name]) write_file(app_name+"/models.py", '') # re-init models.py write_file(app_name+"/errdiff.%s.actual" % settings.DATABASE_ENGINE, "") write_file(app_name+"/errors.%s.actual" % settings.DATABASE_ENGINE, "") get_apps() drop_all_tables() reload_models(app_name, 'pre') app = get_app(app_name) create = sql_create(app, style) + sql_indexes(app, style) write_file(app_name+"/init.%s.actual" % settings.DATABASE_ENGINE, create) #FIXME: compare to init.correct later instead of copying write_file(app_name+"/init.%s.planned" % settings.DATABASE_ENGINE, create) reset = sql_create(app, style) #print 'SQL:', '\n'.join(reset) commit_on_success(run_sql)(reset) reset_idx = sql_indexes(app, style) run_sql(reset_idx) reload_models(app_name, 'post') if use_aka: from deseb.storage import update_with_aka, save_renames update_with_aka(app_name) save_renames(app_name) cursor = connection.cursor() db_schema, model_schema = get_schemas(cursor, app, style) diff = show_evolution_plan(cursor, app, style, db_schema, model_schema) write_file(app_name+"/diff.%s.actual" % settings.DATABASE_ENGINE, diff) #FIXME: compare to diff.correct later instead of copying write_file(app_name+"/diff.%s.planned" % settings.DATABASE_ENGINE, diff) actions = get_introspected_evolution_options(app, style, db_schema, model_schema) write_file(app_name+"/actions.%s.actual" % settings.DATABASE_ENGINE, actions) #FIXME: compare to diff.correct later instead of copying write_file(app_name+"/actions.%s.planned" % settings.DATABASE_ENGINE, actions) try: commit_on_success(run_sql)(actions) except: #print 'changes rolled back' from django.db import transaction transaction.rollback() raise #else: #print 'changes committed' cursor = connection.cursor() db_schema, model_schema = get_schemas(cursor, app, style, model_schema=model_schema) # due to sqlite3/pysqlite bug, caused deferred index creation, we reget db schema. # this is f*****g weird, but i've no any explanation, why getting indxes # doesn't work correctly first time db_schema, model_schema = get_schemas(cursor, app, style, model_schema=model_schema) diff = show_evolution_plan(cursor, app, style, db_schema, model_schema) write_file(app_name+"/errdiff.%s.actual" % settings.DATABASE_ENGINE, diff) diff1 = diff.split('\n',1)[1] if diff1: print "Errors:" print diff1 try: actions, db_schema, model_schema = get_introspected_evolution_options(app, style, db_schema, model_schema) except Exception: actions = ['Was unable to generate error diff SQL commands'] write_file(app_name+"/errors.%s.actual" % settings.DATABASE_ENGINE, actions) #FIXME: compare to diff.correct later instead of copying #write_file(app_name+"/errors.%s.planned" % settings.DATABASE_ENGINE, actions) return diff1
def handle_app_config(self, app_config, **options): if app_config.models_module is None: return connection = connections[options.get('database')] statements = sql_indexes(app_config.models_module, self.style, connection) return '\n'.join(statements)
def test_sql_indexes(self): app = models.get_app('commands_sql') output = sql_indexes(app, no_style(), connections[DEFAULT_DB_ALIAS]) # PostgreSQL creates two indexes self.assertIn(len(output), [1, 2]) self.assertTrue(output[0].startswith("CREATE INDEX"))
def test_sql_indexes(self): app = models.get_app("commands_sql") output = sql_indexes(app, no_style(), connections[DEFAULT_DB_ALIAS]) # PostgreSQL creates two indexes self.assertIn(len(output), [1, 2]) self.assertTrue(output[0].startswith("CREATE INDEX"))
def handle_app(self, app, **options): from django.core.management.sql import sql_indexes return '\n'.join(sql_indexes(app, self.style)).encode('utf-8')