def create_default_anonymous_user(app, created_models, verbosity, db, **kwargs): # Only create the default sites in databases where Django created the table if User in created_models and router.allow_syncdb(db, User): if verbosity >= 2: print("Creating anonymous User object") user = User( pk=settings.ANONYMOUS_USER_ID, username='******', first_name='Anonymous', last_name='User', ) user.is_setup = True # prevent default profiles to be created, because the sequence hasn't been updated user.save(using=db) # We set an explicit pk instead of relying on auto-incrementation, # so we need to reset the database sequence. See #17415. sequence_sql = connections[db].ops.sequence_reset_sql(no_style(), [User]) if not sequence_sql: sequence_sql = connections[db].ops.sequence_reset_sql(no_style(), [Entity]) if sequence_sql: if verbosity >= 2: print("Resetting sequence") cursor = connections[db].cursor() for command in sequence_sql: cursor.execute(command) user.is_setup = False # allow default profiles to be created now user.save(using=db)
def remove_field_constraints(self, field, opts, models, refs): sql = [] if field.primary_key: creation = self.connection.creation style = color.no_style() for f in opts.local_many_to_many: if f.rel and f.rel.through: through = f.rel.through for m2m_f in through._meta.local_fields: if (m2m_f.rel and m2m_f.rel.to._meta.db_table == opts.db_table and m2m_f.rel.field_name == field.column): models.append(m2m_f.rel.to) refs.setdefault(m2m_f.rel.to, []).append( (through, m2m_f)) remove_refs = refs.copy() style = color.no_style() for relto in models: sql.extend(creation.sql_remove_table_constraints( relto, remove_refs, style)) return sql
def sync_db(self): ''' This function uses the same code that's used in syncdb to dynamically execute DDL sql on-the-fly. Copied from: /usr/local/lib/python2.6/dist-packages/django/core/management/commands/syncdb.py ''' from django.core.management.color import no_style from django.db import connection, transaction cursor = connection.cursor() tables = connection.introspection.table_names() seen_models = connection.introspection.installed_models(tables) created_models = set() pending_references = {} sql, references = connection.creation.sql_create_model(self.model_class, no_style(), seen_models) seen_models.add(self.model_class) created_models.add(self.model_class) for refto, refs in references.items(): pending_references.setdefault(refto, []).extend(refs) if refto in seen_models: sql.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references)) sql.extend(connection.creation.sql_for_pending_references(self.model_class, no_style(), pending_references)) #append point geometry by calling the PostGIS function: sql.append("select AddGeometryColumn('public','%s','point',4326,'POINT',2)" % self.form.table_name); for statement in sql: cursor.execute(statement) transaction.commit_unless_managed()
def sync_models(model_list): tables = connection.introspection.table_names() seen_models = connection.introspection.installed_models(tables) created_models = set() pending_references = {} cursor = connection.cursor() for model in model_list: sql, references = connection.creation.sql_create_model( model, no_style(), seen_models) seen_models.add(model) created_models.add(model) for refto, refs in references.items(): pending_references.setdefault(refto, []).extend(refs) if refto in seen_models: sql.extend(connection.creation.sql_for_pending_references( refto, no_style(), pending_references)) sql.extend(connection.creation.sql_for_pending_references( model, no_style(), pending_references)) for statement in sql: cursor.execute(statement) tables.append(connection.introspection.table_name_converter( model._meta.db_table))
def test_router_honored(self): app_config = apps.get_app_config('commands_sql') for sql_command in (sql_all, sql_create, sql_delete, sql_indexes, sql_destroy_indexes): if sql_command is sql_delete: output = sql_command(app_config, no_style(), connections[DEFAULT_DB_ALIAS], close_connection=False) else: output = sql_command(app_config, no_style(), connections[DEFAULT_DB_ALIAS]) self.assertEqual(len(output), 0, "%s command is not honoring routers" % sql_command.__name__)
def init(cls, application_names): applications = [models.get_app(application_name) for application_name in application_names] upgrade = u''.join([ u'\n'.join(sql_all(application, no_style()) + ['']).encode('utf-8') for application in applications]) downgrade = u''.join([u'\n'.join(sql_delete(application, no_style()) + ['']).encode('utf-8') for application in applications]) cls.add(upgrade, downgrade)
def reset_db(): using = DEFAULT_DB_ALIAS connection = connections[using] sql_list = sql_delete(elephantblog.models, no_style(), connection) sql_list += sql_all(elephantblog.models, no_style(), connection) try: cursor = connection.cursor() for sql in sql_list: cursor.execute(sql) except Exception, e: transaction.rollback_unless_managed() raise CommandError("Error: database couldn't be reset: %s" % e)
def test_router_honored(self): app_config = apps.get_app_config('commands_sql') for sql_command in (sql_all, sql_create, sql_delete, sql_indexes, sql_destroy_indexes): if sql_command is sql_delete: output = sql_command(app_config, no_style(), connections[DEFAULT_DB_ALIAS], close_connection=False) # "App creates no tables in the database. Nothing to do." expected_output = 1 else: output = sql_command(app_config, no_style(), connections[DEFAULT_DB_ALIAS]) expected_output = 0 self.assertEqual(len(output), expected_output, "%s command is not honoring routers" % sql_command.__name__)
def load_app(app_path): testapp = django_load_app(app_path) app_name = testapp.__name__.split('.')[-2] connection = connections[DEFAULT_DB_ALIAS] cursor = connection.cursor() test_models = [m for m in models.get_models(testapp, include_auto_created=True) if router.allow_syncdb(DEFAULT_DB_ALIAS, m)] loaded_models[app_path] = test_models # We assume the models haven't been installed, otherwise there's more to do here # Get a list of already installed *models* so that references work right. tables = connection.introspection.table_names() seen_models = connection.introspection.installed_models(tables) pending_references = {} verbosity = 0 # Create the tables for each model for model in test_models: # Create the model's database table, if it doesn't already exist. if verbosity >= 2: print "Processing %s.%s model" % (app_name, model._meta.object_name) sql, references = connection.creation.sql_create_model(model, no_style(), seen_models) seen_models.add(model) for refto, refs in references.items(): pending_references.setdefault(refto, []).extend(refs) if refto in seen_models: sql.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references)) sql.extend(connection.creation.sql_for_pending_references(model, no_style(), pending_references)) if verbosity >= 1 and sql: print "Creating table %s" % model._meta.db_table for statement in sql: cursor.execute(statement) tables.append(connection.introspection.table_name_converter(model._meta.db_table)) transaction.commit_unless_managed(using=DEFAULT_DB_ALIAS) for model in test_models: index_sql = connection.creation.sql_indexes_for_model(model, no_style()) if index_sql: if verbosity >= 1: print "Installing index for %s.%s model" % (app_name, model._meta.object_name) try: for sql in index_sql: cursor.execute(sql) except Exception, e: sys.stderr.write("Failed to install index for %s.%s model: %s\n" % \ (app_name, model._meta.object_name, e)) transaction.rollback_unless_managed(using=DEFAULT_DB_ALIAS) else: transaction.commit_unless_managed(using=DEFAULT_DB_ALIAS)
def get_sync_sql(self, field_name, missing_langs, model): """ returns SQL needed for sync schema for a new translatable field """ qn = connection.ops.quote_name style = no_style() sql_output = [] db_table = model._meta.db_table was_translatable_before = self.was_translatable_before(field_name, db_table) for lang in missing_langs: new_field = get_real_fieldname(field_name, lang) f = model._meta.get_field(new_field) col_type = f.db_type() field_sql = [style.SQL_FIELD(qn(f.column)), style.SQL_COLTYPE(col_type)] # column creation sql_output.append("ALTER TABLE %s ADD COLUMN %s;" % (qn(db_table), ' '.join(field_sql))) if lang == self.default_lang and not was_translatable_before: # data copy from old field (only for default language) sql_output.append("UPDATE %s SET %s = %s" % (qn(db_table), \ qn(f.column), qn(field_name))) if not f.null and lang == self.default_lang: # changing to NOT NULL after having data copied sql_output.append("ALTER TABLE %s MODIFY COLUMN %s %s %s;" % \ (qn(db_table), qn(f.column), col_type, \ style.SQL_KEYWORD('NOT NULL'))) if not was_translatable_before: # we drop field only if field was no translatable before sql_output.append("ALTER TABLE %s DROP COLUMN %s;" % (qn(db_table), qn(field_name))) return sql_output
def create_default_site(app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, **kwargs): try: Site = apps.get_model('sites', 'Site') except LookupError: return if not router.allow_migrate_model(using, Site): return if not Site.objects.using(using).exists(): # The default settings set SITE_ID = 1, and some tests in Django's test # suite rely on this value. However, if database sequences are reused # (e.g. in the test suite after flush/syncdb), it isn't guaranteed that # the next id will be 1, so we coerce it. See #15573 and #16353. This # can also crop up outside of tests - see #15346. if verbosity >= 2: print("Creating example.com Site object") Site(pk=settings.SITE_ID, domain="example.com", name="example.com").save(using=using) # We set an explicit pk instead of relying on auto-incrementation, # so we need to reset the database sequence. See #17415. sequence_sql = connections[using].ops.sequence_reset_sql(no_style(), [Site]) if sequence_sql: if verbosity >= 2: print("Resetting sequence") with connections[using].cursor() as cursor: for command in sequence_sql: cursor.execute(command)
def reset_sequence(model): """ Reset the ID sequence for a model. """ sql = connection.ops.sequence_reset_sql(no_style(), [model]) for cmd in sql: connection.cursor().execute(cmd)
def __init__(self, stdout=None, stderr=None, no_color=False): self.stdout = OutputWrapper(stdout or sys.stdout) self.stderr = OutputWrapper(stderr or sys.stderr) if no_color: self.style = no_style() else: self.style = color_style() self.stderr.style_func = self.style.ERROR # `requires_model_validation` is deprecated in favor of # `requires_system_checks`. If both options are present, an error is # raised. Otherwise the present option is used. If none of them is # defined, the default value (True) is used. has_old_option = hasattr(self, 'requires_model_validation') has_new_option = hasattr(self, 'requires_system_checks') if has_old_option: warnings.warn( '"requires_model_validation" is deprecated ' 'in favor of "requires_system_checks".', RemovedInDjango19Warning) if has_old_option and has_new_option: raise ImproperlyConfigured( 'Command %s defines both "requires_model_validation" ' 'and "requires_system_checks", which is illegal. Use only ' '"requires_system_checks".' % self.__class__.__name__) self.requires_system_checks = ( self.requires_system_checks if has_new_option else self.requires_model_validation if has_old_option else True)
def handle_noargs(self, **options): verbosity = int(options.get('verbosity', 1)) interactive = options.get('interactive') show_traceback = options.get('traceback', False) # Stealth option -- 'load_initial_data' is used by the testing setup # process to disable initial fixture loading. load_initial_data = options.get('load_initial_data', True) self.style = no_style() # Import the 'management' module within each installed app, to register # dispatcher events. for app_name in settings.INSTALLED_APPS: try: import_module('.management', app_name) except ImportError, exc: # This is slightly hackish. We want to ignore ImportErrors # if the "management" module itself is missing -- but we don't # want to ignore the exception if the management module exists # but raises an ImportError for some reason. The only way we # can do this is to check the text of the exception. Note that # we're a bit broad in how we check the text, because different # Python implementations may not use the same text. # CPython uses the text "No module named management" # PyPy uses "No module named myproject.myapp.management" msg = exc.args[0] if not msg.startswith('No module named') or 'management' not in msg: raise
def handle_noargs(self, **options): from django.db import connection, transaction, models from django.conf import settings from django.core.management.sql import ( table_list, installed_models, sql_model_create, sql_for_pending_references, many_to_many_sql_for_model, custom_sql_for_model, sql_indexes_for_model, emit_post_sync_signal, ) verbosity = int(options.get("verbosity", 1)) interactive = options.get("interactive") self.style = no_style() # Import the 'management' module within each installed app, to register # dispatcher events. for app_name in settings.INSTALLED_APPS: try: __import__(app_name + ".management", {}, {}, [""]) except ImportError, exc: if not exc.args[0].startswith("No module named management"): raise
def load_cache_signals(sender, **kwargs): """On startup, sync signals with registered models""" from django.db import connection if not cache_signals.ready: # Have to load directly from db, because CacheBotSignals is not prepared yet cursor = connection.cursor() try: cursor.execute("SELECT * FROM %s" % CacheBotSignals._meta.db_table) except Exception, ex: # This should only happen on syncdb when CacheBot tables haven't been created yet, # but there's not really a good way to catch this error sql, references = connection.creation.sql_create_model(CacheBotSignals, no_style()) cursor.execute(sql[0]) cursor.execute("SELECT * FROM %s" % CacheBotSignals._meta.db_table) results = cursor.fetchall() tables = [r[1] for r in results] mapping = cache.get_many(tables) for r in results: key = version_key(".".join(("cachesignals", r[1]))) accessor_set = mapping.get(key) if accessor_set is None: accessor_set = set() accessor_set.add(r[2:5]) mapping[key] = accessor_set cache.set_many(mapping, CACHE_SECONDS) cache_signals.ready = True
def __init__(self): try: # Add missing index on object_id on AccessControl table creation = BaseDatabaseCreation(connection) sql = creation.sql_indexes_for_field( AccessControl, AccessControl._meta.get_field('object_id'), no_style(), ) cursor = connection.cursor() for s in sql: cursor.execute(s) except: pass try: # Remove IP based group members for group in ExtendedGroup.objects.filter(type=IP_BASED_GROUP): group.user_set.all().delete() except: pass # Only need to run once raise MiddlewareNotUsed
def execute(self, *args, **options): """ Try to execute this command, performing system checks if needed (as controlled by the ``requires_system_checks`` attribute, except if force-skipped). """ if options['force_color'] and options['no_color']: raise CommandError("The --no-color and --force-color options can't be used together.") if options['force_color']: self.style = color_style(force_color=True) elif options['no_color']: self.style = no_style() self.stderr.style_func = None if options.get('stdout'): self.stdout = OutputWrapper(options['stdout']) if options.get('stderr'): self.stderr = OutputWrapper(options['stderr'], self.stderr.style_func) if self.requires_system_checks and not options.get('skip_checks'): self.check() if self.requires_migrations_checks: self.check_migrations() output = self.handle(*args, **options) if output: if self.output_transaction: connection = connections[options.get('database', DEFAULT_DB_ALIAS)] output = '%s\n%s\n%s' % ( self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()), output, self.style.SQL_KEYWORD(connection.ops.end_transaction_sql()), ) self.stdout.write(output) return output
def remove_seo_tables(self): from django.core.management.sql import sql_delete from django.db import connection from django.core.management.color import no_style from rollyourown.seo import models as seo_models try: sql_list = sql_delete(seo_models, no_style(), connection) except TypeError: sql_list = sql_delete(seo_models, no_style()) cursor = connection.cursor() try: for sql in sql_list: cursor.execute(sql) except Exception, e: transaction.rollback_unless_managed()
def handle_noargs(self, **options): from django.db import connection, transaction, models from django.conf import settings from django.core.management.sql import custom_sql_for_model, emit_post_sync_signal verbosity = int(options.get('verbosity', 1)) interactive = options.get('interactive') show_traceback = options.get('traceback', False) self.style = no_style() # Import the 'management' module within each installed app, to register # dispatcher events. for app_name in settings.INSTALLED_APPS: try: __import__(app_name + '.management', {}, {}, ['']) except ImportError, exc: # This is slightly hackish. We want to ignore ImportErrors # if the "management" module itself is missing -- but we don't # want to ignore the exception if the management module exists # but raises an ImportError for some reason. The only way we # can do this is to check the text of the exception. Note that # we're a bit broad in how we check the text, because different # Python implementations may not use the same text. # CPython uses the text "No module named management" # PyPy uses "No module named myproject.myapp.management" msg = exc.args[0] if not msg.startswith('No module named') or 'management' not in msg: raise
def handle(self, *args, **options): using = options.get('database', DEFAULT_DB_ALIAS) conn = connections[using] lst = get_models(include_auto_created=True) cursor = conn.cursor() for sql in conn.ops.sequence_reset_sql(no_style(), lst): cursor.execute(sql)
def handle_noargs(self, **options): db = options.get('database') connection = connections[db] verbosity = int(options.get('verbosity')) interactive = options.get('interactive') # The following are stealth options used by Django's internals. reset_sequences = options.get('reset_sequences', True) allow_cascade = options.get('allow_cascade', False) inhibit_post_migrate = options.get('inhibit_post_migrate', False) self.style = no_style() # Import the 'management' module within each installed app, to register # dispatcher events. for app_name in settings.INSTALLED_APPS: try: import_module('.management', app_name) except ImportError: pass sql_list = sql_flush(self.style, connection, only_django=True, reset_sequences=reset_sequences, allow_cascade=allow_cascade) if interactive: confirm = input("""You have requested a flush of the database. This will IRREVERSIBLY DESTROY all data currently in the %r database, and return each table to a fresh state. Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME']) else: confirm = 'yes' if confirm == 'yes': try: with transaction.commit_on_success_unless_managed(): cursor = connection.cursor() for sql in sql_list: cursor.execute(sql) except Exception as e: new_msg = ( "Database %s couldn't be flushed. Possible reasons:\n" " * The database isn't running or isn't configured correctly.\n" " * At least one of the expected database tables doesn't exist.\n" " * The SQL was invalid.\n" "Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.\n" "The full error: %s") % (connection.settings_dict['NAME'], e) six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2]) if not inhibit_post_migrate: self.emit_post_migrate(verbosity, interactive, db) # Reinstall the initial_data fixture. if options.get('load_initial_data'): # Reinstall the initial_data fixture. call_command('loaddata', 'initial_data', **options) else: self.stdout.write("Flush cancelled.\n")
def setup_databases(self): for alias in connections: connection = connections[alias] creation = connection.creation test_db_name = connection.settings_dict['NAME'] #creation._get_test_db_name() orig_db_name = connection.settings_dict['NAME'] connection.settings_dict['NAME'] = test_db_name if _should_create_database(connection): connection.settings_dict['NAME'] = orig_db_name connection.close() else: cursor = connection.cursor() style = no_style() if uses_mysql(connection): reset_statements = _mysql_reset_sequences( style, connection) else: reset_statements = connection.ops.sequence_reset_sql( style, self._get_models_for_connection(connection)) for reset_statement in reset_statements: cursor.execute(reset_statement) # transaction.commit_unless_managed(using=connection.alias) # creation.create_test_db = MethodType( # _skip_create_test_db, creation, creation.__class__) Command.handle = _foreign_key_ignoring_handle # return super(NoseTestSuiteRunner, self).setup_databases() return self.setup_databases2(test_db_name)
def test_sql_all(self): app = models.get_app("commands_sql") output = sql_all(app, no_style(), connections[DEFAULT_DB_ALIAS]) # PostgreSQL creates two indexes self.assertIn(len(output), [2, 3]) self.assertTrue(output[0].startswith("CREATE TABLE")) self.assertTrue(output[1].startswith("CREATE INDEX"))
def __init__(self,name,port,path,settingsmod): """ Class initialization """ # Set project information self.name=name self.port=port self.path=path self.settingsmod=settingsmod # Change current working directory to project path os.chdir(path) # If project dir is not in python path, add it if not path in PYTHONPATH_ORIG: sys.path=list(PYTHONPATH_ORIG) sys.path.insert(0,path) # Setup settings module variable os.environ['DJANGO_SETTINGS_MODULE'] = settingsmod # Load settings module self.settings=__import__(settingsmod) import django self.django=django reload(self.settings) # Django server instance self.django_server_instance=None # Load needed django modules from django.db.models import get_app, get_models self.get_app=get_app self.get_models=get_models from django.core.management import call_command self.call_command=call_command from django.core.management.color import no_style self.style=no_style()
def sql_indexes_for_field(connection, model, field): """Return SQL statements for creating indexes for a field. This provides compatibility with all supported versions of Django. Args: connection (object): The database connection. model (django.db.models.Model): The database model owning the field. field (django.db.models.Field): The field being indexed. Returns: list: The list of SQL statements for creating the indexes. """ if BaseDatabaseSchemaEditor: # Django >= 1.7 # # Unlike sql_indexes_for_field(), _create_index_sql() won't be # checking whether it *should* create an index for the given field. # We have to check that here instead. if not field.db_index or field.unique: return [] with connection.schema_editor() as schema_editor: return ['%s;' % schema_editor._create_index_sql(model, [field])] else: # Django < 1.7 return connection.creation.sql_indexes_for_field(model, field, color.no_style())
def test_sequence_name_length_limits_flush(self): """ Sequence resetting as part of a flush with model with long name and long pk name doesn't error (#8901). """ # A full flush is expensive to the full test, so we dig into the # internals to generate the likely offending SQL and run it manually # Some convenience aliases VLM = VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through tables = [ VLM._meta.db_table, VLM_m2m._meta.db_table, ] sequences = [ { 'column': VLM._meta.pk.column, 'table': VLM._meta.db_table }, ] sql_list = connection.ops.sql_flush(no_style(), tables, sequences) with connection.cursor() as cursor: for statement in sql_list: cursor.execute(statement)
def sql_create(app, db_name=None): """Return SQL statements for creating all models for an app. This provides compatibility with all supported versions of Django. Args: app (module): The application module. db_name (str, optional): The database connection name. Defaults to the default database connection. Returns: list: The list of SQL statements used to create the models for the app. """ connection = connections[db_name or DEFAULT_DB_ALIAS] if BaseDatabaseSchemaEditor: # Django >= 1.7 with connection.schema_editor(collect_sql=True) as schema_editor: for model in get_models(app): schema_editor.create_model(model) return schema_editor.collected_sql else: # Django < 1.7 style = color.no_style() return (sql.sql_create(app, style, connection) + sql.sql_indexes(app, style, connection))
def do_create(_cls, request): """Use an inner function so that we can generate a proper permission name at run time. """ key_name = self.model.model._meta.pk.name if not request.POST.has_key(key_name): created = True else: filter_args = {key_name: request.POST[key_name]} objects = self.model.model.objects created = (objects.filter(**filter_args).count() == 0) instance = self.model.model(**dict([(k, v) for k, v in request.POST.items()])) instance.save() # Reset the sequence point in case there was a PK set cursor = connection.cursor() reset_sequence_command_lines = connection.ops.sequence_reset_sql( no_style(), [self.model.model]) if len(reset_sequence_command_lines) != 0: cursor.execute(';'.join(reset_sequence_command_lines)) instance_data(response, self.model, instance) response['pk'] = to_json_data(self.model, instance, key_name, self.model.fields[key_name]) response['created'] = created
def sql_indexes_for_model(connection, model): """Return SQL statements for creating all indexes for a model. This provides compatibility with all supported versions of Django. Args: connection (object): The database connection. model (django.db.models.Model): The database model to create indexes for. Returns: list: The list of SQL statements for creating the indexes. """ if BaseDatabaseSchemaEditor: # Django >= 1.7 with connection.schema_editor() as schema_editor: return [ '%s;' % s for s in schema_editor._model_indexes_sql(model) ] else: # Django < 1.7 return connection.creation.sql_indexes_for_model(model, color.no_style())
def column_sql(self, table_name, field_name, field, tablespace=''): """ Creates the SQL snippet for a column. Used by add_column and add_table. """ qn = connection.ops.quote_name field.set_attributes_from_name(field_name) # hook for the field to do any resolution prior to it's attributes being queried if hasattr(field, 'south_init'): field.south_init() # Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL) field = self._field_sanity(field) sql = field.db_type() if sql: field_output = [qn(field.column), sql] field_output.append('%sNULL' % (not field.null and 'NOT ' or '')) if field.primary_key: field_output.append('PRIMARY KEY') elif field.unique: # Just use UNIQUE (no indexes any more, we have delete_unique) field_output.append('UNIQUE') tablespace = field.db_tablespace or tablespace if tablespace and connection.features.supports_tablespaces and field.unique: # We must specify the index tablespace inline, because we # won't be generating a CREATE INDEX statement for this field. field_output.append(connection.ops.tablespace_sql(tablespace, inline=True)) sql = ' '.join(field_output) sqlparams = () # if the field is "NOT NULL" and a default value is provided, create the column with it # this allows the addition of a NOT NULL field to a table with existing rows if not field.null and getattr(field, '_suppress_default', True) and field.has_default(): default = field.get_default() # If the default is actually None, don't add a default term if default is not None: # If the default is a callable, then call it! if callable(default): default = default() # Now do some very cheap quoting. TODO: Redesign return values to avoid this. if isinstance(default, basestring): default = "'%s'" % default.replace("'", "''") elif isinstance(default, datetime.date): default = "'%s'" % default sql += " DEFAULT %s" sqlparams = (default) if field.rel and self.supports_foreign_keys: self.add_deferred_sql( self.foreign_key_sql( table_name, field.column, field.rel.to._meta.db_table, field.rel.to._meta.get_field(field.rel.field_name).column ) ) if field.db_index and not field.unique: self.add_deferred_sql(self.create_index_sql(table_name, [field.column])) if hasattr(field, 'post_create_sql'): style = no_style() for stmt in field.post_create_sql(style, table_name): self.add_deferred_sql(stmt) if sql: return sql % sqlparams else: return None
def handle(self, *model_names, **options): # If --force was passed, first delete all existing CablePaths if options['force']: cable_paths = CablePath.objects.all() paths_count = cable_paths.count() # Prompt the user to confirm recalculation of all paths if paths_count and not options['no_input']: self.stdout.write( self.style.ERROR( "WARNING: Forcing recalculation of all cable paths.")) self.stdout.write( f"This will delete and recalculate all {paths_count} existing cable paths. Are you sure?" ) confirmation = input("Type yes to confirm: ") if confirmation != 'yes': self.stdout.write(self.style.SUCCESS("Aborting")) return # Delete all existing CablePath instances self.stdout.write( f"Deleting {paths_count} existing cable paths...") deleted_count, _ = CablePath.objects.all().delete() self.stdout.write( (self.style.SUCCESS(f' Deleted {deleted_count} paths'))) # Reinitialize the model's PK sequence self.stdout.write( f'Resetting database sequence for CablePath model') sequence_sql = connection.ops.sequence_reset_sql( no_style(), [CablePath]) with connection.cursor() as cursor: for sql in sequence_sql: cursor.execute(sql) # Retrace paths for model in ENDPOINT_MODELS: origins = model.objects.filter(cable__isnull=False) if not options['force']: origins = origins.filter(_path__isnull=True) origins_count = origins.count() if not origins_count: self.stdout.write( f'Found no missing {model._meta.verbose_name} paths; skipping' ) continue self.stdout.write( f'Retracing {origins_count} cabled {model._meta.verbose_name_plural}...' ) i = 0 for i, obj in enumerate(origins, start=1): create_cablepath(obj) if not i % 100: self.draw_progress_bar(i * 100 / origins_count) self.draw_progress_bar(100) self.stdout.write( self.style.SUCCESS( f'\n Retraced {i} {model._meta.verbose_name_plural}')) self.stdout.write(self.style.SUCCESS('Finished.'))
else: render = render_to_response('feedback.html', locals()) return render cursor = conn.cursor() if server_id > 0: try: sql = 'CREATE DATABASE IF NOT EXISTS %s default charset utf8 COLLATE utf8_unicode_ci;' % the_db_config[ 'db'] cursor.execute(sql) except Exception, e: print('create datebase has error', e) conn.select_db(the_db_config['db']) try: Log._meta.db_table = 'log_0' sql, _ = connection.creation.sql_create_model(Log, no_style()) sql = sql[0].replace('CREATE TABLE', 'CREATE TABLE if not exists') # print(sql) cursor.execute(sql) except Exception, e: print('create table log_0 has error', e) try: sql = 'show tables' cursor.execute(sql) list_record = cursor.fetchall() tables = [] for item in list_record: tables.append(item[0])
def handle(self, **options): # Pick up options database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get(username=options["user"]) except: raise CommandError("User '%s' not found" % options["user"]) else: user = None if options["models"]: models = options["models"].split(",") else: models = None now = datetime.now() task = None try: # Initialize the task if options["task"]: try: task = Task.objects.all().using(database).get(pk=options["task"]) except: raise CommandError("Task identifier not found") if ( task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_flush", "empty") ): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="empty", submitted=now, started=now, status="0%", user=user ) task.arguments = "%s%s" % ( "--user=%s " % options["user"] if options["user"] else "", "--models=%s " % options["models"] if options["models"] else "", ) task.processid = os.getpid() task.save(using=database) # Create a database connection cursor = connections[database].cursor() # Get a list of all django tables in the database tables = set( connections[database].introspection.django_table_names( only_existing=True ) ) ContentTypekeys = set() # Validate the user list of tables if models: hasDemand = True if "input.demand" in models else False hasOperation = True if "input.operation" in models else False hasPO = True if "input.purchaseorder" in models else False hasDO = True if "input.distributionorder" in models else False hasMO = True if "input.manufacturingorder" in models else False hasDeO = True if "input.deliveryorder" in models else False if not hasOperation: if hasDemand: models.remove("input.demand") cursor.execute( "update operationplan set demand_id = null where demand_id is not null" ) cursor.execute("delete from demand") key = ContentType.objects.get_for_model( inputmodels.Demand, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if not (hasPO and hasDO and hasMO and hasDeO): if "input.operationplanmaterial" in models: models.remove("input.operationplanmaterial") if "input.operationplanresource" in models: models.remove("input.operationplanresource") if hasPO and not (hasDO and hasMO and hasDeO): models.remove("input.purchaseorder") cursor.execute( """ delete from operationplanresource where operationplan_id in ( select operationplan.reference from operationplan where type = 'PO' ) """ ) cursor.execute( """ delete from operationplanmaterial where operationplan_id in ( select operationplan.reference from operationplan where type = 'PO' ) """ ) cursor.execute( """ update operationplan set demand_id = null%s where type = 'PO' """ % (", forecast = null" if hasForecast else "",) ) cursor.execute("delete from operationplan where type = 'PO'") key = ContentType.objects.get_for_model( inputmodels.PurchaseOrder, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if hasDO and not (hasPO and hasMO and hasDeO): models.remove("input.distributionorder") cursor.execute( """ delete from operationplanresource where operationplan_id in ( select operationplan.reference from operationplan where type = 'DO' ) """ ) cursor.execute( """ delete from operationplanmaterial where operationplan_id in ( select operationplan.reference from operationplan where type = 'DO' ) """ ) cursor.execute( """ update operationplan set demand_id = null%s where type = 'DO' """ % (", forecast = null" if hasForecast else "",) ) cursor.execute("delete from operationplan where type = 'DO'") key = ContentType.objects.get_for_model( inputmodels.DistributionOrder, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if hasMO and not (hasPO and hasDO and hasDeO): models.remove("input.manufacturingorder") cursor.execute( """ delete from operationplanmaterial where operationplan_id in ( select operationplan.reference from operationplan where type = 'MO' ) """ ) cursor.execute( """ delete from operationplanresource where operationplan_id in ( select operationplan.reference from operationplan where type = 'MO' ) """ ) cursor.execute( """ update operationplan set demand_id = null%s where type = 'MO' """ % (", forecast = null" if hasForecast else "",) ) cursor.execute( """ delete from operationplan where type = 'MO' """ ) cursor.execute("delete from operationplan where type = 'MO'") key = ContentType.objects.get_for_model( inputmodels.ManufacturingOrder, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if hasDeO and not (hasPO and hasDO and hasMO): models.remove("input.deliveryorder") cursor.execute( """ delete from operationplanmaterial where operationplan_id in ( select operationplan.reference from operationplan where type = 'DLVR' ) """ ) cursor.execute( """ delete from operationplanresource where operationplan_id in ( select operationplan.reference from operationplan where type = 'DLVR' ) """ ) cursor.execute( """ update operationplan set demand_id = null%s where type = 'DLVR' """ % (", forecast = null" if hasForecast else "",) ) cursor.execute("delete from operationplan where type = 'DLVR'") key = ContentType.objects.get_for_model( inputmodels.DeliveryOrder, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if (hasPO or hasDO or hasMO or hasDeO) and not ( hasPO and hasDO and hasMO and hasDeO ): # Keep the database in shape cursor.execute("vacuum analyze") models2tables = set() admin_log_positive = True for m in models: try: x = m.split(".", 1) x = apps.get_model(x[0], x[1]) if x in EXCLUDE_FROM_BULK_OPERATIONS: continue ContentTypekeys.add(ContentType.objects.get_for_model(x).pk) x = x._meta.db_table if x not in tables: raise models2tables.add(x) except Exception as e: raise CommandError("Invalid model to erase: %s" % m) tables = models2tables else: admin_log_positive = False tables.discard("django_admin_log") for i in EXCLUDE_FROM_BULK_OPERATIONS: tables.discard(i._meta.db_table) ContentTypekeys.add(ContentType.objects.get_for_model(i).pk) # Some tables need to be handled a bit special if "operationplan" in tables: tables.add("operationplanmaterial") tables.add("operationplanresource") tables.add("out_problem") if "resource" in tables and "out_resourceplan" not in tables: tables.add("out_resourceplan") if "demand" in tables and "out_constraint" not in tables: tables.add("out_constraint") tables.discard("auth_group_permissions") tables.discard("auth_permission") tables.discard("auth_group") tables.discard("django_session") tables.discard("common_user") tables.discard("common_user_groups") tables.discard("common_user_user_permissions") tables.discard("common_preference") tables.discard("django_content_type") tables.discard("execute_log") tables.discard("common_scenario") # Delete all records from the tables. with transaction.atomic(using=database, savepoint=False): if ContentTypekeys: if admin_log_positive: cursor.execute( "delete from django_admin_log where content_type_id = any(%s)", (list(ContentTypekeys),), ) else: cursor.execute( "delete from django_admin_log where content_type_id != any(%s)", (list(ContentTypekeys),), ) if "common_bucket" in tables: cursor.execute("update common_user set horizonbuckets = null") for stmt in connections[database].ops.sql_flush(no_style(), tables, []): cursor.execute(stmt) # Task update task.status = "Done" task.finished = datetime.now() task.processid = None task.save(using=database) except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.processid = None task.save(using=database) raise CommandError("%s" % e)
def generate_datamodel(datafile, loader): def propertymap(data): output={} used=[] c=inspectdb.Command() for k in data: att_name, params, notes=inspectdb.Command.normalize_col_name(c, k, used, False) # logger.debug('Field %s, %s', att_name, notes) used.append(att_name) output[k]=att_name # logger.debug('Mappings are %s', output) return output try: if loader.is_spatial: spatial=True geom_type=loader.info.type model_type, geos_func, mapfile_type=geomodel_mappings[geom_type] else: spatial=False db_created=False this_model=None colors=[] model_content=['from django.contrib.gis.db import models'] # feature_id=1 for (row, geometry) in loader: if not db_created: db_created=True if datafile.result_field: min_result=max_result=float(row[datafile.result_field]) # Create the model for this data model_content.append('class Results_{0}(models.Model):'.format(datafile.pk)) # Add an auto-increment field for it (the PK) model_content.append('{0}nmtk_id=models.AutoField(primary_key=True, )'.format(' ' * 4)) # model_content.append('{0}nmtk_feature_id=models.IntegerField()'.format(' '*4)) # Add an entry for each of the fields # So instead of doing this - getting the keys to figure out the fields fields_types=loader.info.fields_types field_map=propertymap((field_name for field_name, type in fields_types)) type_mapping={str: ('models.TextField',''), unicode: ('models.TextField',''), int: ('models.DecimalField','max_digits=32, decimal_places=0, '), # We support up to a 32 digit integer. float: ('models.FloatField',''), datetime.date: ('models.DateField',''), datetime.time: ('models.TimeField',''), bool: ('models.BooleanField',''), datetime.datetime: ('models.DateTimeField',''),} for field_name, field_type in fields_types: if field_type not in type_mapping: logger.info('No type mapping exists for type %s (using TextField)!', field_type) field_type=str model_content.append("""{0}{1}={2}({3} null=True, db_column='''{4}''')""". format(' '*4, field_map[field_name], type_mapping[field_type][0], type_mapping[field_type][1], field_name) ) if spatial: model_content.append('''{0}nmtk_geometry={1}(null=True, srid=4326, dim={2})'''. format(' '*4, model_type, loader.info.dimensions)) model_content.append('''{0}objects=models.GeoManager()'''.format(' '*4,)) model_content.append('''{0}class Meta:'''.format(' '*4,)) model_content.append('''{0}db_table='userdata_results_{1}' '''.format(' '*8,datafile.pk)) datafile.model.save('model.py', ContentFile('\n'.join(model_content)), save=False) #logger.debug('\n'.join(model_content)) user_models=imp.load_source('%s.models' % (datafile.pk,),datafile.model.path) Results_model=getattr(user_models,'Results_{0}'.format(datafile.pk)) database='default' # If using PostgreSQL, then just create the model and go... dbtype='postgis' connection=connections[database] cursor=connection.cursor() for statement in connection.creation.sql_create_model(Results_model, no_style())[0]: cursor.execute(statement) for statement in connection.creation.sql_indexes_for_model(Results_model, no_style()): cursor.execute(statement) this_row=dict((field_map[k],v) for k,v in row.iteritems()) if spatial: this_row['nmtk_geometry']=geometry if datafile.result_field: try: logger.debug('Row is %s', this_row) min_result=min(float(this_row[datafile.result_field.lower()]), min_result) max_result=max(float(this_row[datafile.result_field.lower()]), max_result) except Exception, e: logger.exception('Result field (%s) is not a float (ignoring)', datafile.result_field) else: min_result=max_result=1 m=Results_model(**this_row) try: m.save(using=database) except Exception, e: logger.exception('Failed to save record from data file (%s)', this_row) logger.error('The type of data in question was %s (%s)',m, this_row ) raise e
def delete_fake_model(model): """ Delete the schema for the test model """ sql = connection.creation.sql_destroy_model(model, (), no_style()) _cursor = connection.cursor() for statement in sql: _cursor.execute(statement)
def sync_apps(self, connection, app_labels): "Runs the old syncdb-style operation on a list of app_labels." cursor = connection.cursor() try: # Get a list of already installed *models* so that references work right. tables = connection.introspection.table_names(cursor) seen_models = connection.introspection.installed_models(tables) created_models = set() pending_references = {} # Build the manifest of apps and models that are to be synchronized all_models = [ (app_config.label, router.get_migratable_models(app_config, connection.alias, include_auto_created=True)) for app_config in apps.get_app_configs() if app_config.models_module is not None and app_config.label in app_labels ] def model_installed(model): opts = model._meta converter = connection.introspection.table_name_converter # Note that if a model is unmanaged we short-circuit and never try to install it return not ((converter(opts.db_table) in tables) or (opts.auto_created and converter( opts.auto_created._meta.db_table) in tables)) manifest = OrderedDict( (app_name, list(filter(model_installed, model_list))) for app_name, model_list in all_models) create_models = set(itertools.chain(*manifest.values())) emit_pre_migrate_signal(create_models, self.verbosity, self.interactive, connection.alias) # Create the tables for each model if self.verbosity >= 1: self.stdout.write(" Creating tables...\n") with transaction.atomic(using=connection.alias, savepoint=False): for app_name, model_list in manifest.items(): for model in model_list: # Create the model's database table, if it doesn't already exist. if self.verbosity >= 3: self.stdout.write( " Processing %s.%s model\n" % (app_name, model._meta.object_name)) sql, references = connection.creation.sql_create_model( model, no_style(), seen_models) seen_models.add(model) created_models.add(model) for refto, refs in references.items(): pending_references.setdefault(refto, []).extend(refs) if refto in seen_models: sql.extend( connection.creation. sql_for_pending_references( refto, no_style(), pending_references)) sql.extend( connection.creation.sql_for_pending_references( model, no_style(), pending_references)) if self.verbosity >= 1 and sql: self.stdout.write(" Creating table %s\n" % model._meta.db_table) for statement in sql: cursor.execute(statement) tables.append( connection.introspection.table_name_converter( model._meta.db_table)) # We force a commit here, as that was the previous behavior. # If you can prove we don't need this, remove it. transaction.set_dirty(using=connection.alias) finally: cursor.close() # The connection may have been closed by a syncdb handler. cursor = connection.cursor() try: # Install custom SQL for the app (but only if this # is a model we've just created) if self.verbosity >= 1: self.stdout.write(" Installing custom SQL...\n") for app_name, model_list in manifest.items(): for model in model_list: if model in created_models: custom_sql = custom_sql_for_model( model, no_style(), connection) if custom_sql: if self.verbosity >= 2: self.stdout.write( " Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) try: with transaction.commit_on_success_unless_managed( using=connection.alias): for sql in custom_sql: cursor.execute(sql) except Exception as e: self.stderr.write( " Failed to install custom SQL for %s.%s model: %s\n" % (app_name, model._meta.object_name, e)) if self.show_traceback: traceback.print_exc() else: if self.verbosity >= 3: self.stdout.write( " No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) if self.verbosity >= 1: self.stdout.write(" Installing indexes...\n") # Install SQL indices for all newly created models for app_name, model_list in manifest.items(): for model in model_list: if model in created_models: index_sql = connection.creation.sql_indexes_for_model( model, no_style()) if index_sql: if self.verbosity >= 2: self.stdout.write( " Installing index for %s.%s model\n" % (app_name, model._meta.object_name)) try: with transaction.commit_on_success_unless_managed( using=connection.alias): for sql in index_sql: cursor.execute(sql) except Exception as e: self.stderr.write( " Failed to install index for %s.%s model: %s\n" % (app_name, model._meta.object_name, e)) finally: cursor.close() # Load initial_data fixtures (unless that has been disabled) if self.load_initial_data: for app_label in app_labels: call_command('loaddata', 'initial_data', verbosity=self.verbosity, database=connection.alias, skip_validation=True, app_label=app_label, hide_empty=True) return created_models
def handle(self, *fixture_labels, **options): ''' All database code is unnecessary but remove it will broke. ''' #TODO remove database, connection code and stuff using = options.get('database') connection = connections[using] self.style = no_style() if not len(fixture_labels): print("No database fixture specified. Please provide the path of at least one fixture in the command line.\n") return verbosity = int(options.get('verbosity')) show_traceback = options.get('traceback') # commit is a stealth option - it isn't really useful as # a command line option, but it can be useful when invoking # loaddata from within another script. # If commit=True, loaddata will use its own transaction; # if commit=False, the data load SQL will become part of # the transaction in place when loaddata was invoked. commit = options.get('commit', True) # Keep a count of the installed objects and fixtures fixture_count = 0 loaded_object_count = 0 fixture_object_count = 0 models = set() humanize = lambda dirname: "'%s'" % dirname if dirname else 'absolute path' # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database (if # it isn't already initialized). cursor = connection.cursor() # Start transaction management. All fixtures are installed in a # single transaction to ensure that all references are resolved. if commit: transaction.commit_unless_managed(using=using) transaction.enter_transaction_management(using=using) transaction.managed(True, using=using) class SingleZipReader(zipfile.ZipFile): def __init__(self, *args, **kwargs): zipfile.ZipFile.__init__(self, *args, **kwargs) if settings.DEBUG: assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file." def read(self): return zipfile.ZipFile.read(self, self.namelist()[0]) compression_types = { None: open, 'gz': gzip.GzipFile, 'zip': SingleZipReader } if has_bz2: compression_types['bz2'] = bz2.BZ2File app_module_paths = [] for app in get_apps(): if hasattr(app, '__path__'): # It's a 'models/' subpackage for path in app.__path__: app_module_paths.append(path) else: # It's a models.py module app_module_paths.append(app.__file__) app_fixtures = [os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths] instances = [] apps = set() try: #with connection.constraint_checks_disabled(): for fixture_label in fixture_labels: parts = fixture_label.split('.') if len(parts) > 1 and parts[-1] in compression_types: compression_formats = [parts[-1]] parts = parts[:-1] else: compression_formats = compression_types.keys() if len(parts) == 1: fixture_name = parts[0] formats = serializers.get_public_serializer_formats() else: fixture_name, format = '.'.join(parts[:-1]), parts[-1] if format in serializers.get_public_serializer_formats(): formats = [format] else: formats = [] if formats: if verbosity >= 2: print("Loading '%s' fixtures...\n" % fixture_name) else: print("Problem installing fixture '%s': %s is not a known serialization format.\n" % (fixture_name, format)) if commit: transaction.rollback(using=using) transaction.leave_transaction_management(using=using) return if os.path.isabs(fixture_name): fixture_dirs = [fixture_name] else: fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + [''] for fixture_dir in fixture_dirs: if verbosity >= 2: self.stdout.write("Checking %s for fixtures...\n" % humanize(fixture_dir)) label_found = False for combo in product([using, None], formats, compression_formats): database, format, compression_format = combo file_name = '.'.join( p for p in [ fixture_name, database, format, compression_format ] if p ) if verbosity >= 3: self.stdout.write("Trying %s for %s fixture '%s'...\n" % \ (humanize(fixture_dir), file_name, fixture_name)) full_path = os.path.join(fixture_dir, file_name) open_method = compression_types[compression_format] try: fixture = open_method(full_path, 'r') except IOError: if verbosity >= 2: self.stdout.write("No %s fixture '%s' in %s.\n" % \ (format, fixture_name, humanize(fixture_dir))) else: try: if label_found: print("Multiple fixtures named '%s' in %s. Aborting.\n" % (fixture_name, humanize(fixture_dir))) if commit: transaction.rollback(using=using) transaction.leave_transaction_management(using=using) return fixture_count += 1 objects_in_fixture = 0 loaded_objects_in_fixture = 0 if verbosity >= 2: self.stdout.write("Installing %s fixture '%s' from %s.\n" % \ (format, fixture_name, humanize(fixture_dir))) objects = serializers.deserialize(format, fixture, using=using) for obj in objects: objects_in_fixture += 1 if router.allow_syncdb(using, obj.object.__class__): loaded_objects_in_fixture += 1 models.add(obj.object.__class__) try: obj.object.save = partial(memory_save, obj.object) #if obj.object.__class__.__name__ == "PedidoExame": # import ipdb; ipdb.set_trace() ### XXX BREAKPOINT obj.object.save() instances.append(obj) except (DatabaseError, IntegrityError), e: msg = "Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % { 'app_label': obj.object._meta.app_label, 'object_name': obj.object._meta.object_name, 'pk': obj.object.pk, 'error_msg': e } raise e.__class__, e.__class__(msg), sys.exc_info()[2] loaded_object_count += loaded_objects_in_fixture fixture_object_count += objects_in_fixture label_found = True finally: fixture.close() # If the fixture we loaded contains 0 objects, assume that an # error was encountered during fixture loading. if objects_in_fixture == 0: print("No fixture data found for '%s'. (File format may be invalid.)\n" % (fixture_name)) if commit: transaction.rollback(using=using) transaction.leave_transaction_management(using=using) return # Since we disabled constraint checks, we must manually check for # any invalid keys that might have been added table_names = [model._meta.db_table for model in models]
def sync_apps(self, connection, app_labels): "Runs the old syncdb-style operation on a list of app_labels." cursor = connection.cursor() try: # Get a list of already installed *models* so that references work right. tables = connection.introspection.table_names(cursor) created_models = set() # Build the manifest of apps and models that are to be synchronized all_models = [ (app_config.label, router.get_migratable_models(app_config, connection.alias, include_auto_created=False)) for app_config in apps.get_app_configs() if app_config.models_module is not None and app_config.label in app_labels ] def model_installed(model): opts = model._meta converter = connection.introspection.table_name_converter # Note that if a model is unmanaged we short-circuit and never try to install it return not ((converter(opts.db_table) in tables) or (opts.auto_created and converter( opts.auto_created._meta.db_table) in tables)) manifest = OrderedDict( (app_name, list(filter(model_installed, model_list))) for app_name, model_list in all_models) create_models = set(itertools.chain(*manifest.values())) emit_pre_migrate_signal(create_models, self.verbosity, self.interactive, connection.alias) # Create the tables for each model if self.verbosity >= 1: self.stdout.write(" Creating tables...\n") with transaction.atomic( using=connection.alias, savepoint=connection.features.can_rollback_ddl): deferred_sql = [] for app_name, model_list in manifest.items(): for model in model_list: if model._meta.proxy or not model._meta.managed: continue if self.verbosity >= 3: self.stdout.write( " Processing %s.%s model\n" % (app_name, model._meta.object_name)) with connection.schema_editor() as editor: if self.verbosity >= 1: self.stdout.write(" Creating table %s\n" % model._meta.db_table) editor.create_model(model) deferred_sql.extend(editor.deferred_sql) editor.deferred_sql = [] created_models.add(model) if self.verbosity >= 1: self.stdout.write(" Running deferred SQL...\n") for statement in deferred_sql: cursor.execute(statement) finally: cursor.close() # The connection may have been closed by a syncdb handler. cursor = connection.cursor() try: # Install custom SQL for the app (but only if this # is a model we've just created) if self.verbosity >= 1: self.stdout.write(" Installing custom SQL...\n") for app_name, model_list in manifest.items(): for model in model_list: if model in created_models: custom_sql = custom_sql_for_model( model, no_style(), connection) if custom_sql: if self.verbosity >= 2: self.stdout.write( " Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) try: with transaction.atomic( using=connection.alias): for sql in custom_sql: cursor.execute(sql) except Exception as e: self.stderr.write( " Failed to install custom SQL for %s.%s model: %s\n" % (app_name, model._meta.object_name, e)) if self.show_traceback: traceback.print_exc() else: if self.verbosity >= 3: self.stdout.write( " No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)) finally: cursor.close() # Load initial_data fixtures (unless that has been disabled) if self.load_initial_data: for app_label in app_labels: call_command( 'loaddata', 'initial_data', verbosity=self.verbosity, database=connection.alias, app_label=app_label, hide_empty=True, ) return created_models
def loaddata(self, fixture_labels): connection = connections[self.using] # Keep a count of the installed objects and fixtures self.fixture_count = 0 self.loaded_object_count = 0 self.fixture_object_count = 0 self.models = set() self.serialization_formats = serializers.get_public_serializer_formats( ) # Forcing binary mode may be revisited after dropping Python 2 support (see #22399) self.compression_formats = { None: (open, 'rb'), 'gz': (gzip.GzipFile, 'rb'), 'zip': (SingleZipReader, 'r'), } if has_bz2: self.compression_formats['bz2'] = (bz2.BZ2File, 'r') # Django's test suite repeatedly tries to load initial_data fixtures # from apps that don't have any fixtures. Because disabling constraint # checks can be expensive on some database (especially MSSQL), bail # out early if no fixtures are found. for fixture_label in fixture_labels: if self.find_fixtures(fixture_label): break else: return with connection.constraint_checks_disabled(): for fixture_label in fixture_labels: self.load_label(fixture_label) # Since we disabled constraint checks, we must manually check for # any invalid keys that might have been added table_names = [model._meta.db_table for model in self.models] try: connection.check_constraints(table_names=table_names) except Exception as e: e.args = ("Problem installing fixtures: %s" % e, ) raise # If we found even one object in a fixture, we need to reset the # database sequences. if self.loaded_object_count > 0: sequence_sql = connection.ops.sequence_reset_sql( no_style(), self.models) if sequence_sql: if self.verbosity >= 2: self.stdout.write("Resetting sequences\n") with connection.cursor() as cursor: for line in sequence_sql: cursor.execute(line) if self.verbosity >= 1: if self.fixture_object_count == self.loaded_object_count: self.stdout.write( "Installed %d object(s) from %d fixture(s)" % (self.loaded_object_count, self.fixture_count)) else: self.stdout.write( "Installed %d object(s) (of %d) from %d fixture(s)" % (self.loaded_object_count, self.fixture_object_count, self.fixture_count))
def print_diff(self, style=no_style()): """ print differences to stdout """ if self.options.get('sql', True): self.print_diff_sql(style) else: self.print_diff_text(style)
def handle(self, *fixture_labels, **options): """ Main method of a Django command """ from django.db.models import get_apps from django.core import serializers from django.conf import settings self.style = no_style() verbosity = int(options.get('verbosity', 1)) show_traceback = options.get('traceback', False) # Keep a count of the installed objects and fixtures fixture_count = 0 object_count = 0 objects_per_fixture = [] models = set() humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path' # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database (if # it isn't already initialized). cursor = connection.cursor() app_fixtures = [ os.path.join(os.path.dirname(app.__file__), 'fixtures') for app in get_apps() ] for fixture_label in fixture_labels: parts = fixture_label.split('.') if len(parts) == 1: fixture_name = fixture_label formats = serializers.get_public_serializer_formats() else: fixture_name, format = '.'.join(parts[:-1]), parts[-1] if format in serializers.get_public_serializer_formats(): formats = [format] else: formats = [] if formats: if verbosity > 1: print("Loading '%s' fixtures..." % fixture_name) else: sys.stderr.write( self.style.ERROR( "Problem installing fixture '%s': %s is not a known serialization format." % (fixture_name, format))) transaction.rollback() return if os.path.isabs(fixture_name): fixture_dirs = [fixture_name] else: fixture_dirs = app_fixtures + list( settings.FIXTURE_DIRS) + [''] for fixture_dir in fixture_dirs: if verbosity > 1: print("Checking %s for fixtures..." % humanize(fixture_dir)) label_found = False for format in formats: if verbosity > 1: print("Trying %s for %s fixture '%s'..." % (humanize(fixture_dir), format, fixture_name)) try: full_path = os.path.join( fixture_dir, '.'.join([fixture_name, format])) fixture = open(full_path, 'r') if label_found: fixture.close() print( self.style.ERROR( "Multiple fixtures named '%s' in %s. Aborting." % (fixture_name, humanize(fixture_dir)))) transaction.rollback() return else: fixture_count += 1 objects_per_fixture.append(0) if verbosity > 0: print("Installing %s fixture '%s' from %s." % (format, fixture_name, humanize(fixture_dir))) try: objects_to_keep = {} objects = serializers.deserialize( format, fixture) for obj in objects: object_count += 1 objects_per_fixture[-1] += 1 class_ = obj.object.__class__ if class_ not in objects_to_keep: objects_to_keep[class_] = set() objects_to_keep[class_].add(obj.object) models.add(class_) obj.save() if options.get('remove'): self.remove_objects_not_in( objects_to_keep, verbosity) label_found = True except (SystemExit, KeyboardInterrupt): raise except Exception: import traceback fixture.close() transaction.rollback() if show_traceback: traceback.print_exc() else: sys.stderr.write( self.style.ERROR( "Problem installing fixture '%s': %s\n" % (full_path, traceback.format_exc()))) return fixture.close() except: if verbosity > 1: print( "No %s fixture '%s' in %s." % (format, fixture_name, humanize(fixture_dir))) # If any of the fixtures we loaded contain 0 objects, assume that an # error was encountered during fixture loading. if 0 in objects_per_fixture: sys.stderr.write( self.style.ERROR( "No fixture data found for '%s'. (File format may be invalid.)" % fixture_name)) transaction.rollback() return # If we found even one object in a fixture, we need to reset the # database sequences. if object_count > 0: sequence_sql = connection.ops.sequence_reset_sql( self.style, models) if sequence_sql: if verbosity > 1: print("Resetting sequences") for line in sequence_sql: cursor.execute(line) transaction.commit() if object_count == 0: if verbosity > 1: print("No fixtures found.") else: if verbosity > 0: print("Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count)) # Close the DB connection. This is required as a workaround for an # edge case in MySQL: if the same connection is used to # create tables, load data, and query, the query can return # incorrect results. See Django #7572, MySQL #37735. connection.close()
def test_sql_flush_no_tables(self): self.assertEqual(connection.ops.sql_flush(no_style(), []), [])
def load_ipython_extension(ipython): from django.core.management.color import no_style from django_extensions.management.shells import import_objects imported_objects = import_objects(options={'dont_load': []}, style=no_style()) ipython.push(imported_objects)
def __init__(self, stdout=None, style=None): self.user = None self.devnull = open(os.devnull, "w") self.stdout = stdout if stdout else OutputWrapper(self.devnull) self.style = style if style else no_style()
def check_table(self, target_model_name, target_database): from django.db import connections from django.db import transaction from django.db.models import get_model target_model = get_model(self.app_name, target_model_name) target_conn = connections[target_database] target_cursor = target_conn.cursor() style = no_style() tables = target_conn.introspection.table_names() seen_models = target_conn.introspection.installed_models(tables) pending_references = {} show_traceback = self.verbosity > 1 if target_model not in seen_models: if self.verbosity >= 2: self.stdout.write( "The target model {0} is not installed.".format( target_model_name)) self.stdout.write("Installed objects are: {0}.".format( ', '.join(model._meta.object_name for model in seen_models))) sql, references = target_conn.creation.sql_create_model( target_model, style, seen_models) seen_models.add(target_model) for refto, refs in references.items(): pending_references.setdefault(refto, []).extend(refs) if refto in seen_models: sql.extend( target_conn.creation.sql_for_pending_references( refto, style, pending_references)) sql.extend( target_conn.creation.sql_for_pending_references( target_model, style, pending_references)) if self.verbosity >= 1 and sql: self.stdout.write("Creating table %s\n" % target_model._meta.db_table) for statement in sql: target_cursor.execute(statement) transaction.commit_unless_managed(using=target_database) custom_sql = custom_sql_for_model(target_model, style, target_conn) if custom_sql: if self.verbosity >= 2: self.stdout.write( "Installing custom SQL for %s.%s model\n" % (self.app_name, target_model_name)) try: for sql in custom_sql: target_cursor.execute(sql) except Exception as e: self.stderr.write( "Failed to install custom SQL for %s.%s model: %s\n" % (self.app_name, target_model_name, e)) if show_traceback: traceback.print_exc() transaction.rollback_unless_managed(using=target_database) else: transaction.commit_unless_managed(using=target_database) else: if self.verbosity >= 3: self.stdout.write("No custom SQL for %s.%s model\n" % (self.app_name, target_model_name)) index_sql = target_conn.creation.sql_indexes_for_model( target_model, style) if index_sql: if self.verbosity >= 2: self.stdout.write("Installing index for %s.%s model\n" % (self.app_name, target_model_name)) try: for sql in index_sql: target_cursor.execute(sql) except Exception as e: self.stderr.write( "Failed to install index for %s.%s model: %s\n" % (self.app_name, target_model_name, e)) transaction.rollback_unless_managed(using=target_database) else: transaction.commit_unless_managed(using=target_database)
def handle(self, *args, **options): using = options.get('database', DEFAULT_DB_ALIAS) dbname = settings.DATABASES[using]['NAME'] engine = settings.DATABASES[using]['ENGINE'] if options.get('interactive'): if not confirm("""We are going to flush your database (%s). Are you sure (y/n) ?""" % dbname): raise CommandError("User abort.") fixtures = options.pop('fixtures', args) # print(20160817, fixtures, options) options.update(interactive=False) # the following log message was useful on Travis 20150104 if options.get('verbosity', 1) > 0: dd.logger.info( "`initdb %s` started on database %s.", ' '.join(fixtures), dbname) if engine == 'django.db.backends.sqlite3': if dbname != ':memory:' and os.path.isfile(dbname): os.remove(dbname) del connections[using] elif engine == 'django.db.backends.mysql': conn = connections[using] cursor = conn.cursor() cursor.execute("DROP DATABASE %s;" % dbname) cursor.execute("CREATE DATABASE %s charset 'utf8';" % dbname) # We must now force Django to reconnect, otherwise we get # "no database selected" since Django would try to # continue on the dropped database: del connections[using] # now reconnect and set foreign_key_checks to 0 conn = connections[using] cursor = conn.cursor() cursor.execute("set foreign_key_checks=0;") elif engine == 'django.db.backends.postgresql': foralltables(using, "DROP TABLE IF EXISTS {} CASCADE;") # cmd = """select 'DROP TABLE "' || tablename || '" IF EXISTS CASCADE;' from pg_tables where schemaname = 'public';""" # cursor.execute(cmd) # cursor.close() del connections[using] else: raise Exception("Not tested for %r" % engine) sql_list = [] conn = connections[using] # adds a "DELETE FROM tablename;" for each table # sql = sql_flush(no_style(), conn, only_django=False) # sql_list.extend(sql) if AFTER17: # django.core.management.base.CommandError: App # 'sessions' has migrations. Only the sqlmigrate and # sqlflush commands can be used when an app has # migrations. # from django.apps import apps # app_list = apps.get_app_configs() # for app in app_list: # sql_list.extend(sql_delete(app, no_style(), conn)) pass elif USE_SQLDELETE: from django.core.management.sql import sql_delete # sql_delete was removed in Django 1.9 # ~ sql_list = u'\n'.join(sql_reset(app, no_style(), conn)).encode('utf-8') app_list = [models.get_app(p.app_label) for p in settings.SITE.installed_plugins] for app in app_list: # app_label = app.__name__.split('.')[-2] sql_list.extend(sql_delete(app, no_style(), conn)) # print app_label, ':', sql_list # ~ print sql_list if len(sql_list): with conn.constraint_checks_disabled(): # for sql in sql_list: # cursor.execute(sql) pending = self.try_sql(conn, sql_list) while len(pending): pending = self.try_sql(conn, pending) transaction.commit_unless_managed() settings.SITE._site_config = None # clear cached instance if engine == 'django.db.backends.postgresql': # # a first time to create tables of contenttypes. At # # least on PostgreSQL this is required because for # # some reason the syncdb fails when contenttypes is # # not initialized. call_command('migrate', **options) call_command('makemigrations', interactive=False, verbosity=0) call_command('migrate', '--run-syncdb', **options) if len(fixtures): # if engine == 'django.db.backends.postgresql': # foralltables(using, "ALTER TABLE {} DISABLE TRIGGER ALL;") options.pop('interactive') call_command('loaddata', *fixtures, **options)
def no_style(): style = color.no_style() for role in ('FILTER', 'MODULE_NAME', 'TAG', 'TAGLIB'): setattr(style, role, _dummy_style_func) return style
def populate_genders(app_registry, schema_editor): Gender = app_registry.get_model('hosting', 'Gender') Gender.objects.bulk_create([ Gender(id=1, name_en='Akava\'ine', name='Akava\'ine'), Gender(id=2, name_en='bigender', name='ambaŭgenra'), Gender(id=3, name_en='androgynous', name='androgena'), Gender(id=4, name_en='Baklâ', name='Baklâ'), Gender(id=5, name_en='Bissu', name='Bissu'), Gender(id=6, name_en='Calabai', name='Calabai'), Gender(id=7, name_en='Calalai', name='Calalai'), Gender(id=8, name_en='cisgender woman', name='cisgenra virino'), Gender(id=9, name_en='cisgender man', name='cisgenra viro'), Gender(id=10, name_en='cis woman', name='cis-ino'), Gender(id=11, name_en='cis man', name='cis-viro'), Gender(id=12, name_en='pangender', name='ĉiugenra'), Gender(id=13, name_en='bi-gender', name='dugenra'), Gender(id=14, name_en='two-spirit', name='du-spirita'), Gender(id=15, name_en='genderfluid', name='fluidgenra'), Gender(id=16, name_en='genderqueer', name='genrokvira'), Gender(id=17, name_en='gender nonconforming', name='genro-nekonforma'), Gender(id=18, name_en='gender neutral', name='genro-neŭtra'), Gender(id=19, name_en='gender questioning', name='genro-priduba'), Gender(id=20, name_en='gender variant', name='genro-varia'), Gender(id=21, name_en='intersex', name='interseksa'), Gender(id=22, name_en='other gender', name='ne-difinanta genron'), Gender(id=23, name_en='non-binary gender', name='neduumgenra'), Gender(id=24, name_en='gender non-conforming', name='ne-laŭanta genron'), Gender(id=25, name_en='Neutrois', name='Neutrois'), Gender(id=26, name_en='demiwoman', name='partgenre ina'), Gender(id=27, name_en='demiman', name='partgenre vira'), Gender(id=28, name_en='agender', name='sengenra'), Gender(id=29, name_en='trans*', name='trans*'), Gender(id=30, name_en='trans*person', name='trans*persono'), Gender(id=31, name_en='trans female', name='transfemala'), Gender(id=32, name_en='transgender', name='transgenra'), Gender(id=33, name_en='transgender woman', name='transgenra virino'), Gender(id=34, name_en='transgender man', name='transgenra viro'), Gender(id=35, name_en='trans feminine', name='trans-ineca'), Gender(id=36, name_en='trans woman', name='trans-ino'), Gender(id=37, name_en='trans male', name='transmaskla'), Gender(id=38, name_en='transsexual', name='transseksa'), Gender(id=39, name_en='trans masculine', name='trans-vireca'), Gender(id=40, name_en='trans man', name='trans-viro'), Gender(id=41, name_en='Travesti', name='Travesti'), Gender(id=42, name_en='third gender', name='tria-genra'), Gender(id=43, name_en='third gender (Chhakka)', name='tria-genra (Chhakka)'), Gender(id=44, name_en='third gender (Fa\'afafine)', name='tria-genra (Fa\'afafine)'), Gender(id=45, name_en='third gender (Hijra)', name='tria-genra (Hijra)'), Gender(id=46, name_en='third gender (Kathoey)', name='tria-genra (Kathoey)'), Gender(id=47, name_en='third gender (Khanīth)', name='tria-genra (Khanīth)'), Gender(id=48, name_en='third gender (Māhū)', name='tria-genra (Māhū)'), Gender(id=49, name_en='third gender (Muxhe)', name='tria-genra (Muxhe)'), Gender(id=50, name_en='trigender', name='trigenra'), ]) with connection.cursor() as cursor: for sql in connection.ops.sequence_reset_sql(no_style(), [Gender]): cursor.execute(sql)
def handle(self, *fixture_labels, **options): using = options.get('database', DEFAULT_DB_ALIAS) connection = connections[using] self.style = no_style() verbosity = int(options.get('verbosity', 1)) show_traceback = options.get('traceback', False) # commit is a stealth option - it isn't really useful as # a command line option, but it can be useful when invoking # loaddata from within another script. # If commit=True, loaddata will use its own transaction; # if commit=False, the data load SQL will become part of # the transaction in place when loaddata was invoked. commit = options.get('commit', True) # Keep a count of the installed objects and fixtures fixture_count = 0 loaded_object_count = 0 fixture_object_count = 0 models = set() humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path' # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database (if # it isn't already initialized). cursor = connection.cursor() # Start transaction management. All fixtures are installed in a # single transaction to ensure that all references are resolved. if commit: transaction.commit_unless_managed(using=using) transaction.enter_transaction_management(using=using) transaction.managed(True, using=using) class SingleZipReader(zipfile.ZipFile): def __init__(self, *args, **kwargs): zipfile.ZipFile.__init__(self, *args, **kwargs) if settings.DEBUG: assert len( self.namelist() ) == 1, "Zip-compressed fixtures must contain only one file." def read(self): return zipfile.ZipFile.read(self, self.namelist()[0]) compression_types = { None: open, 'gz': gzip.GzipFile, 'zip': SingleZipReader } if has_bz2: compression_types['bz2'] = bz2.BZ2File app_module_paths = [] for app in get_apps(): if hasattr(app, '__path__'): # It's a 'models/' subpackage for path in app.__path__: app_module_paths.append(path) else: # It's a models.py module app_module_paths.append(app.__file__) app_fixtures = [ os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths ] for fixture_label in fixture_labels: parts = fixture_label.split('.') if len(parts) > 1 and parts[-1] in compression_types: compression_formats = [parts[-1]] parts = parts[:-1] else: compression_formats = compression_types.keys() if len(parts) == 1: fixture_name = parts[0] formats = serializers.get_public_serializer_formats() else: fixture_name, format = '.'.join(parts[:-1]), parts[-1] if format in serializers.get_public_serializer_formats(): formats = [format] else: formats = [] if formats: if verbosity >= 2: self.stdout.write("Loading '%s' fixtures...\n" % fixture_name) else: self.stderr.write( self.style.ERROR( "Problem installing fixture '%s': %s is not a known serialization format.\n" % (fixture_name, format))) if commit: transaction.rollback(using=using) transaction.leave_transaction_management(using=using) return if os.path.isabs(fixture_name): fixture_dirs = [fixture_name] else: fixture_dirs = app_fixtures + list( settings.FIXTURE_DIRS) + [''] for fixture_dir in fixture_dirs: if verbosity >= 2: self.stdout.write("Checking %s for fixtures...\n" % humanize(fixture_dir)) label_found = False for combo in product([using, None], formats, compression_formats): database, format, compression_format = combo file_name = '.'.join( p for p in [fixture_name, database, format, compression_format] if p) if verbosity >= 3: self.stdout.write("Trying %s for %s fixture '%s'...\n" % \ (humanize(fixture_dir), file_name, fixture_name)) full_path = os.path.join(fixture_dir, file_name) open_method = compression_types[compression_format] try: fixture = open_method(full_path, 'r') if label_found: fixture.close() self.stderr.write( self.style.ERROR( "Multiple fixtures named '%s' in %s. Aborting.\n" % (fixture_name, humanize(fixture_dir)))) if commit: transaction.rollback(using=using) transaction.leave_transaction_management( using=using) return else: fixture_count += 1 objects_in_fixture = 0 loaded_objects_in_fixture = 0 if verbosity >= 2: self.stdout.write("Installing %s fixture '%s' from %s.\n" % \ (format, fixture_name, humanize(fixture_dir))) try: objects = serializers.deserialize(format, fixture, using=using) with connection.constraint_checks_disabled(): for obj in objects: objects_in_fixture += 1 if router.allow_syncdb( using, obj.object.__class__): loaded_objects_in_fixture += 1 models.add(obj.object.__class__) obj.save(using=using) # Since we disabled constraint checks, we must manually check for # any invalid keys that might have been added table_names = [ model._meta.db_table for model in models ] connection.check_constraints( table_names=table_names) loaded_object_count += loaded_objects_in_fixture fixture_object_count += objects_in_fixture label_found = True except (SystemExit, KeyboardInterrupt): raise except Exception: import traceback fixture.close() if commit: transaction.rollback(using=using) transaction.leave_transaction_management( using=using) if show_traceback: traceback.print_exc() else: self.stderr.write( self.style.ERROR( "Problem installing fixture '%s': %s\n" % (full_path, ''.join( traceback.format_exception( sys.exc_type, sys.exc_value, sys.exc_traceback))))) return fixture.close() # If the fixture we loaded contains 0 objects, assume that an # error was encountered during fixture loading. if objects_in_fixture == 0: self.stderr.write( self.style.ERROR( "No fixture data found for '%s'. (File format may be invalid.)\n" % (fixture_name))) if commit: transaction.rollback(using=using) transaction.leave_transaction_management( using=using) return except Exception, e: if verbosity >= 2: self.stdout.write("No %s fixture '%s' in %s.\n" % \ (format, fixture_name, humanize(fixture_dir)))
def execute(self, *args, **options): """ Try to execute this command, performing system checks if needed (as controlled by attributes ``self.requires_system_checks`` and ``self.requires_model_validation``, except if force-skipped). """ if options.get('no_color'): self.style = no_style() self.stderr.style_func = None if options.get('stdout'): self.stdout = OutputWrapper(options['stdout']) if options.get('stderr'): self.stderr = OutputWrapper(options.get('stderr'), self.stderr.style_func) if self.can_import_settings: from django.conf import settings # NOQA saved_locale = None if not self.leave_locale_alone: # Only mess with locales if we can assume we have a working # settings file, because django.utils.translation requires settings # (The final saying about whether the i18n machinery is active will be # found in the value of the USE_I18N setting) if not self.can_import_settings: raise CommandError( "Incompatible values of 'leave_locale_alone' " "(%s) and 'can_import_settings' (%s) command " "options." % (self.leave_locale_alone, self.can_import_settings)) # Switch to US English, because django-admin creates database # content like permissions, and those shouldn't contain any # translations. from django.utils import translation saved_locale = translation.get_language() translation.activate('en-us') try: if (self.requires_system_checks and not options.get( 'skip_validation' ) and # Remove at the end of deprecation for `skip_validation`. not options.get('skip_checks')): self.check() output = self.handle(*args, **options) if output: if self.output_transaction: # This needs to be imported here, because it relies on # settings. from django.db import connections, DEFAULT_DB_ALIAS connection = connections[options.get( 'database', DEFAULT_DB_ALIAS)] if connection.ops.start_transaction_sql(): self.stdout.write( self.style.SQL_KEYWORD( connection.ops.start_transaction_sql())) self.stdout.write(output) if self.output_transaction: self.stdout.write('\n' + self.style.SQL_KEYWORD( connection.ops.end_transaction_sql())) finally: if saved_locale is not None: translation.activate(saved_locale)
def test_sql_all(self): app_config = apps.get_app_config('commands_sql_migrations') with self.assertRaises(CommandError): sql_all(app_config, no_style(), connections[DEFAULT_DB_ALIAS])
def handle(self, *fixture_labels, **options): using = options.get('database') connection = connections[using] if not len(fixture_labels): raise CommandError( "No database fixture specified. Please provide the path of at " "least one fixture in the command line.") verbosity = int(options.get('verbosity')) show_traceback = options.get('traceback') # commit is a stealth option - it isn't really useful as # a command line option, but it can be useful when invoking # loaddata from within another script. # If commit=True, loaddata will use its own transaction; # if commit=False, the data load SQL will become part of # the transaction in place when loaddata was invoked. commit = options.get('commit', True) # Keep a count of the installed objects and fixtures fixture_count = 0 loaded_object_count = 0 fixture_object_count = 0 models = set() humanize = lambda dirname: "'%s'" % dirname if dirname else 'absolute path' # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database (if # it isn't already initialized). cursor = connection.cursor() # Start transaction management. All fixtures are installed in a # single transaction to ensure that all references are resolved. if commit: transaction.commit_unless_managed(using=using) transaction.enter_transaction_management(using=using) transaction.managed(True, using=using) class SingleZipReader(zipfile.ZipFile): def __init__(self, *args, **kwargs): zipfile.ZipFile.__init__(self, *args, **kwargs) if settings.DEBUG: assert len( self.namelist() ) == 1, "Zip-compressed fixtures must contain only one file." def read(self): return zipfile.ZipFile.read(self, self.namelist()[0]) compression_types = { None: open, 'gz': gzip.GzipFile, 'zip': SingleZipReader } if has_bz2: compression_types['bz2'] = bz2.BZ2File app_module_paths = [] for app in get_apps(): if hasattr(app, '__path__'): # It's a 'models/' subpackage for path in app.__path__: app_module_paths.append(path) else: # It's a models.py module app_module_paths.append(app.__file__) app_fixtures = [ os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths ] try: with connection.constraint_checks_disabled(): for fixture_label in fixture_labels: parts = fixture_label.split('.') if len(parts) > 1 and parts[-1] in compression_types: compression_formats = [parts[-1]] parts = parts[:-1] else: compression_formats = compression_types.keys() if len(parts) == 1: fixture_name = parts[0] formats = serializers.get_public_serializer_formats() else: fixture_name, format = '.'.join(parts[:-1]), parts[-1] if format in serializers.get_public_serializer_formats( ): formats = [format] else: formats = [] if formats: if verbosity >= 2: self.stdout.write("Loading '%s' fixtures..." % fixture_name) else: raise CommandError( "Problem installing fixture '%s': %s is not a known serialization format." % (fixture_name, format)) if os.path.isabs(fixture_name): fixture_dirs = [fixture_name] else: fixture_dirs = app_fixtures + list( settings.FIXTURE_DIRS) + [''] for fixture_dir in fixture_dirs: if verbosity >= 2: self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir)) label_found = False for combo in product([using, None], formats, compression_formats): database, format, compression_format = combo file_name = '.'.join(p for p in [ fixture_name, database, format, compression_format ] if p) if verbosity >= 3: self.stdout.write("Trying %s for %s fixture '%s'..." % \ (humanize(fixture_dir), file_name, fixture_name)) full_path = os.path.join(fixture_dir, file_name) open_method = compression_types[compression_format] try: fixture = open_method(full_path, 'r') except IOError: if verbosity >= 2: self.stdout.write("No %s fixture '%s' in %s." % \ (format, fixture_name, humanize(fixture_dir))) else: try: if label_found: raise CommandError( "Multiple fixtures named '%s' in %s. Aborting." % (fixture_name, humanize(fixture_dir))) fixture_count += 1 objects_in_fixture = 0 loaded_objects_in_fixture = 0 if verbosity >= 2: self.stdout.write("Installing %s fixture '%s' from %s." % \ (format, fixture_name, humanize(fixture_dir))) objects = serializers.deserialize( format, fixture, using=using) for obj in objects: objects_in_fixture += 1 if router.allow_syncdb( using, obj.object.__class__): loaded_objects_in_fixture += 1 models.add(obj.object.__class__) try: obj.save(using=using) except (DatabaseError, IntegrityError) as e: e.args = ( "Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % { 'app_label': obj.object._meta. app_label, 'object_name': obj.object._meta. object_name, 'pk': obj.object.pk, 'error_msg': force_text(e) }, ) raise loaded_object_count += loaded_objects_in_fixture fixture_object_count += objects_in_fixture label_found = True except Exception as e: if not isinstance(e, CommandError): e.args = ( "Problem installing fixture '%s': %s" % (full_path, e), ) raise finally: fixture.close() # If the fixture we loaded contains 0 objects, assume that an # error was encountered during fixture loading. if objects_in_fixture == 0: raise CommandError( "No fixture data found for '%s'. (File format may be invalid.)" % (fixture_name)) # Since we disabled constraint checks, we must manually check for # any invalid keys that might have been added table_names = [model._meta.db_table for model in models] try: connection.check_constraints(table_names=table_names) except Exception as e: e.args = ("Problem installing fixtures: %s" % e, ) raise except (SystemExit, KeyboardInterrupt): raise except Exception as e: if commit: transaction.rollback(using=using) transaction.leave_transaction_management(using=using) raise # If we found even one object in a fixture, we need to reset the # database sequences. if loaded_object_count > 0: sequence_sql = connection.ops.sequence_reset_sql( no_style(), models) if sequence_sql: if verbosity >= 2: self.stdout.write("Resetting sequences\n") for line in sequence_sql: cursor.execute(line) if commit: transaction.commit(using=using) transaction.leave_transaction_management(using=using) if verbosity >= 1: if fixture_object_count == loaded_object_count: self.stdout.write("Installed %d object(s) from %d fixture(s)" % (loaded_object_count, fixture_count)) else: self.stdout.write( "Installed %d object(s) (of %d) from %d fixture(s)" % (loaded_object_count, fixture_object_count, fixture_count)) # Close the DB connection. This is required as a workaround for an # edge case in MySQL: if the same connection is used to # create tables, load data, and query, the query can return # incorrect results. See Django #7572, MySQL #37735. if commit: connection.close()
def handle(self, *fixture_files, **options): using = options.get('database', DEFAULT_DB_ALIAS) mode = options.get('mode', 'append') items_into_tree = options.get('into_tree', None) if items_into_tree is not None: try: items_into_tree = MODEL_TREE_CLASS.objects.get(alias=items_into_tree) except ObjectDoesNotExist: raise CommandError('Target tree alised by `%s` does not exist. Please create it before import.' % items_into_tree) else: mode = 'append' connection = connections[using] cursor = connection.cursor() self.style = no_style() if VER_LESS_17: transaction.commit_unless_managed(using=using) if VER_LESS_18: transaction.enter_transaction_management(using=using) if VER_LESS_17: transaction.managed(True, using=using) loaded_object_count = 0 if mode == 'replace': try: MODEL_TREE_CLASS.objects.all().delete() MODEL_TREE_ITEM_CLASS.objects.all().delete() except ObjectDoesNotExist: pass for fixture_file in fixture_files: self.stdout.write('Loading fixture from `%s` ...\n' % fixture_file) fixture = open(fixture_file, 'r') try: objects = serializers.deserialize('json', fixture, using=using) except (SystemExit, KeyboardInterrupt): raise trees = [] tree_items = defaultdict(list) tree_item_parents = defaultdict(list) tree_items_new_indexes = {} try: allow_migrate = router.allow_migrate except AttributeError: # Django < 1.7 allow_migrate = router.allow_syncdb for obj in objects: if allow_migrate(using, obj.object.__class__): if isinstance(obj.object, (MODEL_TREE_CLASS, MODEL_TREE_ITEM_CLASS)): if isinstance(obj.object, MODEL_TREE_CLASS): trees.append(obj.object) else: if items_into_tree is not None: obj.object.tree_id = items_into_tree.id tree_items[obj.object.tree_id].append(obj.object) tree_item_parents[obj.object.parent_id].append(obj.object.id) if items_into_tree is not None: trees = [items_into_tree,] try: for tree in trees: self.stdout.write('\nImporting tree `%s` ...\n' % tree.alias) orig_tree_id = tree.id if items_into_tree is None: if mode == 'append': tree.pk = None tree.id = None tree.save(using=using) loaded_object_count += 1 parents_ahead = [] # Parents go first: enough for simple cases. tree_items[orig_tree_id].sort(key=lambda item: item.id not in tree_item_parents.keys()) for tree_item in tree_items[orig_tree_id]: parent_ahead = False self.stdout.write('Importing item `%s` ...\n' % tree_item.title) tree_item.tree_id = tree.id orig_item_id = tree_item.id if mode == 'append': tree_item.pk = None tree_item.id = None if tree_item.id in tree_items_new_indexes: tree_item.pk = tree_item.id = tree_items_new_indexes[tree_item.id] if tree_item.parent_id is not None: if tree_item.parent_id in tree_items_new_indexes: tree_item.parent_id = tree_items_new_indexes[tree_item.parent_id] else: parent_ahead = True tree_item.save(using=using) loaded_object_count += 1 if mode == 'append': tree_items_new_indexes[orig_item_id] = tree_item.id if parent_ahead: parents_ahead.append(tree_item) # Second pass is necessary for tree items being imported before their parents. for tree_item in parents_ahead: tree_item.parent_id = tree_items_new_indexes[tree_item.parent_id] tree_item.save(using=using) except (SystemExit, KeyboardInterrupt): raise except Exception: import traceback fixture.close() if VER_LESS_18: transaction.rollback(using=using) transaction.leave_transaction_management(using=using) self.stderr.write( self.style.ERROR('Fixture `%s` import error: %s\n' % ( fixture_file, ''.join(traceback.format_exception(*sys.exc_info())) )) ) fixture.close() # Reset DB sequences, for DBMS with sequences support. if loaded_object_count > 0: sequence_sql = connection.ops.sequence_reset_sql(self.style, [MODEL_TREE_CLASS, MODEL_TREE_ITEM_CLASS]) if sequence_sql: self.stdout.write('Resetting DB sequences ...\n') for line in sequence_sql: cursor.execute(line) if VER_LESS_18: transaction.commit(using=using) transaction.leave_transaction_management(using=using) connection.close()
def get_style(): from django.core.management.color import no_style return no_style()
def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False): """ Creates the SQL snippet for a column. Used by add_column and add_table. """ # If the field hasn't already been told its attribute name, do so. if not field_prepared: field.set_attributes_from_name(field_name) # hook for the field to do any resolution prior to it's attributes being queried if hasattr(field, 'south_init'): field.south_init() # Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL) field = self._field_sanity(field) try: sql = field.db_type(connection=self._get_connection()) except TypeError: sql = field.db_type() if sql: # Some callers, like the sqlite stuff, just want the extended type. if with_name: field_output = [self.quote_name(field.column), sql] else: field_output = [sql] field_output.append('%s' % (not field.null and 'NOT NULL ' or '')) if field.primary_key: field_output.append('PRIMARY KEY') elif field.unique: # Just use UNIQUE (no indexes any more, we have delete_unique) field_output.append('UNIQUE') tablespace = field.db_tablespace or tablespace if tablespace and getattr(self._get_connection().features, "supports_tablespaces", False) and field.unique: # We must specify the index tablespace inline, because we # won't be generating a CREATE INDEX statement for this field. field_output.append(self._get_connection().ops.tablespace_sql( tablespace, inline=True)) sql = ' '.join(field_output) sqlparams = () # if the field is "NOT NULL" and a default value is provided, create the column with it # this allows the addition of a NOT NULL field to a table with existing rows if not getattr(field, '_suppress_default', False): if field.has_default(): default = field.get_default() # If the default is actually None, don't add a default term if default is not None: # If the default is a callable, then call it! if callable(default): default = default() default = field.get_db_prep_save( default, connection=self._get_connection()) default = self._default_value_workaround(default) # Now do some very cheap quoting. TODO: Redesign return values to avoid this. if isinstance(default, string_types): default = "'%s'" % default.replace("'", "''") # Escape any % signs in the output (bug #317) if isinstance(default, string_types): default = default.replace("%", "%%") # Add it in sql += " DEFAULT %s" sqlparams = (default) elif (not field.null and field.blank) or (field.get_default() == ''): if field.empty_strings_allowed and self._get_connection( ).features.interprets_empty_strings_as_nulls: sql += " DEFAULT ''" # Error here would be nice, but doesn't seem to play fair. #else: # raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.") if field.rel and self.supports_foreign_keys: self.add_deferred_sql( self.foreign_key_sql( table_name, field.column, field.rel.to._meta.db_table, field.rel.to._meta.get_field( field.rel.field_name).column)) # Things like the contrib.gis module fields have this in 1.1 and below if hasattr(field, 'post_create_sql'): for stmt in field.post_create_sql(no_style(), table_name): self.add_deferred_sql(stmt) # In 1.2 and above, you have to ask the DatabaseCreation stuff for it. # This also creates normal indexes in 1.1. if hasattr(self._get_connection().creation, "sql_indexes_for_field"): # Make a fake model to pass in, with only db_table model = self.mock_model("FakeModelForGISCreation", table_name) for stmt in self._get_connection().creation.sql_indexes_for_field( model, field, no_style()): self.add_deferred_sql(stmt) if sql: return sql % sqlparams else: return None
def handle(self, *fixture_labels, **options): from django.db.models import get_apps from django.core import serializers from django.db import connection, transaction from django.conf import settings self.style = no_style() verbosity = int(options.get('verbosity', 1)) show_traceback = options.get('traceback', False) # Keep a count of the installed objects and fixtures fixture_count = 0 object_count = 0 models = set() humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path' # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database (if # it isn't already initialized). cursor = connection.cursor() # Start transaction management. All fixtures are installed in a # single transaction to ensure that all references are resolved. transaction.commit_unless_managed() transaction.enter_transaction_management() transaction.managed(True) app_fixtures = [ os.path.join(os.path.dirname(app.__file__), 'fixtures') for app in get_apps() ] for fixture_label in fixture_labels: parts = fixture_label.split('.') if len(parts) == 1: fixture_name = fixture_label formats = serializers.get_public_serializer_formats() else: fixture_name, format = '.'.join(parts[:-1]), parts[-1] if format in serializers.get_public_serializer_formats(): formats = [format] else: formats = [] if verbosity >= 2: if formats: print "Loading '%s' fixtures..." % fixture_name else: print "Skipping fixture '%s': %s is not a known serialization format" % ( fixture_name, format) if os.path.isabs(fixture_name): fixture_dirs = [fixture_name] else: fixture_dirs = app_fixtures + list( settings.FIXTURE_DIRS) + [''] for fixture_dir in fixture_dirs: if verbosity > 1: print "Checking %s for fixtures..." % humanize(fixture_dir) label_found = False for format in formats: serializer = serializers.get_serializer(format) if verbosity > 1: print "Trying %s for %s fixture '%s'..." % \ (humanize(fixture_dir), format, fixture_name) try: full_path = os.path.join( fixture_dir, '.'.join([fixture_name, format])) fixture = open(full_path, 'r') if label_found: fixture.close() print self.style.ERROR( "Multiple fixtures named '%s' in %s. Aborting." % (fixture_name, humanize(fixture_dir))) transaction.rollback() transaction.leave_transaction_management() return else: fixture_count += 1 if verbosity > 0: print "Installing %s fixture '%s' from %s." % \ (format, fixture_name, humanize(fixture_dir)) try: objects = serializers.deserialize( format, fixture) for obj in objects: object_count += 1 models.add(obj.object.__class__) obj.save() label_found = True except Exception, e: fixture.close() transaction.rollback() transaction.leave_transaction_management() if show_traceback: import traceback traceback.print_exc() else: sys.stderr.write( self.style.ERROR( "Problem installing fixture '%s': %s\n" % (full_path, str(e)))) return fixture.close() except: if verbosity >= 2: print "No %s fixture '%s' in %s." % \ (format, fixture_name, humanize(fixture_dir)) if object_count > 0: sequence_sql = connection.ops.sequence_reset_sql( self.style, models) if sequence_sql: if verbosity > 1: print "Resetting sequences" for line in sequence_sql: cursor.execute(line) transaction.commit() transaction.leave_transaction_management() if object_count == 0: if verbosity >= 2: print "No fixtures found." else: if verbosity > 0: print "Installed %d object(s) from %d fixture(s)" % ( object_count, fixture_count)
def test_index_together(self): connection = connections[DEFAULT_DB_ALIAS] index_sql = connection.creation.sql_indexes_for_model( Article, no_style()) self.assertEqual(len(index_sql), 1)
def handle_noargs(self, **options): database = options.get('database') connection = connections[database] verbosity = int(options.get('verbosity')) interactive = options.get('interactive') # The following are stealth options used by Django's internals. reset_sequences = options.get('reset_sequences', True) allow_cascade = options.get('allow_cascade', False) inhibit_post_syncdb = options.get('inhibit_post_syncdb', False) self.style = no_style() # Import the 'management' module within each installed app, to register # dispatcher events. for app_name in settings.INSTALLED_APPS: try: import_module('.management', app_name) except ImportError: pass sql_list = sql_flush(self.style, connection, only_django=True, reset_sequences=reset_sequences, allow_cascade=allow_cascade) if interactive: confirm = input("""You have requested a flush of the database. This will IRREVERSIBLY DESTROY all data currently in the %r database, and return each table to the state it was in after syncdb. Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME']) else: confirm = 'yes' if confirm == 'yes': try: with transaction.atomic( using=database, savepoint=connection.features.can_rollback_ddl): cursor = connection.cursor() for sql in sql_list: cursor.execute(sql) except Exception as e: new_msg = ( "Database %s couldn't be flushed. Possible reasons:\n" " * The database isn't running or isn't configured correctly.\n" " * At least one of the expected database tables doesn't exist.\n" " * The SQL was invalid.\n" "Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.\n" "The full error: %s") % (connection.settings_dict['NAME'], e) six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2]) if not inhibit_post_syncdb: self.emit_post_syncdb(verbosity, interactive, database) # Reinstall the initial_data fixture. if options.get('load_initial_data'): # Reinstall the initial_data fixture. call_command('loaddata', 'initial_data', **options) else: self.stdout.write("Flush cancelled.\n")
def handle(self, *fixture_labels, **options): from django.db.models import get_apps from django.core import serializers from django.db import connection, transaction from django.conf import settings self.style = no_style() verbosity = int(options.get('verbosity', 1)) show_traceback = options.get('traceback', False) # commit is a stealth option - it isn't really useful as # a command line option, but it can be useful when invoking # loaddata from within another script. # If commit=True, loaddata will use its own transaction; # if commit=False, the data load SQL will become part of # the transaction in place when loaddata was invoked. commit = options.get('commit', True) # Keep a count of the installed objects and fixtures fixture_count = 0 object_count = 0 models = set() humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path' # Get a cursor (even though we don't need one yet). This has # the side effect of initializing the test database (if # it isn't already initialized). cursor = connection.cursor() # Start transaction management. All fixtures are installed in a # single transaction to ensure that all references are resolved. if commit: transaction.commit_unless_managed() transaction.enter_transaction_management() transaction.managed(True) self.disable_forward_ref_checks() class SingleZipReader(zipfile.ZipFile): def __init__(self, *args, **kwargs): zipfile.ZipFile.__init__(self, *args, **kwargs) if settings.DEBUG: assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file." def read(self): return zipfile.ZipFile.read(self, self.namelist()[0]) compression_types = { None: file, 'gz': gzip.GzipFile, 'zip': SingleZipReader } if has_bz2: compression_types['bz2'] = bz2.BZ2File app_fixtures = [os.path.join(os.path.dirname(app.__file__), 'fixtures') for app in get_apps()] for fixture_label in fixture_labels: parts = fixture_label.split('.') if len(parts) > 1 and parts[-1] in compression_types: compression_formats = [parts[-1]] parts = parts[:-1] else: compression_formats = compression_types.keys() if len(parts) == 1: fixture_name = parts[0] formats = serializers.get_public_serializer_formats() else: fixture_name, format = '.'.join(parts[:-1]), parts[-1] if format in serializers.get_public_serializer_formats(): formats = [format] else: formats = [] if formats: if verbosity > 1: print("Loading '%s' fixtures..." % fixture_name) else: self.enable_forward_ref_checks(cursor) sys.stderr.write( self.style.ERROR("Problem installing fixture '%s': %s is not a known serialization format." % (fixture_name, format))) transaction.rollback() transaction.leave_transaction_management() return if os.path.isabs(fixture_name): fixture_dirs = [fixture_name] else: fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + [''] for fixture_dir in fixture_dirs: if verbosity > 1: print("Checking %s for fixtures..." % humanize(fixture_dir)) label_found = False for format in formats: for compression_format in compression_formats: if compression_format: file_name = '.'.join([fixture_name, format, compression_format]) else: file_name = '.'.join([fixture_name, format]) if verbosity > 1: print("Trying %s for %s fixture '%s'..." % \ (humanize(fixture_dir), file_name, fixture_name)) full_path = os.path.join(fixture_dir, file_name) open_method = compression_types[compression_format] try: fixture = open_method(full_path, 'r') if label_found: fixture.close() self.enable_forward_ref_checks(cursor) print(self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting." % (fixture_name, humanize(fixture_dir)))) transaction.rollback() transaction.leave_transaction_management() return else: fixture_count += 1 objects_in_fixture = 0 if verbosity > 0: print("Installing %s fixture '%s' from %s." % \ (format, fixture_name, humanize(fixture_dir))) try: objects = serializers.deserialize(format, fixture) for obj in objects: objects_in_fixture += 1 self.handle_ref_checks(cursor, obj) models.add(obj.object.__class__) obj.save() object_count += objects_in_fixture label_found = True except (SystemExit, KeyboardInterrupt): self.enable_forward_ref_checks(cursor) raise except Exception: import traceback fixture.close() self.enable_forward_ref_checks(cursor) transaction.rollback() transaction.leave_transaction_management() if show_traceback: traceback.print_exc() else: sys.stderr.write( self.style.ERROR("Problem installing fixture '%s': %s\n" % (full_path, ''.join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))))) return fixture.close() # If the fixture we loaded contains 0 objects, assume that an # error was encountered during fixture loading. if objects_in_fixture == 0: self.enable_forward_ref_checks(cursor) sys.stderr.write( self.style.ERROR("No fixture data found for '%s'. (File format may be invalid.)" % (fixture_name))) transaction.rollback() transaction.leave_transaction_management() return except Exception as e: if verbosity > 1: print("No %s fixture '%s' in %s." % \ (format, fixture_name, humanize(fixture_dir))) self.enable_forward_ref_checks(cursor) # If we found even one object in a fixture, we need to reset the # database sequences. if object_count > 0: sequence_sql = connection.ops.sequence_reset_sql(self.style, models) if sequence_sql: if verbosity > 1: print("Resetting sequences") for line in sequence_sql: cursor.execute(line) if commit: transaction.commit() transaction.leave_transaction_management() if object_count == 0: if verbosity > 1: print("No fixtures found.") else: if verbosity > 0: print("Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count)) # Close the DB connection. This is required as a workaround for an # edge case in MySQL: if the same connection is used to # create tables, load data, and query, the query can return # incorrect results. See Django #7572, MySQL #37735. if commit: connection.close()