def handle(self, *args, **options): if len(args) > 1: raise CommandError("need exactly one or zero arguments for username") if args: username, = args else: username = getpass.getuser() try: u = User.objects.using(options.get('database')).get(username=username) except User.DoesNotExist: raise CommandError("user '%s' does not exist" % username) self.stdout.write("Changing password for user '%s'\n" % u.username) MAX_TRIES = 3 count = 0 p1, p2 = 1, 2 # To make them initially mismatch. while p1 != p2 and count < MAX_TRIES: p1 = self._get_pass() p2 = self._get_pass("Password (again): ") if p1 != p2: self.stdout.write("Passwords do not match. Please try again.\n") count = count + 1 if count == MAX_TRIES: raise CommandError("Aborting password change for user '%s' after %s attempts" % (username, count)) u.set_password(p1) u.save() return "Password changed successfully for user '%s'" % u.username
def handle(self, app_or_project, name, target=None, **options): self.app_or_project = app_or_project self.paths_to_remove = [] self.verbosity = int(options.get('verbosity')) # If it's not a valid directory name. if not re.search(r'^[_a-zA-Z]\w*$', name): # Provide a smart error message, depending on the error. if not re.search(r'^[_a-zA-Z]', name): message = ('make sure the name begins ' 'with a letter or underscore') else: message = 'use only numbers, letters and underscores' raise CommandError("%r is not a valid %s name. Please %s." % (name, app_or_project, message)) # if some directory is given, make sure it's nicely expanded if target is None: top_dir = path.join(os.getcwd(), name) try: os.makedirs(top_dir) except OSError, e: if e.errno == errno.EEXIST: message = "'%s' already exists" % top_dir else: message = e raise CommandError(message)
def handle(self, addrport='', *args, **options): self.use_ipv6 = options.get('use_ipv6') if self.use_ipv6 and not socket.has_ipv6: raise CommandError('Your Python does not support IPv6.') if args: raise CommandError('Usage is runserver %s' % self.args) self._raw_ipv6 = False if not addrport: self.addr = '' self.port = DEFAULT_PORT else: m = re.match(naiveip_re, addrport) if m is None: raise CommandError('"%s" is not a valid port number ' 'or address:port pair.' % addrport) self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups() if not self.port.isdigit(): raise CommandError("%r is not a valid port number." % self.port) if self.addr: if _ipv6: self.addr = self.addr[1:-1] self.use_ipv6 = True self._raw_ipv6 = True elif self.use_ipv6 and not _fqdn: raise CommandError('"%s" is not a valid IPv6 address.' % self.addr) if not self.addr: self.addr = self.use_ipv6 and '::1' or '127.0.0.1' self._raw_ipv6 = bool(self.use_ipv6) self.run(*args, **options)
def collect(self): """ Perform the bulk of the work of collectstatic. Split off from handle_noargs() to facilitate testing. """ if self.symlink: if sys.platform == 'win32': raise CommandError("Symlinking is not supported by this " "platform (%s)." % sys.platform) if not self.local: raise CommandError("Can't symlink to a remote destination.") if self.clear: self.clear_dir('') if self.symlink: handler = self.link_file else: handler = self.copy_file found_files = SortedDict() for finder in finders.get_finders(): for path, storage in finder.list(self.ignore_patterns): # Prefix the relative path if the source storage contains it if getattr(storage, 'prefix', None): prefixed_path = os.path.join(storage.prefix, path) else: prefixed_path = path if prefixed_path not in found_files: found_files[prefixed_path] = (storage, path) handler(path, prefixed_path, storage) # Here we check if the storage backend has a post_process # method and pass it the list of modified files. if self.post_process and hasattr(self.storage, 'post_process'): processor = self.storage.post_process(found_files, dry_run=self.dry_run) for original_path, processed_path, processed in processor: if processed: self.log(u"Post-processed '%s' as '%s" % (original_path, processed_path), level=1) self.post_processed_files.append(original_path) else: self.log(u"Skipped post-processing '%s'" % original_path) return { 'modified': self.copied_files + self.symlinked_files, 'unmodified': self.unmodified_files, 'post_processed': self.post_processed_files, }
def handle(self, app_name=None, target=None, **options): if app_name is None: raise CommandError("you must provide an app name") # Check that the app_name cannot be imported. try: import_module(app_name) except ImportError: pass else: raise CommandError("%r conflicts with the name of an existing " "Python module and cannot be used as an app " "name. Please try another name." % app_name) super(Command, self).handle('app', app_name, target, **options)
def download(self, url): """ Downloads the given URL and returns the file name. """ def cleanup_url(url): tmp = url.rstrip('/') filename = tmp.split('/')[-1] if url.endswith('/'): display_url = tmp + '/' else: display_url = url return filename, display_url prefix = 'django_%s_template_' % self.app_or_project tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_download') self.paths_to_remove.append(tempdir) filename, display_url = cleanup_url(url) if self.verbosity >= 2: self.stdout.write("Downloading %s\n" % display_url) try: the_path, info = urllib.urlretrieve(url, path.join(tempdir, filename)) except IOError, e: raise CommandError("couldn't download URL %s to %s: %s" % (url, filename, e))
def call_command(name, *args, **options): """ Calls the given command, with the given options and args/kwargs. This is the primary API you should use for calling specific commands. Some examples: call_command('syncdb') call_command('shell', plain=True) call_command('sqlall', 'myapp') """ # Load the command object. try: app_name = get_commands()[name] if isinstance(app_name, BaseCommand): # If the command is already loaded, use it directly. klass = app_name else: klass = load_command_class(app_name, name) except KeyError: raise CommandError("Unknown command: %r" % name) # Grab out a list of defaults from the options. optparse does this for us # when the script runs from the command line, but since call_command can # be called programatically, we need to simulate the loading and handling # of defaults (see #10080 for details). defaults = {} for opt in klass.option_list: if opt.default is NO_DEFAULT: defaults[opt.dest] = None else: defaults[opt.dest] = opt.default defaults.update(options) return klass.execute(*args, **defaults)
def handle_template(self, template, subdir): """ Determines where the app or project templates are. Use my_django.__path__[0] as the default because we don't know into which directory Django has been installed. """ if template is None: return path.join(my_django.__path__[0], 'conf', subdir) else: if template.startswith('file://'): template = template[7:] expanded_template = path.expanduser(template) expanded_template = path.normpath(expanded_template) if path.isdir(expanded_template): return expanded_template if self.is_url(template): # downloads the file and returns the path absolute_path = self.download(template) else: absolute_path = path.abspath(expanded_template) if path.exists(absolute_path): return self.extract(absolute_path) raise CommandError("couldn't handle %s template %s." % (self.app_or_project, template))
def handle_noargs(self, **options): try: for line in self.handle_inspection(options): self.stdout.write("%s\n" % line) except NotImplementedError: raise CommandError( "Database inspection isn't supported for the currently selected database backend." )
def handle(self, *args, **options): try: data_source, model_name = args except ValueError: raise CommandError('Invalid arguments, must provide: %s' % self.args) if not gdal.HAS_GDAL: raise CommandError( 'GDAL is required to inspect geospatial data sources.') # Removing options with `None` values. options = dict([(k, v) for k, v in options.items() if not v is None]) # Getting the OGR DataSource from the string parameter. try: ds = gdal.DataSource(data_source) except gdal.OGRException, msg: raise CommandError(msg)
def handle(self, *args, **opions): if len(args) != 1: raise CommandError( 'Требуется один параметр -- тип индекса: "all" или "unread"') index_type = args[0] if index_type not in ('all', 'unread'): raise CommandError( u'Неизвестный тип индекса: "%s", должен быть "all" или "unread"' % index_type) from cicero.models import Topic, Article, Profile cicero_search = Profile.objects.get(user__username='******') if index_type == 'unread': for topic in cicero_search.unread_topics(): print format_document(topic_dict(topic)) elif index_type == 'all': for topic in Topic.objects.all(): print format_document(topic_dict(topic)) cicero_search.add_read_articles(Article.objects.all()) cicero_search.save()
def write_po_file(pofile, potfile, domain, locale, verbosity, stdout, copy_pforms, wrap, location, no_obsolete): """ Creates of updates the :param pofile: PO file for :param domain: and :param locale:. Uses contents of the existing :param potfile:. Uses mguniq, msgmerge, and msgattrib GNU gettext utilities. """ msgs, errors = _popen('msguniq %s %s --to-code=utf-8 "%s"' % (wrap, location, potfile)) if errors: os.unlink(potfile) raise CommandError("errors happened while running msguniq\n%s" % errors) if os.path.exists(pofile): f = open(potfile, 'w') try: f.write(msgs) finally: f.close() msgs, errors = _popen('msgmerge %s %s -q "%s" "%s"' % (wrap, location, pofile, potfile)) if errors: os.unlink(potfile) raise CommandError("errors happened while running msgmerge\n%s" % errors) elif copy_pforms: msgs = copy_plural_forms(msgs, locale, domain, verbosity, stdout) msgs = msgs.replace( "#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % domain, "") f = open(pofile, 'wb') try: f.write(msgs) finally: f.close() os.unlink(potfile) if no_obsolete: msgs, errors = _popen('msgattrib %s %s -o "%s" --no-obsolete "%s"' % (wrap, location, pofile, pofile)) if errors: raise CommandError("errors happened while running msgattrib\n%s" % errors)
def handle(self, project_name=None, target=None, *args, **options): if project_name is None: raise CommandError("you must provide a project name") # Check that the project_name cannot be imported. try: import_module(project_name) except ImportError: pass else: raise CommandError("%r conflicts with the name of an existing " "Python module and cannot be used as a " "project name. Please try another name." % project_name) # Create a random SECRET_KEY hash to put it in the main settings. chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)' options['secret_key'] = get_random_string(50, chars) super(Command, self).handle('project', project_name, target, **options)
def handle(self, **options): connection = connections[options.get('database')] try: connection.client.runshell() except OSError: # Note that we're assuming OSError means that the client program # isn't installed. There's a possibility OSError would be raised # for some other reason, in which case this error message would be # inaccurate. Still, this message catches the common case. raise CommandError('You appear not to have the %r program installed or on your path.' % \ connection.client.executable_name)
def handle_noargs(self, **options): self.set_options(**options) # Warn before doing anything more. if (isinstance(self.storage, FileSystemStorage) and self.storage.location): destination_path = self.storage.location destination_display = ':\n\n %s' % destination_path else: destination_path = None destination_display = '.' if self.clear: clear_display = 'This will DELETE EXISTING FILES!' else: clear_display = 'This will overwrite existing files!' if self.interactive: confirm = raw_input(u""" You have requested to collect static files at the destination location as specified in your settings%s %s Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % (destination_display, clear_display)) if confirm != 'yes': raise CommandError("Collecting static files cancelled.") collected = self.collect() modified_count = len(collected['modified']) unmodified_count = len(collected['unmodified']) post_processed_count = len(collected['post_processed']) if self.verbosity >= 1: template = ("\n%(modified_count)s %(identifier)s %(action)s" "%(destination)s%(unmodified)s%(post_processed)s.\n") summary = template % { 'modified_count': modified_count, 'identifier': 'static file' + (modified_count != 1 and 's' or ''), 'action': self.symlink and 'symlinked' or 'copied', 'destination': (destination_path and " to '%s'" % destination_path or ''), 'unmodified': (collected['unmodified'] and ', %s unmodified' % unmodified_count or ''), 'post_processed': (collected['post_processed'] and ', %s post-processed' % post_processed_count or ''), } self.stdout.write(smart_str(summary))
def compile_messages(stderr, locale=None): basedirs = [os.path.join('conf', 'locale'), 'locale'] if os.environ.get('DJANGO_SETTINGS_MODULE'): from my_django.conf import settings basedirs.extend(settings.LOCALE_PATHS) # Gather existing directories. basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs))) if not basedirs: raise CommandError( "This script should be run from the Django SVN tree or your project or app tree, or with the settings module specified." ) for basedir in basedirs: if locale: basedir = os.path.join(basedir, locale, 'LC_MESSAGES') for dirpath, dirnames, filenames in os.walk(basedir): for f in filenames: if f.endswith('.po'): stderr.write('processing file %s in %s\n' % (f, dirpath)) fn = os.path.join(dirpath, f) if has_bom(fn): raise CommandError( "The %s file has a BOM (Byte Order Mark). Django only supports .po files encoded in UTF-8 and without any BOM." % fn) pf = os.path.splitext(fn)[0] # Store the names of the .mo and .po files in an environment # variable, rather than doing a string replacement into the # command, so that we can take advantage of shell quoting, to # quote any malicious characters/escaping. # See http://cyberelk.net/tim/articles/cmdline/ar01s02.html os.environ['djangocompilemo'] = pf + '.mo' os.environ['djangocompilepo'] = pf + '.po' if sys.platform == 'win32': # Different shell-variable syntax cmd = 'msgfmt --check-format -o "%djangocompilemo%" "%djangocompilepo%"' else: cmd = 'msgfmt --check-format -o "$djangocompilemo" "$djangocompilepo"' os.system(cmd)
def extract(self, filename): """ Extracts the given file to a temporarily and returns the path of the directory with the extracted content. """ prefix = 'django_%s_template_' % self.app_or_project tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_extract') self.paths_to_remove.append(tempdir) if self.verbosity >= 2: self.stdout.write("Extracting %s\n" % filename) try: archive.extract(filename, tempdir) return tempdir except (archive.ArchiveException, IOError), e: raise CommandError("couldn't extract file %s to %s: %s" % (filename, tempdir, e))
def handle_app(self, app, **options): # This command breaks a lot and should be deprecated import warnings warnings.warn( 'This command has been deprecated. The command ``flush`` can be used to delete everything. You can also use ALTER TABLE or DROP TABLE statements manually.', DeprecationWarning) using = options.get('database') connection = connections[using] app_name = app.__name__.split('.')[-2] self.style = no_style() sql_list = sql_reset(app, self.style, connection) if options.get('interactive'): confirm = raw_input(""" You have requested a database reset. This will IRREVERSIBLY DESTROY any data for the "%s" application in the database "%s". Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % (app_name, connection.settings_dict['NAME'])) else: confirm = 'yes' if confirm == 'yes': try: cursor = connection.cursor() for sql in sql_list: cursor.execute(sql) except Exception, e: transaction.rollback_unless_managed() raise CommandError( """Error: %s couldn't be reset. Possible reasons: * The database isn't running or isn't configured correctly. * At least one of the database tables doesn't exist. * The SQL was invalid. Hint: Look at the output of 'django-admin.py sqlreset %s'. That's the SQL this command wasn't able to run. The full error: %s""" % (app_name, app_name, e)) transaction.commit_unless_managed()
def handle_noargs(self, **options): db = options.get('database') connection = connections[db] verbosity = int(options.get('verbosity')) interactive = options.get('interactive') self.style = no_style() # Import the 'management' module within each installed app, to register # dispatcher events. for app_name in settings.INSTALLED_APPS: try: import_module('.management', app_name) except ImportError: pass sql_list = sql_flush(self.style, connection, only_django=True) if interactive: confirm = raw_input("""You have requested a flush of the database. This will IRREVERSIBLY DESTROY all data currently in the %r database, and return each table to the state it was in after syncdb. Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME']) else: confirm = 'yes' if confirm == 'yes': try: cursor = connection.cursor() for sql in sql_list: cursor.execute(sql) except Exception, e: transaction.rollback_unless_managed(using=db) raise CommandError( """Database %s couldn't be flushed. Possible reasons: * The database isn't running or isn't configured correctly. * At least one of the expected database tables doesn't exist. * The SQL was invalid. Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run. The full error: %s""" % (connection.settings_dict['NAME'], e)) transaction.commit_unless_managed(using=db) # Emit the post sync signal. This allows individual # applications to respond as if the database had been # sync'd from scratch. all_models = [] for app in models.get_apps(): all_models.extend([ m for m in models.get_models(app, include_auto_created=True) if router.allow_syncdb(db, m) ]) emit_post_sync_signal(set(all_models), verbosity, interactive, db) # Reinstall the initial_data fixture. kwargs = options.copy() kwargs['database'] = db call_command('loaddata', 'initial_data', **kwargs)
# if some directory is given, make sure it's nicely expanded if target is None: top_dir = path.join(os.getcwd(), name) try: os.makedirs(top_dir) except OSError, e: if e.errno == errno.EEXIST: message = "'%s' already exists" % top_dir else: message = e raise CommandError(message) else: top_dir = os.path.abspath(path.expanduser(target)) if not os.path.exists(top_dir): raise CommandError("Destination directory '%s' does not " "exist, please create it first." % top_dir) extensions = tuple( handle_extensions(options.get('extensions'), ignored=())) extra_files = [] for file in options.get('files'): extra_files.extend(map(lambda x: x.strip(), file.split(','))) if self.verbosity >= 2: self.stdout.write("Rendering %s template files with " "extensions: %s\n" % (app_or_project, ', '.join(extensions))) self.stdout.write("Rendering %s template files with " "filenames: %s\n" % (app_or_project, ', '.join(extra_files))) base_name = '%s_name' % app_or_project
def make_messages(locale=None, domain='my_django', verbosity=1, all=False, extensions=None, symlinks=False, ignore_patterns=None, no_wrap=False, no_location=False, no_obsolete=False, stdout=sys.stdout): """ Uses the ``locale/`` directory from the Django SVN tree or an application/project to process all files with translatable literals for the :param domain: domain and :param locale: locale. """ # Need to ensure that the i18n framework is enabled from my_django.conf import settings if settings.configured: settings.USE_I18N = True else: settings.configure(USE_I18N=True) if ignore_patterns is None: ignore_patterns = [] invoked_for_django = False if os.path.isdir(os.path.join('conf', 'locale')): localedir = os.path.abspath(os.path.join('conf', 'locale')) invoked_for_django = True # Ignoring all contrib apps ignore_patterns += ['contrib/*'] elif os.path.isdir('locale'): localedir = os.path.abspath('locale') else: raise CommandError( "This script should be run from the Django SVN " "tree or your project or app tree. If you did indeed run it " "from the SVN checkout or your project or application, " "maybe you are just missing the conf/locale (in the django " "tree) or locale (for project and application) directory? It " "is not created automatically, you have to create it by hand " "if you want to enable i18n for your project or application.") if domain not in ('my_django', 'djangojs'): raise CommandError( "currently makemessages only supports domains 'my_django' and 'djangojs'" ) if (locale is None and not all) or domain is None: message = "Type '%s help %s' for usage information." % ( os.path.basename(sys.argv[0]), sys.argv[1]) raise CommandError(message) # We require gettext version 0.15 or newer. output = _popen('xgettext --version')[0] match = re.search(r'(?P<major>\d+)\.(?P<minor>\d+)', output) if match: xversion = (int(match.group('major')), int(match.group('minor'))) if xversion < (0, 15): raise CommandError( "Django internationalization requires GNU " "gettext 0.15 or newer. You are using version %s, please " "upgrade your gettext toolset." % match.group()) locales = [] if locale is not None: locales.append(locale) elif all: locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % localedir)) locales = [os.path.basename(l) for l in locale_dirs] wrap = '--no-wrap' if no_wrap else '' location = '--no-location' if no_location else '' for locale in locales: if verbosity > 0: stdout.write("processing language %s\n" % locale) basedir = os.path.join(localedir, locale, 'LC_MESSAGES') if not os.path.isdir(basedir): os.makedirs(basedir) pofile = os.path.join(basedir, '%s.po' % domain) potfile = os.path.join(basedir, '%s.pot' % domain) if os.path.exists(potfile): os.unlink(potfile) for dirpath, file in find_files(".", ignore_patterns, verbosity, stdout, symlinks=symlinks): process_file(file, dirpath, potfile, domain, verbosity, extensions, wrap, location, stdout) if os.path.exists(potfile): write_po_file(pofile, potfile, domain, locale, verbosity, stdout, not invoked_for_django, wrap, location, no_obsolete)
def process_file(file, dirpath, potfile, domain, verbosity, extensions, wrap, location, stdout=sys.stdout): """ Extract translatable literals from :param file: for :param domain: creating or updating the :param potfile: POT file. Uses the xgettext GNU gettext utility. """ from my_django.utils.translation import templatize if verbosity > 1: stdout.write('processing file %s in %s\n' % (file, dirpath)) _, file_ext = os.path.splitext(file) if domain == 'djangojs' and file_ext in extensions: is_templatized = True orig_file = os.path.join(dirpath, file) src_data = open(orig_file).read() src_data = prepare_js_for_gettext(src_data) thefile = '%s.c' % file work_file = os.path.join(dirpath, thefile) f = open(work_file, "w") try: f.write(src_data) finally: f.close() cmd = ('xgettext -d %s -L C %s %s --keyword=gettext_noop ' '--keyword=gettext_lazy --keyword=ngettext_lazy:1,2 ' '--keyword=pgettext:1c,2 --keyword=npgettext:1c,2,3 ' '--from-code UTF-8 --add-comments=Translators -o - "%s"' % (domain, wrap, location, work_file)) elif domain == 'my_django' and (file_ext == '.py' or file_ext in extensions): thefile = file orig_file = os.path.join(dirpath, file) is_templatized = file_ext in extensions if is_templatized: src_data = open(orig_file, "rU").read() thefile = '%s.py' % file content = templatize(src_data, orig_file[2:]) f = open(os.path.join(dirpath, thefile), "w") try: f.write(content) finally: f.close() work_file = os.path.join(dirpath, thefile) cmd = ('xgettext -d %s -L Python %s %s --keyword=gettext_noop ' '--keyword=gettext_lazy --keyword=ngettext_lazy:1,2 ' '--keyword=ugettext_noop --keyword=ugettext_lazy ' '--keyword=ungettext_lazy:1,2 --keyword=pgettext:1c,2 ' '--keyword=npgettext:1c,2,3 --keyword=pgettext_lazy:1c,2 ' '--keyword=npgettext_lazy:1c,2,3 --from-code UTF-8 ' '--add-comments=Translators -o - "%s"' % (domain, wrap, location, work_file)) else: return msgs, errors = _popen(cmd) if errors: if is_templatized: os.unlink(work_file) if os.path.exists(potfile): os.unlink(potfile) raise CommandError("errors happened while running xgettext on %s\n%s" % (file, errors)) if msgs: write_pot_file(potfile, msgs, orig_file, work_file, is_templatized) if is_templatized: os.unlink(work_file)
def _get_pass(self, prompt="Password: "******"aborted") return p
def handle(self, *app_labels, **options): from my_django.db.models import get_app, get_apps, get_model format = options.get('format') indent = options.get('indent') using = options.get('database') excludes = options.get('exclude') show_traceback = options.get('traceback') use_natural_keys = options.get('use_natural_keys') use_base_manager = options.get('use_base_manager') excluded_apps = set() excluded_models = set() for exclude in excludes: if '.' in exclude: app_label, model_name = exclude.split('.', 1) model_obj = get_model(app_label, model_name) if not model_obj: raise CommandError('Unknown model in excludes: %s' % exclude) excluded_models.add(model_obj) else: try: app_obj = get_app(exclude) excluded_apps.add(app_obj) except ImproperlyConfigured: raise CommandError('Unknown app in excludes: %s' % exclude) if len(app_labels) == 0: app_list = SortedDict( (app, None) for app in get_apps() if app not in excluded_apps) else: app_list = SortedDict() for label in app_labels: try: app_label, model_label = label.split('.') try: app = get_app(app_label) except ImproperlyConfigured: raise CommandError("Unknown application: %s" % app_label) if app in excluded_apps: continue model = get_model(app_label, model_label) if model is None: raise CommandError("Unknown model: %s.%s" % (app_label, model_label)) if app in app_list.keys(): if app_list[app] and model not in app_list[app]: app_list[app].append(model) else: app_list[app] = [model] except ValueError: # This is just an app - no model qualifier app_label = label try: app = get_app(app_label) except ImproperlyConfigured: raise CommandError("Unknown application: %s" % app_label) if app in excluded_apps: continue app_list[app] = None # Check that the serialization format exists; this is a shortcut to # avoid collating all the objects and _then_ failing. if format not in serializers.get_public_serializer_formats(): raise CommandError("Unknown serialization format: %s" % format) try: serializers.get_serializer(format) except KeyError: raise CommandError("Unknown serialization format: %s" % format) # Now collate the objects to be serialized. objects = [] for model in sort_dependencies(app_list.items()): if model in excluded_models: continue if not model._meta.proxy and router.allow_syncdb(using, model): if use_base_manager: objects.extend(model._base_manager.using(using).all()) else: objects.extend(model._default_manager.using(using).all()) try: return serializers.serialize(format, objects, indent=indent, use_natural_keys=use_natural_keys) except Exception, e: if show_traceback: raise raise CommandError("Unable to serialize database: %s" % e)
def sort_dependencies(app_list): """Sort a list of app,modellist pairs into a single list of models. The single list of models is sorted so that any model with a natural key is serialized before a normal model, and any model with a natural key dependency has it's dependencies serialized first. """ from my_django.db.models import get_model, get_models # Process the list of models, and get the list of dependencies model_dependencies = [] models = set() for app, model_list in app_list: if model_list is None: model_list = get_models(app) for model in model_list: models.add(model) # Add any explicitly defined dependencies if hasattr(model, 'natural_key'): deps = getattr(model.natural_key, 'dependencies', []) if deps: deps = [get_model(*d.split('.')) for d in deps] else: deps = [] # Now add a dependency for any FK or M2M relation with # a model that defines a natural key for field in model._meta.fields: if hasattr(field.rel, 'to'): rel_model = field.rel.to if hasattr(rel_model, 'natural_key'): deps.append(rel_model) for field in model._meta.many_to_many: rel_model = field.rel.to if hasattr(rel_model, 'natural_key'): deps.append(rel_model) model_dependencies.append((model, deps)) model_dependencies.reverse() # Now sort the models to ensure that dependencies are met. This # is done by repeatedly iterating over the input list of models. # If all the dependencies of a given model are in the final list, # that model is promoted to the end of the final list. This process # continues until the input list is empty, or we do a full iteration # over the input models without promoting a model to the final list. # If we do a full iteration without a promotion, that means there are # circular dependencies in the list. model_list = [] while model_dependencies: skipped = [] changed = False while model_dependencies: model, deps = model_dependencies.pop() # If all of the models in the dependency list are either already # on the final model list, or not on the original serialization list, # then we've found another model with all it's dependencies satisfied. found = True for candidate in ((d not in models or d in model_list) for d in deps): if not candidate: found = False if found: model_list.append(model) changed = True else: skipped.append((model, deps)) if not changed: raise CommandError( "Can't resolve dependencies for %s in serialized app list." % ', '.join('%s.%s' % (model._meta.app_label, model._meta.object_name) for model, deps in sorted( skipped, key=lambda obj: obj[0].__name__))) model_dependencies = skipped return model_list
def sql_create(app, style, connection): "Returns a list of the CREATE TABLE SQL statements for the given app." if connection.settings_dict['ENGINE'] == 'my_django.db.backends.dummy': # This must be the "dummy" database backend, which means the user # hasn't set ENGINE for the database. raise CommandError( "Django doesn't know which syntax to use for your SQL statements,\n" + "because you haven't specified the ENGINE setting for the database.\n" + "Edit your settings file and change DATBASES['default']['ENGINE'] to something like\n" + "'my_django.db.backends.postgresql' or 'my_django.db.backends.mysql'." ) # Get installed models, so we generate REFERENCES right. # We trim models from the current app so that the sqlreset command does not # generate invalid SQL (leaving models out of known_models is harmless, so # we can be conservative). app_models = models.get_models(app, include_auto_created=True) final_output = [] tables = connection.introspection.table_names() known_models = set([ model for model in connection.introspection.installed_models(tables) if model not in app_models ]) pending_references = {} for model in app_models: output, references = connection.creation.sql_create_model( model, style, known_models) final_output.extend(output) for refto, refs in references.items(): pending_references.setdefault(refto, []).extend(refs) if refto in known_models: final_output.extend( connection.creation.sql_for_pending_references( refto, style, pending_references)) final_output.extend( connection.creation.sql_for_pending_references( model, style, pending_references)) # Keep track of the fact that we've created the table for this model. known_models.add(model) # Handle references to tables that are from other apps # but don't exist physically. not_installed_models = set(pending_references.keys()) if not_installed_models: alter_sql = [] for model in not_installed_models: alter_sql.extend([ '-- ' + sql for sql in connection.creation.sql_for_pending_references( model, style, pending_references) ]) if alter_sql: final_output.append( '-- The following references should be added but depend on non-existent tables:' ) final_output.extend(alter_sql) return final_output
def handle(self, *apps, **options): raise CommandError( "This command has been renamed. Use the 'sqlcustom' command instead." )