Beispiel #1
0
    def delete(self, key, version=None):
        key = self.make_key(key, version=version)
        self.validate_key(key)

        db = router.db_for_write(self.cache_model_class)
        table = connections[db].ops.quote_name(self._table)
        cursor = connections[db].cursor()

        cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
        transaction.commit_unless_managed(using=db)
Beispiel #2
0
def method_set_order(ordered_obj, self, id_list, using=None):
    if using is None:
        using = DEFAULT_DB_ALIAS
    rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name)
    order_name = ordered_obj._meta.order_with_respect_to.name
    # FIXME: It would be nice if there was an "update many" version of update
    # for situations like this.
    for i, j in enumerate(id_list):
        ordered_obj.objects.filter(**{"pk": j, order_name: rel_val}).update(_order=i)
    transaction.commit_unless_managed(using=using)
Beispiel #3
0
 def decorated(self, *args, **kwargs):
     if not transaction.is_managed(using=self.using):
         transaction.enter_transaction_management(using=self.using)
         forced_managed = True
     else:
         forced_managed = False
     try:
         func(self, *args, **kwargs)
         if forced_managed:
             transaction.commit(using=self.using)
         else:
             transaction.commit_unless_managed(using=self.using)
     finally:
         if forced_managed:
             transaction.leave_transaction_management(using=self.using)
Beispiel #4
0
 def _reset_sequences(self, db_name):
     conn = connections[db_name]
     if conn.features.supports_sequence_reset:
         sql_list = \
             conn.ops.sequence_reset_by_name_sql(no_style(),
                                                 conn.introspection.sequence_list())
         if sql_list:
             try:
                 cursor = conn.cursor()
                 for sql in sql_list:
                     cursor.execute(sql)
             except Exception:
                 transaction.rollback_unless_managed(using=db_name)
                 raise
             transaction.commit_unless_managed(using=db_name)
 def handle_label(self, tablename, **options):
     db = options.get('database')
     cache = BaseDatabaseCache(tablename, {})
     if not router.allow_syncdb(db, cache.cache_model_class):
         return
     connection = connections[db]
     fields = (
         # "key" is a reserved word in MySQL, so use "cache_key" instead.
         models.CharField(name='cache_key', max_length=255, unique=True, primary_key=True),
         models.TextField(name='value'),
         models.DateTimeField(name='expires', db_index=True),
     )
     table_output = []
     index_output = []
     qn = connection.ops.quote_name
     for f in fields:
         field_output = [qn(f.name), f.db_type(connection=connection)]
         field_output.append("%sNULL" % (not f.null and "NOT " or ""))
         if f.primary_key:
             field_output.append("PRIMARY KEY")
         elif f.unique:
             field_output.append("UNIQUE")
         if f.db_index:
             unique = f.unique and "UNIQUE " or ""
             index_output.append("CREATE %sINDEX %s ON %s (%s);" % \
                 (unique, qn('%s_%s' % (tablename, f.name)), qn(tablename),
                 qn(f.name)))
         table_output.append(" ".join(field_output))
     full_statement = ["CREATE TABLE %s (" % qn(tablename)]
     for i, line in enumerate(table_output):
         full_statement.append('    %s%s' % (line, i < len(table_output)-1 and ',' or ''))
     full_statement.append(');')
     curs = connection.cursor()
     try:
         curs.execute("\n".join(full_statement))
     except DatabaseError as e:
         transaction.rollback_unless_managed(using=db)
         raise CommandError(
             "Cache table '%s' could not be created.\nThe error was: %s." %
                 (tablename, force_text(e)))
     for statement in index_output:
         curs.execute(statement)
     transaction.commit_unless_managed(using=db)
Beispiel #6
0
    def _base_set(self, mode, key, value, timeout=None):
        if timeout is None:
            timeout = self.default_timeout
        db = router.db_for_write(self.cache_model_class)
        table = connections[db].ops.quote_name(self._table)
        cursor = connections[db].cursor()

        cursor.execute("SELECT COUNT(*) FROM %s" % table)
        num = cursor.fetchone()[0]
        now = timezone.now()
        now = now.replace(microsecond=0)
        if settings.USE_TZ:
            exp = datetime.utcfromtimestamp(time.time() + timeout)
        else:
            exp = datetime.fromtimestamp(time.time() + timeout)
        exp = exp.replace(microsecond=0)
        if num > self._max_entries:
            self._cull(db, cursor, now)
        pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
        encoded = base64.b64encode(pickled).strip()
        cursor.execute("SELECT cache_key, expires FROM %s "
                       "WHERE cache_key = %%s" % table, [key])
        try:
            result = cursor.fetchone()
            if result and (mode == 'set' or
                    (mode == 'add' and result[1] < now)):
                cursor.execute("UPDATE %s SET value = %%s, expires = %%s "
                               "WHERE cache_key = %%s" % table,
                               [encoded, connections[db].ops.value_to_db_datetime(exp), key])
            else:
                cursor.execute("INSERT INTO %s (cache_key, value, expires) "
                               "VALUES (%%s, %%s, %%s)" % table,
                               [key, encoded, connections[db].ops.value_to_db_datetime(exp)])
        except DatabaseError:
            # To be threadsafe, updates/inserts are allowed to fail silently
            transaction.rollback_unless_managed(using=db)
            return False
        else:
            transaction.commit_unless_managed(using=db)
            return True
Beispiel #7
0
    def get(self, key, default=None, version=None):
        key = self.make_key(key, version=version)
        self.validate_key(key)
        db = router.db_for_read(self.cache_model_class)
        table = connections[db].ops.quote_name(self._table)
        cursor = connections[db].cursor()

        cursor.execute("SELECT cache_key, value, expires FROM %s "
                       "WHERE cache_key = %%s" % table, [key])
        row = cursor.fetchone()
        if row is None:
            return default
        now = timezone.now()
        if row[2] < now:
            db = router.db_for_write(self.cache_model_class)
            cursor = connections[db].cursor()
            cursor.execute("DELETE FROM %s "
                           "WHERE cache_key = %%s" % table, [key])
            transaction.commit_unless_managed(using=db)
            return default
        value = connections[db].ops.process_clob(row[1])
        return pickle.loads(base64.b64decode(force_bytes(value)))
Beispiel #8
0
    def handle_noargs(self, **options):

        verbosity = int(options.get("verbosity"))
        interactive = options.get("interactive")
        show_traceback = options.get("traceback")
        load_initial_data = options.get("load_initial_data")

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module(".management", app_name)
            except ImportError as exc:
                # This is slightly hackish. We want to ignore ImportErrors
                # if the "management" module itself is missing -- but we don't
                # want to ignore the exception if the management module exists
                # but raises an ImportError for some reason. The only way we
                # can do this is to check the text of the exception. Note that
                # we're a bit broad in how we check the text, because different
                # Python implementations may not use the same text.
                # CPython uses the text "No module named management"
                # PyPy uses "No module named myproject.myapp.management"
                msg = exc.args[0]
                if not msg.startswith("No module named") or "management" not in msg:
                    raise

        db = options.get("database")
        connection = connections[db]
        cursor = connection.cursor()

        # Get a list of already installed *models* so that references work right.
        tables = connection.introspection.table_names()
        seen_models = connection.introspection.installed_models(tables)
        created_models = set()
        pending_references = {}

        # Build the manifest of apps and models that are to be synchronized
        all_models = [
            (
                app.__name__.split(".")[-2],
                [m for m in models.get_models(app, include_auto_created=True) if router.allow_syncdb(db, m)],
            )
            for app in models.get_apps()
        ]

        def model_installed(model):
            opts = model._meta
            converter = connection.introspection.table_name_converter
            return not (
                (converter(opts.db_table) in tables)
                or (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables)
            )

        manifest = SortedDict(
            (app_name, list(filter(model_installed, model_list))) for app_name, model_list in all_models
        )

        # Create the tables for each model
        if verbosity >= 1:
            self.stdout.write("Creating tables ...\n")
        for app_name, model_list in manifest.items():
            for model in model_list:
                # Create the model's database table, if it doesn't already exist.
                if verbosity >= 3:
                    self.stdout.write("Processing %s.%s model\n" % (app_name, model._meta.object_name))
                sql, references = connection.creation.sql_create_model(model, self.style, seen_models)
                seen_models.add(model)
                created_models.add(model)
                for refto, refs in references.items():
                    pending_references.setdefault(refto, []).extend(refs)
                    if refto in seen_models:
                        sql.extend(
                            connection.creation.sql_for_pending_references(refto, self.style, pending_references)
                        )
                sql.extend(connection.creation.sql_for_pending_references(model, self.style, pending_references))
                if verbosity >= 1 and sql:
                    self.stdout.write("Creating table %s\n" % model._meta.db_table)
                for statement in sql:
                    cursor.execute(statement)
                tables.append(connection.introspection.table_name_converter(model._meta.db_table))

        transaction.commit_unless_managed(using=db)

        # Send the post_syncdb signal, so individual apps can do whatever they need
        # to do at this point.
        emit_post_sync_signal(created_models, verbosity, interactive, db)

        # The connection may have been closed by a syncdb handler.
        cursor = connection.cursor()

        # Install custom SQL for the app (but only if this
        # is a model we've just created)
        if verbosity >= 1:
            self.stdout.write("Installing custom SQL ...\n")
        for app_name, model_list in manifest.items():
            for model in model_list:
                if model in created_models:
                    custom_sql = custom_sql_for_model(model, self.style, connection)
                    if custom_sql:
                        if verbosity >= 2:
                            self.stdout.write(
                                "Installing custom SQL for %s.%s model\n" % (app_name, model._meta.object_name)
                            )
                        try:
                            for sql in custom_sql:
                                cursor.execute(sql)
                        except Exception as e:
                            self.stderr.write(
                                "Failed to install custom SQL for %s.%s model: %s\n"
                                % (app_name, model._meta.object_name, e)
                            )
                            if show_traceback:
                                traceback.print_exc()
                            transaction.rollback_unless_managed(using=db)
                        else:
                            transaction.commit_unless_managed(using=db)
                    else:
                        if verbosity >= 3:
                            self.stdout.write("No custom SQL for %s.%s model\n" % (app_name, model._meta.object_name))

        if verbosity >= 1:
            self.stdout.write("Installing indexes ...\n")
        # Install SQL indices for all newly created models
        for app_name, model_list in manifest.items():
            for model in model_list:
                if model in created_models:
                    index_sql = connection.creation.sql_indexes_for_model(model, self.style)
                    if index_sql:
                        if verbosity >= 2:
                            self.stdout.write(
                                "Installing index for %s.%s model\n" % (app_name, model._meta.object_name)
                            )
                        try:
                            for sql in index_sql:
                                cursor.execute(sql)
                        except Exception as e:
                            self.stderr.write(
                                "Failed to install index for %s.%s model: %s\n" % (app_name, model._meta.object_name, e)
                            )
                            transaction.rollback_unless_managed(using=db)
                        else:
                            transaction.commit_unless_managed(using=db)

        # Load initial_data fixtures (unless that has been disabled)
        if load_initial_data:
            call_command("loaddata", "initial_data", verbosity=verbosity, database=db, skip_validation=True)
Beispiel #9
0
    def handle_noargs(self, **options):
        db = options.get('database')
        connection = connections[db]
        verbosity = int(options.get('verbosity'))
        interactive = options.get('interactive')
        # 'reset_sequences' is a stealth option
        reset_sequences = options.get('reset_sequences', True)

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError:
                pass

        sql_list = sql_flush(self.style, connection, only_django=True, reset_sequences=reset_sequences)

        if interactive:
            confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to the state it was in after syncdb.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                cursor = connection.cursor()
                for sql in sql_list:
                    cursor.execute(sql)
            except Exception as e:
                transaction.rollback_unless_managed(using=db)
                raise CommandError("""Database %s couldn't be flushed. Possible reasons:
  * The database isn't running or isn't configured correctly.
  * At least one of the expected database tables doesn't exist.
  * The SQL was invalid.
Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.
The full error: %s""" % (connection.settings_dict['NAME'], e))
            transaction.commit_unless_managed(using=db)

            # Emit the post sync signal. This allows individual
            # applications to respond as if the database had been
            # sync'd from scratch.
            all_models = []
            for app in models.get_apps():
                all_models.extend([
                    m for m in models.get_models(app, include_auto_created=True)
                    if router.allow_syncdb(db, m)
                ])
            emit_post_sync_signal(set(all_models), verbosity, interactive, db)

            # Reinstall the initial_data fixture.
            kwargs = options.copy()
            kwargs['database'] = db
            if options.get('load_initial_data'):
                # Reinstall the initial_data fixture.
                call_command('loaddata', 'initial_data', **options)

        else:
            self.stdout.write("Flush cancelled.\n")
Beispiel #10
0
    def handle(self, *fixture_labels, **options):
        using = options.get('database')

        connection = connections[using]

        if not len(fixture_labels):
            raise CommandError(
                "No database fixture specified. Please provide the path of at "
                "least one fixture in the command line."
            )

        verbosity = int(options.get('verbosity'))
        show_traceback = options.get('traceback')

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        loaded_object_count = 0
        fixture_object_count = 0
        models = set()

        humanize = lambda dirname: "'%s'" % dirname if dirname else 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed(using=using)
            transaction.enter_transaction_management(using=using)
            transaction.managed(True, using=using)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file."
            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None:   open,
            'gz':   gzip.GzipFile,
            'zip':  SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        app_module_paths = []
        for app in get_apps():
            if hasattr(app, '__path__'):
                # It's a 'models/' subpackage
                for path in app.__path__:
                    app_module_paths.append(path)
            else:
                # It's a models.py module
                app_module_paths.append(app.__file__)

        app_fixtures = [os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths]

        try:
            with connection.constraint_checks_disabled():
                for fixture_label in fixture_labels:
                    parts = fixture_label.split('.')

                    if len(parts) > 1 and parts[-1] in compression_types:
                        compression_formats = [parts[-1]]
                        parts = parts[:-1]
                    else:
                        compression_formats = compression_types.keys()

                    if len(parts) == 1:
                        fixture_name = parts[0]
                        formats = serializers.get_public_serializer_formats()
                    else:
                        fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                        if format in serializers.get_public_serializer_formats():
                            formats = [format]
                        else:
                            formats = []

                    if formats:
                        if verbosity >= 2:
                            self.stdout.write("Loading '%s' fixtures..." % fixture_name)
                    else:
                        raise CommandError(
                            "Problem installing fixture '%s': %s is not a known serialization format." %
                                (fixture_name, format))

                    if os.path.isabs(fixture_name):
                        fixture_dirs = [fixture_name]
                    else:
                        fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']

                    for fixture_dir in fixture_dirs:
                        if verbosity >= 2:
                            self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir))

                        label_found = False
                        for combo in product([using, None], formats, compression_formats):
                            database, format, compression_format = combo
                            file_name = '.'.join(
                                p for p in [
                                    fixture_name, database, format, compression_format
                                ]
                                if p
                            )

                            if verbosity >= 3:
                                self.stdout.write("Trying %s for %s fixture '%s'..." % \
                                    (humanize(fixture_dir), file_name, fixture_name))
                            full_path = os.path.join(fixture_dir, file_name)
                            open_method = compression_types[compression_format]
                            try:
                                fixture = open_method(full_path, 'r')
                            except IOError:
                                if verbosity >= 2:
                                    self.stdout.write("No %s fixture '%s' in %s." % \
                                        (format, fixture_name, humanize(fixture_dir)))
                            else:
                                try:
                                    if label_found:
                                        raise CommandError("Multiple fixtures named '%s' in %s. Aborting." %
                                            (fixture_name, humanize(fixture_dir)))

                                    fixture_count += 1
                                    objects_in_fixture = 0
                                    loaded_objects_in_fixture = 0
                                    if verbosity >= 2:
                                        self.stdout.write("Installing %s fixture '%s' from %s." % \
                                            (format, fixture_name, humanize(fixture_dir)))

                                    objects = serializers.deserialize(format, fixture, using=using)

                                    for obj in objects:
                                        objects_in_fixture += 1
                                        if router.allow_syncdb(using, obj.object.__class__):
                                            loaded_objects_in_fixture += 1
                                            models.add(obj.object.__class__)
                                            try:
                                                obj.save(using=using)
                                            except (DatabaseError, IntegrityError) as e:
                                                e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
                                                        'app_label': obj.object._meta.app_label,
                                                        'object_name': obj.object._meta.object_name,
                                                        'pk': obj.object.pk,
                                                        'error_msg': force_text(e)
                                                    },)
                                                raise

                                    loaded_object_count += loaded_objects_in_fixture
                                    fixture_object_count += objects_in_fixture
                                    label_found = True
                                except Exception as e:
                                    if not isinstance(e, CommandError):
                                        e.args = ("Problem installing fixture '%s': %s" % (full_path, e),)
                                    raise
                                finally:
                                    fixture.close()

                                # If the fixture we loaded contains 0 objects, assume that an
                                # error was encountered during fixture loading.
                                if objects_in_fixture == 0:
                                    raise CommandError(
                                        "No fixture data found for '%s'. (File format may be invalid.)" %
                                            (fixture_name))

            # Since we disabled constraint checks, we must manually check for
            # any invalid keys that might have been added
            table_names = [model._meta.db_table for model in models]
            try:
                connection.check_constraints(table_names=table_names)
            except Exception as e:
                e.args = ("Problem installing fixtures: %s" % e,)
                raise

        except (SystemExit, KeyboardInterrupt):
            raise
        except Exception as e:
            if commit:
                transaction.rollback(using=using)
                transaction.leave_transaction_management(using=using)
            raise

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if loaded_object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(no_style(), models)
            if sequence_sql:
                if verbosity >= 2:
                    self.stdout.write("Resetting sequences\n")
                for line in sequence_sql:
                    cursor.execute(line)

        if commit:
            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)

        if verbosity >= 1:
            if fixture_object_count == loaded_object_count:
                self.stdout.write("Installed %d object(s) from %d fixture(s)" % (
                    loaded_object_count, fixture_count))
            else:
                self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)" % (
                    loaded_object_count, fixture_object_count, fixture_count))

        # Close the DB connection. This is required as a workaround for an
        # edge case in MySQL: if the same connection is used to
        # create tables, load data, and query, the query can return
        # incorrect results. See Django #7572, MySQL #37735.
        if commit:
            connection.close()
Beispiel #11
0
    def save_base(
        self, raw=False, cls=None, origin=None, force_insert=False, force_update=False, using=None, update_fields=None
    ):
        """
        Does the heavy-lifting involved in saving. Subclasses shouldn't need to
        override this method. It's separate from save() in order to hide the
        need for overrides of save() to pass around internal-only parameters
        ('raw', 'cls', and 'origin').
        """
        using = using or router.db_for_write(self.__class__, instance=self)
        assert not (force_insert and (force_update or update_fields))
        assert update_fields is None or len(update_fields) > 0
        if cls is None:
            cls = self.__class__
            meta = cls._meta
            if not meta.proxy:
                origin = cls
        else:
            meta = cls._meta

        if origin and not meta.auto_created:
            signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields)

        # If we are in a raw save, save the object exactly as presented.
        # That means that we don't try to be smart about saving attributes
        # that might have come from the parent class - we just save the
        # attributes we have been given to the class we have been given.
        # We also go through this process to defer the save of proxy objects
        # to their actual underlying model.
        if not raw or meta.proxy:
            if meta.proxy:
                org = cls
            else:
                org = None
            for parent, field in meta.parents.items():
                # At this point, parent's primary key field may be unknown
                # (for example, from administration form which doesn't fill
                # this field). If so, fill it.
                if (
                    field
                    and getattr(self, parent._meta.pk.attname) is None
                    and getattr(self, field.attname) is not None
                ):
                    setattr(self, parent._meta.pk.attname, getattr(self, field.attname))

                self.save_base(cls=parent, origin=org, using=using, update_fields=update_fields)

                if field:
                    setattr(self, field.attname, self._get_pk_val(parent._meta))
            if meta.proxy:
                return

        if not meta.proxy:
            non_pks = [f for f in meta.local_fields if not f.primary_key]

            if update_fields:
                non_pks = [f for f in non_pks if f.name in update_fields or f.attname in update_fields]

            # First, try an UPDATE. If that doesn't update anything, do an INSERT.
            pk_val = self._get_pk_val(meta)
            pk_set = pk_val is not None
            record_exists = True
            manager = cls._base_manager
            if pk_set:
                # Determine if we should do an update (pk already exists, forced update,
                # no force_insert)
                if (force_update or update_fields) or (
                    not force_insert and manager.using(using).filter(pk=pk_val).exists()
                ):
                    if force_update or non_pks:
                        values = [
                            (f, None, (raw and getattr(self, f.attname) or f.pre_save(self, False))) for f in non_pks
                        ]
                        if values:
                            rows = manager.using(using).filter(pk=pk_val)._update(values)
                            if force_update and not rows:
                                raise DatabaseError("Forced update did not affect any rows.")
                            if update_fields and not rows:
                                raise DatabaseError("Save with update_fields did not affect any rows.")
                else:
                    record_exists = False
            if not pk_set or not record_exists:
                if meta.order_with_respect_to:
                    # If this is a model with an order_with_respect_to
                    # autopopulate the _order field
                    field = meta.order_with_respect_to
                    order_value = manager.using(using).filter(**{field.name: getattr(self, field.attname)}).count()
                    self._order = order_value

                fields = meta.local_fields
                if not pk_set:
                    if force_update or update_fields:
                        raise ValueError("Cannot force an update in save() with no primary key.")
                    fields = [f for f in fields if not isinstance(f, AutoField)]

                record_exists = False

                update_pk = bool(meta.has_auto_field and not pk_set)
                result = manager._insert([self], fields=fields, return_id=update_pk, using=using, raw=raw)

                if update_pk:
                    setattr(self, meta.pk.attname, result)
            transaction.commit_unless_managed(using=using)

        # Store the database on which the object was saved
        self._state.db = using
        # Once saved, this is no longer a to-be-added instance.
        self._state.adding = False

        # Signal that the save is complete
        if origin and not meta.auto_created:
            signals.post_save.send(
                sender=origin,
                instance=self,
                created=(not record_exists),
                update_fields=update_fields,
                raw=raw,
                using=using,
            )
Beispiel #12
0
 def handle_noargs(self, **options):
     from djangocg.db import transaction
     from djangocg.contrib.sessions.models import Session
     Session.objects.filter(expire_date__lt=timezone.now()).delete()
     transaction.commit_unless_managed()