Exemple #1
0
    def __init__(self, *args, **kwargs):
        super(DatabaseWrapper, self).__init__(*args, **kwargs)

        self.features = DatabaseFeatures(self)
        self.ops = DatabaseOperations(self)
        self.client = DatabaseClient(self)
        self.creation = DatabaseCreation(self)
        self.introspection = DatabaseIntrospection(self)
        self.validation = BaseDatabaseValidation(self)
Exemple #2
0
    def __init__(self, *args, **kwargs):
        super(DatabaseWrapper, self).__init__(*args, **kwargs)

        self.features = DatabaseFeatures(self)
        self.ops = DatabaseOperations(self)
        self.client = DatabaseClient(self)
        self.creation = DatabaseCreation(self)
        self.introspection = DatabaseIntrospection(self)
        self.validation = BaseDatabaseValidation(self)
Exemple #3
0
class DatabaseWrapper(BaseDatabaseWrapper):
    vendor = 'sqlite'
    jdbc_driver_class_name = 'org.sqlite.JDBC'
    jdbc_connection_url_pattern = 'jdbc:sqlite://%(NAME)s'
    jdbc_default_host = ''
    jdbc_default_port = 0
    jdbc_default_name = ':memory:'
    # SQLite doesn't actually support most of these types, but it "does the right
    # thing" given more verbose field definitions, so leave them as is so that
    # schema inspection is more useful.
    data_types = {
        'AutoField': 'integer',
        'BinaryField': 'text',  # SQLite JDBC driver causes problems with blob
        'BooleanField': 'bool',
        'CharField': 'varchar(%(max_length)s)',
        'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
        'DateField': 'date',
        'DateTimeField': 'datetime',
        'DecimalField': 'decimal',
        'FileField': 'varchar(%(max_length)s)',
        'FilePathField': 'varchar(%(max_length)s)',
        'FloatField': 'real',
        'IntegerField': 'integer',
        'BigIntegerField': 'bigint',
        'IPAddressField': 'char(15)',
        'GenericIPAddressField': 'char(39)',
        'NullBooleanField': 'bool',
        'OneToOneField': 'integer',
        'PositiveIntegerField': 'integer unsigned',
        'PositiveSmallIntegerField': 'smallint unsigned',
        'SlugField': 'varchar(%(max_length)s)',
        'SmallIntegerField': 'smallint',
        'TextField': 'text',
        'TimeField': 'time',
    }
    data_types_suffix = {
        'AutoField': 'AUTOINCREMENT',
    }
    # SQLite requires LIKE statements to include an ESCAPE clause if the value
    # being escaped has a percent or underscore in it.
    # See http://www.sqlite.org/lang_expr.html for an explanation.
    operators = {
        'exact': '= %s',
        'iexact': "LIKE %s ESCAPE '\\'",
        'contains': "LIKE %s ESCAPE '\\'",
        'icontains': "LIKE %s ESCAPE '\\'",
        'regex': 'REGEXP %s',
        'iregex': "REGEXP '(?i)' || %s",
        'gt': '> %s',
        'gte': '>= %s',
        'lt': '< %s',
        'lte': '<= %s',
        'startswith': "LIKE %s ESCAPE '\\'",
        'endswith': "LIKE %s ESCAPE '\\'",
        'istartswith': "LIKE %s ESCAPE '\\'",
        'iendswith': "LIKE %s ESCAPE '\\'",
    }
    pattern_ops = {
        'startswith': "LIKE %s || '%%%%'",
        'istartswith': "LIKE UPPER(%s) || '%%%%'",
    }

    class Database(BaseDatabaseWrapper.Database):

        @staticmethod
        def Binary(value):
            """
            The SQLite JDBC driver does not support binary fields,
            so we use them as text fields.
            """
            return bytes(value)

    def __init__(self, *args, **kwargs):
        super(DatabaseWrapper, self).__init__(*args, **kwargs)

        self.features = DatabaseFeatures(self)
        self.ops = DatabaseOperations(self)
        self.client = DatabaseClient(self)
        self.creation = DatabaseCreation(self)
        self.introspection = DatabaseIntrospection(self)
        self.validation = BaseDatabaseValidation(self)

    def get_new_connection(self, conn_params):
        conn = super(DatabaseWrapper, self).get_new_connection(conn_params)
        create_function(conn.__connection__, "django_date_extract", 2, _sqlite_date_extract)
        create_function(conn.__connection__, "django_date_trunc", 2, _sqlite_date_trunc)
        create_function(conn.__connection__, "django_datetime_extract", 3, _sqlite_datetime_extract)
        create_function(conn.__connection__, "django_datetime_trunc", 3, _sqlite_datetime_trunc)
        create_function(conn.__connection__, "regexp", 2, _sqlite_regexp)
        create_function(conn.__connection__, "django_format_dtdelta", 5, _sqlite_format_dtdelta)
        create_function(conn.__connection__, "django_power", 2, _sqlite_power)
        return conn

    def init_connection_state(self):
        pass

    def create_cursor(self):
        return SQLiteCursorWrapper(self.connection.cursor())

    def close(self):
        self.validate_thread_sharing()
        # If database is in memory, closing the connection destroys the
        # database. To prevent accidental data loss, ignore close requests on
        # an in-memory db.
        if self.settings_dict['NAME'] != ":memory:":
            BaseDatabaseWrapper.close(self)

    def _savepoint_allowed(self):
        # Two conditions are required here:
        # - A sufficiently recent version of SQLite to support savepoints,
        # - Being in a transaction, which can only happen inside 'atomic'.

        # When 'isolation_level' is not None, sqlite3 commits before each
        # savepoint; it's a bug. When it is None, savepoints don't make sense
        # because autocommit is enabled. The only exception is inside 'atomic'
        # blocks. To work around that bug, on SQLite, 'atomic' starts a
        # transaction explicitly rather than simply disable autocommit.
        return self.features.uses_savepoints and self.in_atomic_block

    def _commit(self):
        if self.connection is not None and not self.connection.autocommit:
            with self.wrap_database_errors:
                return self.connection.commit()

    def check_constraints(self, table_names=None):
        """
        Checks each table name in `table_names` for rows with invalid foreign key references. This method is
        intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to
        determine if rows with invalid references were entered while constraint checks were off.

        Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides
        detailed information about the invalid reference in the error message.

        Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS
        ALL IMMEDIATE")
        """
        cursor = self.cursor()
        if table_names is None:
            table_names = self.introspection.table_names(cursor)
        for table_name in table_names:
            primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
            if not primary_key_column_name:
                continue
            key_columns = self.introspection.get_key_columns(cursor, table_name)
            for column_name, referenced_table_name, referenced_column_name in key_columns:
                cursor.execute("""
                    SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
                    LEFT JOIN `%s` as REFERRED
                    ON (REFERRING.`%s` = REFERRED.`%s`)
                    WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL"""
                    % (primary_key_column_name, column_name, table_name, referenced_table_name,
                    column_name, referenced_column_name, column_name, referenced_column_name))
                for bad_row in cursor.fetchall():
                    raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid "
                        "foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s."
                        % (table_name, bad_row[0], table_name, column_name, bad_row[1],
                        referenced_table_name, referenced_column_name))

    def is_usable(self):
        return True

    def schema_editor(self, *args, **kwargs):
        """
        Returns a new instance of this backend's SchemaEditor
        """
        return DatabaseSchemaEditor(self, *args, **kwargs)

    @staticmethod
    def _set_default_isolation_level(connection):
        """
        Make transactions transparent to all cursors. Must be called by zxJDBC backends
        after instantiating a connection.

        :param connection: zxJDBC connection
        """
        jdbc_connection = connection.__connection__
        jdbc_connection.setTransactionIsolation(JDBCConnection.TRANSACTION_SERIALIZABLE)

    def is_in_memory_db(self, name):
        return name == ":memory:" or "mode=memory" in force_text(name)
Exemple #4
0
class DatabaseWrapper(BaseDatabaseWrapper):
    vendor = 'sqlite'
    jdbc_driver_class_name = 'org.sqlite.JDBC'
    jdbc_connection_url_pattern = 'jdbc:sqlite://%(NAME)s'
    jdbc_default_host = ''
    jdbc_default_port = 0
    jdbc_default_name = ':memory:'
    # SQLite doesn't actually support most of these types, but it "does the right
    # thing" given more verbose field definitions, so leave them as is so that
    # schema inspection is more useful.
    data_types = {
        'AutoField': 'integer',
        'BinaryField': 'text',  # SQLite JDBC driver causes problems with blob
        'BooleanField': 'bool',
        'CharField': 'varchar(%(max_length)s)',
        'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
        'DateField': 'date',
        'DateTimeField': 'datetime',
        'DecimalField': 'decimal',
        'FileField': 'varchar(%(max_length)s)',
        'FilePathField': 'varchar(%(max_length)s)',
        'FloatField': 'real',
        'IntegerField': 'integer',
        'BigIntegerField': 'bigint',
        'IPAddressField': 'char(15)',
        'GenericIPAddressField': 'char(39)',
        'NullBooleanField': 'bool',
        'OneToOneField': 'integer',
        'PositiveIntegerField': 'integer unsigned',
        'PositiveSmallIntegerField': 'smallint unsigned',
        'SlugField': 'varchar(%(max_length)s)',
        'SmallIntegerField': 'smallint',
        'TextField': 'text',
        'TimeField': 'time',
    }
    data_types_suffix = {
        'AutoField': 'AUTOINCREMENT',
    }
    # SQLite requires LIKE statements to include an ESCAPE clause if the value
    # being escaped has a percent or underscore in it.
    # See http://www.sqlite.org/lang_expr.html for an explanation.
    operators = {
        'exact': '= %s',
        'iexact': "LIKE %s ESCAPE '\\'",
        'contains': "LIKE %s ESCAPE '\\'",
        'icontains': "LIKE %s ESCAPE '\\'",
        'regex': 'REGEXP %s',
        'iregex': "REGEXP '(?i)' || %s",
        'gt': '> %s',
        'gte': '>= %s',
        'lt': '< %s',
        'lte': '<= %s',
        'startswith': "LIKE %s ESCAPE '\\'",
        'endswith': "LIKE %s ESCAPE '\\'",
        'istartswith': "LIKE %s ESCAPE '\\'",
        'iendswith': "LIKE %s ESCAPE '\\'",
    }
    pattern_ops = {
        'startswith': "LIKE %s || '%%%%'",
        'istartswith': "LIKE UPPER(%s) || '%%%%'",
    }

    class Database(BaseDatabaseWrapper.Database):
        @staticmethod
        def Binary(value):
            """
            The SQLite JDBC driver does not support binary fields,
            so we use them as text fields.
            """
            return bytes(value)

    def __init__(self, *args, **kwargs):
        super(DatabaseWrapper, self).__init__(*args, **kwargs)

        self.features = DatabaseFeatures(self)
        self.ops = DatabaseOperations(self)
        self.client = DatabaseClient(self)
        self.creation = DatabaseCreation(self)
        self.introspection = DatabaseIntrospection(self)
        self.validation = BaseDatabaseValidation(self)

    def get_new_connection(self, conn_params):
        conn = super(DatabaseWrapper, self).get_new_connection(conn_params)
        create_function(conn.__connection__, "django_date_extract", 2,
                        _sqlite_date_extract)
        create_function(conn.__connection__, "django_date_trunc", 2,
                        _sqlite_date_trunc)
        create_function(conn.__connection__, "django_datetime_extract", 3,
                        _sqlite_datetime_extract)
        create_function(conn.__connection__, "django_datetime_trunc", 3,
                        _sqlite_datetime_trunc)
        create_function(conn.__connection__, "regexp", 2, _sqlite_regexp)
        create_function(conn.__connection__, "django_format_dtdelta", 5,
                        _sqlite_format_dtdelta)
        create_function(conn.__connection__, "django_power", 2, _sqlite_power)
        return conn

    def init_connection_state(self):
        pass

    def create_cursor(self):
        return SQLiteCursorWrapper(self.connection.cursor())

    def close(self):
        self.validate_thread_sharing()
        # If database is in memory, closing the connection destroys the
        # database. To prevent accidental data loss, ignore close requests on
        # an in-memory db.
        if self.settings_dict['NAME'] != ":memory:":
            BaseDatabaseWrapper.close(self)

    def _savepoint_allowed(self):
        # Two conditions are required here:
        # - A sufficiently recent version of SQLite to support savepoints,
        # - Being in a transaction, which can only happen inside 'atomic'.

        # When 'isolation_level' is not None, sqlite3 commits before each
        # savepoint; it's a bug. When it is None, savepoints don't make sense
        # because autocommit is enabled. The only exception is inside 'atomic'
        # blocks. To work around that bug, on SQLite, 'atomic' starts a
        # transaction explicitly rather than simply disable autocommit.
        return self.features.uses_savepoints and self.in_atomic_block

    def _commit(self):
        if self.connection is not None and not self.connection.autocommit:
            with self.wrap_database_errors:
                return self.connection.commit()

    def check_constraints(self, table_names=None):
        """
        Checks each table name in `table_names` for rows with invalid foreign key references. This method is
        intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to
        determine if rows with invalid references were entered while constraint checks were off.

        Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides
        detailed information about the invalid reference in the error message.

        Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS
        ALL IMMEDIATE")
        """
        cursor = self.cursor()
        if table_names is None:
            table_names = self.introspection.table_names(cursor)
        for table_name in table_names:
            primary_key_column_name = self.introspection.get_primary_key_column(
                cursor, table_name)
            if not primary_key_column_name:
                continue
            key_columns = self.introspection.get_key_columns(
                cursor, table_name)
            for column_name, referenced_table_name, referenced_column_name in key_columns:
                cursor.execute("""
                    SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
                    LEFT JOIN `%s` as REFERRED
                    ON (REFERRING.`%s` = REFERRED.`%s`)
                    WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL"""
                               % (primary_key_column_name, column_name,
                                  table_name, referenced_table_name,
                                  column_name, referenced_column_name,
                                  column_name, referenced_column_name))
                for bad_row in cursor.fetchall():
                    raise utils.IntegrityError(
                        "The row in table '%s' with primary key '%s' has an invalid "
                        "foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s."
                        % (table_name, bad_row[0], table_name, column_name,
                           bad_row[1], referenced_table_name,
                           referenced_column_name))

    def is_usable(self):
        return True

    def schema_editor(self, *args, **kwargs):
        """
        Returns a new instance of this backend's SchemaEditor
        """
        return DatabaseSchemaEditor(self, *args, **kwargs)

    @staticmethod
    def _set_default_isolation_level(connection):
        """
        Make transactions transparent to all cursors. Must be called by zxJDBC backends
        after instantiating a connection.

        :param connection: zxJDBC connection
        """
        jdbc_connection = connection.__connection__
        jdbc_connection.setTransactionIsolation(
            JDBCConnection.TRANSACTION_SERIALIZABLE)

    def is_in_memory_db(self, name):
        return name == ":memory:" or "mode=memory" in force_text(name)