def test_load_backend_invalid_name(self): msg = ("'foo' isn't an available database backend.\n" "Try using 'django.db.backends.XXX', where XXX is one of:\n" " 'mysql', 'oracle', 'postgresql', 'sqlite3'") with self.assertRaisesMessage(ImproperlyConfigured, msg) as cm: load_backend('foo') self.assertEqual(str(cm.exception.__cause__), "No module named 'foo'")
def test_load_backend_invalid_name(self): msg = ("'foo' isn't an available database backend.\n" "Try using 'django.db.backends.XXX', where XXX is one of:\n" " 'mysql', 'oracle', 'postgresql', 'sqlite3'\n" "Error was: No module named %s" ) % "foo.base" if six.PY2 else "'foo'" with self.assertRaisesMessage(ImproperlyConfigured, msg): load_backend('foo')
def test_load_backend_invalid_name(self): msg = ( "'foo' isn't an available database backend.\n" "Try using 'django.db.backends.XXX', where XXX is one of:\n" " 'mysql', 'oracle', 'postgresql', 'sqlite3'" ) with self.assertRaisesMessage(ImproperlyConfigured, msg) as cm: load_backend('foo') self.assertEqual(str(cm.exception.__cause__), "No module named 'foo'")
def test_load_backend_invalid_name(self): msg = ( "'foo' isn't an available database backend.\n" "Try using 'django.db.backends.XXX', where XXX is one of:\n" " 'mysql', 'oracle', 'postgresql', 'sqlite3'\n" "Error was: No module named %s" ) % "foo.base" if six.PY2 else "'foo'" with self.assertRaisesMessage(ImproperlyConfigured, msg): load_backend('foo')
def test_load_backend_invalid_name(self): msg = ( "'foo' isn't an available database backend or couldn't be " "imported. Check the above exception. To use one of the built-in " "backends, use 'django.db.backends.XXX', where XXX is one of:\n" " 'mysql', 'oracle', 'postgresql', 'sqlite3'") with self.assertRaisesMessage(ImproperlyConfigured, msg) as cm: load_backend("foo") self.assertEqual(str(cm.exception.__cause__), "No module named 'foo'")
def handle(self, db_engine, db_name, db_user, db_password, db_host, db_port, *args, **options): verbosity = int(options.get('verbosity', 1)) backend = load_backend(db_engine) conn = backend.DatabaseWrapper({ 'NAME': db_name, 'ENGINE': db_engine, 'USER': db_user, 'PASSWORD': db_password, 'HOST': db_host, 'PORT': db_port, 'OPTIONS': {}, }, 'test_connection') try: cursor = conn.cursor() cursor.execute("SELECT COUNT(*) FROM organizations;") cursor.execute("SELECT COUNT(*) FROM organizations;") cursor.execute("SELECT COUNT(*) FROM epic_geo;") cursor.execute("SELECT COUNT(*) FROM epic_geo;") cursor.execute("SELECT COUNT(*) FROM epic_persons;") cursor.execute("SELECT COUNT(*) FROM epic_persons;") except Exception, err: if verbosity >= 2: print err
def allow_migrate(self, db, app_label, model_name=None, **hints): # the imports below need to be done here else django <1.5 goes crazy # https://code.djangoproject.com/ticket/20704 from django.db import connection from tenant_schemas.utils import get_public_schema_name, app_labels from tenant_schemas.postgresql_backend.base import DatabaseWrapper as TenantDbWrapper db_engine = settings.DATABASES[db]["ENGINE"] if not ( db_engine == "tenant_schemas.postgresql_backend" or issubclass(getattr(load_backend(db_engine), "DatabaseWrapper"), TenantDbWrapper) ): return None if isinstance(app_label, ModelBase): # In django <1.7 the `app_label` parameter is actually `model` app_label = app_label._meta.app_label if connection.schema_name == get_public_schema_name(): if app_label not in app_labels(settings.SHARED_APPS): return False else: if app_label not in app_labels(settings.TENANT_APPS): return False return None
def allow_migrate(self, db, app_label, model_name=None, **hints): # the imports below need to be done here else django <1.5 goes crazy # https://code.djangoproject.com/ticket/20704 from django.db import connection from wagtail.wagtailtenant.utils import get_public_schema_name, app_labels from wagtail.wagtailtenant.postgresql_backend.base import DatabaseWrapper as TenantDbWrapper db_engine = settings.DATABASES[db]['ENGINE'] if not (db_engine == 'wagtailtenant.postgresql_backend' or issubclass( getattr(load_backend(db_engine), 'DatabaseWrapper'), TenantDbWrapper)): return None if isinstance(app_label, ModelBase): # In django <1.7 the `app_label` parameter is actually `model` app_label = app_label._meta.app_label if connection.schema_name == get_public_schema_name(): if app_label not in app_labels(settings.SHARED_APPS): return False else: if app_label not in app_labels(settings.TENANT_APPS): return False return None
def destroy_test_db(self, old_database_name, verbosity=1): """ Destroy a test database, prompting the user for confirmation if the database already exists. """ self.connection.close() test_database_name = self.connection.settings_dict['NAME'] if verbosity >= 1: test_db_repr = '' if verbosity >= 2: test_db_repr = " ('%s')" % test_database_name print("Destroying test database for alias '%s'%s..." % (self.connection.alias, test_db_repr)) # Temporarily use a new connection and a copy of the settings dict. # This prevents the production database from being exposed to potential # child threads while (or after) the test database is destroyed. # Refs #10868 and #17786. settings_dict = self.connection.settings_dict.copy() settings_dict['NAME'] = old_database_name backend = load_backend(settings_dict['ENGINE']) new_connection = backend.DatabaseWrapper(settings_dict, alias='__destroy_test_db__', allow_thread_sharing=False) new_connection.creation._destroy_test_db(test_database_name, verbosity)
def destroy_test_db(self, old_database_name, verbosity=1): """ Destroy a test database, prompting the user for confirmation if the database already exists. """ self.connection.close() test_database_name = self.connection.settings_dict['NAME'] if verbosity >= 1: test_db_repr = '' if verbosity >= 2: test_db_repr = " ('%s')" % test_database_name print("Destroying test database for alias '%s'%s..." % ( self.connection.alias, test_db_repr)) # Temporarily use a new connection and a copy of the settings dict. # This prevents the production database from being exposed to potential # child threads while (or after) the test database is destroyed. # Refs #10868 and #17786. settings_dict = self.connection.settings_dict.copy() settings_dict['NAME'] = old_database_name backend = load_backend(settings_dict['ENGINE']) new_connection = backend.DatabaseWrapper( settings_dict, alias='__destroy_test_db__', allow_thread_sharing=False) new_connection.creation._destroy_test_db(test_database_name, verbosity)
def db_test(request): user = os.getenv('TEST_DB_USERNAME') pwd = os.getenv('TEST_DB_PASSWORD') host = os.getenv('TEST_DB_HOST') port = os.getenv('TEST_DB_PORT') db_name = os.getenv('TEST_DB_NAME') db = { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': db_name, 'USER': user, 'PASSWORD': pwd, 'HOST': host, 'PORT': port, 'TIME_ZONE': None, 'CONN_MAX_AGE': 500, 'AUTOCOMMIT': None, 'OPTIONS': {} } backend = load_backend(db['ENGINE']) conn = backend.DatabaseWrapper(db, "remote postgres") c = conn.cursor() c.execute('select * from a;') data = c.fetchall() conn.close() return JsonResponse({'data': data}, safe=False)
def _validate_database_url(self, question, url): # Ensure django is setup django.setup() try: db = dj_database_url.parse(url) except KeyError: return False, 'Failed to parse database URL.\n' options = {} if 'sqlite' not in db['ENGINE']: options['connect_timeout'] = 3 db['OPTIONS'] = options # These are django defaults - the cursor() call craps out if these are missing. db['AUTOCOMMIT'] = True db['TIME_ZONE'] = None try: engine = load_backend(db['ENGINE']).DatabaseWrapper(db) engine.cursor() except KeyError: return False, 'Invalid database URL provided.\n' except Exception as e: return False, '{0}\n'.format(str(e)) return True, ''
def _create_connection(db): connection = connections[db] db_settings = connection.settings_dict db_engine = db_settings["ENGINE"] backend = load_backend(db_engine) db_engine = db_engine.split(".")[-1] if db_engine == "sqlite3": import sqlite3 return sqlite3.connect(db_settings["NAME"]) elif "postgres" in db_engine: return backend.psycopg2.connect( "host='%s' dbname='postgres' user='******' password='******'" % (db_settings["HOST"], db_settings["USER"], db_settings["PASSWORD"]) ) elif db_engine == "pyodbc": return backend.Database.connect( "DRIVER={SQL Server Native Client 11.0};DATABASE=master;Server=%s;UID=%s;PWD=%s" % (db_settings["HOST"], db_settings["USER"], db_settings["PASSWORD"]) ) elif db_engine == "oracle": import cx_Oracle conn = cx_Oracle.connect("SYSTEM", db_settings["PASSWORD"], "localhost/master") return conn
def createConnection(self): """Create new database connection.""" db = connections.databases[self.alias] backend = load_backend(db["ENGINE"]) return backend.DatabaseWrapper(db, self.alias, allow_thread_sharing=True)
def check_database_engine(): all_psql = True postgresql_psycopg2_engine = load_backend('django.db.backends.postgresql_psycopg2') if getattr(settings, 'DATABASES', None): # Django 1.2+ style for db in settings.DATABASES.values(): engine = load_backend(db['ENGINE']) if not issubclass(engine.DatabaseWrapper, postgresql_psycopg2_engine.DatabaseWrapper): all_psql = False else: # Django -1.1 style engine = load_backend(settings.DATABASE_ENGINE) if not issubclass(engine.DatabaseWrapper, postgresql_psycopg2_engine.DatabaseWrapper): all_psql = False if not all_psql: raise CommandError( 'Only the postgresql_psycopg2 database engine is supported.')
def test_load_backend(self): try: from django.db import connections except ImportError: # Django <1.2 return # :( self.assertTrue('mongodb' in connections) from django.db.utils import load_backend backend = load_backend('django_mongokit.mongodb') self.assertTrue(backend is not None)
def _root_db_wrapper(self, db): from django.db.utils import load_backend backend = load_backend(db['ENGINE']) db_root = dict(db) db_creation_overide = db.get('DATABASE_CREATION', {}) if 'NAME' not in db_creation_overide: db_creation_overide['NAME'] = 'postgres' db_root.update(db_creation_overide) return backend.DatabaseWrapper(db_root)
def check_database_engine(): all_psql = True postgresql_psycopg2_engine = load_backend( 'django.db.backends.postgresql_psycopg2') if getattr(settings, 'DATABASES', None): # Django 1.2+ style for db in settings.DATABASES.values(): engine = load_backend(db['ENGINE']) if not issubclass(engine.DatabaseWrapper, postgresql_psycopg2_engine.DatabaseWrapper): all_psql = False else: # Django -1.1 style engine = load_backend(settings.DATABASE_ENGINE) if not issubclass(engine.DatabaseWrapper, postgresql_psycopg2_engine.DatabaseWrapper): all_psql = False if not all_psql: raise CommandError( 'Only the postgresql_psycopg2 database engine is supported.')
def setup_databases(self): for alias, db in self.databases.items(): backend = load_backend(db['ENGINE']) conn = backend.DatabaseWrapper(db, alias) if django.VERSION >= (1, 9): connections[alias].creation.set_as_test_mirror( conn.settings_dict, ) else: test_db_name = conn.settings_dict['NAME'] connections[alias].settings_dict['NAME'] = test_db_name
def test_load_backend(self): try: from django.db import connections from django_mongokit.shortcut import DATABASE_CONF except ImportError: # Django <1.2 return # :( self.assertTrue(DATABASE_CONF in connections) from django.db.utils import load_backend backend = load_backend('django_mongokit.mongodbkit') self.assertTrue(backend is not None)
def test_reconnect_failed(unknown_host): """For an unknown host, connection has to be retried. Behavior validation is done by intercepting the logging messages """ io = StringIO() LOGGER.addHandler(StreamHandler(io)) with pytest.raises(OperationalError): backend = load_backend(unknown_host["ENGINE"]) conn = backend.DatabaseWrapper(unknown_host, "unknown_host") conn.ensure_connection() # test that retrying has taken place (DNS errors might have been fixed) assert "trial 0" in io.getvalue()
def test_connect_failed_missing_db(unknown_db): """An unknown database must not trigger retrying, it should just fail. Behavior validation is done by intercepting the logging messages """ io = StringIO() LOGGER.addHandler(StreamHandler(io)) with pytest.raises(OperationalError): backend = load_backend(unknown_db["ENGINE"]) conn = backend.DatabaseWrapper(unknown_db, "unknown_db") conn.ensure_connection() # test that retrying has NOT taken place (there's no point) assert "trial 0" not in io.getvalue()
def get_slave_connections(): slave_connections = {} for alias, db in settings.REPLICA_DATABASES_ALL.items(): try: slave_connection = connections[alias] except ConnectionDoesNotExist: db.setdefault('TIME_ZONE', None) db.setdefault('CONN_MAX_AGE', 0) db.setdefault('OPTIONS', {}) db.setdefault('AUTOCOMMIT', True) backend = load_backend(db['ENGINE']) slave_connection = backend.DatabaseWrapper(db, alias) slave_connections[alias] = slave_connection return slave_connections
def DatabaseWrapper(settings, *args, **kwargs): """ This is just evil - the caller thinks he is instantiating base.DatabaseWrapper. But not true! He is actually calling this factory method - we are pretending to be a class here. Hopefully nobody is doing isinstance checks against DatabaseWrapper... :) Returns a dynamically created wrapper for the settings.wrapped connection. """ settings = copy.deepcopy(settings) settings['ENGINE'] = wraps = settings['OPTIONS'].pop('WRAPS', None) if wraps is None: raise RuntimeError('You must define OPTIONS["WRAPS"] in settings ' 'for alias %s.' % args[0]) pool_opts = dict(ON_CONNECT=settings['OPTIONS'].pop('ON_CONNECT', 'select 1')) if wraps in dyn_wrap_cache: dynwrap = dyn_wrap_cache[wraps] else: dbwrapper = load_backend(wraps).DatabaseWrapper # Methods we are going to add to the dynamically created wrapper. def get_new_connection(self, conn_params): key = PoolKey(self.alias, self.settings_dict) pooled = pool.acquire_connection(key) if pooled: self.pool_releaser.pool_object = pooled return pooled.connection conn = super(self.own_class, self).get_new_connection(conn_params) self.pool_releaser.pool_object = PoolObject(key, conn, self.pool_opts) pool.add_connection(self.pool_releaser.pool_object) return conn def close(self): self.validate_thread_sharing() if self.connection: self.connection = None self.pool_releaser.release() dynwrap = type('Pooled' + dbwrapper.__name__, (dbwrapper,), {'get_new_connection': get_new_connection, 'close': close, 'pool_opts': pool_opts}) dynwrap.own_class = dynwrap dyn_wrap_cache[wraps] = dynwrap conn = dynwrap(settings, *args, **kwargs) conn.pool_releaser = PoolReleaser() conn.creation = CreationWrapper(conn.creation) return conn
def _nodb_connection(self): """ Alternative connection to be used when there is no need to access the main database, specifically for test db creation/deletion. This also prevents the production database from being exposed to potential child threads while (or after) the test database is destroyed. Refs #10868, #17786, #16969. """ settings_dict = self.connection.settings_dict.copy() settings_dict['NAME'] = None backend = load_backend(settings_dict['ENGINE']) nodb_connection = backend.DatabaseWrapper(settings_dict, alias=NO_DB_ALIAS, allow_thread_sharing=False) return nodb_connection
def __getitem__(self, alias): if hasattr(self._connections, alias): return getattr(self._connections, alias) self.ensure_defaults(alias) db = self.databases[alias] backend = load_backend(db['ENGINE']) # this allows system to create a different type of SQL if backend.__name__.split(".")[3] == "postgresql_psycopg2" and \ self.user_server_side_cursor: conn = pgServerSideCurorDBWrapper(db, alias) else: conn = backend.DatabaseWrapper(db, alias) setattr(self._connections, alias, conn) return conn
def init_pool(): if not globals().get('pool_initialized', False): global pool_initialized pool_initialized = True try: backendname = settings.DATABASES['default']['ENGINE'] backend = load_backend(backendname) #replace the database object with a proxy. backend.Database = pool.manage(backend.Database) backend.DatabaseError = backend.Database.DatabaseError backend.IntegrityError = backend.Database.IntegrityError logging.info("Connection Pool initialized") except: logging.exception("Connection Pool initialization error")
def _nodb_connection(self): """ Alternative connection to be used when there is no need to access the main database, specifically for test db creation/deletion. This also prevents the production database from being exposed to potential child threads while (or after) the test database is destroyed. Refs #10868, #17786, #16969. """ settings_dict = self.connection.settings_dict.copy() settings_dict['NAME'] = None backend = load_backend(settings_dict['ENGINE']) nodb_connection = backend.DatabaseWrapper( settings_dict, alias=NO_DB_ALIAS, allow_thread_sharing=False) return nodb_connection
def init_pool(): """From http://blog.bootstraptoday.com/2012/07/11/django-connection-pooling-using-sqlalchemy-connection-pool/""" if not globals().get('pool_initialized', False): global pool_initialized pool_initialized = True try: backendname = settings.DATABASES['default']['ENGINE'] backend = load_backend(backendname) #replace the database object with a proxy. backend.Database = pool.manage(backend.Database, pool_size=settings.DB_POOL_SIZE, max_overflow=-1) backend.DatabaseError = backend.Database.DatabaseError backend.IntegrityError = backend.Database.IntegrityError logging.info("Connection Pool initialized") except: logging.exception("Connection Pool initialization error")
def init_pool(): if not globals().get('pool_initialized', False): global pool_initialized pool_initialized = True try: backendname = settings.DATABASES['default']['ENGINE'] backend = load_backend(backendname) #replace the database object with a proxy. backend.Database = ManagerProxy(pool.manage(backend.Database, **POOL_SETTINGS)) backend.DatabaseError = backend.Database.DatabaseError backend.IntegrityError = backend.Database.IntegrityError logger.debug("Initialzied Connection Pool") except Exception,e: import traceback traceback.print_exc() pass
def test_app_config_install_patch(unknown_host): """Tests whether the Django app config properly installs the retrier and retries connection failures.""" try: patch = DBConnectionRetrierConfig.ready(None) io = StringIO() LOGGER.addHandler(StreamHandler(io)) with pytest.raises(OperationalError): backend = load_backend(unknown_host["ENGINE"]) conn = backend.DatabaseWrapper(unknown_host, "unknown_host") conn.ensure_connection() # test that retrying has taken place (DNS errors might have been fixed) assert "trial 0" in io.getvalue() finally: patch.rollback()
def handle(self, *args, **options): zamboni = options['zamboni'] if not zamboni: print 'No path to zamboni given.' return db_settings = json.load(open(zamboni, 'r')) backend = utils.load_backend(db_settings['ENGINE']) wrapper = backend.DatabaseWrapper(db_settings) cursor = wrapper.cursor() start = 0 limit = 1000 # The number of records to do at a time. count = {'imported': 0, 'skipped': 0, 'errors': 0} sql = 'SELECT * FROM global_stats ORDER BY id LIMIT %s,%s' cursor.execute(sql, (start, limit)) results = cursor.fetchall() while results: for row in results: uuid = 'global_stats:%s' % row[0] if not Metric.objects.filter(uuid=uuid).exists(): try: obj = Metric.objects.create(uuid=uuid, date=row[3], name=fix_name(row[1]), value=row[2]) count['imported'] += 1 obj.index() except: count['errors'] += 1 if options['raise']: raise else: count['skipped'] += 1 # Bump our start and re-fetch. start += limit cursor.execute(sql, (start, limit)) results = cursor.fetchall() print 'Import completed.' print 'Processed:', sum(count.values()) for k, v in count.items(): print '%s:' % k.title(), v
def _create_connection(database): connection = connections[database] db_settings = connection.settings_dict db_engine = db_settings['ENGINE'] backend = load_backend(db_engine) db_engine = db_engine.split('.')[-1] if db_engine == 'sqlite3': import sqlite3 return sqlite3.connect(db_settings['NAME']) elif db_engine.startswith('postgres'): return backend.psycopg2.connect("host='%s' dbname='postgres' user='******' password='******'" % (db_settings['HOST'], db_settings['USER'], db_settings['PASSWORD'])) elif db_engine == 'pyodbc': return backend.Database.connect("DRIVER={SQL Server Native Client 11.0};DATABASE=master;Server=%s;UID=%s;PWD=%s" % (db_settings['HOST'], db_settings['USER'], db_settings['PASSWORD'])) elif db_engine == 'oracle': import cx_Oracle conn = cx_Oracle.connect('SYSTEM', db_settings['PASSWORD'], 'localhost/master') return conn
def make_wrapper(settings_dict, alias, **kwargs): """Create a wrapper for a shareddb.backends.shareddb engine. Generates a delegating wrapper at call-time. """ if not settings_dict.get('INNER_ENGINE'): raise ImproperlyConfigured( "The shareddb.backends.shareddb database engine requires a 'INNER_ENGINE' setting.") delegate = make_delegate(settings_dict, alias) # Load the inner module inner_engine_module_name = settings_dict['INNER_ENGINE'] inner_engine_module = utils.load_backend(inner_engine_module_name) class InnerDatabaseWrapper(DelegatingDatabaseWrapper, inner_engine_module.DatabaseWrapper): pass return delegate.execute(InnerDatabaseWrapper, delegate, settings_dict, alias, **kwargs)
def _validate_db_dsn(self, question, dsn): from django.conf import settings if not settings.configured: settings.configure() from django.db.utils import load_backend from dj_database_url import parse db = parse(dsn) db['OPTIONS'] = {'connect_timeout': 3} # These are django defaults - there seems to be a bug in dj-database-url that causes an # exception if these keys are missing db['CONN_MAX_AGE'] = 0 db['AUTOCOMMIT'] = True try: engine = load_backend(db['ENGINE']).DatabaseWrapper(db) engine.cursor() except KeyError: return False, 'Invalid DSN provided.\n' except Exception as e: return False, '{0}\n'.format(str(e)) return True, ''
def conn(self): db = self.get_config() backend = load_backend(db['ENGINE']) return backend.DatabaseWrapper(db, "remote postgres")
def load_connection(self, alias, settings_dict): self.ensure_defaults(settings_dict) backend = load_backend(settings_dict['ENGINE']) conn = backend.DatabaseWrapper(settings_dict, alias) return conn
def create_connection(alias=DEFAULT_DB_ALIAS): connections.ensure_defaults(alias) connections.prepare_test_settings(alias) db = connections.databases[alias] backend = load_backend(db['ENGINE']) return backend.DatabaseWrapper(db, alias)
connections = ConnectionHandler(settings.DATABASES) router = ConnectionRouter(settings.DATABASE_ROUTERS) # `connection`, `DatabaseError` and `IntegrityError` are convenient aliases # for backend bits. # DatabaseWrapper.__init__() takes a dictionary, not a settings module, so # we manually create the dictionary from the settings, passing only the # settings that the database backends care about. Note that TIME_ZONE is used # by the PostgreSQL backends. # we load all these up for backwards compatibility, you should use # connections['default'] instead. connection = connections[DEFAULT_DB_ALIAS] backend = load_backend(connection.settings_dict['ENGINE']) # Register an event that closes the database connection # when a Django request is finished. def close_connection(**kwargs): for conn in connections.all(): conn.close() signals.request_finished.connect(close_connection) # Register an event that resets connection.queries # when a Django request is started. def reset_queries(**kwargs):
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from django.db.utils import load_backend from django.core.exceptions import ImproperlyConfigured from db_multitenant.threadlocal import MultiTenantThreadlocal WRAPPED_BACKEND = load_backend('django.db.backends.postgresql_psycopg2') class DatabaseWrapper(WRAPPED_BACKEND.DatabaseWrapper): def __init__(self, *args, **kwargs): super(DatabaseWrapper, self).__init__(*args, **kwargs) self.threadlocal = MultiTenantThreadlocal() self.search_path_set = False def close(self): self.search_path_set = False super(DatabaseWrapper, self).close() def rollback(self): super(DatabaseWrapper, self).rollback() # Django's rollback clears the search path so we have to set it again the next time.
def mock_load_backend(name): if name == 'custom_backend': backend = Mock() backend.DatabaseWrapper = CustomDatabaseWrapper return backend return load_backend(name)
connections = ConnectionHandler(settings.DATABASES) router = ConnectionRouter(settings.DATABASE_ROUTERS) # `connection`, `DatabaseError` and `IntegrityError` are convenient aliases # for backend bits. # DatabaseWrapper.__init__() takes a dictionary, not a settings module, so # we manually create the dictionary from the settings, passing only the # settings that the database backends care about. Note that TIME_ZONE is used # by the PostgreSQL backends. # we load all these up for backwards compatibility, you should use # connections['default'] instead. connection = connections[DEFAULT_DB_ALIAS] backend = load_backend(connection.settings_dict['ENGINE']) # Register an event that closes the database connection # when a Django request is finished. def close_connection(**kwargs): for conn in connections.all(): conn.close() signals.request_finished.connect(close_connection) # Register an event that resets connection.queries # when a Django request is started. def reset_queries(**kwargs): for conn in connections.all(): conn.queries = [] signals.request_started.connect(reset_queries)
def reload_settings(settings, databases=None): """Special routine to reload django settings. Including: urlconf module, context processor, templatetags settings, database settings. """ if databases: settings.DATABASES.update(databases) # check if there's settings to reload if hasattr(settings, "ROOT_URLCONF"): if settings.ROOT_URLCONF in sys.modules: imp.reload(sys.modules[settings.ROOT_URLCONF]) import django if hasattr(django, "setup"): django.setup() import_module(settings.ROOT_URLCONF) set_urlconf(settings.ROOT_URLCONF) settings.LANGUAGE_CODE = "en" # all tests should be run with English by default # Make the ConnectionHandler use the new settings, otherwise the ConnectionHandler will have old configuraton. from django.db.utils import ConnectionHandler import django.db from django.db.utils import load_backend import django.db.transaction import django.db.models import django.db.models.sql.query import django.core.management.commands.syncdb import django.db.models.sql.compiler import django.db.backends import django.db.backends.mysql.base import django.core.management.commands.loaddata # all modules which imported django.db.connections should be changed to get new ConnectionHanlder django.db.models.sql.compiler.connections = ( django.db.models.connections ) = ( django.core.management.commands.loaddata.connections ) = ( django.db.backends.connections ) = ( django.db.backends.mysql.base.connections ) = ( django.core.management.commands.syncdb.connections ) = ( django.db.transaction.connections ) = ( django.db.connections ) = django.db.models.base.connections = django.db.models.sql.query.connections = ConnectionHandler( settings.DATABASES ) # default django connection and backend should be also changed django.db.connection = django.db.connections[django.db.DEFAULT_DB_ALIAS] django.db.backend = load_backend(django.db.connection.settings_dict["ENGINE"]) import django.core.cache django.core.cache.cache = django.core.cache.get_cache(django.core.cache.DEFAULT_CACHE_ALIAS) # clear django urls cache clear_url_caches() # clear django contextprocessors cache context._standard_context_processors = None # clear django templatetags cache base.templatetags_modules = None # reload translation files imp.reload(translation) imp.reload(trans_real) # clear django template loaders cache loader.template_source_loaders = None from django.template.loaders import app_directories imp.reload(app_directories)
from django.db.utils import load_backend from django.core.exceptions import ImproperlyConfigured from db_multitenant.threadlocal import MultiTenantThreadlocal WRAPPED_BACKEND = load_backend('django.db.backends.postgresql_psycopg2') class DatabaseWrapper(WRAPPED_BACKEND.DatabaseWrapper): def __init__(self, *args, **kwargs): super(DatabaseWrapper, self).__init__(*args, **kwargs) self.threadlocal = MultiTenantThreadlocal() self.search_path_set = False def close(self): self.search_path_set = False super(DatabaseWrapper, self).close() def rollback(self): super(DatabaseWrapper, self).rollback() # Django's rollback clears the search path so we have to set it again the next time. self.search_path_set = False def get_threadlocal(self): return self.threadlocal def _cursor(self, name=None): """Supplies a cursor, selecting the schema if required. Ideally we'd override a get_new_connection DatabaseWrapper function, but _cursor() is as close as it gets.
def _backend(self): warnings.warn("Accessing django.db.backend is deprecated.", DeprecationWarning, stacklevel=2) return load_backend(connections[DEFAULT_DB_ALIAS].settings_dict['ENGINE'])
def reload_settings(settings, databases=None): """Special routine to reload django settings. Including: urlconf module, context processor, templatetags settings, database settings. """ if databases: settings.DATABASES.update(databases) # check if there's settings to reload if hasattr(settings, 'ROOT_URLCONF'): if settings.ROOT_URLCONF in sys.modules: imp.reload(sys.modules[settings.ROOT_URLCONF]) import django if hasattr(django, 'setup'): django.setup() import_module(settings.ROOT_URLCONF) set_urlconf(settings.ROOT_URLCONF) settings.LANGUAGE_CODE = 'en' # all tests should be run with English by default # Make the ConnectionHandler use the new settings, otherwise the ConnectionHandler will have old configuraton. from django.db.utils import ConnectionHandler import django.db from django.db.utils import load_backend import django.db.transaction import django.db.models import django.db.models.sql.query import django.core.management.commands.syncdb import django.db.models.sql.compiler import django.db.backends import django.db.backends.mysql.base import django.core.management.commands.loaddata # all modules which imported django.db.connections should be changed to get new ConnectionHanlder django.db.models.sql.compiler.connections = django.db.models.connections = \ django.core.management.commands.loaddata.connections = \ django.db.backends.connections = django.db.backends.mysql.base.connections = \ django.core.management.commands.syncdb.connections = django.db.transaction.connections = \ django.db.connections = django.db.models.base.connections = django.db.models.sql.query.connections = \ ConnectionHandler(settings.DATABASES) # default django connection and backend should be also changed django.db.connection = django.db.connections[django.db.DEFAULT_DB_ALIAS] django.db.backend = load_backend(django.db.connection.settings_dict['ENGINE']) import django.core.cache django.core.cache.cache = django.core.cache.get_cache(django.core.cache.DEFAULT_CACHE_ALIAS) # clear django urls cache clear_url_caches() # clear django contextprocessors cache context._standard_context_processors = None # clear django templatetags cache base.templatetags_modules = None # reload translation files imp.reload(translation) imp.reload(trans_real) # clear django template loaders cache loader.template_source_loaders = None from django.template.loaders import app_directories imp.reload(app_directories)
def _backend(self): warnings.warn("Accessing django.db.backend is deprecated.", RemovedInDjango18Warning, stacklevel=2) return load_backend(connections[DEFAULT_DB_ALIAS].settings_dict['ENGINE'])