def close(self): self.validate_thread_sharing() # If database is in memory, closing the connection destroys the # database. To prevent accidental data loss, ignore close requests on # an in-memory db. if not self.is_in_memory_db(): BaseDatabaseWrapper.close(self)
def __init__(self, *args, **kwargs): BaseDatabaseWrapper.__init__(self, *args, **kwargs) self.server_version = None self.features = DatabaseFeatures(self) self.ops = DatabaseOperations(self) self.client = DatabaseClient(self) self.creation = DatabaseCreation(self) self.introspection = DatabaseIntrospection(self) self.validation = DatabaseValidation(self)
def create_cloned_sqlite_db(queries): """ Magic. Inspired by: http://stackoverflow.com/questions/8045602/how-can-i-copy-an-in-memory-sqlite-database-to-another-in-memory-sqlite-database http://stackoverflow.com/questions/8242837/django-multiprocessing-and-database-connections """ for query_list, database_wrapper in zip(queries, connections.all()): # Work around :memory: in django/db/backends/sqlite3/base.py BaseDatabaseWrapper.close(database_wrapper) cursor = database_wrapper.cursor() for sql in query_list.split(';'): sql += ';' cursor.execute(sql) database_wrapper.connection.commit()
def _rollback(self): try: BaseDatabaseWrapper._rollback(self) except NotSupportedError: pass
def run_tests(self, test_labels, extra_tests=None, **kwargs): extra = 1 if extra_tests else 0 start = time.time() if not test_labels: # If no test labels were provided, provide them # and remove our custom exclusions test_labels = [ # Don't double-discover tests? app.__name__.replace('.models', '') for app in self.get_apps_after_exclusions() ] # Hide most of the test output so we can focus on failures, # unless the user wanted to see the full output per app. if self.verbosity == 1: self.verbosity = 0 self.setup_test_environment() # Prepare (often many) test suites to be run across multiple processes # suite = self.build_suite(test_labels, extra_tests) processes = [] source_queue = Queue(maxsize=len(test_labels) + extra) for label in test_labels: suite = self.build_suite([label]) source_queue.put((label, suite)) if extra_tests: source_queue.put( ('extra_tests', self.build_suite(None, extra_tests)) ) if self.ramdb and self.db_files_exist(): # Have run before, reuse the RAM DB. in_files = self.db_file_paths() print('Reusing database files: \n{}'.format('\n'.join(in_files))) queries = [] for in_file_name in in_files: with open(in_file_name) as infile: queries.append('\n'.join(infile.readlines())) if DJANGO_VERSION[1] >= 7: hijack_setup_databases(self.verbosity, self.interactive) else: self.setup_databases() else: start = time.time() tag_hash = self.get_source_control_tag_hash() if tag_hash == self.DEFAULT_TAG_HASH: print('git or hg source control not found, ' 'only most recent migration saved') print('Running (often slow) migrations... \n' 'Hint: Use --ramdb={} to reuse the final stored SQL later.' .format(tag_hash)) tag_hash = os.path.join(self.ramdb_saves, tag_hash) if not os.path.exists(tag_hash): os.makedirs(tag_hash) # Only run the slow migrations if --ramdb is not specified, # or running for first time old_config = self.setup_databases() queries = [] for database_wrapper in connections.all(): connection = database_wrapper.connection sql = '\n'.join(line for line in connection.iterdump()) queries.append(sql) # Work around :memory: in django/db/backends/sqlite3/base.py BaseDatabaseWrapper.close(database_wrapper) mem, db_name = database_wrapper.creation.test_db_signature() with open(self.get_db_path(db_name, tag_hash), 'w') as outfile: outfile.write(sql) self.teardown_databases(old_config) msg = 'Setup, migrations, ... completed in {:.3f} seconds'.format( time.time() - start ) print(msg) result_queue = Queue(maxsize=len(test_labels) + extra) process_args = (self, source_queue, result_queue, queries) for _ in range(min(self.concurrency, len(test_labels) + extra)): p = Process(target=multi_proc_run_tests, args=process_args) p.start() processes.append(p) else: # Concurrency == 0 - run in same process multi_proc_run_tests(*process_args) for p in processes: p.join() results = [] retrieved_labels = [] while not result_queue.empty(): retrieved_label, result = result_queue.get() results.append(result) retrieved_labels.append(retrieved_label) not_covered = set(test_labels) - set(retrieved_labels) if not_covered: msg = ( 'Tests that did not return results under --concurrency={} ' '(try running separately, or with --concurrency=0): {}'.format( self.concurrency, ' '.join(sorted(not_covered)), )) print(msg) mars = [ r['test_label'] for r in results if r['fail_count'] or r['error_count'] ] skippy = [ r['test_label'] for r in results if r['skip_count'] and r['test_label'] not in mars ] if mars or skippy: line = ''.join(( '---Copy/Paste-after-manage-py-test---', colored('Skipped', 'yellow'), '-or-', colored('MARS', 'red'), '-' * 28, )) print(line) if skippy: print(colored(' '.join(skippy), 'yellow')) if mars: print(colored(' '.join(mars), 'red')) merged = { 'test_label': 'OVERALL', 'run': sum([r['run'] for r in results]), 'fail_count': sum([r['fail_count'] for r in results]), 'error_count': sum([r['error_count'] for r in results]), 'skip_count': sum([r['skip_count'] for r in results]), 'expected_fail_count': sum([r['expected_fail_count'] for r in results]), 'unexpected_success_count': sum([r['unexpected_success_count'] for r in results]), } merged['short_summary'] = build_short_summary(merged) end = time.time() merged['took'] = end - start final_result = ''.join(( '_ \~ ', '-meep-meep', '-' * 64, '\n', ' `=/ ', build_message(merged), '\n', '~` `~ ', '-' * 74, )) msg = colored(final_result, color=get_colour(merged), attrs=['bold']) print(msg) self.teardown_test_environment()
def _rollback(self): try: BaseDatabaseWrapper._rollback(self) except Database.NotSupportedError: pass
def test_get_database_version(self): with patch.object(BaseDatabaseWrapper, "__init__", return_value=None): msg = ("subclasses of BaseDatabaseWrapper may require a " "get_database_version() method.") with self.assertRaisesMessage(NotImplementedError, msg): BaseDatabaseWrapper().get_database_version()
def fix_connection(sender: type, *, dbwrapper: BaseDatabaseWrapper, **kwargs: Any) -> None: dbwrapper.connect = dbwrapper.s_connect
"""