def test_setup_test_database_aliases(self): """ The default database must be the first because data migrations use the default alias by default. """ tested_connections = db.ConnectionHandler({ 'other': { 'ENGINE': 'django.db.backends.dummy', 'NAME': 'dbname', }, 'default': { 'ENGINE': 'django.db.backends.dummy', 'NAME': 'dbname', } }) with mock.patch('django.test.utils.connections', new=tested_connections): test_databases, _ = get_unique_databases_and_mirrors() self.assertEqual( test_databases, { ('', '', 'django.db.backends.dummy', 'test_dbname'): ( 'dbname', ['default', 'other'], ), }, )
def test_setup_test_database_aliases(self): """ The default database must be the first because data migrations use the default alias by default. """ tested_connections = db.ConnectionHandler( { "other": { "ENGINE": "django.db.backends.dummy", "NAME": "dbname", }, "default": { "ENGINE": "django.db.backends.dummy", "NAME": "dbname", }, } ) with mock.patch("django.test.utils.connections", new=tested_connections): test_databases, _ = get_unique_databases_and_mirrors() self.assertEqual( test_databases, { ("", "", "django.db.backends.dummy", "test_dbname"): ( "dbname", ["default", "other"], ), }, )
def setup_databases(verbosity, interactive, keepdb=False, debug_sql=False, parallel=0, aliases=None, **kwargs): """Create the test databases. This function is a copy of the Django setup_databases with one addition. A Tenant object is created and saved when setting up the database. """ test_databases, mirrored_aliases = get_unique_databases_and_mirrors( aliases) old_names = [] for db_name, aliases in test_databases.values(): first_alias = None for alias in aliases: connection = connections[alias] old_names.append((connection, db_name, first_alias is None)) # Actually create the database for the first connection if first_alias is None: first_alias = alias connection.creation.create_test_db( verbosity=verbosity, autoclobber=not interactive, keepdb=keepdb, serialize=connection.settings_dict.get('TEST', {}).get( 'SERIALIZE', True), ) tenant = Tenant.objects.get_or_create( schema_name=KokuTestRunner.schema)[0] tenant.save() if parallel > 1: for index in range(parallel): connection.creation.clone_test_db( suffix=str(index + 1), verbosity=verbosity, keepdb=keepdb, ) # Configure all other connections as mirrors of the first one else: connections[alias].creation.set_as_test_mirror( connections[first_alias].settings_dict) # Configure the test mirrors. for alias, mirror_alias in mirrored_aliases.items(): connections[alias].creation.set_as_test_mirror( connections[mirror_alias].settings_dict) if debug_sql: for alias in connections: connections[alias].force_debug_cursor = True return old_names
def _setup_databases(self, verbosity, interactive, keepdb=False, debug_sql=False, parallel=0, **kwargs): """Create the test databases. Except those with BYPASS_CREATION.""" print('MyTestRunner: setup_databases ...') test_databases, mirrored_aliases = get_unique_databases_and_mirrors() old_names = [] for signature, (db_name, aliases) in test_databases.items(): first_alias = None for alias in aliases: connection = connections[alias] if connection.settings_dict.get('BYPASS_CREATION', 'no') == 'no': print('MyTestRunner: call create DB ...', alias) old_names.append( (connection, db_name, first_alias is None)) # Actually create the database for the first connection if first_alias is None: first_alias = alias connection.creation.create_test_db( verbosity=verbosity, autoclobber=not interactive, keepdb=keepdb, serialize=connection.settings_dict.get( 'TEST', {}).get('SERIALIZE', True), ) if parallel > 1: for index in range(parallel): connection.creation.clone_test_db( suffix=str(index + 1), verbosity=verbosity, keepdb=keepdb, ) # Configure all other connections as mirrors of the first one else: connections[alias].creation.set_as_test_mirror( connections[first_alias].settings_dict) else: print('MyTestRunner: skipped create DB for', alias) # Configure the test mirrors. for alias, mirror_alias in mirrored_aliases.items(): connections[alias].creation.set_as_test_mirror( connections[mirror_alias].settings_dict) if debug_sql: for alias in connections: connections[alias].force_debug_cursor = True return old_names
def _setup_databases(verbosity, interactive, keepdb=False, debug_sql=False, parallel=0, aliases=None, **kwargs): """Create the test databases.""" mirrored_aliases = {} # overriding the test database with localsetup test_databases = OrderedDict([ ((os.path.join(BASE_DIR, 'db.sqlite3'), 'test_database'), (os.path.join(BASE_DIR, 'db.sqlite3'), {'test_database'})) ]) test_databases, mirrored_aliases = get_unique_databases_and_mirrors( aliases) old_names = [] for db_name, aliases in test_databases.values(): first_alias = None for alias in aliases: connection = connections[alias] old_names.append((connection, db_name, first_alias is None)) # Actually create the database for the first connection if first_alias is None: first_alias = alias connection.creation.create_test_db( verbosity=verbosity, autoclobber=not interactive, keepdb=keepdb, serialize=connection.settings_dict.get('TEST', {}).get( 'SERIALIZE', True), ) if parallel > 1: for index in range(parallel): connection.creation.clone_test_db( suffix=str(index + 1), verbosity=verbosity, keepdb=keepdb, ) # Configure all other connections as mirrors of the first one else: connections[alias].creation.set_as_test_mirror( connections[first_alias].settings_dict) # Configure the test mirrors. for alias, mirror_alias in mirrored_aliases.items(): connections[alias].creation.set_as_test_mirror( connections[mirror_alias].settings_dict) if debug_sql: for alias in connections: connections[alias].force_debug_cursor = True return old_names
def _get_databases(self): from django.db import connections old_names = [] test_databases, mirrored_aliases = get_unique_databases_and_mirrors() assert not mirrored_aliases, "DB mirrors not supported" for signature, (db_name, aliases) in test_databases.items(): alias = list(aliases)[0] connection = connections[alias] old_names.append((connection, db_name, True)) return old_names
def _get_databases(self): from django.db import connections old_names = [] test_databases, mirrored_aliases = get_unique_databases_and_mirrors() assert not mirrored_aliases, "DB mirrors not supported" for signature, (db_name, aliases) in test_databases.items(): alias = list(aliases)[0] connection = connections[alias] old_names.append((connection, db_name, True)) return old_names
def setup_databases(verbosity, keepdb=False, debug_sql=False, parallel=0, **kwargs): # pylint: disable=too-many-locals, unused-argument """Create the test databases.""" test_databases, mirrored_aliases = get_unique_databases_and_mirrors() old_names = [] for _, (db_name, aliases) in test_databases.items(): first_alias = None for alias in aliases: connection = connections[alias] old_names.append((connection, db_name, first_alias is None)) # Actually create the database for the first connection if first_alias is None: first_alias = alias # ---以下是修改部分--- SqliteToSqlManager().write_sql_to_test_db() # ---以上是修改部分--- if parallel > 1: for index in range(parallel): connection.creation.clone_test_db( suffix=str(index + 1), verbosity=verbosity, keepdb=keepdb, ) # Configure all other connections as mirrors of the first one else: connections[alias].creation.set_as_test_mirror( connections[first_alias].settings_dict) # Configure the test mirrors. for alias, mirror_alias in mirrored_aliases.items(): connections[alias].creation.set_as_test_mirror( connections[mirror_alias].settings_dict) if debug_sql: for alias in connections: connections[alias].force_debug_cursor = True return old_names
def setup_databases(verbosity, interactive, keepdb=False, debug_sql=False, parallel=0, aliases=None, **kwargs): """Create the test databases. This function is a copy of the Django setup_databases with one addition. A Tenant object is created and saved when setting up the database. """ test_databases, mirrored_aliases = get_unique_databases_and_mirrors(aliases) old_names = [] for db_name, aliases in test_databases.values(): first_alias = None for alias in aliases: connection = connections[alias] old_names.append((connection, db_name, first_alias is None)) # Actually create the database for the first connection if first_alias is None: first_alias = alias connection.creation.create_test_db( verbosity=verbosity, autoclobber=not interactive, keepdb=keepdb, serialize=connection.settings_dict.get("TEST", {}).get("SERIALIZE", True), ) try: tenant = Tenant.objects.get_or_create(schema_name=KokuTestRunner.schema)[0] tenant.save() customer, __ = Customer.objects.get_or_create( account_id=KokuTestRunner.account, schema_name=KokuTestRunner.schema ) with tenant_context(tenant): for tag_key in OCP_ENABLED_TAGS: OCPEnabledTagKeys.objects.get_or_create(key=tag_key) data_loader = NiseDataLoader(KokuTestRunner.schema) data_loader.load_openshift_data(customer, "ocp_aws_static_data.yml", "OCP-on-AWS") data_loader.load_openshift_data(customer, "ocp_azure_static_data.yml", "OCP-on-Azure") data_loader.load_aws_data(customer, "aws_static_data.yml") data_loader.load_azure_data(customer, "azure_static_data.yml") for account in [("10002", "acct10002"), ("12345", "acct12345")]: tenant = Tenant.objects.get_or_create(schema_name=account[1])[0] tenant.save() Customer.objects.get_or_create(account_id=account[0], schema_name=account[1]) except Exception: pass if parallel > 1: for index in range(parallel): connection.creation.clone_test_db(suffix=str(index + 1), verbosity=verbosity, keepdb=keepdb) # Configure all other connections as mirrors of the first one else: connections[alias].creation.set_as_test_mirror(connections[first_alias].settings_dict) # Configure the test mirrors. for alias, mirror_alias in mirrored_aliases.items(): connections[alias].creation.set_as_test_mirror(connections[mirror_alias].settings_dict) if debug_sql: for alias in connections: connections[alias].force_debug_cursor = True return old_names
#!/usr/bin/env python import os from django.conf import settings from django.db import connections from django.test.utils import get_unique_databases_and_mirrors init_test_db_file = os.path.join(os.path.dirname(__file__), 'init_test_db.sql') os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'openIMIS.settings') test_databases, mirrored_aliases = get_unique_databases_and_mirrors() for db_host, db_port, db_engine, db_name in test_databases.keys(): name, cfgs = test_databases[(db_host, db_port, db_engine, db_name)] for cfg in cfgs: with connections[cfg]._nodb_connection.cursor() as c: c.execute("DROP DATABASE IF EXISTS %s" % db_name) c.execute("CREATE DATABASE %s" % db_name) os.system("sqlcmd -S %s,%s -U %s -P '%s' -d %s -i init_test_db.sql" % (db_host, db_port, settings.DATABASES[cfg]['USER'], settings.DATABASES[cfg]['PASSWORD'], db_name))
def setup_databases(verbosity, interactive, keepdb=False, debug_sql=False, parallel=0, aliases=None, **kwargs): """Create the test databases. This function is a copy of the Django setup_databases with one addition. A Tenant object is created and saved when setting up the database. """ test_databases, mirrored_aliases = get_unique_databases_and_mirrors( aliases) old_names = [] for db_name, aliases in test_databases.values(): first_alias = None for alias in aliases: connection = connections[alias] old_names.append((connection, db_name, first_alias is None)) # Actually create the database for the first connection if first_alias is None: first_alias = alias test_db_name = connection.creation.create_test_db( verbosity=verbosity, autoclobber=not interactive, keepdb=keepdb, serialize=connection.settings_dict.get("TEST", {}).get( "SERIALIZE", True), ) try: tenant, created = Tenant.objects.get_or_create( schema_name=Tenant._TEMPLATE_SCHEMA) if created: tenant.save() tenant.create_schema() tenant, created = Tenant.objects.get_or_create( schema_name=KokuTestRunner.schema) if created: tenant.save() tenant.create_schema() customer, __ = Customer.objects.get_or_create( account_id=KokuTestRunner.account, schema_name=KokuTestRunner.schema) ######### TODO: remove after azure has been converted ######## with tenant_context(tenant): for tag_key in OCP_ENABLED_TAGS: OCPEnabledTagKeys.objects.get_or_create( key=tag_key) ############################################################## data_loader = NiseDataLoader(KokuTestRunner.schema, customer) # Obtain the day_list from yaml read_yaml = UploadAwsTree(None, None, None, None) tree_yaml = read_yaml.import_yaml( yaml_file_path="dev/scripts/aws_org_tree.yml") day_list = tree_yaml["account_structure"]["days"] # Load data # TODO: COST-444: This NiseDataLoader to be removed and replaced with the commented baker_data_loaders below. data_loader = NiseDataLoader(KokuTestRunner.schema, customer) data_loader.load_openshift_data( customer, "ocp_azure_static_data.yml", "OCP-on-Azure") data_loader.load_azure_data(customer, "azure_static_data.yml") bakery_data_loader = ModelBakeryDataLoader( KokuTestRunner.schema, customer) ocp_on_aws_cluster_id = "OCP-on-AWS" ocp_on_azure_cluster_id = "OCP-on-Azure" ocp_on_gcp_cluster_id = "OCP-on-GCP" ocp_on_prem_cluster_id = "OCP-on-Prem" ocp_on_aws_ocp_provider, ocp_on_aws_report_periods = bakery_data_loader.load_openshift_data( ocp_on_aws_cluster_id, on_cloud=True) # TODO: COST-444: uncomment these when the above NISE data_loader is removed # ocp_on_azure_ocp_provider, ocp_on_azure_report_periods = bakery_data_loader.load_openshift_data( # ocp_on_azure_cluster_id, on_cloud=True # ) ocp_on_gcp_ocp_provider, ocp_on_gcp_report_periods = bakery_data_loader.load_openshift_data( ocp_on_gcp_cluster_id, on_cloud=True) bakery_data_loader.load_openshift_data( ocp_on_prem_cluster_id, on_cloud=False) aws_bills = bakery_data_loader.load_aws_data( linked_openshift_provider=ocp_on_aws_ocp_provider, day_list=day_list) # TODO: COST-444: uncomment these when the above NISE data_loader is removed # azure_bills = bakery_data_loader.load_azure_data( # linked_openshift_provider=ocp_on_azure_ocp_provider # ) gcp_bills = bakery_data_loader.load_gcp_data( linked_openshift_provider=ocp_on_gcp_ocp_provider) bakery_data_loader.load_openshift_on_cloud_data( Provider.PROVIDER_AWS_LOCAL, ocp_on_aws_cluster_id, aws_bills, ocp_on_aws_report_periods) # bakery_data_loader.load_openshift_on_cloud_data( # Provider.PROVIDER_AZURE_LOCAL, # ocp_on_azure_cluster_id, # azure_bills, # ocp_on_azure_report_periods, # ) bakery_data_loader.load_openshift_on_cloud_data( Provider.PROVIDER_GCP_LOCAL, ocp_on_gcp_cluster_id, gcp_bills, ocp_on_gcp_report_periods) for account in [("10002", "acct10002"), ("12345", "acct12345")]: tenant = Tenant.objects.get_or_create( schema_name=account[1])[0] tenant.save() tenant.create_schema() Customer.objects.get_or_create( account_id=account[0], schema_name=account[1]) except Exception as err: LOG.error(err) raise err if parallel > 1: for index in range(parallel): connection.creation.clone_test_db(suffix=str(index + 1), verbosity=verbosity, keepdb=keepdb) else: connection.creation.set_as_test_mirror( connections[first_alias].settings_dict) # Configure the test mirrors. for alias, mirror_alias in mirrored_aliases.items(): connections[alias].creation.set_as_test_mirror( connections[mirror_alias].settings_dict) if debug_sql: for alias in connections: connections[alias].force_debug_cursor = True return old_names