def crawl_account_hierarchy(request): """Return crawl account hierarchy async task ID.""" # Require provider_uuid parameter for both GET & POST method params = request.query_params provider_uuid = params.get("provider_uuid") if provider_uuid is None: errmsg = "provider_uuid is a required parameter." return Response({"Error": errmsg}, status=status.HTTP_400_BAD_REQUEST) if request.method == "GET": # Note: That we need to check that the provider uuid exists here, because the # Orchestrator.get_accounts will return all accounts if the provider_uuid does # not exist. with ProviderCollector() as collector: all_providers = collector.get_provider_uuid_map() provider = all_providers.get(str(provider_uuid)) if not provider: errmsg = f"The provider_uuid {provider_uuid} does not exist." return Response({"Error": errmsg}, status=status.HTTP_400_BAD_REQUEST) async_crawl_hierarchy = crawl_hierarchy.delay( provider_uuid=provider_uuid) return Response( {"Crawl Account Hierarchy Task ID": str(async_crawl_hierarchy)}) if request.method == "POST": data = request.data schema_name = data.get("schema") if schema_name is None: errmsg = "schema is a required parameter." return Response({"Error": errmsg}, status=status.HTTP_400_BAD_REQUEST) days_list = data.get("account_structure", {}).get("days") if days_list is None: errmsg = "Unexpected json structure. Can not find days key." return Response({"Error": errmsg}, status=status.HTTP_400_BAD_REQUEST) if data.get("start_date"): insert_obj = InsertAwsOrgTree(schema=schema_name, provider_uuid=provider_uuid, start_date=data.get("start_date")) else: insert_obj = InsertAwsOrgTree(schema=schema_name, provider_uuid=provider_uuid) insert_obj.insert_tree(day_list=days_list) return Response(data)
def load_aws_data(self, customer, static_data_file, account_id=None, role_arn=None, day_list=None): """Load AWS data into the database.""" provider_type = Provider.PROVIDER_AWS_LOCAL if account_id is None: account_id = "9999999999999" if role_arn is None: role_arn = "arn:aws:iam::999999999999:role/CostManagement" nise_provider_type = provider_type.replace("-local", "") report_name = "Test" credentials = {"role_arn": role_arn} data_source = {"bucket": "test-bucket"} with patch.object(settings, "AUTO_DATA_INGEST", False): provider = baker.make( "Provider", type=provider_type, authentication__credentials=credentials, billing_source__data_source=data_source, customer=customer, ) # chicken/egg probrem. I need the provider_uuid to upload aws org unit tree # but the tree needs to be created first in order to populate the org unit # foreign key on the daily summary table. if day_list: org_tree_obj = InsertAwsOrgTree(schema=self.schema, provider_uuid=provider.uuid, start_date=self.dates[0][0]) org_tree_obj.insert_tree(day_list=day_list) template, static_data_path = self.prepare_template( provider_type, static_data_file) options = { "static_report_file": static_data_path, "aws_report_name": report_name, "aws_bucket_name": self.nise_data_path, } base_path = f"{self.nise_data_path}/{report_name}" with schema_context(self.schema): baker.make("AWSAccountAlias", account_id=account_id, account_alias="Test Account") for start_date, end_date, bill_date in self.dates: manifest = baker.make( "CostUsageReportManifest", _fill_optional=True, provider=provider, billing_period_start_datetime=bill_date, ) with open(static_data_path, "w") as f: f.write( template.render(start_date=start_date, end_date=end_date, account_id=account_id)) run(nise_provider_type.lower(), options) report_path = self.build_report_path(provider_type, bill_date, base_path) for report in os.scandir(report_path): if os.path.isdir(report): for report in [ f.path for f in os.scandir(f"{report_path}/{report.name}") ]: if os.path.isdir(report): continue elif "manifest" in report.lower(): continue self.process_report(report, "GZIP", provider_type, provider, manifest) with patch("masu.processor.tasks.chain"), patch.object( settings, "AUTO_DATA_INGEST", False): update_summary_tables( self.schema, provider_type, provider.uuid, start_date, end_date, manifest_id=manifest.id, synchronous=True, ) update_cost_model_costs.s(self.schema, provider.uuid, self.dh.last_month_start, self.dh.today, synchronous=True).apply() refresh_materialized_views.s(self.schema, provider_type, provider_uuid=provider.uuid, synchronous=True).apply() shutil.rmtree(base_path, ignore_errors=True)
def setup_databases(verbosity, interactive, keepdb=False, debug_sql=False, parallel=0, aliases=None, **kwargs): """Create the test databases. This function is a copy of the Django setup_databases with one addition. A Tenant object is created and saved when setting up the database. """ test_databases, mirrored_aliases = get_unique_databases_and_mirrors( aliases) old_names = [] for db_name, aliases in test_databases.values(): first_alias = None for alias in aliases: connection = connections[alias] old_names.append((connection, db_name, first_alias is None)) # Actually create the database for the first connection if first_alias is None: first_alias = alias test_db_name = connection.creation.create_test_db( verbosity=verbosity, autoclobber=not interactive, keepdb=keepdb, serialize=connection.settings_dict.get("TEST", {}).get( "SERIALIZE", True), ) try: tenant, created = Tenant.objects.get_or_create( schema_name=KokuTestRunner.schema) if created: tenant = Tenant.objects.get_or_create( schema_name=KokuTestRunner.schema)[0] tenant.save() customer, __ = Customer.objects.get_or_create( account_id=KokuTestRunner.account, schema_name=KokuTestRunner.schema) with tenant_context(tenant): for tag_key in OCP_ENABLED_TAGS: OCPEnabledTagKeys.objects.get_or_create( key=tag_key) data_loader = NiseDataLoader(KokuTestRunner.schema) # grab the dates to get the start date dates = data_loader.dates # Obtain the day_list from yaml read_yaml = UploadAwsTree(None, None, None, None) tree_yaml = read_yaml.import_yaml( yaml_file_path="scripts/aws_org_tree.yml") day_list = tree_yaml["account_structure"]["days"] # Insert the tree org_tree_obj = InsertAwsOrgTree( schema=KokuTestRunner.schema, start_date=dates[0][0]) org_tree_obj.insert_tree(day_list=day_list) data_loader.load_openshift_data( customer, "ocp_aws_static_data.yml", "OCP-on-AWS") data_loader.load_openshift_data( customer, "ocp_azure_static_data.yml", "OCP-on-Azure") data_loader.load_aws_data(customer, "aws_static_data.yml") data_loader.load_azure_data(customer, "azure_static_data.yml") for account in [("10002", "acct10002"), ("12345", "acct12345")]: tenant = Tenant.objects.get_or_create( schema_name=account[1])[0] tenant.save() Customer.objects.get_or_create( account_id=account[0], schema_name=account[1]) except Exception as err: LOG.error(err) raise err if parallel > 1: for index in range(parallel): connection.creation.clone_test_db(suffix=str(index + 1), verbosity=verbosity, keepdb=keepdb) # Configure all other connections as mirrors of the first one else: connections[alias].creation.set_as_test_mirror( connections[first_alias].settings_dict) # Configure the test mirrors. for alias, mirror_alias in mirrored_aliases.items(): connections[alias].creation.set_as_test_mirror( connections[mirror_alias].settings_dict) if debug_sql: for alias in connections: connections[alias].force_debug_cursor = True return old_names
def load_aws_data(self, linked_openshift_provider=None, day_list=None): """Load AWS data for tests.""" bills = [] provider_type = Provider.PROVIDER_AWS_LOCAL role_arn = "arn:aws:iam::999999999999:role/CostManagement" credentials = {"role_arn": role_arn} billing_source = {"bucket": "test-bucket"} payer_account_id = "9999999999999" provider = self.create_provider( provider_type, credentials, billing_source, "test-aws", linked_openshift_provider=linked_openshift_provider) if day_list: org_tree_obj = InsertAwsOrgTree(schema=self.schema, provider_uuid=provider.uuid, start_date=self.dates[0][0]) org_tree_obj.insert_tree(day_list=day_list) with schema_context(self.schema): main_alias = baker.make("AWSAccountAlias", account_id=payer_account_id, account_alias="Test Account") for start_date, end_date, bill_date in self.dates: LOG.info(f"load aws data for start: {start_date}, end: {end_date}") with schema_context(self.schema): org_units = list( AWSOrganizationalUnit.objects.filter( account_alias_id__isnull=False)) random.shuffle(org_units) aliases = [main_alias] + [ AWSAccountAlias.objects.get(id=org_unit.account_alias_id) for org_unit in org_units ] org_units.insert(0, None) usage_account_ids = [alias.account_id for alias in aliases] self.create_manifest(provider, bill_date) bill = self.create_bill(provider_type, provider, bill_date, payer_account_id=payer_account_id) bills.append(bill) self.create_cost_entry(bill_date, bill) days = (end_date - start_date).days + 1 for i in range(days): baker.make_recipe( # Storage data_source "api.report.test.util.aws_daily_summary", cost_entry_bill=bill, usage_account_id=cycle(usage_account_ids), account_alias=cycle(aliases), organizational_unit=cycle(org_units), currency_code=self.currency, usage_start=start_date + timedelta(i), usage_end=start_date + timedelta(i), tags=cycle(self.tags), source_uuid=provider.uuid, _quantity=max(AWS_CONSTANTS.length, len(aliases)), ) bill_ids = [bill.id for bill in bills] with AWSReportDBAccessor(self.schema) as accessor: accessor.populate_tags_summary_table(bill_ids, self.first_start_date, self.last_end_date) accessor.populate_ui_summary_tables(self.first_start_date, self.last_end_date, provider.uuid) return bills