def _check_cost_report_access(credential_name, credentials, region="us-east-1", bucket=None): """Check for provider cost and usage report access.""" cur_client = boto3.client("cur", region_name=region, **credentials) reports = None try: response = cur_client.describe_report_definitions() reports = response.get("ReportDefinitions") except (ClientError, BotoConnectionError) as boto_error: key = ProviderErrors.AWS_NO_REPORT_FOUND message = f"Unable to obtain cost and usage report definition data with {credential_name}." LOG.warn(msg=message, exc_info=boto_error) raise serializers.ValidationError(error_obj(key, message)) if reports and bucket: # filter report definitions to reports with a matching S3 bucket name. bucket_matched = list(filter(lambda rep: bucket in rep.get("S3Bucket"), reports)) for report in bucket_matched: if report.get("Compression") not in ALLOWED_COMPRESSIONS: key = ProviderErrors.AWS_COMPRESSION_REPORT_CONFIG internal_msg = ( f"{report.get('Compression')} compression is not supported. " f"Reports must use GZIP compression format." ) raise serializers.ValidationError(error_obj(key, internal_msg)) if "RESOURCES" not in report.get("AdditionalSchemaElements"): key = ProviderErrors.AWS_REPORT_CONFIG msg = f"Required Resource IDs are not included in report {report.get('ReportName')}" raise serializers.ValidationError(error_obj(key, msg))
def _verify_patch_entries(self, subscription_id, resource_group, storage_account): """Raise Validation Error for missing.""" if subscription_id and not (resource_group and storage_account): key = ProviderErrors.AZURE_MISSING_PATCH message = ProviderErrors.AZURE_MISSING_RESOURCE_GROUP_AND_STORAGE_ACCOUNT_MESSAGE raise ValidationError(error_obj(key, message)) if subscription_id and resource_group and not storage_account: key = ProviderErrors.AZURE_MISSING_PATCH message = ProviderErrors.AZURE_MISSING_STORAGE_ACCOUNT_MESSAGE raise ValidationError(error_obj(key, message)) if subscription_id and storage_account and not resource_group: key = ProviderErrors.AZURE_MISSING_PATCH message = ProviderErrors.AZURE_MISSING_RESOURCE_GROUP_MESSAGE raise ValidationError(error_obj(key, message)) if storage_account and resource_group and not subscription_id: key = ProviderErrors.AZURE_MISSING_PATCH message = ProviderErrors.AZURE_MISSING_SUBSCRIPTION_ID_MESSAGE raise ValidationError(error_obj(key, message)) if not resource_group and not storage_account and not subscription_id: key = ProviderErrors.AZURE_MISSING_PATCH message = ProviderErrors.AZURE_MISSING_ALL_PATCH_VALUES_MESSAGE raise ValidationError(error_obj(key, message))
def _detect_billing_export_table(self, data_source, credentials): """Verify that dataset and billing export table exists.""" proj_table = self._format_dataset_id(data_source, credentials) try: bigquery_table_id = self.get_table_id(proj_table) if bigquery_table_id: data_source["table_id"] = bigquery_table_id self.update_source_data_source(credentials, data_source) else: raise SkipStatusPush("Table ID not ready.") except NotFound as e: data_source.pop("table_id", None) self.update_source_data_source(credentials, data_source) key = "billing_source.dataset" LOG.info(error_obj(key, e.message)) message = ( f"Unable to find dataset: {data_source.get('dataset')} in project: {credentials.get('project_id')}" ) raise serializers.ValidationError(error_obj(key, message)) except BadRequest as e: LOG.warning(str(e)) key = "billing_source" message = f"Invalid Dataset ID: {str(data_source.get('dataset'))}" raise serializers.ValidationError(error_obj(key, message)) except ValueError: key = "billing_source.dataset" message = f"Invalid Dataset ID: {str(data_source.get('dataset'))}" raise serializers.ValidationError(error_obj(key, message))
def cost_usage_source_is_reachable(self, credential_name, storage_resource_name): """Verify that the S3 bucket exists and is reachable.""" if not credential_name or credential_name.isspace(): key = ProviderErrors.AWS_MISSING_RESOURCE_NAME message = ProviderErrors.AWS_MISSING_RESOURCE_NAME_MESSAGE raise serializers.ValidationError(error_obj(key, message)) creds = _get_sts_access(credential_name) # if any values in creds are None, the dict won't be empty if bool({k: v for k, v in creds.items() if not v}): key = ProviderErrors.AWS_RESOURCE_NAME_UNREACHABLE internal_message = f"Unable to access account resources with ARN {credential_name}." raise serializers.ValidationError(error_obj(key, internal_message)) if not storage_resource_name or storage_resource_name.isspace(): key = ProviderErrors.AWS_BUCKET_MISSING message = ProviderErrors.AWS_BUCKET_MISSING_MESSAGE raise serializers.ValidationError(error_obj(key, message)) s3_exists = _check_s3_access(storage_resource_name, creds) if not s3_exists: key = ProviderErrors.AWS_BILLING_SOURCE_NOT_FOUND internal_message = f"Bucket {storage_resource_name} could not be found with {credential_name}." raise serializers.ValidationError(error_obj(key, internal_message)) _check_cost_report_access(credential_name, creds, bucket=storage_resource_name) org_access = _check_org_access(creds) if not org_access: message = f"Unable to obtain organization data with {credential_name}." LOG.info(message) return True
def cost_usage_source_is_reachable(self, credentials, data_source): """ Verify that the GCP bucket exists and is reachable. Args: credentials (dict): a dictionary containing project_id data_source (dict): not used; only present for interface compatibility """ try: project = credentials.get("project_id") gcp_credentials, _ = google.auth.default() # https://github.com/googleapis/google-api-python-client/issues/299 service = discovery.build("cloudresourcemanager", "v1", credentials=gcp_credentials, cache_discovery=False) check_permissions = {"permissions": REQUIRED_IAM_PERMISSIONS} request = service.projects().testIamPermissions( resource=project, body=check_permissions) response = request.execute() permissions = response.get("permissions", []) for required_permission in REQUIRED_IAM_PERMISSIONS: if required_permission not in permissions: key = ProviderErrors.GCP_INCORRECT_IAM_PERMISSIONS internal_message = f"Improper IAM permissions: {permissions}." LOG.warning(internal_message) message = f"Incorrect IAM permissions for project {project}" raise serializers.ValidationError(error_obj(key, message)) except GoogleCloudError as e: key = "authentication.project_id" raise serializers.ValidationError(error_obj(key, e.message)) except HttpError as err: reason = err._get_reason() key = "authentication.project_id" LOG.info(error_obj(key, reason)) raise serializers.ValidationError(error_obj(key, reason)) if not data_source.get("table_id"): proj_table = f"{credentials.get('project_id')}.{data_source.get('dataset')}" bigquery_table_id = self.get_table_id(proj_table) if bigquery_table_id: data_source["table_id"] = bigquery_table_id self.update_source_data_source(credentials, data_source) else: raise SkipStatusPush("Table ID not ready.") return True
def cost_usage_source_is_reachable(self, credentials, data_source): """Verify that IBM local enterprise id is given.""" if not data_source: key = "data_source.enterprise_id" message = "Enterprise ID is a required parameter for IBM local." raise serializers.ValidationError(error_obj(key, message)) return True
def cost_usage_source_is_reachable(self, credentials, data_source): """Verify that GCP local bucket name is given.""" if not data_source: key = "table_id" message = "Project id is a required parameter for GCP." raise serializers.ValidationError(error_obj(key, message)) return True
def _check_cost_report_access(customer_tenancy): """Check for provider cost and usage report access.""" # CUR bucket is made from customers tenancy name reporting_bucket = customer_tenancy # The Object Storage namespace used for the reports is bling; the bucket name is the tenancy OCID. reporting_namespace = "bling" # Download all usage and cost files."" will downlaod both usage and cost files. prefix_file = "" # Get the list of reports # https://docs.oracle.com/en-us/iaas/Content/API/SDKDocs/clienvironmentvariables.htm!!! config = { "user": os.environ["OCI_CLI_USER"], "key_file": os.environ["OCI_CLI_KEY_FILE"], "fingerprint": os.environ["OCI_CLI_FINGERPRINT"], "tenancy": os.environ["OCI_CLI_TENANCY"], "region": "uk-london-1", } object_storage = oci.object_storage.ObjectStorageClient(config) try: oci.pagination.list_call_get_all_results( object_storage.list_objects, reporting_namespace, reporting_bucket, prefix=prefix_file ) except (ClientError, OciConnectionError) as oci_error: key = ProviderErrors.OCI_NO_REPORT_FOUND message = f"Unable to obtain cost and usage reports with tenant/bucket: {customer_tenancy}." LOG.warn(msg=message, exc_info=oci_error) raise serializers.ValidationError(error_obj(key, message)) # return a auth friendly format return config, customer_tenancy
def validate(self, data): """Validate data_source field.""" data_source = data.get("data_source") bucket = data_source.get("bucket", "") if not bucket: key = "data_source.bucket" message = "This field is required." raise serializers.ValidationError(error_obj(key, message)) report_prefix = data_source.get("report_prefix", "") if report_prefix and len(report_prefix) > REPORT_PREFIX_MAX_LENGTH: key = "data_source.report_prefix" message = f"Ensure this field has no more than {REPORT_PREFIX_MAX_LENGTH} characters." raise serializers.ValidationError(error_obj(key, message)) return data
def test_aws_errors(self): """Test AWS error types.""" test_matrix = [ { "key": ProviderErrors.AWS_ROLE_ARN_UNREACHABLE, "internal_message": "internal resource name message string", "expected_message": ProviderErrors.AWS_ROLE_ARN_UNREACHABLE_MESSAGE, }, { "key": ProviderErrors.AWS_BILLING_SOURCE_NOT_FOUND, "internal_message": "internal billing source message string", "expected_message": ProviderErrors.AWS_BILLING_SOURCE_NOT_FOUND_MESSAGE, }, { "key": ProviderErrors.AWS_COMPRESSION_REPORT_CONFIG, "internal_message": "internal compression error message", "expected_message": ProviderErrors.AWS_COMPRESSION_REPORT_CONFIG_MESSAGE, }, { "key": ProviderErrors.AWS_BUCKET_MISSING, "internal_message": ProviderErrors.AWS_BUCKET_MISSING_MESSAGE, "expected_message": ProviderErrors.AWS_BUCKET_MISSING_MESSAGE, }, ] for test in test_matrix: with self.subTest(test=test): key = test.get("key") message = test.get("internal_message") error = ValidationError(error_obj(key, message)) message_obj = SourcesErrorMessage(error) self.assertEquals(message_obj.display(source_id=1), test.get("expected_message"))
def cost_usage_source_is_reachable(self, credential_name, storage_resource_name): """Verify that the cost usage source exists and is reachable.""" if not storage_resource_name: key = ProviderErrors.AWS_BUCKET_MISSING message = ProviderErrors.AWS_BUCKET_MISSING_MESSAGE raise serializers.ValidationError(error_obj(key, message)) return True
def validate_source_type(self, source_type): """Validate credentials field.""" if source_type.lower() in LCASE_PROVIDER_CHOICE_LIST: return Provider.PROVIDER_CASE_MAPPING.get(source_type.lower()) key = "source_type" message = f"Invalid source_type, {source_type}, provided." raise serializers.ValidationError(error_obj(key, message))
def cost_usage_source_is_reachable(self, credential_name, storage_resource_name): """Verify that GCP local bucket name is given.""" if not storage_resource_name: key = "bucket" message = "Bucket is a required parameter for GCP." raise serializers.ValidationError(error_obj(key, message)) return True
def __init__(self, instance=None, data=empty, **kwargs): """Initialize the Provider Serializer. Here we ensure we use the appropriate serializer to validate the authentication and billing_source parameters. """ super().__init__(instance, data, **kwargs) provider_type = None if data and data != empty: provider_type = data.get("type") if provider_type and provider_type.lower( ) not in LCASE_PROVIDER_CHOICE_LIST: key = "type" message = f"{provider_type} is not a valid source type." raise serializers.ValidationError(error_obj(key, message)) if provider_type: provider_type = provider_type.lower() self.fields["authentication"] = AUTHENTICATION_SERIALIZERS.get( Provider.PROVIDER_CASE_MAPPING.get(provider_type))() self.fields["billing_source"] = BILLING_SOURCE_SERIALIZERS.get( Provider.PROVIDER_CASE_MAPPING.get(provider_type))() else: self.fields["authentication"] = ProviderAuthenticationSerializer() self.fields["billing_source"] = ProviderBillingSourceSerializer()
def validate_field(data, valid_fields, key): """Validate a field.""" message = f"One or more required fields is invalid/missing. Required fields are {valid_fields}" diff = set(valid_fields) - set(data) if not diff: return data raise serializers.ValidationError(error_obj(key, message))
def create(self, validated_data): """Create a provider from validated data.""" user, customer = self.get_request_info() if "billing_source" in validated_data: billing_source = validated_data.pop("billing_source") data_source = billing_source.get("data_source", {}) bucket = data_source.get("bucket") else: # Because of a unique together constraint, this is done # to allow for this field to be non-required for OCP # but will still have a blank no-op entry in the DB billing_source = {"bucket": "", "data_source": {}} data_source = None authentication = validated_data.pop("authentication") credentials = authentication.get("credentials") provider_resource_name = credentials.get("provider_resource_name") provider_type = validated_data["type"] provider_type = Provider.PROVIDER_CASE_MAPPING.get(provider_type) validated_data["type"] = provider_type interface = ProviderAccessor(provider_type) if customer.account_id not in settings.DEMO_ACCOUNTS: if credentials and data_source and provider_type not in [ Provider.PROVIDER_AWS, Provider.PROVIDER_OCP ]: interface.cost_usage_source_ready(credentials, data_source) else: interface.cost_usage_source_ready(provider_resource_name, bucket) bill, __ = ProviderBillingSource.objects.get_or_create( **billing_source) auth, __ = ProviderAuthentication.objects.get_or_create( **authentication) # We can re-use a billing source or a auth, but not the same combination. dup_queryset = (Provider.objects.filter(authentication=auth).filter( billing_source=bill).filter(customer=customer)) if dup_queryset.count() != 0: conflict_provider = dup_queryset.first() message = ( f"Cost management does not allow duplicate accounts. " f"{conflict_provider.name} already exists. Edit source settings to configure a new source." ) LOG.warn(message) raise serializers.ValidationError( error_obj(ProviderErrors.DUPLICATE_AUTH, message)) provider = Provider.objects.create(**validated_data) provider.customer = customer provider.created_by = user provider.authentication = auth provider.billing_source = bill provider.active = True provider.save() return provider
def update(self, instance, validated_data): """Update a Provider instance from validated data.""" _, customer = self.get_request_info() provider_type = validated_data["type"].lower() provider_type = Provider.PROVIDER_CASE_MAPPING.get(provider_type) validated_data["type"] = provider_type interface = ProviderAccessor(provider_type) authentication = validated_data.pop("authentication") credentials = authentication.get("credentials") billing_source = validated_data.pop("billing_source") data_source = billing_source.get("data_source") # updating `paused` must happen regardless of Provider availabilty instance.paused = validated_data.pop("paused", instance.paused) try: if self._is_demo_account(provider_type, credentials): LOG.info( "Customer account is a DEMO account. Skipping cost_usage_source_ready check." ) else: interface.cost_usage_source_ready(credentials, data_source) except serializers.ValidationError as validation_error: instance.active = False instance.save() raise validation_error with transaction.atomic(): bill, __ = ProviderBillingSource.objects.get_or_create( **billing_source) auth, __ = ProviderAuthentication.objects.get_or_create( **authentication) if instance.billing_source != bill or instance.authentication != auth: dup_queryset = (Provider.objects.filter( authentication=auth).filter(billing_source=bill).filter( customer=customer)) if dup_queryset.count() != 0: conflict_provder = dup_queryset.first() message = ( f"Cost management does not allow duplicate accounts. " f"{conflict_provder.name} already exists. Edit source settings to configure a new source." ) LOG.warn(message) raise serializers.ValidationError( error_obj(ProviderErrors.DUPLICATE_AUTH, message)) for key in validated_data.keys(): setattr(instance, key, validated_data[key]) instance.authentication = auth instance.billing_source = bill instance.active = True instance.save() customer.date_updated = DateHelper().now_utc customer.save() return instance
def cost_usage_source_is_reachable(self, credentials, _): """Verify that the cost usage source exists and is reachable.""" tenancy = credentials.get("tenant") if not tenancy or tenancy.isspace(): key = ProviderErrors.OCI_MISSING_TENANCY message = ProviderErrors.OCI_MISSING_TENANCY_MESSAGE raise serializers.ValidationError(error_obj(key, message)) return True
def create(self, validated_data): """Create the cost model object in the database.""" source_uuids = validated_data.pop("source_uuids", []) validated_data.update({"provider_uuids": source_uuids}) try: return CostModelManager().create(**validated_data) except CostModelException as error: raise serializers.ValidationError(error_obj("cost-models", str(error)))
def update(self, instance, validated_data): """Update a Provider instance from validated data.""" _, customer = self.get_request_info() provider_type = validated_data["type"].lower() provider_type = Provider.PROVIDER_CASE_MAPPING.get(provider_type) validated_data["type"] = provider_type interface = ProviderAccessor(provider_type) authentication = validated_data.pop("authentication") credentials = authentication.get("credentials") provider_resource_name = credentials.get("provider_resource_name") billing_source = validated_data.pop("billing_source") data_source = billing_source.get("data_source") bucket = billing_source.get("bucket") try: if customer.account_id not in settings.DEMO_ACCOUNTS: if credentials and data_source and provider_type not in [ Provider.PROVIDER_AWS, Provider.PROVIDER_OCP ]: interface.cost_usage_source_ready(credentials, data_source) else: interface.cost_usage_source_ready(provider_resource_name, bucket) except serializers.ValidationError as validation_error: instance.active = False instance.save() raise validation_error with transaction.atomic(): bill, __ = ProviderBillingSource.objects.get_or_create( **billing_source) auth, __ = ProviderAuthentication.objects.get_or_create( **authentication) dup_queryset = (Provider.objects.filter( authentication=auth).filter(billing_source=bill).filter( customer=customer)) if dup_queryset.count() != 0: conflict_provder = dup_queryset.first() message = ( f"Cost management does not allow duplicate accounts. " f"{conflict_provder.name} already exists. Edit source settings to configure a new source." ) LOG.warn(message) for key in validated_data.keys(): setattr(instance, key, validated_data[key]) instance.authentication = auth instance.billing_source = bill instance.active = True try: instance.save() except IntegrityError: raise serializers.ValidationError( error_obj(ProviderErrors.DUPLICATE_AUTH, message)) return instance
def _tag_key_handler(self, settings): """ Handle setting results Args: (String) name - unique name for switch. Returns: (Bool) - True, if a setting had an effect, False otherwise """ updated = [False] * len(obtainTagKeysProvidersParams) for ix, providerName in enumerate(obtainTagKeysProvidersParams): provider_in_settings = settings.get(providerName) if provider_in_settings is None: continue enabled_tags = provider_in_settings.get("enabled", []) tag_view = obtainTagKeysProvidersParams[providerName]["tag_view"] query_handler = obtainTagKeysProvidersParams[providerName][ "query_handler"] enabled_tag_keys = obtainTagKeysProvidersParams[providerName][ "enabled_tag_keys"] provider = obtainTagKeysProvidersParams[providerName]["provider"] available, _ = self._obtain_tag_keys(tag_view, query_handler, enabled_tag_keys) invalid_keys = [ tag_key for tag_key in enabled_tags if tag_key not in available ] if invalid_keys: key = "settings" message = f"Invalid tag keys provided: {', '.join(invalid_keys)}." raise ValidationError(error_obj(key, message)) if "aws" in providerName: updated[ix] = update_enabled_keys(self.schema, enabled_tag_keys, enabled_tags) else: remove_tags = [] with schema_context(self.schema): existing_enabled_tags = enabled_tag_keys.objects.all() for existing_tag in existing_enabled_tags: if existing_tag.key in enabled_tags: enabled_tags.remove(existing_tag.key) else: remove_tags.append(existing_tag) updated[ix] = True for rm_tag in remove_tags: rm_tag.delete() for new_tag in enabled_tags: enabled_tag_keys.objects.create(key=new_tag) updated[ix] = True if updated[ix]: invalidate_view_cache_for_tenant_and_source_type( self.schema, provider) return any(updated)
def check_service(self): """ Checks if the service is valid or raises an error. Raises: ValidationError """ if self.service is None: key = ProviderErrors.INVALID_SOURCE_TYPE message = ProviderErrors.INVALID_SOURCE_TYPE_MESSAGE raise ValidationError(error_obj(key, message))
def cost_usage_source_is_reachable(self, credential, data_source): """Verify that the cost usage source exists and is reachable.""" cluster_id = credential.get("cluster_id") if not cluster_id or len(cluster_id) == 0: key = "authentication.cluster_id" message = "Provider resource name is a required parameter for OCP." LOG.info(message) raise serializers.ValidationError(error_obj(key, message)) if data_source: key = "billing_source.bucket" message = "Bucket is an invalid parameter for OCP." LOG.error(message) raise serializers.ValidationError(error_obj(key, message)) # TODO: Add data_source existance check once Insights integration is complete. message = f"Stub to verify that OCP report for cluster {cluster_id} is accessible." LOG.info(message) return True
def cost_usage_source_is_reachable(self, credentials, data_source): """ Verify that the cost usage report source is reachable by Koku. Implemented by provider specific class. An account validation and connectivity check is to be done. Args: credentials (dict): Azure credentials dict example: {'subscription_id': 'f695f74f-36a4-4112-9fe6-74415fac75a2', 'tenant_id': '319d4d72-7ddc-45d0-9d63-a2db0a36e048', 'client_id': 'ce26bd50-2e5a-4eb7-9504-a05a79568e25', 'client_secret': 'abc123' } data_source (dict): Identifier of the cost usage report source example: { 'resource_group': 'My Resource Group 1', 'storage_account': 'My Storage Account 2' Returns: None Raises: ValidationError: Error string """ key = "azure.error" if not (isinstance(credentials, dict) and isinstance(data_source, dict)): message = "Resource group and/or Storage account must be a dict" raise ValidationError(error_obj(key, message)) resource_group = data_source.get("resource_group") storage_account = data_source.get("storage_account") if not (resource_group and storage_account): message = ProviderErrors.AZURE_MISSING_RESOURCE_GROUP_AND_STORAGE_ACCOUNT_MESSAGE raise ValidationError(error_obj(key, message)) return True
def get_request_info(self): """Obtain request information like user and customer context.""" user = self.context.get("user") customer = self.context.get("customer") if user and customer: return user, customer request = self.context.get("request") if request and hasattr(request, "user"): user = request.user if user.customer: customer = user.customer else: key = "customer" message = "Customer for requesting user could not be found." raise serializers.ValidationError(error_obj(key, message)) else: key = "created_by" message = "Requesting user could not be found." raise serializers.ValidationError(error_obj(key, message)) return user, customer
def validate_data_source(self, data_source): """Validate data_source field.""" key = "provider.data_source" fields = ["dataset"] data = validate_field(data_source, fields, key) report_prefix = data_source.get("report_prefix", "") if report_prefix and len(report_prefix) > REPORT_PREFIX_MAX_LENGTH: key = "data_source.report_prefix" message = f"Ensure this field has no more than {REPORT_PREFIX_MAX_LENGTH} characters." raise serializers.ValidationError(error_obj(key, message)) return data
def update(self, instance, validated_data, *args, **kwargs): """Update the rate object in the database.""" source_uuids = validated_data.pop("source_uuids", []) new_providers_for_instance = [] for uuid in source_uuids: new_providers_for_instance.append(str(Provider.objects.filter(uuid=uuid).first().uuid)) try: manager = CostModelManager(cost_model_uuid=instance.uuid) manager.update_provider_uuids(new_providers_for_instance) manager.update(**validated_data) except CostModelException as error: raise serializers.ValidationError(error_obj("cost-models", str(error))) return manager.instance
def create(self, validated_data): """Create a provider from validated data.""" user, customer = self.get_request_info() provider_type = validated_data["type"].lower() provider_type = Provider.PROVIDER_CASE_MAPPING.get(provider_type) validated_data["type"] = provider_type interface = ProviderAccessor(provider_type) authentication = validated_data.pop("authentication") credentials = authentication.get("credentials") billing_source = validated_data.pop("billing_source") data_source = billing_source.get("data_source") if self._is_demo_account(provider_type, credentials): LOG.info( "Customer account is a DEMO account. Skipping cost_usage_source_ready check." ) else: interface.cost_usage_source_ready(credentials, data_source) bill, __ = ProviderBillingSource.objects.get_or_create( **billing_source) auth, __ = ProviderAuthentication.objects.get_or_create( **authentication) # We can re-use a billing source or a auth, but not the same combination. dup_queryset = (Provider.objects.filter(authentication=auth).filter( billing_source=bill).filter(customer=customer)) if dup_queryset.count() != 0: conflict_provider = dup_queryset.first() message = ( f"Cost management does not allow duplicate accounts. " f"{conflict_provider.name} already exists. Edit source settings to configure a new source." ) LOG.warn(message) raise serializers.ValidationError( error_obj(ProviderErrors.DUPLICATE_AUTH, message)) provider = Provider.objects.create(**validated_data) provider.customer = customer provider.created_by = user provider.authentication = auth provider.billing_source = bill provider.active = True provider.save() customer.date_updated = DateHelper().now_utc customer.save() return provider
def cost_usage_source_is_reachable(self, credentials, data_source): """ Verify that the GCP bucket exists and is reachable. Args: credentials (object): not used; only present for interface compatibility data_source (dict): dict containing name of GCP storage bucket """ storage_client = storage.Client() bucket = data_source.get("bucket") try: bucket_info = storage_client.lookup_bucket(bucket) if not bucket_info: # if the lookup does not return anything, then this is an nonexistent bucket key = "billing_source.bucket" message = f"The provided GCP bucket {bucket} does not exist" raise serializers.ValidationError(error_obj(key, message)) except GoogleCloudError as e: key = "billing_source.bucket" raise serializers.ValidationError(error_obj(key, e.message)) return True
def cost_usage_source_is_reachable(self, credentials, data_source): """Verify that the IBM Cloud is reachable.""" token = credentials.get("iam_token", "") enterprise_id = data_source.get("enterprise_id", "") authenticator = IAMAuthenticator(token) try: service = EnterpriseUsageReportsV1(authenticator=authenticator) service.get_resource_usage_report(enterprise_id=enterprise_id, children=True, limit=1) except ApiException as e: key = "" if "enterprise" in e.message: key = "data_source.enterprise_id" if "API key" in e.message: key = "credentials.iam_token" raise serializers.ValidationError(error_obj(key, e.message)) return True