def get_missing_concepts(self, client, tables):

        queries = []
        union_distinct = "\nUNION DISTINCT\n"
        for table in tables:
            concept_id_fields = get_concept_id_fields(table)
            concept_id_fields = [
                field for field in concept_id_fields
                if 'source_concept_id' not in field
            ]
            for concept_id_field in concept_id_fields:
                query = MISSING_CONCEPTS_QUERY.render(
                    project_id=self.project_id,
                    dataset_id=self.dataset_id,
                    tablename=table,
                    concept_id_field=concept_id_field)

                queries.append(query)

        unioned_queries = union_distinct.join(queries)
        concept_suppression_lookup_query = CREATE_OR_REPLACE_CLAUSE.render(
            project_id=self.project_id,
            sandbox_id=self.sandbox_dataset_id,
            concept_suppression_lookup_table=self.
            concept_suppression_lookup_table,
            query=unioned_queries)

        query_job = client.query(concept_suppression_lookup_query)
        result = query_job.result()

        if hasattr(result, 'errors') and result.errors:
            LOGGER.error(f"Error running job {result.job_id}: {result.errors}")
            raise GoogleCloudError(
                f"Error running job {result.job_id}: {result.errors}")
예제 #2
0
 def test_bucket_access_exception(self, mock_storage):
     """Test that ValidationError is raised when GoogleCloudError is raised."""
     gcp_client = mock_storage.return_value
     gcp_client.lookup_bucket.side_effect = GoogleCloudError('GCP Error')
     credentials = {'project_id': FAKE.word()}
     storage_resource_name = {'bucket': FAKE.word()}
     with self.assertRaises(ValidationError):
         GCPProvider().cost_usage_source_is_reachable(credentials, storage_resource_name)
예제 #3
0
def google_bucket_error_mock():
    bucket = mock.MagicMock()
    blob = mock.MagicMock()
    blob.upload_from_filename.side_effect = [
        GoogleCloudError("error 1"),
        GoogleCloudError("error 2"),
        None,
    ]

    def get_named_blob(name):
        type(blob).name = name
        return blob

    bucket.blob.side_effect = get_named_blob
    bucket.get_blob.return_value = blob

    return bucket
예제 #4
0
 def test_download_file_query_client_error(self, mock_open):
     """Test BigQuery client is handled correctly in download file method."""
     key = "202011_1234_2020-12-05:2020-12-08.csv"
     downloader = self.create_gcp_downloader_with_mocked_values()
     err_msg = "GCP Error"
     with patch("masu.external.downloader.gcp.gcp_report_downloader.bigquery") as bigquery:
         bigquery.Client.side_effect = GoogleCloudError(err_msg)
         with self.assertRaisesRegexp(GCPReportDownloaderError, err_msg):
             downloader.download_file(key)
예제 #5
0
 def test_cost_usage_source_raise_google_cloud_error(self, mock_auth, mock_discovery):
     """Test that cost_usage_source_is_reachable succeeds."""
     err_msg = "GCP Error"
     mock_discovery.build.side_effect = GoogleCloudError(err_msg)
     mock_auth.return_value = (MagicMock(), MagicMock())
     billing_source_param = {"dataset": FAKE.word(), "table_id": FAKE.word()}
     credentials_param = {"project_id": FAKE.word()}
     with self.assertRaisesRegexp(ValidationError, err_msg):
         provider = GCPProvider()
         provider.cost_usage_source_is_reachable(credentials_param, billing_source_param)
예제 #6
0
    def test_gcp_upload_error(self, mock_storage):
        """Test upload_to_s3 method with mock s3."""
        gcp_client = mock_storage.return_value
        gcp_client.get_bucket.side_effect = GoogleCloudError("GCP Error")

        bucket_name = fake.slug()
        local_path = fake.file_path()
        remote_path = fake.file_path()
        uploaded = upload_to_gcp_storage(bucket_name, local_path, remote_path)

        self.assertFalse(uploaded)
예제 #7
0
    def create_suppression_lookup_table(self, client):
        concept_suppression_lookup_query = CANCER_CONCEPT_QUERY.render(
            project_id=self.project_id,
            dataset_id=self.dataset_id,
            sandbox_id=self.sandbox_dataset_id,
            concept_suppression_lookup_table=self.
            concept_suppression_lookup_table)
        query_job = client.query(concept_suppression_lookup_query)
        result = query_job.result()

        if hasattr(result, 'errors') and result.errors:
            LOGGER.error(f"Error running job {result.job_id}: {result.errors}")
            raise GoogleCloudError(
                f"Error running job {result.job_id}: {result.errors}")
예제 #8
0
 def test_generate_etag_big_query_client_error(self, gcp_provider):
     """Test BigQuery client is handled correctly in generate etag method."""
     billing_source = {"table_id": FAKE.slug(), "dataset": FAKE.slug()}
     credentials = {"project_id": FAKE.slug()}
     err_msg = "GCP Error"
     with patch("masu.external.downloader.gcp.gcp_report_downloader.bigquery") as bigquery:
         bigquery.Client.side_effect = GoogleCloudError(err_msg)
         with self.assertRaisesRegexp(GCPReportDownloaderError, err_msg):
             GCPReportDownloader(
                 customer_name=FAKE.name(),
                 data_source=billing_source,
                 provider_uuid=uuid4(),
                 credentials=credentials,
             )
    def create_suppression_lookup_table(self, client):
        """
        :param client: Bigquery client
        :return: None
        raises google.cloud.exceptions.GoogleCloudError if a QueryJob fails 
        """
        concept_suppression_lookup_query = GEO_LOCATION_CONCEPT_SUPPRESSION_LOOKUP_QUERY.render(
            project_id=self.project_id,
            dataset_id=self.dataset_id,
            sandbox_dataset_id=self.sandbox_dataset_id,
            lookup_table=self.concept_suppression_lookup_table)
        query_job = client.query(concept_suppression_lookup_query)
        result = query_job.result()

        if hasattr(result, 'errors') and result.errors:
            LOGGER.error(f"Error running job {result.job_id}: {result.errors}")
            raise GoogleCloudError(
                f"Error running job {result.job_id}: {result.errors}")
예제 #10
0
    def create_suppression_lookup_table(self, client):
        """
        
        :param client: 
        :return:
        
        raises google.cloud.exceptions.GoogleCloudError if a QueryJob fails 
        """
        concept_suppression_lookup_query = MOTOR_VEHICLE_ACCIDENT_CONCEPT_QUERY.render(
            project=self.project_id,
            dataset=self.dataset_id,
            sandbox_dataset=self.sandbox_dataset_id,
            concept_suppression_lookup=self.concept_suppression_lookup_table)
        query_job = client.query(concept_suppression_lookup_query)
        result = query_job.result()

        if hasattr(result, 'errors') and result.errors:
            LOGGER.error(f"Error running job {result.job_id}: {result.errors}")
            raise GoogleCloudError(
                f"Error running job {result.job_id}: {result.errors}")
    def create_suppression_lookup_table(self, client):
        """
        Build the concept suppression lookup table 
        
        :param client: Bigquery client
        :return: 
        """
        concept_suppression_lookup_query = SECTION_PARTICIPATION_CONCEPT_QUERY.render(
            project_id=self.project_id,
            dataset_id=self.dataset_id,
            sandbox_id=self.sandbox_dataset_id,
            concept_suppression_lookup_table=self.
            concept_suppression_lookup_table)
        query_job = client.query(concept_suppression_lookup_query)
        result = query_job.result()

        if hasattr(result, 'errors') and result.errors:
            LOGGER.error(f"Error running job {result.job_id}: {result.errors}")
            raise GoogleCloudError(
                f"Error running job {result.job_id}: {result.errors}")
예제 #12
0
    def create_suppression_lookup_table(self, client):
        """

        :param client:
        :return:

        raises google.cloud.exceptions.GoogleCloudError if a QueryJob fails
        """
        concept_suppression_lookup_query = FREE_TEXT_CONCEPT_QUERY.render(
            project_id=self.project_id,
            dataset_id=self.dataset_id,
            sandbox_dataset=self.sandbox_dataset_id,
            concept_suppression_table=self.concept_suppression_lookup_table)
        query_job = client.query(concept_suppression_lookup_query)
        result = query_job.result()

        if query_job.errors or query_job.error_result:
            LOGGER.error(f"Error running job {result.job_id}: {result.errors}")
            raise GoogleCloudError(
                f"Error running job {result.job_id}: {result.errors}")
def download_to_filename(filename):
    if filename == valid_blob_path:
        pass
    else:
        raise GoogleCloudError("Error")
def upload_from_filename(filename):
    if filename == valid_blob_path:
        pass
    else:
        raise GoogleCloudError('Error')