Beispiel #1
0
 def setUp(self):
     self.app_id = app_identity.get_application_id()
     self.dataset_id = bq_utils.get_dataset_id()
     self.bucket = gcs_utils.get_drc_bucket()
     test_util.empty_bucket(self.bucket)
     test_util.delete_all_tables(self.dataset_id)
     self.load_test_data(hpo_id=HPO_NYC)
Beispiel #2
0
    def test_target_bucket_upload(self):
        bucket_nyc = gcs_utils.get_hpo_bucket('nyc')
        folder_prefix = 'test-folder-fake/'
        test_util.empty_bucket(bucket_nyc)

        main._upload_achilles_files(hpo_id=None,
                                    folder_prefix=folder_prefix,
                                    target_bucket=bucket_nyc)
        actual_bucket_files = set(
            [item['name'] for item in gcs_utils.list_bucket(bucket_nyc)])
        expected_bucket_files = set([
            'test-folder-fake/' + item
            for item in resources.ALL_ACHILLES_INDEX_FILES
        ])
        self.assertSetEqual(expected_bucket_files, actual_bucket_files)
Beispiel #3
0
    def setUp(self):
        self.hpo_bucket = gcs_utils.get_hpo_bucket(FAKE_HPO_ID)
        self.project_id = app_identity.get_application_id()
        self.dataset_id = bq_utils.get_dataset_id()
        self.rdr_dataset_id = bq_utils.get_rdr_dataset_id()
        self.folder_prefix = '2019-01-01/'
        test_util.delete_all_tables(self.dataset_id)
        test_util.empty_bucket(self.hpo_bucket)

        mock_get_hpo_name = mock.patch('validation.main.get_hpo_name')

        self.mock_get_hpo_name = mock_get_hpo_name.start()
        self.mock_get_hpo_name.return_value = 'Fake HPO'
        self.addCleanup(mock_get_hpo_name.stop)

        self._load_data()
 def load_dataset_from_files(dataset_id, path, mappings=False):
     bucket = gcs_utils.get_hpo_bucket(test_util.FAKE_HPO_ID)
     test_util.empty_bucket(bucket)
     job_ids = []
     for table in resources.CDM_TABLES:
         job_ids.append(
             CombineEhrRdrTest._upload_file_to_bucket(
                 bucket, dataset_id, path, table))
         if mappings and table in DOMAIN_TABLES:
             mapping_table = '_mapping_{table}'.format(table=table)
             job_ids.append(
                 CombineEhrRdrTest._upload_file_to_bucket(
                     bucket, dataset_id, path, mapping_table))
     incomplete_jobs = bq_utils.wait_on_jobs(job_ids)
     if len(incomplete_jobs) > 0:
         message = "Job id(s) %s failed to complete" % incomplete_jobs
         raise RuntimeError(message)
     test_util.empty_bucket(bucket)
Beispiel #5
0
 def tearDown(self):
     self._empty_bucket()
     bucket_nyc = gcs_utils.get_hpo_bucket('nyc')
     test_util.empty_bucket(bucket_nyc)
     test_util.empty_bucket(gcs_utils.get_drc_bucket())
     test_util.delete_all_tables(self.bigquery_dataset_id)
Beispiel #6
0
 def _empty_hpo_buckets(self):
     for hpo_id in self.hpo_ids:
         bucket = gcs_utils.get_hpo_bucket(hpo_id)
         test_util.empty_bucket(bucket)
 def tearDown(self):
     self._empty_bucket()
     test_util.empty_bucket(self.bucket)
Beispiel #8
0
 def tearDown(self):
     test_util.delete_all_tables(bq_utils.get_dataset_id())
     test_util.empty_bucket(self.hpo_bucket)
Beispiel #9
0
 def setUp(self):
     self.hpo_bucket = gcs_utils.get_hpo_bucket(test_util.FAKE_HPO_ID)
     test_util.empty_bucket(self.hpo_bucket)
     test_util.delete_all_tables(bq_utils.get_dataset_id())
Beispiel #10
0
 def tearDown(self):
     self._empty_bucket()
     bucket_nyc = gcs_utils.get_hpo_bucket('nyc')
     test_util.empty_bucket(bucket_nyc)
Beispiel #11
0
 def tearDown(self):
     test_util.empty_bucket(self.bucket)
     test_util.delete_all_tables(self.dataset_id)