def test_run_export(self): folder_prefix = 'dummy-prefix-2018-03-24/' main._upload_achilles_files(test_util.FAKE_HPO_ID, folder_prefix) main.run_export(hpo_id=test_util.FAKE_HPO_ID, folder_prefix=folder_prefix) bucket_objects = gcs_utils.list_bucket(self.hpo_bucket) actual_object_names = [obj['name'] for obj in bucket_objects] for report in common.ALL_REPORT_FILES: prefix = folder_prefix + common.ACHILLES_EXPORT_PREFIX_STRING + test_util.FAKE_HPO_ID + '/' expected_object_name = prefix + report self.assertIn(expected_object_name, actual_object_names) datasources_json_path = folder_prefix + common.ACHILLES_EXPORT_DATASOURCES_JSON self.assertIn(datasources_json_path, actual_object_names) datasources_json = gcs_utils.get_object(self.hpo_bucket, datasources_json_path) datasources_actual = json.loads(datasources_json) datasources_expected = { 'datasources': [{ 'name': test_util.FAKE_HPO_ID, 'folder': test_util.FAKE_HPO_ID, 'cdmVersion': 5 }] } self.assertDictEqual(datasources_expected, datasources_actual)
def test_run_export_with_target_bucket(self): folder_prefix = 'dummy-prefix-2018-03-24/' bucket_nyc = gcs_utils.get_hpo_bucket('nyc') test_util.get_synpuf_results_files() test_util.populate_achilles(self.hpo_bucket, hpo_id=None) main.run_export(folder_prefix=folder_prefix, target_bucket=bucket_nyc) bucket_objects = gcs_utils.list_bucket(bucket_nyc) actual_object_names = [obj['name'] for obj in bucket_objects] for report in common.ALL_REPORT_FILES: expected_object_name = folder_prefix + common.ACHILLES_EXPORT_PREFIX_STRING + 'default' + '/' + report self.assertIn(expected_object_name, actual_object_names) datasources_json_path = folder_prefix + common.ACHILLES_EXPORT_DATASOURCES_JSON self.assertIn(datasources_json_path, actual_object_names) datasources_json = gcs_utils.get_object(bucket_nyc, datasources_json_path) datasources_actual = json.loads(datasources_json) datasources_expected = { 'datasources': [{ 'name': 'default', 'folder': 'default', 'cdmVersion': 5 }] } self.assertDictEqual(datasources_expected, datasources_actual)
def test_run_export_with_target_bucket_and_datasource_id( self, mock_is_hpo_id): # validation/main.py INTEGRATION TEST mock_is_hpo_id.return_value = True folder_prefix = 'dummy-prefix-2018-03-24/' bucket_nyc = gcs_utils.get_hpo_bucket('nyc') main.run_export(datasource_id=FAKE_HPO_ID, folder_prefix=folder_prefix, target_bucket=bucket_nyc) bucket_objects = gcs_utils.list_bucket(bucket_nyc) actual_object_names = [obj['name'] for obj in bucket_objects] for report in common.ALL_REPORT_FILES: prefix = folder_prefix + common.ACHILLES_EXPORT_PREFIX_STRING + FAKE_HPO_ID + '/' expected_object_name = prefix + report self.assertIn(expected_object_name, actual_object_names) datasources_json_path = folder_prefix + common.ACHILLES_EXPORT_DATASOURCES_JSON self.assertIn(datasources_json_path, actual_object_names) datasources_json = gcs_utils.get_object(bucket_nyc, datasources_json_path) datasources_actual = json.loads(datasources_json) datasources_expected = { 'datasources': [{ 'name': FAKE_HPO_ID, 'folder': FAKE_HPO_ID, 'cdmVersion': 5 }] } self.assertDictEqual(datasources_expected, datasources_actual)
def test_run_export(self, mock_is_hpo_id): # validation/main.py INTEGRATION TEST mock_is_hpo_id.return_value = True folder_prefix: str = 'dummy-prefix-2018-03-24/' main._upload_achilles_files(FAKE_HPO_ID, folder_prefix) main.run_export(datasource_id=FAKE_HPO_ID, folder_prefix=folder_prefix) storage_bucket = self.storage_client.get_bucket(self.hpo_bucket) bucket_objects = storage_bucket.list_blobs() actual_object_names: list = [obj.name for obj in bucket_objects] for report in common.ALL_REPORT_FILES: prefix: str = f'{folder_prefix}{common.ACHILLES_EXPORT_PREFIX_STRING}{FAKE_HPO_ID}/' expected_object_name: str = f'{prefix}{report}' self.assertIn(expected_object_name, actual_object_names) datasources_json_path: str = folder_prefix + common.ACHILLES_EXPORT_DATASOURCES_JSON self.assertIn(datasources_json_path, actual_object_names) datasources_blob = storage_bucket.blob(datasources_json_path) datasources_json: str = datasources_blob.download_as_bytes().decode() datasources_actual: dict = json.loads(datasources_json) datasources_expected: dict = { 'datasources': [{ 'name': FAKE_HPO_ID, 'folder': FAKE_HPO_ID, 'cdmVersion': 5 }] } self.assertDictEqual(datasources_expected, datasources_actual)
def test_run_export(self): folder_prefix = 'dummy-prefix-2018-03-24/' main._upload_achilles_files(test_util.FAKE_HPO_ID, folder_prefix) main.run_export(test_util.FAKE_HPO_ID, folder_prefix) for report in common.ALL_REPORT_FILES: _reports_prefix = folder_prefix + common.ACHILLES_EXPORT_PREFIX_STRING + test_util.FAKE_HPO_ID + '/' _exist_check = gcs_utils.get_metadata(self.hpo_bucket, _reports_prefix + report) self.assertIsNotNone(_exist_check)
def test_run_export_without_datasource_id(self): # validation/main.py INTEGRATION TEST with self.assertRaises(RuntimeError): main.run_export(datasource_id=None, target_bucket=None)