def get_full_result_log(): full_log = [] for hpo in resources.hpo_csv(): hpo_id = hpo['hpo_id'] hpo_bucket = gcs_utils.get_hpo_bucket(hpo_id) try: # TODO : figure out possible errors and catch specific bucket inexistence error obj_metadata = gcs_utils.get_metadata(hpo_bucket, RESULT_CSV) except: logging.warning( 'skipping hpo {}. bucket does not exist.'.format(hpo)) continue if obj_metadata is None: logging.info('%s was not found in %s' % (RESULT_CSV, hpo_bucket)) else: hpo_result = gcs_utils.get_object(hpo_bucket, RESULT_CSV) hpo_result_file = StringIO.StringIO(hpo_result) hpo_result_items = resources._csv_file_to_list(hpo_result_file) result_objects = map( lambda item: hpo_log_item_to_obj(hpo_id, item), hpo_result_items) full_log.extend(result_objects) return full_log
def test_get_metadata_on_existing_file(self): expected_file_name = 'person.csv' with open(FIVE_PERSONS_PERSON_CSV, 'rb') as fp: gcs_utils.upload_object(self.hpo_bucket, expected_file_name, fp) metadata = gcs_utils.get_metadata(self.hpo_bucket, expected_file_name) self.assertIsNotNone(metadata) self.assertEqual(metadata['name'], expected_file_name)
def test_run_export(self): folder_prefix = 'dummy-prefix-2018-03-24/' main._upload_achilles_files(test_util.FAKE_HPO_ID, folder_prefix) main.run_export(test_util.FAKE_HPO_ID, folder_prefix) for report in common.ALL_REPORT_FILES: _reports_prefix = folder_prefix + common.ACHILLES_EXPORT_PREFIX_STRING + test_util.FAKE_HPO_ID + '/' _exist_check = gcs_utils.get_metadata(self.hpo_bucket, _reports_prefix + report) self.assertIsNotNone(_exist_check)
def _validation_done(bucket, folder): if gcs_utils.get_metadata(bucket=bucket, name=folder + common.PROCESSED_TXT) is not None: return True return False
def test_get_metadata_on_not_existing_file(self): expected = 100 actual = gcs_utils.get_metadata(self.hpo_bucket, 'this_file_does_not_exist', expected) self.assertEqual(expected, actual)