def run_export(hpo_id=None, folder_prefix="", target_bucket=None): """ Run export queries for an HPO and store JSON payloads in specified folder in (optional) target bucket :type hpo_id: ID of the HPO to run export for. This is the data source name in the report. :param folder_prefix: Relative base path to store report. empty by default. :param target_bucket: Bucket to save report. If None, use bucket associated with hpo_id. """ results = [] # Using separate var rather than hpo_id here because hpo_id None needed in calls below datasource_name = 'default' if hpo_id is None: if target_bucket is None: raise RuntimeError( 'Cannot export if neither hpo_id or target_bucket is specified.' ) else: datasource_name = hpo_id if target_bucket is None: target_bucket = gcs_utils.get_hpo_bucket(hpo_id) logging.info('Exporting %s report to bucket %s', datasource_name, target_bucket) # Run export queries and store json payloads in specified folder in the target bucket reports_prefix = folder_prefix + ACHILLES_EXPORT_PREFIX_STRING + datasource_name + '/' for export_name in common.ALL_REPORTS: sql_path = os.path.join(export.EXPORT_PATH, export_name) result = export.export_from_path(sql_path, hpo_id) content = json.dumps(result) fp = StringIO(content) result = gcs_utils.upload_object( target_bucket, reports_prefix + export_name + '.json', fp) results.append(result) result = save_datasources_json(hpo_id=hpo_id, folder_prefix=folder_prefix, target_bucket=target_bucket) results.append(result) return results
def _test_report_export(self, report): data_density_path = os.path.join(export.EXPORT_PATH, report) result = export.export_from_path(data_density_path, FAKE_HPO_ID) return result
def _test_report_export(self, report): test_util.get_synpuf_results_files() test_util.populate_achilles(self.hpo_bucket) data_density_path = os.path.join(export.EXPORT_PATH, report) result = export.export_from_path(data_density_path, FAKE_HPO_ID) return result