def main(args): folder = args.folder target_bucket = args.bucket folder_prefix = folder + '/' _run_achilles() _run_export(folder_prefix=folder_prefix, target_bucket=target_bucket) _upload_achilles_files(folder_prefix=folder_prefix, target_bucket=target_bucket)
def main(args): dataset_id = get_dataset_id() target_bucket = args.bucket folder_prefix = args.folder + '/' _run_achilles() _run_export(datasource_id=dataset_id, folder_prefix=folder_prefix, target_bucket=target_bucket) _upload_achilles_files(folder_prefix=folder_prefix, target_bucket=target_bucket)
def main(args): hpo_id = args.hpo_id for table_name in common.CDM_TABLES: table_id = hpo_id + '_' + table_name if bq_utils.table_exists(table_id): print table_id, ' exists' else: print table_id, ' being created' bq_utils.create_standard_table(table_name, table_id, False) _run_achilles(hpo_id) _run_export(hpo_id)
def main(args): hpo_id = args.hpo_id folder = args.folder folder_prefix = folder + '/' for table_name in common.CDM_TABLES: table_id = hpo_id + '_' + table_name if bq_utils.table_exists(table_id): print table_id, ' exists' else: print table_id, ' being created' bq_utils.create_standard_table(table_name, table_id, False) _run_achilles(hpo_id) _run_export(hpo_id, folder_prefix) _upload_achilles_files(hpo_id, folder_prefix)
def main(args): folder = args.folder target_bucket = args.bucket folder_prefix = folder + '/' for table_name in common.CDM_TABLES: table_id = table_name if bq_utils.table_exists(table_id): print table_id, ' exists' else: print table_id, ' being created' bq_utils.create_standard_table(table_name, table_id, False) _run_achilles() _run_export(folder_prefix=folder_prefix, target_bucket=target_bucket) _upload_achilles_files(folder_prefix=folder_prefix, target_bucket=target_bucket)