def publish_v3_main_table() -> Response: with temporary_directory() as workdir: input_folder = workdir / "input" output_folder = workdir / "output" input_folder.mkdir(parents=True, exist_ok=True) output_folder.mkdir(parents=True, exist_ok=True) # Download all the global tables into our local storage download_folder( GCS_BUCKET_PROD, "v3", input_folder, lambda x: x.suffix == ".csv" and all(token not in str(x) for token in ("/", "main.")), ) file_name = "covid-19-open-data.csv" with ZipFile(output_folder / f"{file_name}.zip", mode="w", compression=ZIP_DEFLATED) as zip_archive: with zip_archive.open(file_name, "w") as output_file: merge_output_tables_sqlite(input_folder, TextIOWrapper(output_file), use_table_names=V3_TABLE_LIST) # Upload the results to the prod bucket upload_folder(GCS_BUCKET_PROD, "v3", output_folder) return Response("OK", status=200)
def test_make_main_table_sqlite(self): with temporary_directory() as workdir: # Copy all test tables into the temporary directory publish_global_tables(SRC / "test" / "data", workdir, use_table_names=V3_TABLE_LIST) # Create the main table main_table_path = workdir / "main.csv" merge_output_tables_sqlite(workdir, main_table_path, use_table_names=V3_TABLE_LIST) self._test_make_main_table_helper(main_table_path, OUTPUT_COLUMN_ADAPTER)