def generate_file_b_custom_account_download_export_query( source, inner_sql, export_options): """ DEV-3997 requests that we eliminate excess $0 sum rows and roll up missing direct/reimbursable rows where possible in custom account File B (a.k.a. object class program activity) downloads. While it may be possible to achieve this using the ORM, a solution eluded me so I dropped back wrapping the download query in SQL that will achieve the requirements. We are adding some randomness to the temp table name in an effort to prevent any chance at collisions in temp table names since this can cause blocking if we attempt to create the same temp table on the same connection at the same time (ish). """ tas_or_fas = "federal_account_symbol" if source.file_type == "federal_account" else "treasury_account_symbol" export_sql_file = settings.APP_DIR / "download" / "filestreaming" / "file_b_custom_account_download_export.sql" export_sql = export_sql_file.read_text() temp_table_name = "temp_file_b_custom_account_download_" + generate_random_string( 10) return export_sql.format(tas_or_fas=tas_or_fas, inner_sql=inner_sql, export_options=export_options, temp_table=temp_table_name)
def _generate_index_name(cls): return "test-{}-{}".format( datetime.now(timezone.utc).strftime("%Y-%m-%d-%H-%M-%S-%f"), generate_random_string() )
sleep(0.001) monkeypatch.setattr("usaspending_api.etl.es_etl_helpers.sleep", _sleep) monkeypatch.setattr("usaspending_api.etl.rapidloader.sleep", _sleep) config = { "root_index": "award-query", "processing_start_datetime": datetime(2019, 12, 13, 16, 10, 33, 729108, tzinfo=timezone.utc), "verbose": False, "load_type": "awards", "process_deletes": False, "directory": Path(__file__).resolve().parent, "skip_counts": False, "index_name": "test-{}-{}".format( datetime.now(timezone.utc).strftime("%Y-%m-%d-%H-%M-%S-%f"), generate_random_string() ), "create_new_index": True, "snapshot": False, "fiscal_years": [2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020], "starting_date": datetime(2007, 10, 1, 0, 0, tzinfo=timezone.utc), "max_query_size": 10000, "is_incremental_load": False, } def test_es_award_loader_class(award_data_fixture, elasticsearch_award_index, baby_sleeps): elasticsearch_client = instantiate_elasticsearch_client() loader = Rapidloader(config, elasticsearch_client) assert loader.__class__.__name__ == "Rapidloader" loader.run_load_steps()
"processing_start_datetime": datetime(2019, 12, 13, 16, 10, 33, 729108, tzinfo=timezone.utc), "verbose": False, "load_type": "awards", "process_deletes": False, "directory": Path(__file__).resolve().parent, "skip_counts": False, "index_name": "test-{}-{}".format( datetime.now(timezone.utc).strftime("%Y-%m-%d-%H-%M-%S-%f"), generate_random_string()), "create_new_index": True, "snapshot": False, "fiscal_years": [ 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 ], "starting_date": datetime(2007, 10, 1, 0, 0, tzinfo=timezone.utc), "max_query_size": 10000, "is_incremental_load": False, }