def get_all_award_ids_in_idv_hierarchy(root_idv_award_id): """ Unfortunately, there's no clean way to get IDV descendants using the Django ORM so we will turn to the dark side to get what we need. For the provided IDV award id (surrogate, integer, internal award id), this function will return the award id of all awards in the IDV's hierarchy, including the root IDV itself. """ sql = """ with cte as ( select award_id from parent_award where award_id = %(root_idv_award_id)s union all select cpa.award_id from parent_award ppa inner join parent_award cpa on cpa.parent_award_id = ppa.award_id where ppa.award_id = %(root_idv_award_id)s ) select ca.id from cte inner join awards pa on pa.id = cte.award_id inner join awards ca on ca.parent_award_piid = pa.piid and ca.fpds_parent_agency_id = pa.fpds_agency_id union all select %(root_idv_award_id)s """ connection = get_connection() with connection.cursor() as cursor: cursor.execute(sql, {"root_idv_award_id": root_idv_award_id}) return [row[0] for row in cursor.fetchall()]
def _import_raw_agencies(self): with get_connection(read_only=False).cursor() as cursor: execute_values( cursor.cursor, """ insert into temp_load_agencies_raw_agency ( row_number, cgac_agency_code, agency_name, agency_abbreviation, frec, frec_entity_description, frec_abbreviation, subtier_code, subtier_name, subtier_abbreviation, toptier_flag, is_frec, user_selectable, mission, website, congressional_justification, icon_filename ) values %s """, self.agencies, page_size=len(self.agencies), ) return cursor.rowcount
def get_descendant_award_ids(root_idv_award_id, include_child_idvs): """ Unfortunately, there's no clean way to get IDV descendants using the Django ORM so we will turn to the dark side to get what we need. For the provided IDV award id (surrogate, integer, internal award id), this function will return the award id for all child Awards and grandchild Awards and, if include_child_idvs is True, all child IDVs as well. """ sql = """ with cte as ( select award_id from parent_award where award_id = %(root_idv_award_id)s union all select cpa.award_id from parent_award ppa inner join parent_award cpa on cpa.parent_award_id = ppa.award_id where ppa.award_id = %(root_idv_award_id)s ) select ca.id from cte inner join awards pa on pa.id = cte.award_id inner join awards ca on ca.parent_award_piid = pa.piid and ca.fpds_parent_agency_id = pa.fpds_agency_id """ + ("" if include_child_idvs else " and ca.type not like 'IDV%%'") connection = get_connection() with connection.cursor() as cursor: cursor.execute(sql, {"root_idv_award_id": root_idv_award_id}) return [row[0] for row in cursor.fetchall()]
def test_run_script(setup_test_data): """ Test that the populate_reporting_agency_tas script acts as expected """ connection = get_connection(read_only=False) sql_path = settings.APP_DIR / "reporting" / "management" / "sql" / "populate_reporting_agency_tas.sql" test_sql = sql_path.read_text() # Executing the SQL and testing the entry with only one record for the period/fiscal year/tas per table with connection.cursor() as cursor: cursor.execute(test_sql) results = ReportingAgencyTas.objects.filter( fiscal_year=2019, fiscal_period=3, tas_rendering_label="tas-1").all() assert len(results) == 1 assert results[0].appropriation_obligated_amount == 50 assert results[0].object_class_pa_obligated_amount == 20.5 assert results[0].diff_approp_ocpa_obligated_amounts == 29.5 # Testing an entry with multiple rows that roll up into a single period/fiscal year/tas results = ReportingAgencyTas.objects.filter( fiscal_year=2019, fiscal_period=3, tas_rendering_label="tas-2").all() assert len(results) == 1 assert results[0].appropriation_obligated_amount == 41 assert results[0].object_class_pa_obligated_amount == Decimal("42.30") assert results[0].diff_approp_ocpa_obligated_amounts == Decimal("-1.30") # Making sure that 2 different agencies under the same year/period don't get rolled up together results = ReportingAgencyTas.objects.filter(fiscal_year=2019, fiscal_period=3).all() assert len(results) == 3 assert results[0].diff_approp_ocpa_obligated_amounts == 29.5 assert results[1].diff_approp_ocpa_obligated_amounts == Decimal("-1.30") assert results[2].diff_approp_ocpa_obligated_amounts == 20.5
def operations_fixture(): with get_connection(read_only=False).cursor() as cursor: cursor.execute(""" create table t1 ( id1 int, id2 int, name text, description text, primary key (id1, id2) ); insert into t1 values (1, 2, 'three', 'four'); insert into t1 values (4, 5, 'six', 'seven'); insert into t1 values (8, 8, 'eight', 'eight'); create table t2 ( id1 int, id2 int, name text, description text, primary key (id1, id2) ); insert into t2 values (1, 2, 'three', 'four'); insert into t2 values (4, 5, 'not six', 'not seven'); insert into t2 values (9, 9, 'nine', 'nine'); """)
def test_get_connection(): connection = get_connection() with connection.cursor() as cursor: cursor.execute(RAW_SQL) result = fetchall_to_ordered_dictionary(cursor) assert result == EXPECTED_RESPONSE_ORDERED_DICTIONARY connection = get_connection(User, True) with connection.cursor() as cursor: cursor.execute(RAW_SQL) result = fetchall_to_ordered_dictionary(cursor) assert result == EXPECTED_RESPONSE_ORDERED_DICTIONARY connection = get_connection(read_only=False) with connection.cursor() as cursor: cursor.execute(RAW_SQL) result = fetchall_to_ordered_dictionary(cursor) assert result == EXPECTED_RESPONSE_ORDERED_DICTIONARY
def test_get_connection(): connection = get_connection() with connection.cursor() as cursor: cursor.execute(RAW_SQL) result = fetchall_to_ordered_dictionary(cursor) assert result == EXPECTED_RESPONSE_ORDERED_DICTIONARY connection = get_connection(User, True) with connection.cursor() as cursor: cursor.execute(RAW_SQL) result = fetchall_to_ordered_dictionary(cursor) assert result == EXPECTED_RESPONSE_ORDERED_DICTIONARY connection = get_connection(read_only=False) with connection.cursor() as cursor: cursor.execute(RAW_SQL) result = fetchall_to_ordered_dictionary(cursor) assert result == EXPECTED_RESPONSE_ORDERED_DICTIONARY
def _import_raw_agencies(self): sql = (Path(self.etl_dml_sql_directory) / "insert_into.sql").read_text().format( temp_table=TEMP_TABLE_NAME) with get_connection(read_only=False).cursor() as cursor: execute_values(cursor.cursor, sql, self.agencies, page_size=len(self.agencies)) return cursor.rowcount
def make_a_table(): connection = get_connection(read_only=False) with connection.cursor() as cursor: cursor.execute(""" create table totally_tubular_testing_table_for_two ( id1 int, id2 int, name text, description text, primary key (id1, id2) ) """)
def _stage_table_mock(source, destination, staging): # Insert our mock data into the database. insert_statement = "insert into temp_load_subawards_broker_subaward (%s) values %s" connection = get_connection(read_only=False) with connection.cursor() as cursor: cursor.execute("drop table if exists temp_load_subawards_broker_subaward") cursor.execute("create table temp_load_subawards_broker_subaward as select * from broker_subaward where 0 = 1") for record in SAMPLE_DATA: columns = record.keys() values = tuple(record[column] for column in columns) sql = cursor.cursor.mogrify(insert_statement, (AsIs(", ".join(columns)), values)) cursor.execute(sql) return len(SAMPLE_DATA)
def _execute_sql(self, sql): """ Pretty straightforward. Executes some SQL. """ if self.log_sql: logger.info(sql) connection = get_connection(read_only=False) with connection.cursor() as cursor: cursor.execute(sql) rowcount = cursor.rowcount if rowcount > -1: logger.info("{:,} rows affected".format(rowcount))
def delete_rows(self, removed_records: dict) -> None: delete_template = "DELETE FROM {table} WHERE {key} IN {ids}" connection = get_connection(read_only=False) with connection.cursor() as cursor: for date, ids in removed_records.items(): if len(ids) == 1: # handle case with single-value Python tuples contain a trailing comma "(id,)" ids += ids if len(ids) == 0: logger.warning(f"No records to delete for '{date}'") else: sql = delete_template.format(table=self.destination_table_name, key=self.shared_pk, ids=tuple(ids)) cursor.execute(sql) logger.info(f"Removed {cursor.rowcount} rows previous to '{date}'")
def _get_maxes(self): """ Get some values from the broker_subaward table that we can use to identify new/updated records in Broker. """ sql = "select max(id), max(created_at), max(updated_at) from broker_subaward" if self.log_sql: logger.info(sql) with Timer("Retrieve incremental values from broker_subaward"): connection = get_connection() with connection.cursor() as cursor: cursor.execute(sql) return cursor.fetchall()[0]
def delete_rows(self, removed_records): delete_template = "DELETE FROM {table} WHERE {key} IN {ids} AND updated_at < '{date}'::date" connection = get_connection(read_only=False) with connection.cursor() as cursor: for date, ids in removed_records.items(): if len( ids ) == 1: # handle case with single-value Python tuples contain a trailing comma "(id,)" ids = ids + ids sql = delete_template.format(table=self.destination_table_name, key=self.shared_pk, ids=tuple(ids), date=date) cursor.execute(sql) logger.info( f"Removed {cursor.rowcount} rows from {date} delete records" )
def handle(self, *args, **options): self.agency_file = options["agency_file"] logger.info("AGENCY FILE: {}".format(self.agency_file)) self.connection = get_connection(read_only=False) try: with Timer("Import agencies"): self._read_agencies() self._validate_agencies() with transaction.atomic(): self._execute_sql_file("create_temp_table.sql") self._import_agencies() self._execute_sql_file("populate_dependent_tables.sql") self._vacuum_tables() except Exception: logger.error("ALL CHANGES WERE ROLLED BACK DUE TO EXCEPTION") raise
def _import_def_codes(self): with get_connection(read_only=False).cursor() as cursor: execute_values( cursor.cursor, """ insert into temp_load_disaster_emergency_fund_codes ( row_number, code, public_law, title, group_name, urls ) values %s """, self.def_codes, page_size=len(self.def_codes), ) return cursor.rowcount
def _import_object_classes(self): with get_connection(read_only=False).cursor() as cursor: execute_values( cursor.cursor, """ insert into temp_load_object_classes ( row_number, major_object_class, major_object_class_name, object_class, object_class_name, direct_reimbursable, direct_reimbursable_name ) values %s """, self.full_object_classes, page_size=len(self.full_object_classes), ) return cursor.rowcount
def test_run_script(setup_test_data): """ Test that the populate_reporting_agency_tas script acts as expected """ connection = get_connection(read_only=False) sql_path = settings.APP_DIR / "reporting" / "management" / "sql" / "populate_reporting_agency_overview.sql" test_sql = sql_path.read_text() # Executing the SQL and testing the entry with only one record for the period/fiscal year in reporting_agency_tas with connection.cursor() as cursor: cursor.execute(test_sql) results = ReportingAgencyOverview.objects.filter(fiscal_year=2019, fiscal_period=3, toptier_code="987").all() assert len(results) == 1 assert results[0].total_dollars_obligated_gtas == Decimal("-3.2") assert results[0].total_budgetary_resources == 15.5 assert results[0].total_diff_approp_ocpa_obligated_amounts == 20.5 # Testing an agency with multiple rows in reporting_agency_tas that roll up into a single period/fiscal year results = ReportingAgencyOverview.objects.filter(fiscal_year=2019, fiscal_period=3, toptier_code="123").all() assert len(results) == 1 assert results[0].total_dollars_obligated_gtas == Decimal("23.54") assert results[0].total_budgetary_resources == 91 assert results[0].total_diff_approp_ocpa_obligated_amounts == Decimal("28.2")
def test_build_composable_order_by(): connection = get_connection() with connection.cursor() as cursor: def _build_composable_order_by(*args, **kwargs): result = build_composable_order_by(*args, **kwargs) return result.as_string(cursor.connection) assert _build_composable_order_by('column') == 'order by "column"' assert _build_composable_order_by( 'this.column') == 'order by "this"."column"' assert _build_composable_order_by('column', 'asc') == 'order by "column" asc' assert _build_composable_order_by( 'column', sort_nulls='first') == 'order by "column" nulls first' assert _build_composable_order_by( 'column', 'asc', 'first') == 'order by "column" asc nulls first' assert (_build_composable_order_by( ['column1', 'column2']) == 'order by "column1", "column2"') assert (_build_composable_order_by( ['column1', 'column2'], 'desc') == 'order by "column1" desc, "column2" desc') assert (_build_composable_order_by(['column1', 'column2'], sort_nulls='last') == 'order by "column1" nulls last, "column2" nulls last') assert ( _build_composable_order_by(['column1', 'column2'], 'desc', 'last') == 'order by "column1" desc nulls last, "column2" desc nulls last' ) assert (_build_composable_order_by( ['column1', 'column2'], ['asc', 'desc']) == 'order by "column1" asc, "column2" desc') assert (_build_composable_order_by( ['column1', 'column2'], sort_nulls=[ 'first', 'last' ]) == 'order by "column1" nulls first, "column2" nulls last') assert ( _build_composable_order_by(['column1', 'column2'], ['asc', 'desc'], ['first', 'last']) == 'order by "column1" asc nulls first, "column2" desc nulls last' ) assert _build_composable_order_by(None) == '' assert _build_composable_order_by('') == '' assert _build_composable_order_by([]) == '' with pytest.raises(ValueError): _build_composable_order_by([1, 2, 3]) with pytest.raises(ValueError): _build_composable_order_by(['column1', 'column2'], 'NOPE') with pytest.raises(ValueError): _build_composable_order_by(['column1', 'column2'], sort_nulls='NOPE') with pytest.raises(ValueError): _build_composable_order_by(['column1', 'column2'], ['asc', 'NOPE']) with pytest.raises(ValueError): _build_composable_order_by(['column1', 'column2'], sort_nulls=['first', 'NOPE']) with pytest.raises(ValueError): _build_composable_order_by(['column1', 'column2'], ['asc', 'asc', 'asc']) with pytest.raises(ValueError): _build_composable_order_by( ['column1', 'column2'], sort_nulls=['first', 'first', 'first']) _sql = (SQL('select id, latest_transaction_id from awards a ') + SQL( _build_composable_order_by(['a.id', 'a.latest_transaction_id'], ['desc', 'asc'], ['first', 'last']))) assert execute_sql_to_ordered_dictionary(_sql) == [ OrderedDict((('id', 5), ('latest_transaction_id', 5))), OrderedDict((('id', 4), ('latest_transaction_id', 4))), OrderedDict((('id', 3), ('latest_transaction_id', 3))), OrderedDict((('id', 2), ('latest_transaction_id', 2))), OrderedDict((('id', 1), ('latest_transaction_id', 1))), ]
def generate_where_clause(queryset): """Returns the SQL and params from a queryset all ready to be plugged into an extra method.""" compiler = queryset.query.get_compiler(get_connection().alias) return queryset.query.where.as_sql(compiler, compiler.connection)
def test_build_composable_order_by(): connection = get_connection() with connection.cursor() as cursor: def _build_composable_order_by(*args, **kwargs): result = build_composable_order_by(*args, **kwargs) return result.as_string(cursor.connection) assert _build_composable_order_by("column") == 'order by "column"' assert _build_composable_order_by( "this.column") == 'order by "this"."column"' assert _build_composable_order_by("column", "asc") == 'order by "column" asc' assert _build_composable_order_by( "column", sort_nulls="first") == 'order by "column" nulls first' assert _build_composable_order_by( "column", "asc", "first") == 'order by "column" asc nulls first' assert _build_composable_order_by( ["column1", "column2"]) == 'order by "column1", "column2"' assert (_build_composable_order_by( ["column1", "column2"], "desc") == 'order by "column1" desc, "column2" desc') assert (_build_composable_order_by(["column1", "column2"], sort_nulls="last") == 'order by "column1" nulls last, "column2" nulls last') assert ( _build_composable_order_by(["column1", "column2"], "desc", "last") == 'order by "column1" desc nulls last, "column2" desc nulls last' ) assert (_build_composable_order_by( ["column1", "column2"], ["asc", "desc"]) == 'order by "column1" asc, "column2" desc') assert (_build_composable_order_by( ["column1", "column2"], sort_nulls=[ "first", "last" ]) == 'order by "column1" nulls first, "column2" nulls last') assert ( _build_composable_order_by(["column1", "column2"], ["asc", "desc"], ["first", "last"]) == 'order by "column1" asc nulls first, "column2" desc nulls last' ) assert _build_composable_order_by(None) == "" assert _build_composable_order_by("") == "" assert _build_composable_order_by([]) == "" with pytest.raises(ValueError): _build_composable_order_by([1, 2, 3]) with pytest.raises(ValueError): _build_composable_order_by(["column1", "column2"], "NOPE") with pytest.raises(ValueError): _build_composable_order_by(["column1", "column2"], sort_nulls="NOPE") with pytest.raises(ValueError): _build_composable_order_by(["column1", "column2"], ["asc", "NOPE"]) with pytest.raises(ValueError): _build_composable_order_by(["column1", "column2"], sort_nulls=["first", "NOPE"]) with pytest.raises(ValueError): _build_composable_order_by(["column1", "column2"], ["asc", "asc", "asc"]) with pytest.raises(ValueError): _build_composable_order_by( ["column1", "column2"], sort_nulls=["first", "first", "first"]) _sql = SQL("select id, latest_transaction_id from awards a ") + SQL( _build_composable_order_by(["a.id", "a.latest_transaction_id"], ["desc", "asc"], ["first", "last"])) assert execute_sql_to_ordered_dictionary(_sql) == [ OrderedDict((("id", 5), ("latest_transaction_id", 5))), OrderedDict((("id", 4), ("latest_transaction_id", 4))), OrderedDict((("id", 3), ("latest_transaction_id", 3))), OrderedDict((("id", 2), ("latest_transaction_id", 2))), OrderedDict((("id", 1), ("latest_transaction_id", 1))), ]
def test_build_composable_order_by(): connection = get_connection() with connection.cursor() as cursor: def _build_composable_order_by(*args, **kwargs): result = build_composable_order_by(*args, **kwargs) return result.as_string(cursor.connection) assert _build_composable_order_by('column') == 'order by "column"' assert _build_composable_order_by('this.column') == 'order by "this"."column"' assert _build_composable_order_by('column', 'asc') == 'order by "column" asc' assert _build_composable_order_by('column', nulls='first') == 'order by "column" nulls first' assert _build_composable_order_by('column', 'asc', 'first') == 'order by "column" asc nulls first' assert ( _build_composable_order_by(['column1', 'column2']) == 'order by "column1", "column2"' ) assert ( _build_composable_order_by(['column1', 'column2'], 'desc') == 'order by "column1" desc, "column2" desc' ) assert ( _build_composable_order_by(['column1', 'column2'], nulls='last') == 'order by "column1" nulls last, "column2" nulls last' ) assert ( _build_composable_order_by(['column1', 'column2'], 'desc', 'last') == 'order by "column1" desc nulls last, "column2" desc nulls last' ) assert ( _build_composable_order_by(['column1', 'column2'], ['asc', 'desc']) == 'order by "column1" asc, "column2" desc' ) assert ( _build_composable_order_by(['column1', 'column2'], nulls=['first', 'last']) == 'order by "column1" nulls first, "column2" nulls last' ) assert ( _build_composable_order_by(['column1', 'column2'], ['asc', 'desc'], ['first', 'last']) == 'order by "column1" asc nulls first, "column2" desc nulls last' ) assert _build_composable_order_by(None) == '' assert _build_composable_order_by('') == '' assert _build_composable_order_by([]) == '' with pytest.raises(ValueError): _build_composable_order_by([1, 2, 3]) with pytest.raises(ValueError): _build_composable_order_by(['column1', 'column2'], 'NOPE') with pytest.raises(ValueError): _build_composable_order_by(['column1', 'column2'], nulls='NOPE') with pytest.raises(ValueError): _build_composable_order_by(['column1', 'column2'], ['asc', 'NOPE']) with pytest.raises(ValueError): _build_composable_order_by(['column1', 'column2'], nulls=['first', 'NOPE']) with pytest.raises(ValueError): _build_composable_order_by(['column1', 'column2'], ['asc', 'asc', 'asc']) with pytest.raises(ValueError): _build_composable_order_by(['column1', 'column2'], nulls=['first', 'first', 'first']) _sql = ( SQL('select id, latest_transaction_id from awards a ') + SQL(_build_composable_order_by(['a.id', 'a.latest_transaction_id'], ['desc', 'asc'], ['first', 'last'])) ) assert execute_sql_to_ordered_dictionary(_sql) == [ OrderedDict((('id', 5), ('latest_transaction_id', 5))), OrderedDict((('id', 4), ('latest_transaction_id', 4))), OrderedDict((('id', 3), ('latest_transaction_id', 3))), OrderedDict((('id', 2), ('latest_transaction_id', 2))), OrderedDict((('id', 1), ('latest_transaction_id', 1))), ]