def test_txn_assistance_get_or_create():
    """Test TransactionFABS.get_or_create_2 method."""

    agency1 = mommy.make("references.Agency")
    awd1 = mommy.make("awards.Award", awarding_agency=agency1)
    txn1 = mommy.make(
        "awards.TransactionNormalized",
        award=awd1,
        modification_number="1",
        awarding_agency=agency1,
        last_modified_date=date(2012, 3, 1),
    )
    mommy.make(
        "awards.TransactionFABS",
        transaction=txn1,
        business_funds_indicator="a",
        record_type=1,
        total_funding_amount=1000.00,
    )
    assert TransactionFABS.objects.all().count() == 1

    # an updated transaction should also update existing TranactionAssistance
    # data, not insert a new record
    ta_dict = {
        "business_funds_indicator": "c",
        "record_type": 2,
        "total_funding_amount": 2000
    }
    ta2 = TransactionFABS.get_or_create_2(txn1, **ta_dict)
    ta2.save()
    assert TransactionFABS.objects.all().count() == 1
    t = TransactionNormalized.objects.get(id=txn1.id)
    assert t.assistance_data.business_funds_indicator == "c"
    assert t.assistance_data.record_type == 2
    assert t.assistance_data.total_funding_amount == "2000"

    # a new transaction gets a new TransactionFABS record
    ta_dict = {
        "business_funds_indicator": "z",
        "record_type": 5,
        "total_funding_amount": 8000
    }
    ta3 = TransactionFABS.get_or_create_2(
        mommy.make("awards.TransactionNormalized"), **ta_dict)
    ta3.save()
    assert TransactionFABS.objects.all().count() == 2
def test_txn_assistance_get_or_create():
    """Test TransactionFABS.get_or_create_2 method."""

    agency1 = mommy.make('references.Agency')
    awd1 = mommy.make('awards.Award', awarding_agency=agency1)
    txn1 = mommy.make(
        'awards.TransactionNormalized',
        award=awd1,
        modification_number='1',
        awarding_agency=agency1,
        last_modified_date=date(2012, 3, 1),
    )
    mommy.make('awards.TransactionFABS',
               transaction=txn1,
               business_funds_indicator='a',
               record_type=1,
               total_funding_amount=1000.00)
    assert TransactionFABS.objects.all().count() == 1

    # an updated transaction should also update existing TranactionAssistance
    # data, not insert a new record
    ta_dict = {
        'business_funds_indicator': 'c',
        'record_type': 2,
        'total_funding_amount': 2000,
    }
    ta2 = TransactionFABS.get_or_create_2(txn1, **ta_dict)
    ta2.save()
    assert TransactionFABS.objects.all().count() == 1
    t = TransactionNormalized.objects.get(id=txn1.id)
    assert t.assistance_data.business_funds_indicator == 'c'
    assert t.assistance_data.record_type == 2
    assert t.assistance_data.total_funding_amount == '2000'

    # a new transaction gets a new TransactionFABS record
    ta_dict = {
        'business_funds_indicator': 'z',
        'record_type': 5,
        'total_funding_amount': 8000,
    }
    ta3 = TransactionFABS.get_or_create_2(
        mommy.make('awards.TransactionNormalized'), **ta_dict)
    ta3.save()
    assert TransactionFABS.objects.all().count() == 2
    def load_transaction_fabs(self, fabs_broker_data, total_rows):
        logger.info('Starting bulk loading for FABS data')

        start_time = datetime.now()
        for index, row in enumerate(fabs_broker_data, 1):
            if not (index % 10000):
                logger.info('Transaction FABS: Loading row {} of {} ({})'.format(str(index),
                                                                                 str(total_rows),
                                                                                 datetime.now() - start_time))

            fabs_instance_data = load_data_into_model(
                TransactionFABS(),  # thrown away
                row,
                as_dict=True)

            fabs_instance = TransactionFABS(**fabs_instance_data)
            fabs_instance.transaction = transaction_normalized_bulk[index - 1]
            fabs_bulk.append(fabs_instance)

        logger.info('Bulk creating Transaction FABS (batch_size: {})...'.format(BATCH_SIZE))
        TransactionFABS.objects.bulk_create(fabs_bulk, batch_size=BATCH_SIZE)
Exemplo n.º 4
0
    def load_transaction_fabs(self, fabs_broker_data, total_rows):
        logger.info('Starting bulk loading for FABS data')

        start_time = datetime.now()
        for index, row in enumerate(fabs_broker_data, 1):
            if not (index % 10000):
                logger.info(
                    'Transaction FABS: Loading row {} of {} ({})'.format(
                        str(index), str(total_rows),
                        datetime.now() - start_time))

            fabs_instance_data = load_data_into_model(
                TransactionFABS(),  # thrown away
                row,
                as_dict=True)

            fabs_instance = TransactionFABS(**fabs_instance_data)
            fabs_instance.transaction = transaction_normalized_bulk[index - 1]
            fabs_bulk.append(fabs_instance)

        logger.info(
            'Bulk creating Transaction FABS (batch_size: {})...'.format(
                BATCH_SIZE))
        TransactionFABS.objects.bulk_create(fabs_bulk, batch_size=BATCH_SIZE)
Exemplo n.º 5
0
    def insert_new_fabs(self, to_insert):
        place_of_performance_field_map = {
            "location_country_code": "place_of_perform_country_c",
            "country_name": "place_of_perform_country_n",
            "state_code": "place_of_perfor_state_code",
            "state_name": "place_of_perform_state_nam",
            "city_name": "place_of_performance_city",
            "county_name": "place_of_perform_county_na",
            "county_code": "place_of_perform_county_co",
            "foreign_location_description": "place_of_performance_forei",
            "zip_4a": "place_of_performance_zip4a",
            "congressional_code": "place_of_performance_congr",
            "performance_code": "place_of_performance_code",
            "zip_last4": "place_of_perform_zip_last4",
            "zip5": "place_of_performance_zip5",
        }

        legal_entity_location_field_map = {
            "location_country_code": "legal_entity_country_code",
            "country_name": "legal_entity_country_name",
            "state_code": "legal_entity_state_code",
            "state_name": "legal_entity_state_name",
            "city_name": "legal_entity_city_name",
            "city_code": "legal_entity_city_code",
            "county_name": "legal_entity_county_name",
            "county_code": "legal_entity_county_code",
            "address_line1": "legal_entity_address_line1",
            "address_line2": "legal_entity_address_line2",
            "address_line3": "legal_entity_address_line3",
            "foreign_location_description": "legal_entity_foreign_descr",
            "congressional_code": "legal_entity_congressional",
            "zip_last4": "legal_entity_zip_last4",
            "zip5": "legal_entity_zip5",
            "foreign_postal_code": "legal_entity_foreign_posta",
            "foreign_province": "legal_entity_foreign_provi",
            "foreign_city_name": "legal_entity_foreign_city",
        }

        for row in to_insert:
            upper_case_dict_values(row)

            # Create new LegalEntityLocation and LegalEntity from the row data
            legal_entity_location = create_location(
                legal_entity_location_field_map, row, {"recipient_flag": True})
            recipient_name = row['awardee_or_recipient_legal']
            legal_entity = LegalEntity.objects.create(
                recipient_unique_id=row['awardee_or_recipient_uniqu'],
                recipient_name=recipient_name
                if recipient_name is not None else "",
                parent_recipient_unique_id=row['ultimate_parent_unique_ide'],
            )
            legal_entity_value_map = {
                "location":
                legal_entity_location,
                "business_categories":
                get_business_categories(row=row, data_type='fabs'),
                "business_types_description":
                row['business_types_desc'],
            }
            legal_entity = load_data_into_model(
                legal_entity, row, value_map=legal_entity_value_map, save=True)

            # Create the place of performance location
            pop_location = create_location(place_of_performance_field_map, row,
                                           {"place_of_performance_flag": True})

            # Find the toptier awards from the subtier awards
            awarding_agency = Agency.get_by_subtier_only(
                row["awarding_sub_tier_agency_c"])
            funding_agency = Agency.get_by_subtier_only(
                row["funding_sub_tier_agency_co"])

            # Generate the unique Award ID
            # "ASST_AW_" + awarding_sub_tier_agency_c + fain + uri

            # this will raise an exception if the cast to an int fails, that's ok since we don't want to process
            # non-numeric record type values
            record_type_int = int(row['record_type'])
            if record_type_int == 1:
                uri = row['uri'] if row['uri'] else '-NONE-'
                fain = '-NONE-'
            elif record_type_int in (2, 3):
                uri = '-NONE-'
                fain = row['fain'] if row['fain'] else '-NONE-'
            else:
                msg = "Invalid record type encountered for the following afa_generated_unique record: {}"
                raise Exception(msg.format(row['afa_generated_unique']))

            astac = row["awarding_sub_tier_agency_c"] if row[
                "awarding_sub_tier_agency_c"] else "-NONE-"
            generated_unique_id = "ASST_AW_{}_{}_{}".format(astac, fain, uri)

            # Create the summary Award
            (created, award) = Award.get_or_create_summary_award(
                generated_unique_award_id=generated_unique_id,
                fain=row['fain'],
                uri=row['uri'],
                record_type=row['record_type'],
            )
            award.save()

            # Append row to list of Awards updated
            AWARD_UPDATE_ID_LIST.append(award.id)

            try:
                last_mod_date = datetime.strptime(str(
                    row['modified_at']), "%Y-%m-%d %H:%M:%S.%f").date()
            except ValueError:
                last_mod_date = datetime.strptime(str(row['modified_at']),
                                                  "%Y-%m-%d %H:%M:%S").date()
            parent_txn_value_map = {
                "award":
                award,
                "awarding_agency":
                awarding_agency,
                "funding_agency":
                funding_agency,
                "recipient":
                legal_entity,
                "place_of_performance":
                pop_location,
                "period_of_performance_start_date":
                format_date(row['period_of_performance_star']),
                "period_of_performance_current_end_date":
                format_date(row['period_of_performance_curr']),
                "action_date":
                format_date(row['action_date']),
                "last_modified_date":
                last_mod_date,
                "type_description":
                row['assistance_type_desc'],
                "transaction_unique_id":
                row['afa_generated_unique'],
                "generated_unique_award_id":
                generated_unique_id,
            }

            fad_field_map = {
                "type": "assistance_type",
                "description": "award_description",
                "funding_amount": "total_funding_amount",
            }

            transaction_normalized_dict = load_data_into_model(
                TransactionNormalized(),  # thrown away
                row,
                field_map=fad_field_map,
                value_map=parent_txn_value_map,
                as_dict=True,
            )

            financial_assistance_data = load_data_into_model(
                TransactionFABS(), row, as_dict=True)  # thrown away

            afa_generated_unique = financial_assistance_data[
                'afa_generated_unique']
            unique_fabs = TransactionFABS.objects.filter(
                afa_generated_unique=afa_generated_unique)

            if unique_fabs.first():
                transaction_normalized_dict["update_date"] = datetime.now(
                    timezone.utc)
                transaction_normalized_dict["fiscal_year"] = fy(
                    transaction_normalized_dict["action_date"])

                # Update TransactionNormalized
                TransactionNormalized.objects.filter(
                    id=unique_fabs.first().transaction.id).update(
                        **transaction_normalized_dict)

                # Update TransactionFABS
                unique_fabs.update(**financial_assistance_data)
            else:
                # Create TransactionNormalized
                transaction = TransactionNormalized(
                    **transaction_normalized_dict)
                transaction.save()

                # Create TransactionFABS
                transaction_fabs = TransactionFABS(transaction=transaction,
                                                   **financial_assistance_data)
                transaction_fabs.save()

            # Update legal entity to map back to transaction
            legal_entity.transaction_unique_id = afa_generated_unique
            legal_entity.save()
    def update_transaction_assistance(db_cursor,
                                      fiscal_year=None,
                                      page=1,
                                      limit=500000):

        # logger.info("Getting IDs for what's currently in the DB...")
        # current_ids = TransactionFABS.objects
        #
        # if fiscal_year:
        #     current_ids = current_ids.filter(action_date__fy=fiscal_year)
        #
        # current_ids = current_ids.values_list('published_award_financial_assistance_id', flat=True)

        query = "SELECT * FROM published_award_financial_assistance"
        arguments = []

        fy_begin = '10/01/' + str(fiscal_year - 1)
        fy_end = '09/30/' + str(fiscal_year)

        if fiscal_year:
            if arguments:
                query += " AND"
            else:
                query += " WHERE"
            query += ' action_date::Date BETWEEN %s AND %s'
            arguments += [fy_begin]
            arguments += [fy_end]
        query += ' ORDER BY published_award_financial_assistance_id LIMIT %s OFFSET %s'
        arguments += [limit, (page - 1) * limit]

        logger.info("Executing query on Broker DB => " + query %
                    (arguments[0], arguments[1], arguments[2], arguments[3]))

        db_cursor.execute(query, arguments)

        logger.info("Running dictfetchall on db_cursor")
        award_financial_assistance_data = dictfetchall(db_cursor)

        legal_entity_location_field_map = {
            "address_line1": "legal_entity_address_line1",
            "address_line2": "legal_entity_address_line2",
            "address_line3": "legal_entity_address_line3",
            "city_name": "legal_entity_city_name",
            "congressional_code": "legal_entity_congressional",
            "county_code": "legal_entity_county_code",
            "county_name": "legal_entity_county_name",
            "foreign_city_name": "legal_entity_foreign_city",
            "foreign_postal_code": "legal_entity_foreign_posta",
            "foreign_province": "legal_entity_foreign_provi",
            "state_code": "legal_entity_state_code",
            "state_name": "legal_entity_state_name",
            "zip5": "legal_entity_zip5",
            "zip_last4": "legal_entity_zip_last4",
            "location_country_code": "legal_entity_country_code"
        }

        place_of_performance_field_map = {
            "city_name": "place_of_performance_city",
            "performance_code": "place_of_performance_code",
            "congressional_code": "place_of_performance_congr",
            "county_name": "place_of_perform_county_na",
            "foreign_location_description": "place_of_performance_forei",
            "state_name": "place_of_perform_state_nam",
            "zip4": "place_of_performance_zip4a",
            "location_country_code": "place_of_perform_country_c"
        }

        fad_field_map = {
            "type": "assistance_type",
            "description": "award_description",
        }

        logger.info("Getting total rows")
        # rows_loaded = len(current_ids)
        total_rows = len(award_financial_assistance_data)  # - rows_loaded

        logger.info("Processing " + str(total_rows) +
                    " rows of assistance data")

        # skip_count = 0

        # ROW ITERATION STARTS HERE

        lel_bulk = []
        pop_bulk = []
        legal_entity_bulk = []
        award_bulk = []

        transaction_assistance_bulk = []
        transaction_normalized_bulk = []

        logger.info(
            'Getting legal entity location objects for {} rows...'.format(
                len(award_financial_assistance_data)))
        for index, row in enumerate(award_financial_assistance_data, 1):

            # Recipient flag is true for LeL
            legal_entity_location = get_or_create_location(
                legal_entity_location_field_map,
                row, {"recipient_flag": True},
                save=False)

            lel_bulk.append(legal_entity_location)

        logger.info('Bulk creating {} legal entity location rows...'.format(
            len(lel_bulk)))
        try:
            Location.objects.bulk_create(lel_bulk)
        except IntegrityError:
            logger.info('!!! DUPLICATES FOUND. Continuing... ')

        logger.info(
            'Getting place of performance objects for {} rows...'.format(
                len(award_financial_assistance_data)))
        for index, row in enumerate(award_financial_assistance_data, 1):

            # Place of Performance flag is true for PoP
            pop_location = get_or_create_location(
                place_of_performance_field_map,
                row, {"place_of_performance_flag": True},
                save=False)

            pop_bulk.append(pop_location)

        logger.info('Bulk creating {} place of performance rows...'.format(
            len(pop_bulk)))
        try:
            Location.objects.bulk_create(pop_bulk)
        except IntegrityError:
            logger.info('!!! DUPLICATES FOUND. Continuing... ')

        logger.info('Getting legal entity objects for {} rows...'.format(
            len(award_financial_assistance_data)))
        for index, row in enumerate(award_financial_assistance_data, 1):

            recipient_name = row.get('awardee_or_recipient_legal', '')

            legal_entity = LegalEntity.objects.filter(
                recipient_unique_id=row['awardee_or_recipient_uniqu'],
                recipient_name=recipient_name).first()

            if legal_entity is None:
                legal_entity = LegalEntity(
                    recipient_unique_id=row['awardee_or_recipient_uniqu'],
                    recipient_name=recipient_name)
                legal_entity_value_map = {
                    "location": lel_bulk[index - 1],
                }
                legal_entity = load_data_into_model(
                    legal_entity,
                    row,
                    value_map=legal_entity_value_map,
                    save=False)

            legal_entity_bulk.append(legal_entity)

        logger.info('Bulk creating {} legal entity rows...'.format(
            len(legal_entity_bulk)))
        try:
            LegalEntity.objects.bulk_create(legal_entity_bulk)
        except IntegrityError:
            logger.info('!!! DUPLICATES FOUND. Continuing... ')

        awarding_agency_list = []
        funding_agency_list = []

        logger.info('Getting award objects for {} rows...'.format(
            len(award_financial_assistance_data)))
        for index, row in enumerate(award_financial_assistance_data, 1):
            # If awarding toptier agency code (aka CGAC) is not supplied on the D2 record,
            # use the sub tier code to look it up. This code assumes that all incoming
            # records will supply an awarding subtier agency code
            if row['awarding_agency_code'] is None or len(
                    row['awarding_agency_code'].strip()) < 1:
                awarding_subtier_agency_id = subtier_agency_map[
                    row["awarding_sub_tier_agency_c"]]
                awarding_toptier_agency_id = subtier_to_agency_map[
                    awarding_subtier_agency_id]['toptier_agency_id']
                awarding_cgac_code = toptier_agency_map[
                    awarding_toptier_agency_id]
                row['awarding_agency_code'] = awarding_cgac_code

            # If funding toptier agency code (aka CGAC) is empty, try using the sub
            # tier funding code to look it up. Unlike the awarding agency, we can't
            # assume that the funding agency subtier code will always be present.
            if row['funding_agency_code'] is None or len(
                    row['funding_agency_code'].strip()) < 1:
                funding_subtier_agency_id = subtier_agency_map.get(
                    row["funding_sub_tier_agency_co"])
                if funding_subtier_agency_id is not None:
                    funding_toptier_agency_id = \
                        subtier_to_agency_map[funding_subtier_agency_id]['toptier_agency_id']
                    funding_cgac_code = toptier_agency_map[
                        funding_toptier_agency_id]
                else:
                    funding_cgac_code = None
                row['funding_agency_code'] = funding_cgac_code

            # Find the award that this award transaction belongs to. If it doesn't exist, create it.
            awarding_agency = Agency.get_by_toptier_subtier(
                row['awarding_agency_code'], row["awarding_sub_tier_agency_c"])
            funding_agency = Agency.get_by_toptier_subtier(
                row['funding_agency_code'], row["funding_sub_tier_agency_co"])

            awarding_agency_list.append(awarding_agency)
            funding_agency_list.append(funding_agency)

            # award.save() is called in Award.get_or_create_summary_award by default
            created, award = Award.get_or_create_summary_award(
                awarding_agency=awarding_agency,
                fain=row.get('fain'),
                uri=row.get('uri'),
                save=False)

            award_bulk.append(award)
            award_update_id_list.append(award.id)

        logger.info('Bulk creating {} award rows...'.format(len(award_bulk)))
        try:
            Award.objects.bulk_create(award_bulk)
        except IntegrityError:
            logger.info('!!! DUPLICATES FOUND. Continuing... ')

        logger.info('Getting transaction_normalized for {} rows...'.format(
            len(award_financial_assistance_data)))
        for index, row in enumerate(award_financial_assistance_data, 1):

            parent_txn_value_map = {
                "award":
                award_bulk[index - 1],
                "awarding_agency":
                awarding_agency_list[index - 1],
                "funding_agency":
                funding_agency_list[index - 1],
                "recipient":
                legal_entity_bulk[index - 1],
                "place_of_performance":
                pop_bulk[index - 1],
                "period_of_performance_start_date":
                format_date(row['period_of_performance_star']),
                "period_of_performance_current_end_date":
                format_date(row['period_of_performance_curr']),
                "action_date":
                format_date(row['action_date']),
            }

            transaction_dict = load_data_into_model(
                TransactionNormalized(),  # thrown away
                row,
                field_map=fad_field_map,
                value_map=parent_txn_value_map,
                as_dict=True)

            transaction_normalized = TransactionNormalized.get_or_create_transaction(
                **transaction_dict)
            transaction_normalized.fiscal_year = fy(
                transaction_normalized.action_date)
            transaction_normalized_bulk.append(transaction_normalized)

        logger.info('Bulk creating {} TransactionNormalized rows...'.format(
            len(transaction_normalized_bulk)))
        try:
            TransactionNormalized.objects.bulk_create(
                transaction_normalized_bulk)
        except IntegrityError:
            logger.info(
                'Tried and failed to insert duplicate transaction_normalized row. Continuing... '
            )

        for index, row in enumerate(award_financial_assistance_data, 1):
            financial_assistance_data = load_data_into_model(
                TransactionFABS(),  # thrown away
                row,
                as_dict=True)

            transaction_assistance = TransactionFABS(
                transaction=transaction_normalized_bulk[index - 1],
                **financial_assistance_data)
            transaction_assistance_bulk.append(transaction_assistance)

        logger.info('Bulk creating TransactionFABS rows...')
        try:
            TransactionFABS.objects.bulk_create(transaction_assistance_bulk)
        except IntegrityError:
            logger.info('!!! DUPLICATES FOUND. Continuing... ')
Exemplo n.º 7
0
def insert_new_fabs(to_insert):
    fabs_normalized_field_map = {
        "type": "assistance_type",
        "description": "award_description",
        "funding_amount": "total_funding_amount",
    }

    fabs_field_map = {
        "officer_1_name": "high_comp_officer1_full_na",
        "officer_1_amount": "high_comp_officer1_amount",
        "officer_2_name": "high_comp_officer2_full_na",
        "officer_2_amount": "high_comp_officer2_amount",
        "officer_3_name": "high_comp_officer3_full_na",
        "officer_3_amount": "high_comp_officer3_amount",
        "officer_4_name": "high_comp_officer4_full_na",
        "officer_4_amount": "high_comp_officer4_amount",
        "officer_5_name": "high_comp_officer5_full_na",
        "officer_5_amount": "high_comp_officer5_amount",
    }

    update_award_ids = []
    for row in to_insert:
        upper_case_dict_values(row)

        # Find the toptier awards from the subtier awards
        awarding_agency = Agency.get_by_subtier_only(
            row["awarding_sub_tier_agency_c"])
        funding_agency = Agency.get_by_subtier_only(
            row["funding_sub_tier_agency_co"])

        # Create the summary Award
        (created, award) = Award.get_or_create_summary_award(
            generated_unique_award_id=row["unique_award_key"],
            fain=row["fain"],
            uri=row["uri"],
            record_type=row["record_type"],
        )
        award.save()

        # Append row to list of Awards updated
        update_award_ids.append(award.id)

        try:
            last_mod_date = datetime.strptime(str(row["modified_at"]),
                                              "%Y-%m-%d %H:%M:%S.%f").date()
        except ValueError:
            last_mod_date = datetime.strptime(str(row["modified_at"]),
                                              "%Y-%m-%d %H:%M:%S").date()

        parent_txn_value_map = {
            "award":
            award,
            "awarding_agency":
            awarding_agency,
            "funding_agency":
            funding_agency,
            "period_of_performance_start_date":
            format_date(row["period_of_performance_star"]),
            "period_of_performance_current_end_date":
            format_date(row["period_of_performance_curr"]),
            "action_date":
            format_date(row["action_date"]),
            "last_modified_date":
            last_mod_date,
            "type_description":
            row["assistance_type_desc"],
            "transaction_unique_id":
            row["afa_generated_unique"],
            "business_categories":
            get_business_categories(row=row, data_type="fabs"),
        }

        transaction_normalized_dict = load_data_into_model(
            TransactionNormalized(),  # thrown away
            row,
            field_map=fabs_normalized_field_map,
            value_map=parent_txn_value_map,
            as_dict=True,
        )

        financial_assistance_data = load_data_into_model(
            TransactionFABS(),
            row,
            field_map=fabs_field_map,
            as_dict=True  # thrown away
        )

        # Hack to cut back on the number of warnings dumped to the log.
        financial_assistance_data["updated_at"] = cast_datetime_to_utc(
            financial_assistance_data["updated_at"])
        financial_assistance_data["created_at"] = cast_datetime_to_utc(
            financial_assistance_data["created_at"])
        financial_assistance_data["modified_at"] = cast_datetime_to_utc(
            financial_assistance_data["modified_at"])

        afa_generated_unique = financial_assistance_data[
            "afa_generated_unique"]
        unique_fabs = TransactionFABS.objects.filter(
            afa_generated_unique=afa_generated_unique)

        if unique_fabs.first():
            transaction_normalized_dict["update_date"] = datetime.now(
                timezone.utc)
            transaction_normalized_dict["fiscal_year"] = fy(
                transaction_normalized_dict["action_date"])

            # Update TransactionNormalized
            TransactionNormalized.objects.filter(
                id=unique_fabs.first().transaction.id).update(
                    **transaction_normalized_dict)

            # Update TransactionFABS
            unique_fabs.update(**financial_assistance_data)
        else:
            # Create TransactionNormalized
            transaction_normalized = TransactionNormalized(
                **transaction_normalized_dict)
            transaction_normalized.save()

            # Create TransactionFABS
            transaction_fabs = TransactionFABS(
                transaction=transaction_normalized,
                **financial_assistance_data)
            transaction_fabs.save()

    return update_award_ids
def insert_new_fabs(to_insert):
    place_of_performance_field_map = {
        "location_country_code": "place_of_perform_country_c",
        "country_name": "place_of_perform_country_n",
        "state_code": "place_of_perfor_state_code",
        "state_name": "place_of_perform_state_nam",
        "city_name": "place_of_performance_city",
        "county_name": "place_of_perform_county_na",
        "county_code": "place_of_perform_county_co",
        "foreign_location_description": "place_of_performance_forei",
        "zip_4a": "place_of_performance_zip4a",
        "congressional_code": "place_of_performance_congr",
        "performance_code": "place_of_performance_code",
        "zip_last4": "place_of_perform_zip_last4",
        "zip5": "place_of_performance_zip5",
    }

    legal_entity_location_field_map = {
        "location_country_code": "legal_entity_country_code",
        "country_name": "legal_entity_country_name",
        "state_code": "legal_entity_state_code",
        "state_name": "legal_entity_state_name",
        "city_name": "legal_entity_city_name",
        "city_code": "legal_entity_city_code",
        "county_name": "legal_entity_county_name",
        "county_code": "legal_entity_county_code",
        "address_line1": "legal_entity_address_line1",
        "address_line2": "legal_entity_address_line2",
        "address_line3": "legal_entity_address_line3",
        "foreign_location_description": "legal_entity_foreign_descr",
        "congressional_code": "legal_entity_congressional",
        "zip_last4": "legal_entity_zip_last4",
        "zip5": "legal_entity_zip5",
        "foreign_postal_code": "legal_entity_foreign_posta",
        "foreign_province": "legal_entity_foreign_provi",
        "foreign_city_name": "legal_entity_foreign_city",
    }

    fabs_normalized_field_map = {
        "type": "assistance_type",
        "description": "award_description",
        "funding_amount": "total_funding_amount",
    }

    fabs_field_map = {
        "officer_1_name": "high_comp_officer1_full_na",
        "officer_1_amount": "high_comp_officer1_amount",
        "officer_2_name": "high_comp_officer2_full_na",
        "officer_2_amount": "high_comp_officer2_amount",
        "officer_3_name": "high_comp_officer3_full_na",
        "officer_3_amount": "high_comp_officer3_amount",
        "officer_4_name": "high_comp_officer4_full_na",
        "officer_4_amount": "high_comp_officer4_amount",
        "officer_5_name": "high_comp_officer5_full_na",
        "officer_5_amount": "high_comp_officer5_amount",
    }

    update_award_ids = []
    for row in to_insert:
        upper_case_dict_values(row)

        # Create new LegalEntityLocation and LegalEntity from the row data
        legal_entity_location = create_location(
            legal_entity_location_field_map, row, {"recipient_flag": True})
        recipient_name = row['awardee_or_recipient_legal']
        legal_entity = LegalEntity.objects.create(
            recipient_unique_id=row['awardee_or_recipient_uniqu'],
            recipient_name=recipient_name
            if recipient_name is not None else "",
            parent_recipient_unique_id=row['ultimate_parent_unique_ide'],
        )
        legal_entity_value_map = {
            "location":
            legal_entity_location,
            "business_categories":
            get_business_categories(row=row, data_type='fabs'),
            "business_types_description":
            row['business_types_desc'],
        }
        legal_entity = load_data_into_model(legal_entity,
                                            row,
                                            value_map=legal_entity_value_map,
                                            save=True)

        # Create the place of performance location
        pop_location = create_location(place_of_performance_field_map, row,
                                       {"place_of_performance_flag": True})

        # Find the toptier awards from the subtier awards
        awarding_agency = Agency.get_by_subtier_only(
            row["awarding_sub_tier_agency_c"])
        funding_agency = Agency.get_by_subtier_only(
            row["funding_sub_tier_agency_co"])

        # Create the summary Award
        (created, award) = Award.get_or_create_summary_award(
            generated_unique_award_id=row['unique_award_key'],
            fain=row['fain'],
            uri=row['uri'],
            record_type=row['record_type'],
        )
        award.save()

        # Append row to list of Awards updated
        update_award_ids.append(award.id)

        try:
            last_mod_date = datetime.strptime(str(row['modified_at']),
                                              "%Y-%m-%d %H:%M:%S.%f").date()
        except ValueError:
            last_mod_date = datetime.strptime(str(row['modified_at']),
                                              "%Y-%m-%d %H:%M:%S").date()

        parent_txn_value_map = {
            "award":
            award,
            "awarding_agency":
            awarding_agency,
            "funding_agency":
            funding_agency,
            "recipient":
            legal_entity,
            "place_of_performance":
            pop_location,
            "period_of_performance_start_date":
            format_date(row['period_of_performance_star']),
            "period_of_performance_current_end_date":
            format_date(row['period_of_performance_curr']),
            "action_date":
            format_date(row['action_date']),
            "last_modified_date":
            last_mod_date,
            "type_description":
            row['assistance_type_desc'],
            "transaction_unique_id":
            row['afa_generated_unique'],
        }

        transaction_normalized_dict = load_data_into_model(
            TransactionNormalized(),  # thrown away
            row,
            field_map=fabs_normalized_field_map,
            value_map=parent_txn_value_map,
            as_dict=True,
        )

        financial_assistance_data = load_data_into_model(
            TransactionFABS(),  # thrown away
            row,
            field_map=fabs_field_map,
            as_dict=True)

        # Hack to cut back on the number of warnings dumped to the log.
        financial_assistance_data['updated_at'] = cast_datetime_to_utc(
            financial_assistance_data['updated_at'])
        financial_assistance_data['created_at'] = cast_datetime_to_utc(
            financial_assistance_data['created_at'])
        financial_assistance_data['modified_at'] = cast_datetime_to_utc(
            financial_assistance_data['modified_at'])

        afa_generated_unique = financial_assistance_data[
            'afa_generated_unique']
        unique_fabs = TransactionFABS.objects.filter(
            afa_generated_unique=afa_generated_unique)

        if unique_fabs.first():
            transaction_normalized_dict["update_date"] = datetime.now(
                timezone.utc)
            transaction_normalized_dict["fiscal_year"] = fy(
                transaction_normalized_dict["action_date"])

            # Update TransactionNormalized
            TransactionNormalized.objects.filter(
                id=unique_fabs.first().transaction.id).update(
                    **transaction_normalized_dict)

            # Update TransactionFABS
            unique_fabs.update(**financial_assistance_data)
        else:
            # Create TransactionNormalized
            transaction_normalized = TransactionNormalized(
                **transaction_normalized_dict)
            transaction_normalized.save()

            # Create TransactionFABS
            transaction_fabs = TransactionFABS(
                transaction=transaction_normalized,
                **financial_assistance_data)
            transaction_fabs.save()

        # Update legal entity to map back to transaction
        legal_entity.transaction_unique_id = afa_generated_unique
        legal_entity.save()

    return update_award_ids
Exemplo n.º 9
0
def load_file_d2(submission_attributes,
                 award_financial_assistance_data,
                 db_cursor,
                 quick,
                 row_preprocessor=no_preprocessing):
    """
    Process and load file D2 broker data (financial assistance award txns).
    """

    d_start_time = time.time()

    if quick:
        setup_broker_fdw()

        parameters = {
            'broker_submission_id': submission_attributes.broker_submission_id
        }
        run_sql_file('usaspending_api/etl/management/load_file_d2.sql',
                     parameters)
        logger.info('\n\n\n\nFile D2 time elapsed: {}'.format(time.time() -
                                                              d_start_time))
        return

    legal_entity_location_field_map = {
        "address_line1": "legal_entity_address_line1",
        "address_line2": "legal_entity_address_line2",
        "address_line3": "legal_entity_address_line3",
        "city_code": "legal_entity_city_code",
        "city_name": "legal_entity_city_name",
        "congressional_code": "legal_entity_congressional",
        "county_code": "legal_entity_county_code",
        "county_name": "legal_entity_county_name",
        "foreign_city_name": "legal_entity_foreign_city",
        "foreign_postal_code": "legal_entity_foreign_posta",
        "foreign_province": "legal_entity_foreign_provi",
        "state_code": "legal_entity_state_code",
        "state_name": "legal_entity_state_name",
        "zip5": "legal_entity_zip5",
        "zip_last4": "legal_entity_zip_last4",
        "location_country_code": "legal_entity_country_code"
    }

    place_of_performance_field_map = {
        "city_name": "place_of_performance_city",
        "performance_code": "place_of_performance_code",
        "congressional_code": "place_of_performance_congr",
        "county_name": "place_of_perform_county_na",
        "foreign_location_description": "place_of_performance_forei",
        "state_name": "place_of_perform_state_nam",
        "zip4": "place_of_performance_zip4a",
        "location_country_code": "place_of_perform_country_c"
    }

    legal_entity_location_value_map = {"recipient_flag": True}

    place_of_performance_value_map = {"place_of_performance_flag": True}

    fad_field_map = {
        "type": "assistance_type",
        "description": "award_description",
    }

    total_rows = len(award_financial_assistance_data)

    start_time = datetime.now()
    for index, row in enumerate(award_financial_assistance_data, 1):
        if not (index % 100):
            logger.info('D2 File Load: Loading row {} of {} ({})'.format(
                str(index), str(total_rows),
                datetime.now() - start_time))

        row = row_preprocessor(row)

        legal_entity_location, created = get_or_create_location(
            legal_entity_location_field_map, row,
            legal_entity_location_value_map)

        recipient_name = row['awardee_or_recipient_legal']
        if recipient_name is None:
            recipient_name = ""

        # Create the legal entity if it doesn't exist
        legal_entity, created = LegalEntity.objects.get_or_create(
            recipient_unique_id=row['awardee_or_recipient_uniqu'],
            recipient_name=recipient_name)

        if created:
            legal_entity_value_map = {
                "location": legal_entity_location,
            }
            legal_entity = load_data_into_model(
                legal_entity, row, value_map=legal_entity_value_map, save=True)

        # Create the place of performance location
        pop_location, created = get_or_create_location(
            place_of_performance_field_map, row,
            place_of_performance_value_map)

        # If awarding toptier agency code (aka CGAC) is not supplied on the D2 record,
        # use the sub tier code to look it up. This code assumes that all incoming
        # records will supply an awarding subtier agency code
        if row['awarding_agency_code'] is None or len(
                row['awarding_agency_code'].strip()) < 1:
            row['awarding_agency_code'] = Agency.get_by_subtier(
                row["awarding_sub_tier_agency_c"]).toptier_agency.cgac_code
        # If funding toptier agency code (aka CGAC) is empty, try using the sub
        # tier funding code to look it up. Unlike the awarding agency, we can't
        # assume that the funding agency subtier code will always be present.
        if row['funding_agency_code'] is None or len(
                row['funding_agency_code'].strip()) < 1:
            funding_agency = Agency.get_by_subtier(
                row["funding_sub_tier_agency_co"])
            row['funding_agency_code'] = (
                funding_agency.toptier_agency.cgac_code
                if funding_agency is not None else None)

        # Find the award that this award transaction belongs to. If it doesn't exist, create it.
        awarding_agency = Agency.get_by_toptier_subtier(
            row['awarding_agency_code'], row["awarding_sub_tier_agency_c"])
        created, award = Award.get_or_create_summary_award(
            awarding_agency=awarding_agency,
            piid=row.get('piid'),
            fain=row.get('fain'),
            uri=row.get('uri'))
        award.save()

        award_update_id_list.append(award.id)

        parent_txn_value_map = {
            "award":
            award,
            "awarding_agency":
            awarding_agency,
            "funding_agency":
            Agency.get_by_toptier_subtier(row['funding_agency_code'],
                                          row["funding_sub_tier_agency_co"]),
            "recipient":
            legal_entity,
            "place_of_performance":
            pop_location,
            'submission':
            submission_attributes,
            "period_of_performance_start_date":
            format_date(row['period_of_performance_star']),
            "period_of_performance_current_end_date":
            format_date(row['period_of_performance_curr']),
            "action_date":
            format_date(row['action_date']),
        }

        transaction_dict = load_data_into_model(
            TransactionNormalized(),  # thrown away
            row,
            field_map=fad_field_map,
            value_map=parent_txn_value_map,
            as_dict=True)

        transaction = TransactionNormalized.get_or_create_transaction(
            **transaction_dict)
        transaction.save()

        fad_value_map = {
            "submission":
            submission_attributes,
            "cfda":
            Cfda.objects.filter(program_number=row['cfda_number']).first(),
            'reporting_period_start':
            submission_attributes.reporting_period_start,
            'reporting_period_end':
            submission_attributes.reporting_period_end,
            "period_of_performance_start_date":
            format_date(row['period_of_performance_star']),
            "period_of_performance_current_end_date":
            format_date(row['period_of_performance_curr']),
        }

        financial_assistance_data = load_data_into_model(
            TransactionFABS(),  # thrown away
            row,
            field_map=fad_field_map,
            value_map=fad_value_map,
            as_dict=True)

        transaction_assistance = TransactionFABS.get_or_create_2(
            transaction=transaction, **financial_assistance_data)
        transaction_assistance.save()

    logger.info('\n\n\n\nFile D2 time elapsed: {}'.format(time.time() -
                                                          d_start_time))
Exemplo n.º 10
0
    def insert_new_fabs(self, to_insert, total_rows):
        logger.info('Starting insertion of new FABS data')

        place_of_performance_field_map = {
            "location_country_code": "place_of_perform_country_c",
            "country_name": "place_of_perform_country_n",
            "state_code": "place_of_perfor_state_code",
            "state_name": "place_of_perform_state_nam",
            "city_name": "place_of_performance_city",
            "county_name": "place_of_perform_county_na",
            "county_code": "place_of_perform_county_co",
            "foreign_location_description": "place_of_performance_forei",
            "zip_4a": "place_of_performance_zip4a",
            "congressional_code": "place_of_performance_congr",
            "performance_code": "place_of_performance_code",
            "zip_last4": "place_of_perform_zip_last4",
            "zip5": "place_of_performance_zip5"
        }

        legal_entity_location_field_map = {
            "location_country_code": "legal_entity_country_code",
            "country_name": "legal_entity_country_name",
            "state_code": "legal_entity_state_code",
            "state_name": "legal_entity_state_name",
            "city_name": "legal_entity_city_name",
            "city_code": "legal_entity_city_code",
            "county_name": "legal_entity_county_name",
            "county_code": "legal_entity_county_code",
            "address_line1": "legal_entity_address_line1",
            "address_line2": "legal_entity_address_line2",
            "address_line3": "legal_entity_address_line3",
            "foreign_location_description": "legal_entity_foreign_descr",
            "congressional_code": "legal_entity_congressional",
            "zip_last4": "legal_entity_zip_last4",
            "zip5": "legal_entity_zip5",
            "foreign_postal_code": "legal_entity_foreign_posta",
            "foreign_province": "legal_entity_foreign_provi",
            "foreign_city_name": "legal_entity_foreign_city"
        }

        start_time = datetime.now()

        for index, row in enumerate(to_insert, 1):
            if not (index % 1000):
                logger.info('Inserting Stale FABS: Inserting row {} of {} ({})'.format(str(index), str(total_rows),
                                                                                       datetime.now() - start_time))

            for key in row:
                if isinstance(row[key], str):
                    row[key] = row[key].upper()

            # Create new LegalEntityLocation and LegalEntity from the row data
            legal_entity_location = create_location(legal_entity_location_field_map, row, {"recipient_flag": True})
            recipient_name = row['awardee_or_recipient_legal']
            legal_entity = LegalEntity.objects.create(
                recipient_unique_id=row['awardee_or_recipient_uniqu'],
                recipient_name=recipient_name if recipient_name is not None else ""
            )
            legal_entity_value_map = {
                "location": legal_entity_location,
                "business_categories": get_business_categories(row=row, data_type='fabs'),
                "business_types_description": get_business_type_description(row['business_types'])
            }
            legal_entity = load_data_into_model(legal_entity, row, value_map=legal_entity_value_map, save=True)

            # Create the place of performance location
            pop_location = create_location(place_of_performance_field_map, row, {"place_of_performance_flag": True})

            # Find the toptier awards from the subtier awards
            awarding_agency = Agency.get_by_subtier_only(row["awarding_sub_tier_agency_c"])
            funding_agency = Agency.get_by_subtier_only(row["funding_sub_tier_agency_co"])

            # Generate the unique Award ID
            # "ASST_AW_" + awarding_sub_tier_agency_c + fain + uri

            # this will raise an exception if the cast to an int fails, that's ok since we don't want to process
            # non-numeric record type values
            record_type_int = int(row['record_type'])
            if record_type_int == 1:
                uri = row['uri'] if row['uri'] else '-NONE-'
                fain = '-NONE-'
            elif record_type_int == 2:
                uri = '-NONE-'
                fain = row['fain'] if row['fain'] else '-NONE-'
            else:
                raise Exception('Invalid record type encountered for the following afa_generated_unique record: %s' %
                                row['afa_generated_unique'])

            generated_unique_id = 'ASST_AW_' +\
                (row['awarding_sub_tier_agency_c'] if row['awarding_sub_tier_agency_c'] else '-NONE-') + '_' + \
                fain + '_' + uri

            # Create the summary Award
            (created, award) = Award.get_or_create_summary_award(generated_unique_award_id=generated_unique_id,
                                                                 fain=row['fain'],
                                                                 uri=row['uri'],
                                                                 record_type=row['record_type'])
            award.save()

            # Append row to list of Awards updated
            award_update_id_list.append(award.id)

            try:
                last_mod_date = datetime.strptime(str(row['modified_at']), "%Y-%m-%d %H:%M:%S.%f").date()
            except ValueError:
                last_mod_date = datetime.strptime(str(row['modified_at']), "%Y-%m-%d %H:%M:%S").date()
            parent_txn_value_map = {
                "award": award,
                "awarding_agency": awarding_agency,
                "funding_agency": funding_agency,
                "recipient": legal_entity,
                "place_of_performance": pop_location,
                "period_of_performance_start_date": format_date(row['period_of_performance_star']),
                "period_of_performance_current_end_date": format_date(row['period_of_performance_curr']),
                "action_date": format_date(row['action_date']),
                "last_modified_date": last_mod_date,
                "type_description": get_assistance_type_description(row['assistance_type']),
                "transaction_unique_id": row['afa_generated_unique'],
                "generated_unique_award_id": generated_unique_id
            }

            fad_field_map = {
                "type": "assistance_type",
                "description": "award_description",
            }

            transaction_normalized_dict = load_data_into_model(
                TransactionNormalized(),  # thrown away
                row,
                field_map=fad_field_map,
                value_map=parent_txn_value_map,
                as_dict=True)

            financial_assistance_data = load_data_into_model(
                TransactionFABS(),  # thrown away
                row,
                as_dict=True)

            afa_generated_unique = financial_assistance_data['afa_generated_unique']
            unique_fabs = TransactionFABS.objects.filter(afa_generated_unique=afa_generated_unique)

            if unique_fabs.first():
                transaction_normalized_dict["update_date"] = datetime.utcnow()
                transaction_normalized_dict["fiscal_year"] = fy(transaction_normalized_dict["action_date"])

                # Update TransactionNormalized
                TransactionNormalized.objects.filter(id=unique_fabs.first().transaction.id).\
                    update(**transaction_normalized_dict)

                # Update TransactionFABS
                unique_fabs.update(**financial_assistance_data)
            else:
                # Create TransactionNormalized
                transaction = TransactionNormalized(**transaction_normalized_dict)
                transaction.save()

                # Create TransactionFABS
                transaction_fabs = TransactionFABS(transaction=transaction, **financial_assistance_data)
                transaction_fabs.save()
def insert_new_fabs(to_insert):
    place_of_performance_field_map = {
        "location_country_code": "place_of_perform_country_c",
        "country_name": "place_of_perform_country_n",
        "state_code": "place_of_perfor_state_code",
        "state_name": "place_of_perform_state_nam",
        "city_name": "place_of_performance_city",
        "county_name": "place_of_perform_county_na",
        "county_code": "place_of_perform_county_co",
        "foreign_location_description": "place_of_performance_forei",
        "zip_4a": "place_of_performance_zip4a",
        "congressional_code": "place_of_performance_congr",
        "performance_code": "place_of_performance_code",
        "zip_last4": "place_of_perform_zip_last4",
        "zip5": "place_of_performance_zip5",
    }

    legal_entity_location_field_map = {
        "location_country_code": "legal_entity_country_code",
        "country_name": "legal_entity_country_name",
        "state_code": "legal_entity_state_code",
        "state_name": "legal_entity_state_name",
        "city_name": "legal_entity_city_name",
        "city_code": "legal_entity_city_code",
        "county_name": "legal_entity_county_name",
        "county_code": "legal_entity_county_code",
        "address_line1": "legal_entity_address_line1",
        "address_line2": "legal_entity_address_line2",
        "address_line3": "legal_entity_address_line3",
        "foreign_location_description": "legal_entity_foreign_descr",
        "congressional_code": "legal_entity_congressional",
        "zip_last4": "legal_entity_zip_last4",
        "zip5": "legal_entity_zip5",
        "foreign_postal_code": "legal_entity_foreign_posta",
        "foreign_province": "legal_entity_foreign_provi",
        "foreign_city_name": "legal_entity_foreign_city",
    }

    update_award_ids = []
    for row in to_insert:
        upper_case_dict_values(row)

        # Create new LegalEntityLocation and LegalEntity from the row data
        legal_entity_location = create_location(legal_entity_location_field_map, row, {"recipient_flag": True})
        recipient_name = row['awardee_or_recipient_legal']
        legal_entity = LegalEntity.objects.create(
            recipient_unique_id=row['awardee_or_recipient_uniqu'],
            recipient_name=recipient_name if recipient_name is not None else "",
            parent_recipient_unique_id=row['ultimate_parent_unique_ide'],
        )
        legal_entity_value_map = {
            "location": legal_entity_location,
            "business_categories": get_business_categories(row=row, data_type='fabs'),
            "business_types_description": row['business_types_desc'],
        }
        legal_entity = load_data_into_model(legal_entity, row, value_map=legal_entity_value_map, save=True)

        # Create the place of performance location
        pop_location = create_location(place_of_performance_field_map, row, {"place_of_performance_flag": True})

        # Find the toptier awards from the subtier awards
        awarding_agency = Agency.get_by_subtier_only(row["awarding_sub_tier_agency_c"])
        funding_agency = Agency.get_by_subtier_only(row["funding_sub_tier_agency_co"])

        # Generate the unique Award ID
        # "ASST_AW_" + awarding_sub_tier_agency_c + fain + uri

        # this will raise an exception if the cast to an int fails, that's ok since we don't want to process
        # non-numeric record type values
        record_type_int = int(row['record_type'])
        if record_type_int == 1:
            uri = row['uri'] if row['uri'] else '-NONE-'
            fain = '-NONE-'
        elif record_type_int in (2, 3):
            uri = '-NONE-'
            fain = row['fain'] if row['fain'] else '-NONE-'
        else:
            msg = "Invalid record type encountered for the following afa_generated_unique record: {}"
            raise Exception(msg.format(row['afa_generated_unique']))

        astac = row["awarding_sub_tier_agency_c"] if row["awarding_sub_tier_agency_c"] else "-NONE-"
        generated_unique_id = "ASST_AW_{}_{}_{}".format(astac, fain, uri)

        # Create the summary Award
        (created, award) = Award.get_or_create_summary_award(
            generated_unique_award_id=generated_unique_id,
            fain=row['fain'],
            uri=row['uri'],
            record_type=row['record_type'],
        )
        award.save()

        # Append row to list of Awards updated
        update_award_ids.append(award.id)

        try:
            last_mod_date = datetime.strptime(str(row['modified_at']), "%Y-%m-%d %H:%M:%S.%f").date()
        except ValueError:
            last_mod_date = datetime.strptime(str(row['modified_at']), "%Y-%m-%d %H:%M:%S").date()
        parent_txn_value_map = {
            "award": award,
            "awarding_agency": awarding_agency,
            "funding_agency": funding_agency,
            "recipient": legal_entity,
            "place_of_performance": pop_location,
            "period_of_performance_start_date": format_date(row['period_of_performance_star']),
            "period_of_performance_current_end_date": format_date(row['period_of_performance_curr']),
            "action_date": format_date(row['action_date']),
            "last_modified_date": last_mod_date,
            "type_description": row['assistance_type_desc'],
            "transaction_unique_id": row['afa_generated_unique'],
            "generated_unique_award_id": generated_unique_id,
        }

        fad_field_map = {
            "type": "assistance_type",
            "description": "award_description",
            "funding_amount": "total_funding_amount",
        }

        transaction_normalized_dict = load_data_into_model(
            TransactionNormalized(),  # thrown away
            row,
            field_map=fad_field_map,
            value_map=parent_txn_value_map,
            as_dict=True,
        )

        financial_assistance_data = load_data_into_model(TransactionFABS(), row, as_dict=True)  # thrown away

        # Hack to cut back on the number of warnings dumped to the log.
        financial_assistance_data['updated_at'] = cast_datetime_to_utc(financial_assistance_data['updated_at'])
        financial_assistance_data['created_at'] = cast_datetime_to_utc(financial_assistance_data['created_at'])
        financial_assistance_data['modified_at'] = cast_datetime_to_utc(financial_assistance_data['modified_at'])

        afa_generated_unique = financial_assistance_data['afa_generated_unique']
        unique_fabs = TransactionFABS.objects.filter(afa_generated_unique=afa_generated_unique)

        if unique_fabs.first():
            transaction_normalized_dict["update_date"] = datetime.now(timezone.utc)
            transaction_normalized_dict["fiscal_year"] = fy(transaction_normalized_dict["action_date"])

            # Update TransactionNormalized
            TransactionNormalized.objects.filter(id=unique_fabs.first().transaction.id).update(
                **transaction_normalized_dict
            )

            # Update TransactionFABS
            unique_fabs.update(**financial_assistance_data)
        else:
            # Create TransactionNormalized
            transaction_normalized = TransactionNormalized(**transaction_normalized_dict)
            transaction_normalized.save()

            # Create TransactionFABS
            transaction_fabs = TransactionFABS(transaction=transaction_normalized, **financial_assistance_data)
            transaction_fabs.save()

        # Update legal entity to map back to transaction
        legal_entity.transaction_unique_id = afa_generated_unique
        legal_entity.save()

    return update_award_ids
Exemplo n.º 12
0
    def update_transaction_assistance(db_cursor,
                                      fiscal_year=None,
                                      page=1,
                                      limit=500000):

        query = "SELECT * FROM published_award_financial_assistance"
        arguments = []

        fy_begin = "10/01/" + str(fiscal_year - 1)
        fy_end = "09/30/" + str(fiscal_year)

        if fiscal_year:
            if arguments:
                query += " AND"
            else:
                query += " WHERE"
            query += " action_date::Date BETWEEN %s AND %s"
            arguments += [fy_begin]
            arguments += [fy_end]
        query += " ORDER BY published_award_financial_assistance_id LIMIT %s OFFSET %s"
        arguments += [limit, (page - 1) * limit]

        logger.info("Executing query on Broker DB => " + query %
                    (arguments[0], arguments[1], arguments[2], arguments[3]))

        db_cursor.execute(query, arguments)

        logger.info("Running dictfetchall on db_cursor")
        award_financial_assistance_data = dictfetchall(db_cursor)

        fabs_normalized_field_map = {
            "type": "assistance_type",
            "description": "award_description",
            "funding_amount": "total_funding_amount",
        }

        fabs_field_map = {
            "officer_1_name": "high_comp_officer1_full_na",
            "officer_1_amount": "high_comp_officer1_amount",
            "officer_2_name": "high_comp_officer2_full_na",
            "officer_2_amount": "high_comp_officer2_amount",
            "officer_3_name": "high_comp_officer3_full_na",
            "officer_3_amount": "high_comp_officer3_amount",
            "officer_4_name": "high_comp_officer4_full_na",
            "officer_4_amount": "high_comp_officer4_amount",
            "officer_5_name": "high_comp_officer5_full_na",
            "officer_5_amount": "high_comp_officer5_amount",
        }

        logger.info("Getting total rows")

        total_rows = len(award_financial_assistance_data)  # - rows_loaded

        logger.info("Processing " + str(total_rows) +
                    " rows of assistance data")

        # ROW ITERATION STARTS HERE

        award_bulk = []

        transaction_assistance_bulk = []
        transaction_normalized_bulk = []

        awarding_agency_list = []
        funding_agency_list = []

        logger.info("Getting award objects for {} rows...".format(
            len(award_financial_assistance_data)))
        for index, row in enumerate(award_financial_assistance_data, 1):
            # If awarding toptier agency code (aka CGAC) is not supplied on the D2 record,
            # use the sub tier code to look it up. This code assumes that all incoming
            # records will supply an awarding subtier agency code
            if row["awarding_agency_code"] is None or len(
                    row["awarding_agency_code"].strip()) < 1:
                awarding_subtier_agency_id = subtier_agency_map[
                    row["awarding_sub_tier_agency_c"]]
                awarding_toptier_agency_id = subtier_to_agency_map[
                    awarding_subtier_agency_id]["toptier_agency_id"]
                awarding_toptier_code = toptier_agency_map[
                    awarding_toptier_agency_id]
                row["awarding_agency_code"] = awarding_toptier_code

            # If funding toptier agency code (aka CGAC) is empty, try using the sub
            # tier funding code to look it up. Unlike the awarding agency, we can't
            # assume that the funding agency subtier code will always be present.
            if row["funding_agency_code"] is None or len(
                    row["funding_agency_code"].strip()) < 1:
                funding_subtier_agency_id = subtier_agency_map.get(
                    row["funding_sub_tier_agency_co"])
                if funding_subtier_agency_id is not None:
                    funding_toptier_agency_id = subtier_to_agency_map[
                        funding_subtier_agency_id]["toptier_agency_id"]
                    funding_toptier_code = toptier_agency_map[
                        funding_toptier_agency_id]
                else:
                    funding_toptier_code = None
                row["funding_agency_code"] = funding_toptier_code

            # Find the award that this award transaction belongs to. If it doesn't exist, create it.
            awarding_agency = Agency.get_by_toptier_subtier(
                row["awarding_agency_code"], row["awarding_sub_tier_agency_c"])
            funding_agency = Agency.get_by_toptier_subtier(
                row["funding_agency_code"], row["funding_sub_tier_agency_co"])

            awarding_agency_list.append(awarding_agency)
            funding_agency_list.append(funding_agency)

            # award.save() is called in Award.get_or_create_summary_award by default
            created, award = Award.get_or_create_summary_award(
                awarding_agency=awarding_agency,
                fain=row.get("fain"),
                uri=row.get("uri"),
                generated_unique_award_id=row.get("unique_award_key"),
                save=False,
            )

            award_bulk.append(award)
            award_update_id_list.append(award.id)
            award_assistance_update_id_list.append(award.id)

        logger.info("Bulk creating {} award rows...".format(len(award_bulk)))
        try:
            Award.objects.bulk_create(award_bulk)
        except IntegrityError:
            logger.info("!!! DUPLICATES FOUND. Continuing... ")

        logger.info("Getting transaction_normalized for {} rows...".format(
            len(award_financial_assistance_data)))
        for index, row in enumerate(award_financial_assistance_data, 1):

            parent_txn_value_map = {
                "award":
                award_bulk[index - 1],
                "awarding_agency":
                awarding_agency_list[index - 1],
                "funding_agency":
                funding_agency_list[index - 1],
                "period_of_performance_start_date":
                format_date(row["period_of_performance_star"]),
                "period_of_performance_current_end_date":
                format_date(row["period_of_performance_curr"]),
                "action_date":
                format_date(row["action_date"]),
            }

            transaction_dict = load_data_into_model(
                TransactionNormalized(),  # thrown away
                row,
                field_map=fabs_normalized_field_map,
                value_map=parent_txn_value_map,
                as_dict=True,
            )

            transaction_normalized = TransactionNormalized.get_or_create_transaction(
                **transaction_dict)
            transaction_normalized.fiscal_year = fy(
                transaction_normalized.action_date)
            transaction_normalized_bulk.append(transaction_normalized)

        logger.info("Bulk creating {} TransactionNormalized rows...".format(
            len(transaction_normalized_bulk)))
        try:
            TransactionNormalized.objects.bulk_create(
                transaction_normalized_bulk)
        except IntegrityError:
            logger.info(
                "Tried and failed to insert duplicate transaction_normalized row. Continuing... "
            )

        for index, row in enumerate(award_financial_assistance_data, 1):
            financial_assistance_data = load_data_into_model(
                TransactionFABS(),
                row,
                field_map=fabs_field_map,
                as_dict=True  # thrown away
            )

            transaction_assistance = TransactionFABS(
                transaction=transaction_normalized_bulk[index - 1],
                **financial_assistance_data)
            transaction_assistance_bulk.append(transaction_assistance)

        logger.info("Bulk creating TransactionFABS rows...")
        try:
            TransactionFABS.objects.bulk_create(transaction_assistance_bulk)
        except IntegrityError:
            logger.info("!!! DUPLICATES FOUND. Continuing... ")
def load_file_d2(
        submission_attributes, award_financial_assistance_data, db_cursor, quick, row_preprocessor=no_preprocessing
):
    """
    Process and load file D2 broker data (financial assistance award txns).
    """

    d_start_time = time.time()

    if quick:
        setup_broker_fdw()

        parameters = {'broker_submission_id': submission_attributes.broker_submission_id}
        run_sql_file('usaspending_api/etl/management/load_file_d2.sql', parameters)
        logger.info('\n\n\n\nFile D2 time elapsed: {}'.format(time.time() - d_start_time))
        return

    legal_entity_location_field_map = {
        "address_line1": "legal_entity_address_line1",
        "address_line2": "legal_entity_address_line2",
        "address_line3": "legal_entity_address_line3",
        "city_code": "legal_entity_city_code",
        "city_name": "legal_entity_city_name",
        "congressional_code": "legal_entity_congressional",
        "county_code": "legal_entity_county_code",
        "county_name": "legal_entity_county_name",
        "foreign_city_name": "legal_entity_foreign_city",
        "foreign_postal_code": "legal_entity_foreign_posta",
        "foreign_province": "legal_entity_foreign_provi",
        "state_code": "legal_entity_state_code",
        "state_name": "legal_entity_state_name",
        "zip5": "legal_entity_zip5",
        "zip_last4": "legal_entity_zip_last4",
        "location_country_code": "legal_entity_country_code"
    }

    place_of_performance_field_map = {
        "city_name": "place_of_performance_city",
        "performance_code": "place_of_performance_code",
        "congressional_code": "place_of_performance_congr",
        "county_name": "place_of_perform_county_na",
        "foreign_location_description": "place_of_performance_forei",
        "state_name": "place_of_perform_state_nam",
        "zip4": "place_of_performance_zip4a",
        "location_country_code": "place_of_perform_country_c"

    }

    legal_entity_location_value_map = {
        "recipient_flag": True
    }

    place_of_performance_value_map = {
        "place_of_performance_flag": True
    }

    fad_field_map = {
        "type": "assistance_type",
        "description": "award_description",
    }

    total_rows = len(award_financial_assistance_data)

    start_time = datetime.now()
    for index, row in enumerate(award_financial_assistance_data, 1):
        if not (index % 100):
            logger.info('D2 File Load: Loading row {} of {} ({})'.format(str(index),
                                                                         str(total_rows),
                                                                         datetime.now() - start_time))

        row = row_preprocessor(row)

        legal_entity_location, created = get_or_create_location(
            legal_entity_location_field_map, row, legal_entity_location_value_map
        )

        recipient_name = row['awardee_or_recipient_legal']
        if recipient_name is None:
            recipient_name = ""

        # Create the legal entity if it doesn't exist
        legal_entity, created = LegalEntity.objects.get_or_create(
            recipient_unique_id=row['awardee_or_recipient_uniqu'],
            recipient_name=recipient_name
        )

        if created:
            legal_entity_value_map = {
                "location": legal_entity_location,
            }
            legal_entity = load_data_into_model(legal_entity, row, value_map=legal_entity_value_map, save=True)

        # Create the place of performance location
        pop_location, created = get_or_create_location(
            place_of_performance_field_map, row, place_of_performance_value_map
        )

        # If awarding toptier agency code (aka CGAC) is not supplied on the D2 record,
        # use the sub tier code to look it up. This code assumes that all incoming
        # records will supply an awarding subtier agency code
        if row['awarding_agency_code'] is None or len(row['awarding_agency_code'].strip()) < 1:
            row['awarding_agency_code'] = Agency.get_by_subtier(
                row["awarding_sub_tier_agency_c"]).toptier_agency.cgac_code
        # If funding toptier agency code (aka CGAC) is empty, try using the sub
        # tier funding code to look it up. Unlike the awarding agency, we can't
        # assume that the funding agency subtier code will always be present.
        if row['funding_agency_code'] is None or len(row['funding_agency_code'].strip()) < 1:
            funding_agency = Agency.get_by_subtier(row["funding_sub_tier_agency_co"])
            row['funding_agency_code'] = (
                funding_agency.toptier_agency.cgac_code if funding_agency is not None
                else None)

        # Find the award that this award transaction belongs to. If it doesn't exist, create it.
        awarding_agency = Agency.get_by_toptier_subtier(
            row['awarding_agency_code'],
            row["awarding_sub_tier_agency_c"]
        )
        created, award = Award.get_or_create_summary_award(
            awarding_agency=awarding_agency,
            piid=row.get('piid'),
            fain=row.get('fain'),
            uri=row.get('uri'))
        award.save()

        award_update_id_list.append(award.id)

        parent_txn_value_map = {
            "award": award,
            "awarding_agency": awarding_agency,
            "funding_agency": Agency.get_by_toptier_subtier(row['funding_agency_code'],
                                                            row["funding_sub_tier_agency_co"]),
            "recipient": legal_entity,
            "place_of_performance": pop_location,
            'submission': submission_attributes,
            "period_of_performance_start_date": format_date(row['period_of_performance_star']),
            "period_of_performance_current_end_date": format_date(row['period_of_performance_curr']),
            "action_date": format_date(row['action_date']),
        }

        transaction_dict = load_data_into_model(
            TransactionNormalized(),  # thrown away
            row,
            field_map=fad_field_map,
            value_map=parent_txn_value_map,
            as_dict=True)

        transaction = TransactionNormalized.get_or_create_transaction(**transaction_dict)
        transaction.save()

        fad_value_map = {
            "submission": submission_attributes,
            "cfda": Cfda.objects.filter(program_number=row['cfda_number']).first(),
            'reporting_period_start': submission_attributes.reporting_period_start,
            'reporting_period_end': submission_attributes.reporting_period_end,
            "period_of_performance_start_date": format_date(row['period_of_performance_star']),
            "period_of_performance_current_end_date": format_date(row['period_of_performance_curr']),
        }

        financial_assistance_data = load_data_into_model(
            TransactionFABS(),  # thrown away
            row,
            field_map=fad_field_map,
            value_map=fad_value_map,
            as_dict=True)

        transaction_assistance = TransactionFABS.get_or_create_2(transaction=transaction, **financial_assistance_data)
        transaction_assistance.save()

    logger.info('\n\n\n\nFile D2 time elapsed: {}'.format(time.time() - d_start_time))