Exemple #1
0
    def handle(self, *args, **options):
        processing_start_datetime = datetime.now(timezone.utc)

        logger.info("Starting FABS data load script...")

        # "Reload all" supersedes all other processing options.
        reload_all = options["reload_all"]
        if reload_all:
            ids = None
            afa_ids = None
            start_datetime = None
            end_datetime = None
        else:
            ids = options["ids"]
            afa_ids = set(options["afa_ids"])
            if options["afa_id_file"]:
                afa_ids = tuple(
                    afa_ids | read_afa_ids_from_file(options["afa_id_file"]))
            start_datetime = options["start_datetime"]
            end_datetime = options["end_datetime"]

        # If no other processing options were provided than this is an incremental load.
        is_incremental_load = not any(
            (reload_all, ids, afa_ids, start_datetime, end_datetime))

        if is_incremental_load:
            start_datetime = get_incremental_load_start_datetime()
            logger.info(
                f"Processing data for FABS starting from {start_datetime} (includes offset)"
            )

            # We only perform deletes with incremental loads.
            with timer("obtaining delete records", logger.info):
                delete_records = retrieve_deleted_fabs_transactions(
                    start_datetime, end_datetime)
                ids_to_delete = [
                    item for sublist in delete_records.values()
                    for item in sublist if item
                ]
                ids_to_delete = get_delete_pks_for_afa_keys(ids_to_delete)
            logger.info(f"{len(ids_to_delete):,} delete ids found in total")

        with timer("retrieving IDs of FABS to process", logger.info):
            ids_to_upsert = get_fabs_transaction_ids(ids, afa_ids,
                                                     start_datetime,
                                                     end_datetime)

        update_award_ids = delete_fabs_transactions(
            ids_to_delete) if is_incremental_load else []
        upsert_fabs_transactions(ids_to_upsert, update_award_ids)

        if is_incremental_load:
            logger.info(
                f"Storing {processing_start_datetime} for the next incremental run"
            )
            update_last_load_date("fabs", processing_start_datetime)

        logger.info("FABS UPDATE FINISHED!")
    def handle(self, *args, **options):
        processing_start_datetime = datetime.now(timezone.utc)

        logger.info("Starting FABS data load script...")

        do_not_log_deletions = options["do_not_log_deletions"]

        # "Reload all" supersedes all other processing options.
        reload_all = options["reload_all"]
        if reload_all:
            submission_ids = None
            afa_ids = None
            start_datetime = None
            end_datetime = None
        else:
            submission_ids = tuple(options["submission_ids"]
                                   ) if options["submission_ids"] else None
            afa_ids = read_afa_ids_from_file(
                options['afa_id_file']) if options['afa_id_file'] else None
            start_datetime = options["start_datetime"]
            end_datetime = options["end_datetime"]

        # If no other processing options were provided than this is an incremental load.
        is_incremental_load = not any((reload_all, submission_ids, afa_ids,
                                       start_datetime, end_datetime))

        if is_incremental_load:
            last_load_date = get_last_load_date()
            submission_ids = get_new_submission_ids(last_load_date)
            logger.info("Processing data for FABS starting from %s" %
                        last_load_date)

        if is_incremental_load and not submission_ids:
            logger.info("No new submissions. Exiting.")

        else:
            with timer("obtaining delete records", logger.info):
                ids_to_delete = get_fabs_records_to_delete(
                    submission_ids, afa_ids, start_datetime, end_datetime)

            with timer("retrieving/diff-ing FABS Data", logger.info):
                ids_to_upsert = get_fabs_transaction_ids(
                    submission_ids, afa_ids, start_datetime, end_datetime)

            update_award_ids = delete_fabs_transactions(
                ids_to_delete, do_not_log_deletions)
            upsert_fabs_transactions(ids_to_upsert, update_award_ids)

        if is_incremental_load:
            update_last_load_date("fabs", processing_start_datetime)

        logger.info("FABS UPDATE FINISHED!")
    def delete_and_add_fabs_transaction_records():
        from usaspending_api.broker.helpers.delete_fabs_transactions import delete_fabs_transactions
        from usaspending_api.broker.helpers.upsert_fabs_transactions import upsert_fabs_transactions

        with Timer("Insert/delete FABS transactions"):
            delete_ids = get_ids(TEMP_TRANSACTION_FABS_DELETE_IDS_TABLE)
            add_ids = get_ids(TEMP_TRANSACTION_FABS_ADD_IDS_TABLE)
            if not delete_ids and not add_ids:
                logger.info("No FABS transaction records to add or delete")
                return

            update_award_ids = delete_fabs_transactions(delete_ids)
            upsert_fabs_transactions(add_ids, update_award_ids)
Exemple #4
0
    def handle(self, *args, **options):
        processing_start_datetime = datetime.now(timezone.utc)

        logger.info("Starting FABS data load script...")

        # "Reload all" supersedes all other processing options.
        reload_all = options["reload_all"]
        if reload_all:
            afa_ids = None
            start_datetime = None
            end_datetime = None
        else:
            afa_ids = read_afa_ids_from_file(
                options["afa_id_file"]) if options["afa_id_file"] else None
            start_datetime = options["start_datetime"]
            end_datetime = options["end_datetime"]

        # If no other processing options were provided than this is an incremental load.
        is_incremental_load = not any(
            (reload_all, afa_ids, start_datetime, end_datetime))

        if is_incremental_load:
            start_datetime = get_last_load_date()
            logger.info("Processing data for FABS starting from %s" %
                        start_datetime)

        with timer("obtaining delete records", logger.info):
            delete_records = retrieve_deleted_fabs_transactions(
                start_datetime, end_datetime)
            ids_to_delete = [
                item for sublist in delete_records.values() for item in sublist
                if item
            ]

        with timer("retrieving/diff-ing FABS Data", logger.info):
            ids_to_upsert = get_fabs_transaction_ids(afa_ids, start_datetime,
                                                     end_datetime)

        update_award_ids = delete_fabs_transactions(ids_to_delete)
        upsert_fabs_transactions(ids_to_upsert, update_award_ids)

        if is_incremental_load:
            update_last_load_date("fabs", processing_start_datetime)

        logger.info("FABS UPDATE FINISHED!")
    def handle(self, *args, **options):
        processing_start_datetime = datetime.now(timezone.utc)

        logger.info("Starting FABS data load script...")

        do_not_log_deletions = options["do_not_log_deletions"]

        # "Reload all" supersedes all other processing options.
        reload_all = options["reload_all"]
        if reload_all:
            submission_ids = None
            afa_ids = None
            start_datetime = None
            end_datetime = None
        else:
            submission_ids = tuple(options["submission_ids"]) if options["submission_ids"] else None
            afa_ids = read_afa_ids_from_file(options['afa_id_file']) if options['afa_id_file'] else None
            start_datetime = options["start_datetime"]
            end_datetime = options["end_datetime"]

        # If no other processing options were provided than this is an incremental load.
        is_incremental_load = not any((reload_all, submission_ids, afa_ids, start_datetime, end_datetime))

        if is_incremental_load:
            last_load_date = get_last_load_date()
            submission_ids = get_new_submission_ids(last_load_date)
            logger.info("Processing data for FABS starting from %s" % last_load_date)

        if is_incremental_load and not submission_ids:
            logger.info("No new submissions. Exiting.")

        else:
            with timer("obtaining delete records", logger.info):
                ids_to_delete = get_fabs_records_to_delete(submission_ids, afa_ids, start_datetime, end_datetime)

            with timer("retrieving/diff-ing FABS Data", logger.info):
                ids_to_upsert = get_fabs_transaction_ids(submission_ids, afa_ids, start_datetime, end_datetime)

            update_award_ids = delete_fabs_transactions(ids_to_delete, do_not_log_deletions)
            upsert_fabs_transactions(ids_to_upsert, update_award_ids)

        if is_incremental_load:
            update_last_load_date("fabs", processing_start_datetime)

        logger.info("FABS UPDATE FINISHED!")