Beispiel #1
0
def test_generate_d2_file_query(mock_broker_config_paths, database):
    """ A CSV with fields in the right order should be written to the file system """
    sess = database.session
    pafa = PublishedAwardFinancialAssistanceFactory
    pafa_1 = pafa(awarding_agency_code='123',
                  action_date='20170101',
                  afa_generated_unique='unique1',
                  is_active=True)
    pafa_2 = pafa(awarding_agency_code='123',
                  action_date='20170131',
                  afa_generated_unique='unique2',
                  is_active=True)
    pafa_3 = pafa(awarding_agency_code='123',
                  action_date='20161231',
                  afa_generated_unique='unique3',
                  is_active=True)
    pafa_4 = pafa(awarding_agency_code='123',
                  action_date='20170201',
                  afa_generated_unique='unique4',
                  is_active=True)
    pafa_5 = pafa(awarding_agency_code='123',
                  action_date='20170115',
                  afa_generated_unique='unique5',
                  is_active=False)
    pafa_6 = pafa(awarding_agency_code='234',
                  action_date='20170115',
                  afa_generated_unique='unique6',
                  is_active=True)
    sess.add_all([pafa_1, pafa_2, pafa_3, pafa_4, pafa_5, pafa_6])

    file_path = str(
        mock_broker_config_paths['d_file_storage_path'].join('d2_test'))
    job = JobFactory(job_status_id=JOB_STATUS_DICT['running'],
                     job_type_id=JOB_TYPE_DICT['file_upload'],
                     file_type_id=FILE_TYPE_DICT['award'],
                     filename=file_path,
                     original_filename='d2_test',
                     start_date='01/01/2017',
                     end_date='01/31/2017')
    sess.add(job)
    sess.commit()

    file_gen_manager = FileGenerationManager(job, '123', 'awarding',
                                             CONFIG_BROKER['local'])
    file_gen_manager.generate_d_file()

    # check headers
    file_rows = read_file_rows(file_path)
    assert file_rows[0] == [
        key for key in file_generation_manager.fileD2.mapping
    ]

    # check body
    pafa1 = sess.query(PublishedAwardFinancialAssistance).filter_by(
        afa_generated_unique='unique1').first()
    pafa2 = sess.query(PublishedAwardFinancialAssistance).filter_by(
        afa_generated_unique='unique2').first()
    expected1, expected2 = [], []
    for value in file_generation_manager.fileD2.db_columns:
        # loop through all values and format date columns
        if value in [
                'period_of_performance_star', 'period_of_performance_curr',
                'modified_at', 'action_date'
        ]:
            expected1.append(
                re.sub(r"[-]", r"", str(pafa1.__dict__[value]))[0:8])
            expected2.append(
                re.sub(r"[-]", r"", str(pafa2.__dict__[value]))[0:8])
        else:
            expected1.append(str(pafa1.__dict__[value]))
            expected2.append(str(pafa2.__dict__[value]))

    assert expected1 in file_rows
    assert expected2 in file_rows
Beispiel #2
0
def test_generate_d1_file_query(mock_broker_config_paths, database):
    """ A CSV with fields in the right order should be written to the file system """
    sess = database.session
    dap_model = DetachedAwardProcurementFactory
    dap_1 = dap_model(awarding_agency_code='123',
                      action_date='20170101',
                      detached_award_proc_unique='unique1')
    dap_2 = dap_model(awarding_agency_code='123',
                      action_date='20170131',
                      detached_award_proc_unique='unique2')
    dap_3 = dap_model(awarding_agency_code='123',
                      action_date='20170201',
                      detached_award_proc_unique='unique3')
    dap_4 = dap_model(awarding_agency_code='123',
                      action_date='20161231',
                      detached_award_proc_unique='unique4')
    dap_5 = dap_model(awarding_agency_code='234',
                      action_date='20170115',
                      detached_award_proc_unique='unique5')
    sess.add_all([dap_1, dap_2, dap_3, dap_4, dap_5])

    file_path = str(
        mock_broker_config_paths['d_file_storage_path'].join('d1_test'))
    job = JobFactory(job_status_id=JOB_STATUS_DICT['running'],
                     job_type_id=JOB_TYPE_DICT['file_upload'],
                     file_type_id=FILE_TYPE_DICT['award_procurement'],
                     filename=file_path,
                     original_filename='d1_test',
                     start_date='01/01/2017',
                     end_date='01/31/2017')
    sess.add(job)
    sess.commit()

    file_gen_manager = FileGenerationManager(job, '123', 'awarding',
                                             CONFIG_BROKER['local'])
    file_gen_manager.generate_d_file()

    # check headers
    file_rows = read_file_rows(file_path)
    assert file_rows[0] == [
        key for key in file_generation_manager.fileD1.mapping
    ]

    # check body
    dap_one = sess.query(DetachedAwardProcurement).filter_by(
        detached_award_proc_unique='unique1').first()
    dap_two = sess.query(DetachedAwardProcurement).filter_by(
        detached_award_proc_unique='unique2').first()
    expected1, expected2 = [], []
    for value in file_generation_manager.fileD1.db_columns:
        # loop through all values and format date columns
        if value in [
                'period_of_performance_star', 'period_of_performance_curr',
                'period_of_perf_potential_e', 'ordering_period_end_date',
                'action_date', 'last_modified'
        ]:
            expected1.append(
                re.sub(r"[-]", r"", str(dap_one.__dict__[value]))[0:8])
            expected2.append(
                re.sub(r"[-]", r"", str(dap_two.__dict__[value]))[0:8])
        else:
            expected1.append(str(dap_one.__dict__[value]))
            expected2.append(str(dap_two.__dict__[value]))

    assert expected1 in file_rows
    assert expected2 in file_rows