def test_start_d_generation_submission_change_request(database, monkeypatch):
    """ In-submission generations that change their requested start or end dates must actually generate files based on
        the new dates.
    """
    sess = database.session
    original_filename = 'D1_test_gen.csv'
    file_path = gen_file_path_from_submission('None/', original_filename)

    submission = SubmissionFactory(submission_id=1000,
                                   reporting_start_date='2017-01-01',
                                   reporting_end_date='2017-01-31',
                                   cgac_code='123',
                                   frec_code=None,
                                   is_quarter_format=False,
                                   publishable=False,
                                   reporting_fiscal_year='2017')
    file_gen = FileGenerationFactory(request_date=datetime.now().date(),
                                     start_date='2017-01-01',
                                     end_date='2017-01-31',
                                     file_type='D1',
                                     agency_code='123',
                                     agency_type='awarding',
                                     is_cached_file=True,
                                     file_path=file_path,
                                     file_generation_id=1000,
                                     file_format='csv')
    up_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                        error_message=None,
                        job_type_id=JOB_TYPE_DICT['file_upload'],
                        file_type_id=FILE_TYPE_DICT['award_procurement'],
                        filename=None,
                        submission_id=submission.submission_id,
                        file_generation_id=file_gen.file_generation_id,
                        original_filename=original_filename)
    val_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                         error_message=None,
                         file_type_id=FILE_TYPE_DICT['award_procurement'],
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                         filename=None,
                         submission_id=submission.submission_id,
                         original_filename=original_filename)
    sess.add_all([submission, file_gen, up_job, val_job])
    sess.commit()

    monkeypatch.setattr(
        generation_helper, 'g',
        Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    start_d_generation(up_job, '01/01/2017', '01/30/2017', 'funding')

    assert up_job.file_generation_id != file_gen.file_generation_id
    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 30)
    assert up_job.original_filename != original_filename
    assert up_job.filename != gen_file_path_from_submission(
        up_job.submission_id, original_filename)

    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 30)
    assert up_job.original_filename == up_job.original_filename
    assert up_job.filename == up_job.filename
def test_retrieve_cached_file_generation_not_cached(database):
    """ Should return no FileGeneration """
    sess = database.session
    job = JobFactory(start_date='2017-01-01',
                     end_date='2017-01-31',
                     job_status_id=JOB_STATUS_DICT['waiting'],
                     error_message=None,
                     file_type_id=FILE_TYPE_DICT['award'],
                     job_type_id=JOB_TYPE_DICT['file_upload'],
                     filename=None,
                     original_filename=None,
                     file_generation_id=None)
    file_gen = FileGenerationFactory(request_date=datetime.now().date(),
                                     start_date='2017-01-01',
                                     end_date='2017-01-31',
                                     file_type='D2',
                                     agency_code='123',
                                     agency_type='awarding',
                                     is_cached_file=False,
                                     file_format='csv')
    sess.add_all([job, file_gen])
    sess.commit()

    file_generation = retrieve_cached_file_generation(job, 'awarding', '123',
                                                      'csv')
    assert file_generation is None
def test_copy_file_generation_to_job(monkeypatch, database):
    sess = database.session
    original_filename = 'new_filename.csv'
    file_path = gen_file_path_from_submission('None', original_filename)

    job = JobFactory(job_status_id=JOB_STATUS_DICT['running'],
                     job_type_id=JOB_TYPE_DICT['file_upload'],
                     file_type_id=FILE_TYPE_DICT['award'])
    file_gen = FileGenerationFactory(file_type='D1',
                                     file_path=file_path,
                                     file_format='csv')
    sess.add_all([job, file_gen])
    sess.commit()

    monkeypatch.setattr(
        generation_helper, 'g',
        Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    copy_file_generation_to_job(job, file_gen, True)
    sess.refresh(job)
    sess.refresh(file_gen)

    assert job.job_status.name == 'finished'
    assert job.filename == gen_file_path_from_submission(
        job.submission_id, original_filename)
    assert job.original_filename == original_filename
    assert job.number_of_errors == 0
    assert job.number_of_warnings == 0
    assert job.file_generation_id == file_gen.file_generation_id
Esempio n. 4
0
def test_start_d_generation_submission_cached(database, monkeypatch):
    """ Cached D files must update the upload Job with the FileGeneration data. """
    sess = database.session
    original_filename = 'D1_test_gen.csv'
    file_path = gen_file_path_from_submission('None/', original_filename)

    submission = SubmissionFactory(submission_id=1000,
                                   reporting_start_date='2017-01-01',
                                   reporting_end_date='2017-01-31',
                                   frec_code='1234',
                                   cgac_code=None,
                                   is_quarter_format=False,
                                   publishable=False,
                                   reporting_fiscal_year='2017')
    file_gen = FileGenerationFactory(request_date=datetime.now().date(),
                                     start_date='2017-01-01',
                                     end_date='2017-01-31',
                                     file_type='D2',
                                     agency_code='1234',
                                     agency_type='awarding',
                                     is_cached_file=True,
                                     file_path=file_path)
    up_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                        file_type_id=FILE_TYPE_DICT['award'],
                        error_message=None,
                        job_type_id=JOB_TYPE_DICT['file_upload'],
                        filename=None,
                        original_filename=None,
                        submission_id=submission.submission_id)
    val_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                         error_message=None,
                         file_type_id=FILE_TYPE_DICT['award'],
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                         filename=None,
                         original_filename=None,
                         submission_id=submission.submission_id)
    sess.add_all([submission, file_gen, up_job, val_job])
    sess.commit()

    monkeypatch.setattr(
        generation_helper, 'g',
        Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    start_d_generation(up_job, '01/01/2017', '01/31/2017', 'awarding')

    assert up_job.file_generation_id == file_gen.file_generation_id
    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename == original_filename
    assert up_job.filename == gen_file_path_from_submission(
        up_job.submission_id, original_filename)
    assert up_job.job_status_id == JOB_STATUS_DICT['finished']

    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename == original_filename
    assert up_job.filename == gen_file_path_from_submission(
        up_job.submission_id, original_filename)
    assert up_job.job_status_id != JOB_STATUS_DICT['waiting']
Esempio n. 5
0
def test_copy_file_generation_to_job_attached(monkeypatch, database):
    sess = database.session
    original_filename = 'new_filename.csv'

    submission = SubmissionFactory(submission_id=1,
                                   reporting_fiscal_year='2022',
                                   reporting_fiscal_period='4',
                                   is_quarter_format=False)
    file_path = gen_file_path_from_submission(submission.submission_id,
                                              original_filename)
    up_job = JobFactory(submission=submission,
                        job_status_id=JOB_STATUS_DICT['running'],
                        job_type_id=JOB_TYPE_DICT['file_upload'],
                        file_type_id=FILE_TYPE_DICT['award_procurement'])
    val_job = JobFactory(submission=submission,
                         job_status_id=JOB_STATUS_DICT['running'],
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                         file_type_id=FILE_TYPE_DICT['award_procurement'])
    file_gen = FileGenerationFactory(file_type='D1',
                                     file_path=file_path,
                                     file_format='csv',
                                     agency_type='awarding',
                                     start_date='2022-01-01',
                                     end_date='2022-01-31')
    sess.add_all([submission, up_job, val_job, file_gen])
    sess.commit()

    monkeypatch.setattr(
        generation_helper, 'g',
        Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    monkeypatch.setattr(generation_helper, 'get_timestamp',
                        Mock(return_value='123456789'))
    monkeypatch.setattr(shutil, 'copyfile', Mock())
    copy_file_generation_to_job(up_job, file_gen, True)
    sess.refresh(up_job)
    sess.refresh(file_gen)

    expected_filename = 'SubID-1_File-D1_FY22P04_20220101_20220131_awarding_123456789.csv'
    assert up_job.job_status.name == 'finished'
    assert up_job.filename == gen_file_path_from_submission(
        up_job.submission_id, expected_filename)
    assert up_job.original_filename == expected_filename
    assert up_job.number_of_errors == 0
    assert up_job.number_of_warnings == 0
    assert up_job.file_generation_id == file_gen.file_generation_id

    assert val_job.filename == gen_file_path_from_submission(
        up_job.submission_id, expected_filename)
    assert val_job.original_filename == expected_filename
Esempio n. 6
0
def test_generate_file_updates_jobs(monkeypatch, mock_broker_config_paths, database):
    sess = database.session
    job1 = JobFactory(job_status_id=JOB_STATUS_DICT['running'], job_type_id=JOB_TYPE_DICT['file_upload'],
                      file_type_id=FILE_TYPE_DICT['award_procurement'], filename=None, original_filename=None,
                      start_date='01/01/2017', end_date='01/31/2017')
    job2 = JobFactory(job_status_id=JOB_STATUS_DICT['running'], job_type_id=JOB_TYPE_DICT['file_upload'],
                      file_type_id=FILE_TYPE_DICT['award_procurement'], filename=None, original_filename=None,
                      start_date='01/01/2017', end_date='01/31/2017')
    job3 = JobFactory(job_status_id=JOB_STATUS_DICT['running'], job_type_id=JOB_TYPE_DICT['file_upload'],
                      file_type_id=FILE_TYPE_DICT['award_procurement'], filename=None, original_filename=None,
                      start_date='01/01/2017', end_date='01/31/2017')
    file_gen = FileGenerationFactory(request_date=datetime.now().date(), start_date='01/01/2017',
                                     end_date='01/31/2017', file_type='D1', agency_code='123',
                                     agency_type='awarding', is_cached_file=True, file_path=None)
    sess.add_all([job1, job2, job3, file_gen])
    sess.commit()
    job1.file_generation_id = file_gen.file_generation_id
    job2.file_generation_id = file_gen.file_generation_id
    job3.file_generation_id = file_gen.file_generation_id
    sess.commit()

    monkeypatch.setattr(generation_helper, 'g', Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    file_gen_manager = FileGenerationManager(sess, CONFIG_BROKER['local'], file_generation=file_gen)
    file_gen_manager.generate_file()
    sess.refresh(file_gen)

    original_filename = file_gen.file_path.split('/')[-1]

    assert job1.job_status_id == JOB_STATUS_DICT['finished']
    assert job1.original_filename == original_filename
    assert job1.filename == '{}{}'.format(
        CONFIG_BROKER['broker_files'] if CONFIG_BROKER['local'] else job1.submission_id + '/', original_filename)

    assert job2.job_status_id == JOB_STATUS_DICT['finished']
    assert job2.original_filename == original_filename
    assert job2.filename == '{}{}'.format(
        CONFIG_BROKER['broker_files'] if CONFIG_BROKER['local'] else job2.submission_id + '/', original_filename)

    assert job3.job_status_id == JOB_STATUS_DICT['finished']
    assert job3.original_filename == original_filename
    assert job3.filename == '{}{}'.format(
        CONFIG_BROKER['broker_files'] if CONFIG_BROKER['local'] else job3.submission_id + '/', original_filename)
Esempio n. 7
0
def test_generate_funding_d2(mock_broker_config_paths, database):
    sess = database.session
    pafa = PublishedAwardFinancialAssistanceFactory
    pafa_1 = pafa(funding_agency_code='123', action_date='20170101', afa_generated_unique='unique1', is_active=True)
    pafa_2 = pafa(funding_agency_code='123', action_date='20170131', afa_generated_unique='unique2', is_active=True)
    pafa_3 = pafa(funding_agency_code='123', action_date='20161231', afa_generated_unique='unique3', is_active=True)
    pafa_4 = pafa(funding_agency_code='123', action_date='20170201', afa_generated_unique='unique4', is_active=True)
    pafa_5 = pafa(funding_agency_code='123', action_date='20170115', afa_generated_unique='unique5', is_active=False)
    pafa_6 = pafa(funding_agency_code='234', action_date='20170115', afa_generated_unique='unique6', is_active=True)
    file_gen = FileGenerationFactory(request_date=datetime.now().date(), start_date='01/01/2017', end_date='01/31/2017',
                                     file_type='D2', agency_code='123', agency_type='funding', is_cached_file=True,
                                     file_path=None)
    sess.add_all([pafa_1, pafa_2, pafa_3, pafa_4, pafa_5, pafa_6, file_gen])
    sess.commit()

    file_gen_manager = FileGenerationManager(sess, CONFIG_BROKER['local'], file_generation=file_gen)
    file_gen_manager.generate_file()

    assert file_gen.file_path is not None

    # check headers
    file_rows = read_file_rows(file_gen.file_path)
    assert file_rows[0] == [key for key in file_generation_manager.fileD2.mapping]

    # check body
    pafa1 = sess.query(PublishedAwardFinancialAssistance).filter_by(afa_generated_unique='unique1').first()
    pafa2 = sess.query(PublishedAwardFinancialAssistance).filter_by(afa_generated_unique='unique2').first()
    expected1, expected2 = [], []
    for value in file_generation_manager.fileD2.db_columns:
        # loop through all values and format date columns
        if value in ['period_of_performance_star', 'period_of_performance_curr', 'modified_at', 'action_date']:
            expected1.append(re.sub(r"[-]", r"", str(pafa1.__dict__[value]))[0:8])
            expected2.append(re.sub(r"[-]", r"", str(pafa2.__dict__[value]))[0:8])
        else:
            expected1.append(str(pafa1.__dict__[value]))
            expected2.append(str(pafa2.__dict__[value]))

    assert expected1 in file_rows
    assert expected2 in file_rows
Esempio n. 8
0
def test_generate_funding_d1(mock_broker_config_paths, database):
    sess = database.session
    dap_model = DetachedAwardProcurementFactory
    dap_1 = dap_model(funding_agency_code='123', action_date='20170101', detached_award_proc_unique='unique1')
    dap_2 = dap_model(funding_agency_code='123', action_date='20170131', detached_award_proc_unique='unique2')
    dap_3 = dap_model(funding_agency_code='123', action_date='20170201', detached_award_proc_unique='unique3')
    dap_4 = dap_model(funding_agency_code='123', action_date='20161231', detached_award_proc_unique='unique4')
    dap_5 = dap_model(funding_agency_code='234', action_date='20170115', detached_award_proc_unique='unique5')
    file_gen = FileGenerationFactory(request_date=datetime.now().date(), start_date='01/01/2017', end_date='01/31/2017',
                                     file_type='D1', agency_code='123', agency_type='funding', is_cached_file=True,
                                     file_path=None)
    sess.add_all([dap_1, dap_2, dap_3, dap_4, dap_5, file_gen])
    sess.commit()

    file_gen_manager = FileGenerationManager(sess, CONFIG_BROKER['local'], file_generation=file_gen)
    file_gen_manager.generate_file()

    assert file_gen.file_path is not None

    # check headers
    file_rows = read_file_rows(file_gen.file_path)
    assert file_rows[0] == [key for key in file_generation_manager.fileD1.mapping]

    # check body
    dap_one = sess.query(DetachedAwardProcurement).filter_by(detached_award_proc_unique='unique1').first()
    dap_two = sess.query(DetachedAwardProcurement).filter_by(detached_award_proc_unique='unique2').first()
    expected1, expected2 = [], []
    for value in file_generation_manager.fileD1.db_columns:
        # loop through all values and format date columns
        if value in ['period_of_performance_star', 'period_of_performance_curr', 'period_of_perf_potential_e',
                     'ordering_period_end_date', 'action_date', 'last_modified', 'solicitation_date']:
            expected1.append(re.sub(r"[-]", r"", str(dap_one.__dict__[value]))[0:8])
            expected2.append(re.sub(r"[-]", r"", str(dap_two.__dict__[value]))[0:8])
        else:
            expected1.append(str(dap_one.__dict__[value]))
            expected2.append(str(dap_two.__dict__[value]))

    assert expected1 in file_rows
    assert expected2 in file_rows