def test_list_submissions_failure(database, monkeypatch):
    user = UserFactory(user_id=1)
    sub = SubmissionFactory(user_id=1, submission_id=1, number_of_errors=1, publish_status_id=1)
    add_models(database, [user, sub])

    monkeypatch.setattr(fileHandler, 'g', Mock(user=user))
    result = list_submissions_result()
    assert result['total'] == 1
    assert result['submissions'][0]['status'] == "validation_errors"
    delete_models(database, [user, sub])

    user = UserFactory(user_id=1)
    sub = SubmissionFactory(user_id=1, submission_id=1, publish_status_id=1)
    job = JobFactory(submission_id=1, job_status_id=JOB_STATUS_DICT['failed'],
                     job_type_id=JOB_TYPE_DICT['csv_record_validation'], file_type_id=FILE_TYPE_DICT['award'])
    add_models(database, [user, sub, job])

    result = list_submissions_result()
    assert result['total'] == 1
    assert result['submissions'][0]['status'] == "failed"
    delete_models(database, [user, sub, job])

    user = UserFactory(user_id=1)
    sub = SubmissionFactory(user_id=1, submission_id=1, publish_status_id=1)
    job = JobFactory(submission_id=1, job_status_id=JOB_STATUS_DICT['invalid'],
                     job_type_id=JOB_TYPE_DICT['csv_record_validation'], file_type_id=FILE_TYPE_DICT['award'])
    add_models(database, [user, sub, job])

    result = list_submissions_result()
    assert result['total'] == 1
    assert result['submissions'][0]['status'] == "file_errors"
    delete_models(database, [user, sub, job])
Esempio n. 2
0
def test_check_job_dependencies_has_unfinished_dependencies(database):
    """ Tests check_job_dependencies with a job that isn't finished """
    sess = database.session
    sub = SubmissionFactory(submission_id=1)
    job = JobFactory(submission_id=sub.submission_id,
                     job_status_id=JOB_STATUS_DICT['finished'],
                     job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                     file_type_id=FILE_TYPE_DICT['award'],
                     number_of_errors=0)
    job_2 = JobFactory(submission_id=sub.submission_id,
                       job_status_id=JOB_STATUS_DICT['waiting'],
                       job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                       file_type_id=FILE_TYPE_DICT['award'])
    job_3 = JobFactory(submission_id=sub.submission_id,
                       job_status_id=JOB_STATUS_DICT['waiting'],
                       job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                       file_type_id=FILE_TYPE_DICT['award'],
                       number_of_errors=0)
    sess.add_all([sub, job, job_2, job_3])
    sess.commit()

    # Job 1 finished, it is a prerequisite for job 2 (waiting)
    job_dep = JobDependency(job_id=job_2.job_id, prerequisite_id=job.job_id)
    # Job 3 is also a prerequisite of job 2, it's not done, job 2 should stay in "waiting"
    job_dep_2 = JobDependency(job_id=job_2.job_id,
                              prerequisite_id=job_3.job_id)
    sess.add_all([job_dep, job_dep_2])
    sess.commit()

    check_job_dependencies(job.job_id)

    assert job_2.job_status_id == JOB_STATUS_DICT['waiting']
def test_get_status_fabs(database):
    """ Test get status function for a fabs submission """
    sess = database.session

    sub = SubmissionFactory(submission_id=1, d2_submission=True)
    job_up = JobFactory(
        submission_id=sub.submission_id,
        job_type=sess.query(JobType).filter_by(name='file_upload').one(),
        file_type=sess.query(FileType).filter_by(name='fabs').one(),
        job_status=sess.query(JobStatus).filter_by(name='finished').one(),
        number_of_errors=0,
        number_of_warnings=0)
    job_val = JobFactory(
        submission_id=sub.submission_id,
        job_type=sess.query(JobType).filter_by(
            name='csv_record_validation').one(),
        file_type=sess.query(FileType).filter_by(name='fabs').one(),
        job_status=sess.query(JobStatus).filter_by(name='finished').one(),
        number_of_errors=0,
        number_of_warnings=4)

    sess.add_all([sub, job_up, job_val])
    sess.commit()

    json_response = fileHandler.get_status(sub)
    assert json_response.status_code == 200
    json_content = json.loads(json_response.get_data().decode('UTF-8'))
    assert json_content['fabs'] == {
        'status': 'finished',
        'has_errors': False,
        'has_warnings': True,
        'message': ''
    }
Esempio n. 4
0
def test_copy_parent_file_request_data(database):
    sess = database.session

    job_one = JobFactory(
        job_status=sess.query(JobStatus).filter_by(name='finished').one(),
        job_type=sess.query(JobType).filter_by(name='file_upload').one(),
        file_type=sess.query(FileType).filter_by(name='award').one(),
    )
    job_two = JobFactory(
        job_status=sess.query(JobStatus).filter_by(name='running').one(),
        job_type=sess.query(JobType).filter_by(name='file_upload').one(),
        file_type=sess.query(FileType).filter_by(name='award').one(),
        filename='job_id/new_filename'
    )
    sess.add_all([job_one, job_two])
    sess.commit()

    file_generation_handler.copy_parent_file_request_data(sess, job_two, job_one, True)
    sess.refresh(job_one)
    sess.refresh(job_two)

    assert job_two.job_status_id == job_one.job_status_id
    assert job_two.filename == 'job_id/{}'.format(job_one.original_filename)
    assert job_two.original_filename == job_one.original_filename
    assert job_two.number_of_errors == job_one.number_of_errors
    assert job_two.number_of_warnings == job_one.number_of_warnings
    assert job_two.from_cached is True
def test_check_job_dependencies_prior_dependency_has_errors(database):
    """ Tests check_job_dependencies with a job that is finished but has errors """
    sess = database.session
    sub = SubmissionFactory(submission_id=1)
    job = JobFactory(
        submission_id=sub.submission_id,
        job_status=sess.query(JobStatus).filter_by(name='finished').one(),
        job_type=sess.query(JobType).filter_by(
            name='csv_record_validation').one(),
        file_type=sess.query(FileType).filter_by(name='award').one(),
        number_of_errors=3)
    job_2 = JobFactory(
        submission_id=sub.submission_id,
        job_status=sess.query(JobStatus).filter_by(name='waiting').one(),
        job_type=sess.query(JobType).filter_by(
            name='csv_record_validation').one(),
        file_type=sess.query(FileType).filter_by(name='award').one())
    sess.add_all([sub, job, job_2])
    sess.commit()

    # Job 1 finished, it is a prerequisite for job 2 (waiting) but it has errors
    job_dep = JobDependency(job_id=job_2.job_id, prerequisite_id=job.job_id)
    sess.add(job_dep)
    sess.commit()

    check_job_dependencies(job.job_id)

    assert job_2.job_status_id == JOB_STATUS_DICT['waiting']
def test_check_generation_prereqs_d_has_errors(database):
    """ Tests a set of conditions that has an error in one of the A,B,C files, prevent D file generation. """
    sess = database.session

    sub = SubmissionFactory(submission_id=1, d2_submission=False)
    job_1 = JobFactory(submission_id=sub.submission_id,
                       job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                       file_type_id=FILE_TYPE_DICT['appropriations'],
                       job_status_id=JOB_STATUS_DICT['finished'],
                       number_of_errors=1,
                       number_of_warnings=0,
                       error_message=None)
    job_2 = JobFactory(submission_id=sub.submission_id,
                       job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                       file_type_id=FILE_TYPE_DICT['program_activity'],
                       job_status_id=JOB_STATUS_DICT['finished'],
                       number_of_errors=0,
                       number_of_warnings=0,
                       error_message=None)
    job_3 = JobFactory(submission_id=sub.submission_id,
                       job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                       file_type_id=FILE_TYPE_DICT['award_financial'],
                       job_status_id=JOB_STATUS_DICT['finished'],
                       number_of_errors=0,
                       number_of_warnings=0,
                       error_message=None)
    sess.add_all([sub, job_1, job_2, job_3])
    sess.commit()

    can_generate = check_generation_prereqs(sub.submission_id, 'D1')
    assert can_generate is False
Esempio n. 7
0
def test_copy_parent_file_request_data(database):
    sess = database.session

    job_one = JobFactory(job_status_id=JOB_STATUS_DICT['finished'],
                         job_type_id=JOB_TYPE_DICT['file_upload'],
                         file_type_id=FILE_TYPE_DICT['award'])
    job_two = JobFactory(job_status_id=JOB_STATUS_DICT['running'],
                         job_type_id=JOB_TYPE_DICT['file_upload'],
                         file_type_id=FILE_TYPE_DICT['award'],
                         filename='None/new_filename.csv')
    sess.add_all([job_one, job_two])
    sess.commit()

    copy_parent_file_request_data(job_two, job_one, True)
    sess.refresh(job_one)
    sess.refresh(job_two)

    assert job_two.job_status_id == job_one.job_status_id
    filepath = CONFIG_BROKER['broker_files'] if CONFIG_BROKER[
        'local'] else "{}/".format(str(job_two.submission_id))
    assert job_two.filename == '{}{}'.format(filepath,
                                             job_one.original_filename)
    assert job_two.original_filename == job_one.original_filename
    assert job_two.number_of_errors == job_one.number_of_errors
    assert job_two.number_of_warnings == job_one.number_of_warnings
    assert job_two.from_cached is True
Esempio n. 8
0
def test_check_generation_prereqs_d_valid(database):
    """ Tests a set of conditions that passes the prerequisite checks to allow D files to be generated. Show that
        warnings do not prevent generation.
    """
    sess = database.session

    sub = SubmissionFactory(submission_id=1, is_fabs=False)
    job_1 = JobFactory(submission_id=sub.submission_id,
                       job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                       file_type_id=FILE_TYPE_DICT['appropriations'],
                       job_status_id=JOB_STATUS_DICT['finished'],
                       number_of_errors=0,
                       number_of_warnings=0,
                       error_message=None)
    job_2 = JobFactory(submission_id=sub.submission_id,
                       job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                       file_type_id=FILE_TYPE_DICT['program_activity'],
                       job_status_id=JOB_STATUS_DICT['finished'],
                       number_of_errors=0,
                       number_of_warnings=0,
                       error_message=None)
    job_3 = JobFactory(submission_id=sub.submission_id,
                       job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                       file_type_id=FILE_TYPE_DICT['award_financial'],
                       job_status_id=JOB_STATUS_DICT['finished'],
                       number_of_errors=0,
                       number_of_warnings=1,
                       error_message=None)
    sess.add_all([sub, job_1, job_2, job_3])
    sess.commit()

    can_generate = check_generation_prereqs(sub.submission_id, 'D1')
    assert can_generate is True
Esempio n. 9
0
def test_get_status_fabs(database):
    """ Test get status function for a fabs submission """
    sess = database.session

    sub = SubmissionFactory(submission_id=1, d2_submission=True)
    job_up = JobFactory(submission_id=sub.submission_id,
                        job_type_id=JOB_TYPE_DICT['file_upload'],
                        file_type_id=FILE_TYPE_DICT['fabs'],
                        job_status_id=JOB_STATUS_DICT['finished'],
                        number_of_errors=0,
                        number_of_warnings=0)
    job_val = JobFactory(submission_id=sub.submission_id,
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                         file_type_id=FILE_TYPE_DICT['fabs'],
                         job_status_id=JOB_STATUS_DICT['finished'],
                         number_of_errors=0,
                         number_of_warnings=4)

    sess.add_all([sub, job_up, job_val])
    sess.commit()

    json_response = fileHandler.get_status(sub)
    assert json_response.status_code == 200
    json_content = json.loads(json_response.get_data().decode('UTF-8'))
    assert json_content['fabs'] == {
        'status': 'finished',
        'has_errors': False,
        'has_warnings': True,
        'message': ''
    }
Esempio n. 10
0
def test_check_job_dependencies_ready(mock_sqs_queue, database):
    """ Tests check_job_dependencies with a job that can be set to ready """
    # Mock so it always returns the mock queue for the test
    mock_sqs_queue.return_value = SQSMockQueue
    sess = database.session
    sub = SubmissionFactory(submission_id=1)
    job = JobFactory(submission_id=sub.submission_id,
                     job_status_id=JOB_STATUS_DICT['finished'],
                     job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                     file_type_id=FILE_TYPE_DICT['award'],
                     number_of_errors=0)
    job_2 = JobFactory(submission_id=sub.submission_id,
                       job_status_id=JOB_STATUS_DICT['waiting'],
                       job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                       file_type_id=FILE_TYPE_DICT['award'])
    sess.add_all([sub, job, job_2])
    sess.commit()

    # Job 1 finished, it is a prerequisite for job 2 (waiting) but it has errors
    job_dep = JobDependency(job_id=job_2.job_id, prerequisite_id=job.job_id)
    sess.add(job_dep)
    sess.commit()

    check_job_dependencies(job.job_id)

    assert job_2.job_status_id == JOB_STATUS_DICT['ready']
def test_start_d_generation_submission_change_request(database, monkeypatch):
    """ In-submission generations that change their requested start or end dates must actually generate files based on
        the new dates.
    """
    sess = database.session
    original_filename = 'D1_test_gen.csv'
    file_path = gen_file_path_from_submission('None/', original_filename)

    submission = SubmissionFactory(submission_id=1000,
                                   reporting_start_date='2017-01-01',
                                   reporting_end_date='2017-01-31',
                                   cgac_code='123',
                                   frec_code=None,
                                   is_quarter_format=False,
                                   publishable=False,
                                   reporting_fiscal_year='2017')
    file_gen = FileGenerationFactory(request_date=datetime.now().date(),
                                     start_date='2017-01-01',
                                     end_date='2017-01-31',
                                     file_type='D1',
                                     agency_code='123',
                                     agency_type='awarding',
                                     is_cached_file=True,
                                     file_path=file_path,
                                     file_generation_id=1000,
                                     file_format='csv')
    up_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                        error_message=None,
                        job_type_id=JOB_TYPE_DICT['file_upload'],
                        file_type_id=FILE_TYPE_DICT['award_procurement'],
                        filename=None,
                        submission_id=submission.submission_id,
                        file_generation_id=file_gen.file_generation_id,
                        original_filename=original_filename)
    val_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                         error_message=None,
                         file_type_id=FILE_TYPE_DICT['award_procurement'],
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                         filename=None,
                         submission_id=submission.submission_id,
                         original_filename=original_filename)
    sess.add_all([submission, file_gen, up_job, val_job])
    sess.commit()

    monkeypatch.setattr(
        generation_helper, 'g',
        Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    start_d_generation(up_job, '01/01/2017', '01/30/2017', 'funding')

    assert up_job.file_generation_id != file_gen.file_generation_id
    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 30)
    assert up_job.original_filename != original_filename
    assert up_job.filename != gen_file_path_from_submission(
        up_job.submission_id, original_filename)

    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 30)
    assert up_job.original_filename == up_job.original_filename
    assert up_job.filename == up_job.filename
Esempio n. 12
0
def test_start_d_generation_submission_cached(database, monkeypatch):
    """ Cached D files must update the upload Job with the FileGeneration data. """
    sess = database.session
    original_filename = 'D1_test_gen.csv'
    file_path = gen_file_path_from_submission('None/', original_filename)

    submission = SubmissionFactory(submission_id=1000,
                                   reporting_start_date='2017-01-01',
                                   reporting_end_date='2017-01-31',
                                   frec_code='1234',
                                   cgac_code=None,
                                   is_quarter_format=False,
                                   publishable=False,
                                   reporting_fiscal_year='2017')
    file_gen = FileGenerationFactory(request_date=datetime.now().date(),
                                     start_date='2017-01-01',
                                     end_date='2017-01-31',
                                     file_type='D2',
                                     agency_code='1234',
                                     agency_type='awarding',
                                     is_cached_file=True,
                                     file_path=file_path)
    up_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                        file_type_id=FILE_TYPE_DICT['award'],
                        error_message=None,
                        job_type_id=JOB_TYPE_DICT['file_upload'],
                        filename=None,
                        original_filename=None,
                        submission_id=submission.submission_id)
    val_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                         error_message=None,
                         file_type_id=FILE_TYPE_DICT['award'],
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                         filename=None,
                         original_filename=None,
                         submission_id=submission.submission_id)
    sess.add_all([submission, file_gen, up_job, val_job])
    sess.commit()

    monkeypatch.setattr(
        generation_helper, 'g',
        Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    start_d_generation(up_job, '01/01/2017', '01/31/2017', 'awarding')

    assert up_job.file_generation_id == file_gen.file_generation_id
    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename == original_filename
    assert up_job.filename == gen_file_path_from_submission(
        up_job.submission_id, original_filename)
    assert up_job.job_status_id == JOB_STATUS_DICT['finished']

    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename == original_filename
    assert up_job.filename == gen_file_path_from_submission(
        up_job.submission_id, original_filename)
    assert up_job.job_status_id != JOB_STATUS_DICT['waiting']
Esempio n. 13
0
def test_generate_new_d1_file_keep_old_job_files_success(
        monkeypatch, mock_broker_config_paths, database):
    """ Testing that when a new file is generated by a child job, the parent job's files stay the same """
    sess = database.session
    original_job = JobFactory(
        job_status_id=JOB_STATUS_DICT['waiting'],
        job_type_id=JOB_TYPE_DICT['file_upload'],
        file_type_id=FILE_TYPE_DICT['award_procurement'],
        filename=str(
            mock_broker_config_paths['d_file_storage_path'].join('original')),
        start_date='01/01/2017',
        end_date='01/31/2017',
        original_filename='original',
        from_cached=False)
    new_job = JobFactory(
        job_status_id=JOB_STATUS_DICT['finished'],
        job_type_id=JOB_TYPE_DICT['file_upload'],
        file_type_id=FILE_TYPE_DICT['award_procurement'],
        filename=str(
            mock_broker_config_paths['d_file_storage_path'].join('original')),
        start_date='01/01/2017',
        end_date='01/31/2017',
        original_filename='original',
        from_cached=False)
    sess.add_all([original_job, new_job])
    sess.commit()

    fr = FileRequestFactory(job=original_job,
                            parent_job_id=None,
                            is_cached_file=False,
                            agency_code='123',
                            agency_type='awarding',
                            start_date=original_job.start_date,
                            end_date=original_job.end_date,
                            file_type='D1',
                            request_date=datetime.now().date())
    fr_2 = FileRequestFactory(job=new_job,
                              parent_job_id=original_job.job_id,
                              is_cached_file=False,
                              agency_code='123',
                              agency_type='awarding',
                              start_date=new_job.start_date,
                              end_date=new_job.end_date,
                              file_type='D1',
                              request_date=datetime.now().date())
    sess.add_all([fr, fr_2])
    sess.commit()

    file_gen_manager = FileGenerationManager(original_job, '123', 'awarding',
                                             CONFIG_BROKER['local'])
    file_gen_manager.generate_from_job()

    sess.refresh(original_job)
    sess.refresh(new_job)

    assert original_job.original_filename != 'original'

    assert new_job.original_filename == 'original'
Esempio n. 14
0
def test_uncache_new_d1_file_fpds_success(monkeypatch,
                                          mock_broker_config_paths, database,
                                          job_constants):
    """Testing that a new file is not generated if another job has already has a successfully generated file"""
    sess = database.session
    original_job = JobFactory(
        job_status=sess.query(JobStatus).filter_by(name='finished').one(),
        job_type=sess.query(JobType).filter_by(name='file_upload').one(),
        file_type=sess.query(FileType).filter_by(
            name='award_procurement').one(),
        filename=str(
            mock_broker_config_paths['d_file_storage_path'].join('original')),
        start_date='01/01/2017',
        end_date='01/31/2017',
        original_filename='original',
        from_cached=True,
    )
    sess.add(original_job)
    sess.commit()

    file_request = FileRequestFactory(
        job=original_job,
        is_cached_file=True,
        agency_code='123',
        start_date='01/01/2017',
        end_date='01/31/2017',
        file_type='D1',
        request_date=(datetime.now().date() - timedelta(1)),
    )
    new_job = JobFactory(
        job_status=sess.query(JobStatus).filter_by(name='waiting').one(),
        job_type=sess.query(JobType).filter_by(name='file_upload').one(),
        file_type=sess.query(FileType).filter_by(
            name='award_procurement').one(),
        start_date='01/01/2017',
        end_date='01/31/2017',
    )
    sess.add_all([file_request, new_job])
    sess.commit()

    monkeypatch.setattr(file_generation_handler, 'retrieve_job_context_data',
                        Mock(return_value=(sess, new_job)))
    FileGenerationManager().generate_from_job(new_job.job_id, '123')

    sess.refresh(new_job)
    file_request = sess.query(FileRequest).filter(
        FileRequest.job_id == new_job.job_id).one_or_none()
    assert file_request is not None
    assert file_request.is_cached_file is True
    assert file_request.start_date == new_job.start_date
    assert file_request.end_date == new_job.end_date
    assert file_request.agency_code == '123'
    assert file_request.request_date == datetime.now().date()

    assert new_job.original_filename != 'original'
    assert new_job.from_cached is False
    assert new_job.job_status_id == sess.query(JobStatus).filter_by(
        name='finished').one().job_status_id
Esempio n. 15
0
def test_generate_new_d1_file_different_dates_success(monkeypatch,
                                                      mock_broker_config_paths,
                                                      database):
    """ Testing that a new D1 file is generated using the same data except for the dates """
    sess = database.session
    job = JobFactory(
        job_status_id=JOB_STATUS_DICT['waiting'],
        job_type_id=JOB_TYPE_DICT['file_upload'],
        file_type_id=FILE_TYPE_DICT['award_procurement'],
        filename=str(
            mock_broker_config_paths['d_file_storage_path'].join('original')),
        start_date='01/01/2017',
        end_date='01/31/2017',
        original_filename='original',
        from_cached=True)
    sess.add(job)
    sess.commit()

    fr = FileRequestFactory(job=job,
                            is_cached_file=True,
                            agency_code='123',
                            agency_type='awarding',
                            start_date=job.start_date,
                            end_date=job.end_date,
                            file_type='D1',
                            request_date=datetime.now().date())
    sess.add(fr)
    sess.commit()

    # Change the job start date
    old_start_date = job.start_date
    job.start_date = '01/02/2017'
    sess.commit()

    file_gen_manager = FileGenerationManager(job, '123', 'awarding',
                                             CONFIG_BROKER['local'])
    file_gen_manager.generate_from_job()

    sess.refresh(job)
    new_file_request = sess.query(FileRequest).filter(
        FileRequest.job_id == job.job_id,
        FileRequest.is_cached_file.is_(True)).one_or_none()
    assert new_file_request is not None
    assert new_file_request.is_cached_file is True
    assert new_file_request.start_date == job.start_date
    assert new_file_request.end_date == job.end_date
    assert new_file_request.agency_code == '123'
    assert new_file_request.agency_type == 'awarding'

    old_file_request = sess.query(FileRequest).filter(
        FileRequest.job_id == job.job_id,
        FileRequest.is_cached_file.is_(False)).one_or_none()
    assert old_file_request is not None
    assert old_file_request.is_cached_file is False
    assert old_file_request.start_date == old_start_date
    assert old_file_request.end_date == job.end_date
    assert old_file_request.agency_code == '123'
    assert old_file_request.agency_type == 'awarding'
def test_start_d_generation_submission_new(database, monkeypatch):
    """ A new file generation must update the upload Job and create a new FileGeneration object. """
    sess = database.session
    original_filename = 'D2_test_gen.csv'

    submission = SubmissionFactory(submission_id=1000,
                                   reporting_start_date='2017-01-01',
                                   reporting_end_date='2017-01-31',
                                   cgac_code='123',
                                   frec_code=None,
                                   is_quarter_format=False,
                                   publishable=False,
                                   reporting_fiscal_year='2017')
    up_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                        error_message=None,
                        file_type_id=FILE_TYPE_DICT['award'],
                        job_type_id=JOB_TYPE_DICT['file_upload'],
                        filename=None,
                        submission_id=submission.submission_id,
                        original_filename=original_filename,
                        file_generation_id=None)
    val_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                         error_message=None,
                         file_type_id=FILE_TYPE_DICT['award'],
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                         filename=None,
                         submission_id=submission.submission_id,
                         original_filename=original_filename)
    sess.add_all([submission, up_job, val_job])
    sess.commit()

    monkeypatch.setattr(
        generation_helper, 'g',
        Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    start_d_generation(up_job, '01/01/2017', '01/31/2017', 'awarding')

    assert up_job.file_generation_id is not None
    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename != original_filename
    assert up_job.filename != gen_file_path_from_submission(
        up_job.submission_id, original_filename)

    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename == up_job.original_filename
    assert up_job.filename == up_job.filename

    file_gen = sess.query(FileGeneration).filter_by(
        file_generation_id=up_job.file_generation_id).one_or_none()
    assert file_gen is not None
    assert file_gen.request_date == datetime.now().date()
    assert file_gen.start_date == date(2017, 1, 1)
    assert file_gen.end_date == date(2017, 1, 31)
    assert file_gen.file_type == 'D2'
    assert file_gen.file_path != gen_file_path_from_submission(
        'None', original_filename)
def test_move_certified_data(database):
    """ Tests the move_certified_data function """
    with Flask('test-app').app_context():
        sess = database.session

        # Create 2 submissions
        sub_1 = SubmissionFactory()
        sub_2 = SubmissionFactory()
        sess.add_all([sub_1, sub_2])
        sess.commit()

        # Create jobs so we can put a job ID into the tables
        job_1 = JobFactory(submission_id=sub_1.submission_id)
        job_2 = JobFactory(submission_id=sub_2.submission_id)
        sess.add_all([job_1, job_2])
        sess.commit()

        # Create Appropriation entries, 1 per submission, and one of each other kind
        approp_1 = Appropriation(submission_id=sub_1.submission_id, job_id=job_1.job_id, row_number=1,
                                 spending_authority_from_of_cpe=2)
        approp_2 = Appropriation(submission_id=sub_2.submission_id, job_id=job_2.job_id, row_number=1,
                                 spending_authority_from_of_cpe=2)
        ocpa = ObjectClassProgramActivity(submission_id=sub_1.submission_id, job_id=job_1.job_id, row_number=1)
        award_fin = AwardFinancial(submission_id=sub_1.submission_id, job_id=job_1.job_id, row_number=1)
        error_1 = ErrorMetadata(job_id=job_1.job_id)
        error_2 = ErrorMetadata(job_id=job_2.job_id)
        sess.add_all([approp_1, approp_2, ocpa, award_fin, error_1, error_2])
        sess.commit()

        move_certified_data(sess, sub_1.submission_id)

        # There are 2 entries, we only want to move the 1 with the submission ID that matches
        approp_query = sess.query(CertifiedAppropriation).filter_by(submission_id=sub_1.submission_id).all()
        assert len(approp_query) == 1
        assert approp_query[0].spending_authority_from_of_cpe == 2

        # Make sure the others got moved as well
        ocpa_query = sess.query(CertifiedObjectClassProgramActivity).filter_by(submission_id=sub_1.submission_id).all()
        award_query = sess.query(CertifiedAwardFinancial).filter_by(submission_id=sub_1.submission_id).all()
        # Query all job IDs but only one result should show up
        error_query = sess.query(CertifiedErrorMetadata).\
            filter(CertifiedErrorMetadata.job_id.in_([job_1.job_id, job_2.job_id])).all()
        assert len(ocpa_query) == 1
        assert len(award_query) == 1
        assert len(error_query) == 1

        # Change the Appropriation data
        approp_1.spending_authority_from_of_cpe = 5
        sess.refresh(approp_1)

        # Move the data again (recertify) and make sure we didn't add extras, just adjusted the one we had
        move_certified_data(sess, sub_1.submission_id)
        approp_query = sess.query(CertifiedAppropriation).filter_by(submission_id=sub_1.submission_id).all()
        assert len(approp_query) == 1
        assert approp_query[0].spending_authority_from_of_cpe == 2
def test_list_submissions_failure(database, job_constants, monkeypatch):
    fh = fileHandler.FileHandler(Mock())

    mock_value = Mock()
    mock_value.getName.return_value = 1
    monkeypatch.setattr(fileHandler, 'LoginSession', mock_value)

    user = UserFactory(user_id=1, cgac_code='cgac')
    sub = SubmissionFactory(user_id=1,
                            submission_id=1,
                            number_of_errors=1,
                            cgac_code='cgac')
    add_models(database, [user, sub])

    json_response = fh.list_submissions(PAGE, LIMIT, CERTIFIED)
    assert json.loads(json_response.get_data().decode("utf-8"))['total'] == 1
    assert json.loads(json_response.get_data().decode(
        "utf-8"))['submissions'][0]['status'] == "validation_errors"
    delete_models(database, [user, sub])

    sess = database.session
    user = UserFactory(user_id=1, cgac_code='cgac')
    sub = SubmissionFactory(user_id=1, submission_id=1, cgac_code='cgac')
    job = JobFactory(
        submission_id=1,
        job_status=sess.query(JobStatus).filter_by(name='failed').one(),
        job_type=sess.query(JobType).filter_by(
            name='csv_record_validation').one(),
        file_type=sess.query(FileType).filter_by(name='award').one())
    add_models(database, [user, sub, job])

    json_response = fh.list_submissions(PAGE, LIMIT, CERTIFIED)
    assert json.loads(json_response.get_data().decode("utf-8"))['total'] == 1
    assert json.loads(json_response.get_data().decode(
        "utf-8"))['submissions'][0]['status'] == "failed"
    delete_models(database, [user, sub, job])

    sess = database.session
    user = UserFactory(user_id=1, cgac_code='cgac')
    sub = SubmissionFactory(user_id=1, submission_id=1, cgac_code='cgac')
    job = JobFactory(
        submission_id=1,
        job_status=sess.query(JobStatus).filter_by(name='invalid').one(),
        job_type=sess.query(JobType).filter_by(
            name='csv_record_validation').one(),
        file_type=sess.query(FileType).filter_by(name='award').one())
    add_models(database, [user, sub, job])

    json_response = fh.list_submissions(PAGE, LIMIT, CERTIFIED)
    assert json.loads(json_response.get_data().decode("utf-8"))['total'] == 1
    assert json.loads(json_response.get_data().decode(
        "utf-8"))['submissions'][0]['status'] == "file_errors"
    delete_models(database, [user, sub, job])
Esempio n. 19
0
def test_get_submission_metadata_quarterly_dabs_cgac(database):
    """ Tests the get_submission_metadata function for quarterly dabs submissions """
    sess = database.session

    now = datetime.datetime.utcnow()
    now_plus_10 = now + datetime.timedelta(minutes=10)
    cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency')
    frec_cgac = CGACFactory(cgac_code='999', agency_name='FREC CGAC')
    frec = FRECFactory(frec_code='0001', agency_name='FREC Agency', cgac=frec_cgac)

    sub = SubmissionFactory(submission_id=1, created_at=now, updated_at=now_plus_10, cgac_code=cgac.cgac_code,
                            reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True,
                            publish_status_id=PUBLISH_STATUS_DICT['updated'], d2_submission=False, number_of_errors=40,
                            number_of_warnings=200)
    # Job for submission
    job = JobFactory(submission_id=sub.submission_id, last_validated=now_plus_10,
                     job_type=sess.query(JobType).filter_by(name='csv_record_validation').one(),
                     job_status=sess.query(JobStatus).filter_by(name='finished').one(),
                     file_type=sess.query(FileType).filter_by(name='appropriations').one(),
                     number_of_rows=3,
                     file_size=7655)
    job_2 = JobFactory(submission_id=sub.submission_id, last_validated=now_plus_10,
                       job_type=sess.query(JobType).filter_by(name='csv_record_validation').one(),
                       job_status=sess.query(JobStatus).filter_by(name='finished').one(),
                       file_type=sess.query(FileType).filter_by(name='program_activity').one(),
                       number_of_rows=7,
                       file_size=12345)

    sess.add_all([cgac, frec_cgac, frec, sub, job, job_2])
    sess.commit()

    # Test for Quarterly, updated DABS cgac submission
    expected_results = {
        'cgac_code': cgac.cgac_code,
        'frec_code': None,
        'agency_name': cgac.agency_name,
        'number_of_errors': 40,
        'number_of_warnings': 200,
        'number_of_rows': 10,
        'total_size': 20000,
        'created_on': now.strftime('%m/%d/%Y'),
        'last_updated': now_plus_10.strftime("%Y-%m-%dT%H:%M:%S"),
        'last_validated': now_plus_10.strftime('%m/%d/%Y'),
        'reporting_period': 'Q1/2017',
        'publish_status': 'updated',
        'quarterly_submission': True,
        'fabs_submission': False,
        'fabs_meta': None
    }

    results = get_submission_metadata(sub)
    assert results == expected_results
def test_certify_dabs_submission(database, monkeypatch):
    """ Tests the certify_dabs_submission function """
    with Flask('test-app').app_context():
        now = datetime.datetime.utcnow()
        sess = database.session

        user = UserFactory()
        cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency')
        submission = SubmissionFactory(created_at=now, updated_at=now, cgac_code=cgac.cgac_code,
                                       reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True,
                                       publishable=True, publish_status_id=PUBLISH_STATUS_DICT['unpublished'],
                                       d2_submission=False, number_of_errors=0, number_of_warnings=200,
                                       certifying_user_id=None)
        quarter_reval = QuarterlyRevalidationThresholdFactory(year=2017, quarter=1,
                                                              window_start=now - datetime.timedelta(days=1))
        sess.add_all([user, cgac, submission, quarter_reval])
        sess.commit()

        comment = CommentFactory(file_type_id=FILE_TYPE_DICT['appropriations'], comment='Test',
                                 submission_id=submission.submission_id)
        job_1 = JobFactory(submission_id=submission.submission_id, last_validated=now,
                           job_type_id=JOB_TYPE_DICT['csv_record_validation'])
        job_2 = JobFactory(submission_id=submission.submission_id, last_validated=now + datetime.timedelta(days=1),
                           job_type_id=JOB_TYPE_DICT['csv_record_validation'])
        sess.add_all([job_1, job_2, comment])
        sess.commit()

        flex_field = FlexField(file_type_id=FILE_TYPE_DICT['appropriations'], header='flex_test', job_id=job_1.job_id,
                               submission_id=submission.submission_id, row_number=2, cell=None)
        sess.add(flex_field)
        sess.commit()

        g.user = user
        file_handler = fileHandler.FileHandler({}, is_local=True)
        monkeypatch.setattr(file_handler, 'move_certified_files', Mock(return_value=True))
        monkeypatch.setattr(fileHandler.GlobalDB, 'db', Mock(return_value=database))

        certify_dabs_submission(submission, file_handler)

        sess.refresh(submission)
        certify_history = sess.query(CertifyHistory).filter_by(submission_id=submission.submission_id).one_or_none()
        assert certify_history is not None
        assert submission.certifying_user_id == user.user_id
        assert submission.publish_status_id == PUBLISH_STATUS_DICT['published']

        # Make sure certified comments are created
        certified_comment = sess.query(CertifiedComment).filter_by(submission_id=submission.submission_id).one_or_none()
        assert certified_comment is not None

        # Make sure certified flex fields are created
        certified_flex = sess.query(CertifiedFlexField).filter_by(submission_id=submission.submission_id).one_or_none()
        assert certified_flex is not None
Esempio n. 21
0
def test_uncache_new_d1_file_fpds_success(monkeypatch,
                                          mock_broker_config_paths, database):
    """Testing that a new file is not generated if another job has already has a successfully generated file"""
    sess = database.session
    original_job = JobFactory(
        job_status_id=JOB_STATUS_DICT['finished'],
        job_type_id=JOB_TYPE_DICT['file_upload'],
        file_type_id=FILE_TYPE_DICT['award_procurement'],
        filename=str(
            mock_broker_config_paths['d_file_storage_path'].join('original')),
        start_date='01/01/2017',
        end_date='01/31/2017',
        original_filename='original',
        from_cached=True)
    sess.add(original_job)
    sess.commit()

    file_request = FileRequestFactory(job=original_job,
                                      is_cached_file=True,
                                      agency_code='123',
                                      agency_type='awarding',
                                      start_date='01/01/2017',
                                      end_date='01/31/2017',
                                      file_type='D1',
                                      request_date=(datetime.now().date() -
                                                    timedelta(1)))
    new_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                         job_type_id=JOB_TYPE_DICT['file_upload'],
                         file_type_id=FILE_TYPE_DICT['award_procurement'],
                         start_date='01/01/2017',
                         end_date='01/31/2017')
    sess.add_all([file_request, new_job])
    sess.commit()

    file_gen_manager = FileGenerationManager(new_job, '123', 'awarding',
                                             CONFIG_BROKER['local'])
    file_gen_manager.generate_from_job()

    sess.refresh(new_job)
    file_request = sess.query(FileRequest).filter(
        FileRequest.job_id == new_job.job_id).one_or_none()
    assert file_request is not None
    assert file_request.is_cached_file is True
    assert file_request.start_date == new_job.start_date
    assert file_request.end_date == new_job.end_date
    assert file_request.agency_code == '123'
    assert file_request.request_date == datetime.now().date()

    assert new_job.original_filename != 'original'
    assert new_job.from_cached is False
    assert new_job.job_status_id == JOB_STATUS_DICT['finished']
Esempio n. 22
0
def test_copy_file_generation_to_job_attached(monkeypatch, database):
    sess = database.session
    original_filename = 'new_filename.csv'

    submission = SubmissionFactory(submission_id=1,
                                   reporting_fiscal_year='2022',
                                   reporting_fiscal_period='4',
                                   is_quarter_format=False)
    file_path = gen_file_path_from_submission(submission.submission_id,
                                              original_filename)
    up_job = JobFactory(submission=submission,
                        job_status_id=JOB_STATUS_DICT['running'],
                        job_type_id=JOB_TYPE_DICT['file_upload'],
                        file_type_id=FILE_TYPE_DICT['award_procurement'])
    val_job = JobFactory(submission=submission,
                         job_status_id=JOB_STATUS_DICT['running'],
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                         file_type_id=FILE_TYPE_DICT['award_procurement'])
    file_gen = FileGenerationFactory(file_type='D1',
                                     file_path=file_path,
                                     file_format='csv',
                                     agency_type='awarding',
                                     start_date='2022-01-01',
                                     end_date='2022-01-31')
    sess.add_all([submission, up_job, val_job, file_gen])
    sess.commit()

    monkeypatch.setattr(
        generation_helper, 'g',
        Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    monkeypatch.setattr(generation_helper, 'get_timestamp',
                        Mock(return_value='123456789'))
    monkeypatch.setattr(shutil, 'copyfile', Mock())
    copy_file_generation_to_job(up_job, file_gen, True)
    sess.refresh(up_job)
    sess.refresh(file_gen)

    expected_filename = 'SubID-1_File-D1_FY22P04_20220101_20220131_awarding_123456789.csv'
    assert up_job.job_status.name == 'finished'
    assert up_job.filename == gen_file_path_from_submission(
        up_job.submission_id, expected_filename)
    assert up_job.original_filename == expected_filename
    assert up_job.number_of_errors == 0
    assert up_job.number_of_warnings == 0
    assert up_job.file_generation_id == file_gen.file_generation_id

    assert val_job.filename == gen_file_path_from_submission(
        up_job.submission_id, expected_filename)
    assert val_job.original_filename == expected_filename
def test_retrieve_cached_file_generation_not_cached(database):
    """ Should return no FileGeneration """
    sess = database.session
    job = JobFactory(start_date='2017-01-01',
                     end_date='2017-01-31',
                     job_status_id=JOB_STATUS_DICT['waiting'],
                     error_message=None,
                     file_type_id=FILE_TYPE_DICT['award'],
                     job_type_id=JOB_TYPE_DICT['file_upload'],
                     filename=None,
                     original_filename=None,
                     file_generation_id=None)
    file_gen = FileGenerationFactory(request_date=datetime.now().date(),
                                     start_date='2017-01-01',
                                     end_date='2017-01-31',
                                     file_type='D2',
                                     agency_code='123',
                                     agency_type='awarding',
                                     is_cached_file=False,
                                     file_format='csv')
    sess.add_all([job, file_gen])
    sess.commit()

    file_generation = retrieve_cached_file_generation(job, 'awarding', '123',
                                                      'csv')
    assert file_generation is None
def test_certify_dabs_submission_quarterly_revalidation_not_in_db(database):
    """ Tests that a DABS submission that doesnt have its year/quarter in the system won't be able to certify. """
    with Flask('test-app').app_context():
        now = datetime.datetime.utcnow()
        sess = database.session

        user = UserFactory()
        cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency')
        submission = SubmissionFactory(created_at=now, updated_at=now, cgac_code=cgac.cgac_code,
                                       reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True,
                                       publishable=True, publish_status_id=PUBLISH_STATUS_DICT['unpublished'],
                                       d2_submission=False, number_of_errors=0, number_of_warnings=200,
                                       certifying_user_id=None)
        sess.add_all([user, cgac, submission])
        sess.commit()

        job = JobFactory(submission_id=submission.submission_id, last_validated=now,
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'])
        sess.add(job)
        sess.commit()

        g.user = user
        file_handler = fileHandler.FileHandler({}, is_local=True)
        response = certify_dabs_submission(submission, file_handler)
        response_json = json.loads(response.data.decode('UTF-8'))
        assert response.status_code == 400
        assert response_json['message'] == "No submission window for this year and quarter was found. If this is an " \
                                           "error, please contact the Service Desk."
def test_certify_dabs_submission_quarterly_revalidation_too_early(database):
    """ Tests that a DABS submission that was last validated before the window start cannot be certified. """
    with Flask('test-app').app_context():
        now = datetime.datetime.utcnow()
        earlier = now - datetime.timedelta(days=1)
        sess = database.session

        user = UserFactory()
        cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency')
        submission = SubmissionFactory(created_at=earlier, updated_at=earlier, cgac_code=cgac.cgac_code,
                                       reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True,
                                       publishable=True, publish_status_id=PUBLISH_STATUS_DICT['unpublished'],
                                       d2_submission=False, number_of_errors=0, number_of_warnings=200,
                                       certifying_user_id=None)
        quarter_reval = QuarterlyRevalidationThresholdFactory(year=2017, quarter=1, window_start=now)
        sess.add_all([user, cgac, submission, quarter_reval])
        sess.commit()

        job = JobFactory(submission_id=submission.submission_id, last_validated=earlier,
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'])
        sess.add(job)
        sess.commit()

        g.user = user
        file_handler = fileHandler.FileHandler({}, is_local=True)
        response = certify_dabs_submission(submission, file_handler)
        response_json = json.loads(response.data.decode('UTF-8'))
        assert response.status_code == 400
        assert response_json['message'] == "This submission was last validated or its D files generated before the " \
                                           "start of the submission window ({}). Please revalidate before " \
                                           "certifying.".\
            format(quarter_reval.window_start.strftime('%m/%d/%Y'))
Esempio n. 26
0
def test_generate_new_d1_file_success(monkeypatch, mock_broker_config_paths,
                                      database):
    """ Testing that a new D1 file is generated """
    sess = database.session
    job = JobFactory(
        job_status_id=JOB_STATUS_DICT['waiting'],
        job_type_id=JOB_TYPE_DICT['file_upload'],
        file_type_id=FILE_TYPE_DICT['award_procurement'],
        filename=str(
            mock_broker_config_paths['d_file_storage_path'].join('original')),
        start_date='01/01/2017',
        end_date='01/31/2017',
        original_filename='original',
        from_cached=True)
    sess.add(job)
    sess.commit()

    file_gen_manager = FileGenerationManager(job, '123', 'awarding',
                                             CONFIG_BROKER['local'])
    file_gen_manager.generate_from_job()

    sess.refresh(job)
    file_request = sess.query(FileRequest).filter(
        FileRequest.job_id == job.job_id).one_or_none()
    assert file_request is not None
    assert file_request.is_cached_file is True
    assert file_request.start_date == job.start_date
    assert file_request.end_date == job.end_date
    assert file_request.agency_code == '123'
    assert file_request.request_date == datetime.now().date()

    assert job.original_filename != 'original'
    assert job.from_cached is False
    assert job.job_status_id == JOB_STATUS_DICT['finished']
def test_get_upload_file_url_s3(database, monkeypatch):
    """ Test getting the url of the uploaded file non-locally. """
    monkeypatch.setattr(fileHandler, 'CONFIG_BROKER', {'local': False})
    s3_url_handler = Mock()
    s3_url_handler.return_value.get_signed_url.return_value = 'some/url/here.csv'
    monkeypatch.setattr(fileHandler, 'S3Handler', s3_url_handler)

    # create and insert submission/job
    sess = database.session
    sub = SubmissionFactory(submission_id=1, d2_submission=False)
    job = JobFactory(
        submission_id=1,
        job_status=sess.query(JobStatus).filter_by(name='finished').one(),
        job_type=sess.query(JobType).filter_by(name='file_upload').one(),
        file_type=sess.query(FileType).filter_by(name='appropriations').one(),
        filename='1/some_file.csv')
    add_models(database, [sub, job])

    json_response = fileHandler.get_upload_file_url(sub, 'A')
    url = json.loads(json_response.get_data().decode('utf-8'))['url']
    assert url == 'some/url/here.csv'
    assert s3_url_handler.return_value.get_signed_url.call_args == ((
        '1', 'some_file.csv'), {
            'method':
            'GET'
        })
Esempio n. 28
0
def test_relevant_flex_data(database):
    """Verify that we can retrieve multiple flex fields from our data"""
    sess = database.session
    subs = [SubmissionFactory() for _ in range(3)]
    sess.add_all(subs)
    sess.commit()
    # Three jobs per submission
    jobs = [JobFactory(submission_id=sub.submission_id) for sub in subs for _ in range(3)]
    sess.add_all(jobs)
    sess.commit()
    # Set up ten rows of three fields per job
    sess.add_all([
        FlexField(submission_id=job.submission_id, job_id=job.job_id, row_number=row_number, header=str(idx),
                  cell="cell"*row_number)
        for job in jobs for idx in range(3) for row_number in range(1, 11)
    ])
    sess.commit()

    failures = [{'row_number': 3}, {'row_number': 7}]
    result = validator.relevant_flex_data(failures, jobs[0].job_id)
    assert {3, 7} == set(result.keys())
    assert len(result[3]) == 3
    # spot check some of the values
    assert result[3][0].header == '0'
    assert result[3][1].cell == 'cell' * 3
    assert result[3][2].job_id == jobs[0].job_id
    assert result[7][1].header == '1'
    assert result[7][0].cell == 'cell' * 7
def test_certify_dabs_submission_quarterly_revalidation_multiple_thresholds(database):
    """ Tests that a DABS submission is not affected by a different quarterly revalidation threshold than the one that
        matches its reporting_start_date.
    """
    with Flask('test-app').app_context():
        now = datetime.datetime.utcnow()
        earlier = now - datetime.timedelta(days=1)
        sess = database.session

        user = UserFactory()
        cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency')
        submission = SubmissionFactory(created_at=earlier, updated_at=earlier, cgac_code=cgac.cgac_code,
                                       reporting_fiscal_period=3, reporting_fiscal_year=2017,
                                       reporting_start_date='2016-10-01', is_quarter_format=True, publishable=True,
                                       publish_status_id=PUBLISH_STATUS_DICT['unpublished'], d2_submission=False,
                                       number_of_errors=0, number_of_warnings=200, certifying_user_id=None)
        quarter_reval = QuarterlyRevalidationThresholdFactory(year=2017, quarter=1, window_start=earlier)
        quarter_reval_2 = QuarterlyRevalidationThresholdFactory(year=2017, quarter=2,
                                                                window_start=now + datetime.timedelta(days=10))
        sess.add_all([user, cgac, submission, quarter_reval, quarter_reval_2])
        sess.commit()

        job = JobFactory(submission_id=submission.submission_id, last_validated=now,
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'])
        sess.add(job)
        sess.commit()

        g.user = user
        file_handler = fileHandler.FileHandler({}, is_local=True)
        response = certify_dabs_submission(submission, file_handler)
        assert response.status_code == 200
def test_copy_file_generation_to_job(monkeypatch, database):
    sess = database.session
    original_filename = 'new_filename.csv'
    file_path = gen_file_path_from_submission('None', original_filename)

    job = JobFactory(job_status_id=JOB_STATUS_DICT['running'],
                     job_type_id=JOB_TYPE_DICT['file_upload'],
                     file_type_id=FILE_TYPE_DICT['award'])
    file_gen = FileGenerationFactory(file_type='D1',
                                     file_path=file_path,
                                     file_format='csv')
    sess.add_all([job, file_gen])
    sess.commit()

    monkeypatch.setattr(
        generation_helper, 'g',
        Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    copy_file_generation_to_job(job, file_gen, True)
    sess.refresh(job)
    sess.refresh(file_gen)

    assert job.job_status.name == 'finished'
    assert job.filename == gen_file_path_from_submission(
        job.submission_id, original_filename)
    assert job.original_filename == original_filename
    assert job.number_of_errors == 0
    assert job.number_of_warnings == 0
    assert job.file_generation_id == file_gen.file_generation_id
def test_check_detached_d_file_generation(database):
    """ Job statuses should return the correct status and error message to the user """
    sess = database.session

    # Detached D2 generation waiting to be picked up by the Validator
    job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'], job_type_id=JOB_TYPE_DICT['file_upload'],
                     file_type_id=FILE_TYPE_DICT['award'], error_message='', filename='job_id/file.csv',
                     original_filename='file.csv')
    sess.add(job)
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'waiting'

    # Detached D2 generation running in the Validator
    job.job_status_id = JOB_STATUS_DICT['running']
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'waiting'

    # Detached D2 generation completed by the Validator
    job.job_status_id = JOB_STATUS_DICT['finished']
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'finished'
    assert response_dict['message'] == ''

    # Detached D2 generation with an unknown error
    job.job_status_id = JOB_STATUS_DICT['failed']
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'failed'
    assert response_dict['message'] == 'Upload job failed without error message'

    # Detached D2 generation with a known error
    job.error_message = 'FABS upload error message'
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'failed'
    assert response_dict['message'] == 'FABS upload error message'
def test_generate_file_updates_jobs(monkeypatch, mock_broker_config_paths, database):
    sess = database.session
    job1 = JobFactory(job_status_id=JOB_STATUS_DICT['running'], job_type_id=JOB_TYPE_DICT['file_upload'],
                      file_type_id=FILE_TYPE_DICT['award_procurement'], filename=None, original_filename=None,
                      start_date='01/01/2017', end_date='01/31/2017')
    job2 = JobFactory(job_status_id=JOB_STATUS_DICT['running'], job_type_id=JOB_TYPE_DICT['file_upload'],
                      file_type_id=FILE_TYPE_DICT['award_procurement'], filename=None, original_filename=None,
                      start_date='01/01/2017', end_date='01/31/2017')
    job3 = JobFactory(job_status_id=JOB_STATUS_DICT['running'], job_type_id=JOB_TYPE_DICT['file_upload'],
                      file_type_id=FILE_TYPE_DICT['award_procurement'], filename=None, original_filename=None,
                      start_date='01/01/2017', end_date='01/31/2017')
    file_gen = FileGenerationFactory(request_date=datetime.now().date(), start_date='01/01/2017',
                                     end_date='01/31/2017', file_type='D1', agency_code='123',
                                     agency_type='awarding', is_cached_file=True, file_path=None)
    sess.add_all([job1, job2, job3, file_gen])
    sess.commit()
    job1.file_generation_id = file_gen.file_generation_id
    job2.file_generation_id = file_gen.file_generation_id
    job3.file_generation_id = file_gen.file_generation_id
    sess.commit()

    monkeypatch.setattr(generation_helper, 'g', Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    file_gen_manager = FileGenerationManager(sess, CONFIG_BROKER['local'], file_generation=file_gen)
    file_gen_manager.generate_file()
    sess.refresh(file_gen)

    original_filename = file_gen.file_path.split('/')[-1]

    assert job1.job_status_id == JOB_STATUS_DICT['finished']
    assert job1.original_filename == original_filename
    assert job1.filename == '{}{}'.format(
        CONFIG_BROKER['broker_files'] if CONFIG_BROKER['local'] else job1.submission_id + '/', original_filename)

    assert job2.job_status_id == JOB_STATUS_DICT['finished']
    assert job2.original_filename == original_filename
    assert job2.filename == '{}{}'.format(
        CONFIG_BROKER['broker_files'] if CONFIG_BROKER['local'] else job2.submission_id + '/', original_filename)

    assert job3.job_status_id == JOB_STATUS_DICT['finished']
    assert job3.original_filename == original_filename
    assert job3.filename == '{}{}'.format(
        CONFIG_BROKER['broker_files'] if CONFIG_BROKER['local'] else job3.submission_id + '/', original_filename)
def test_check_submission_d_file_generation(database):
    """ Job statuses should return the correct status and error message to the user """
    sess = database.session
    sub = SubmissionFactory()
    sess.add(sub)

    # D1 generation waiting to be picked up by the Validator
    job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'], job_type_id=JOB_TYPE_DICT['file_upload'],
                     file_type_id=FILE_TYPE_DICT['award_procurement'], submission=sub, error_message='',
                     filename='job_id/file.csv', original_filename='file.csv')
    val_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'], job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                         file_type_id=FILE_TYPE_DICT['award_procurement'], submission=sub, error_message='',
                         number_of_errors=0)
    sess.add_all([job, val_job])
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'waiting'

    # D1 generation running in the Validator
    job.job_status_id = JOB_STATUS_DICT['running']
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'waiting'

    # D1 generation with an unknown error
    job.job_status_id = JOB_STATUS_DICT['failed']
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'failed'
    assert response_dict['message'] == 'Upload job failed without error message'

    # D1 generation with a known error
    job.error_message = 'D1 upload error message'
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'failed'
    assert response_dict['message'] == 'D1 upload error message'

    # D1 generation completed by the Validator; validation waiting to be picked up
    job.error_message = ''
    job.job_status_id = JOB_STATUS_DICT['finished']
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'waiting'

    # D1 generation completed; validation running in the Validator
    val_job.job_status_id = JOB_STATUS_DICT['running']
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'waiting'

    # D1 generation completed; validation completed by the Validator
    val_job.job_status_id = JOB_STATUS_DICT['finished']
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'finished'

    # D1 generation completed; validation completed by the Validator
    val_job.number_of_errors = 10
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'failed'
    assert response_dict['message'] == 'Validation completed but row-level errors were found'

    # D1 generation completed; validation with an unknown error
    job.error_message = ''
    val_job.error_message = ''
    val_job.job_status_id = JOB_STATUS_DICT['failed']
    val_job.number_of_errors = 0
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'failed'
    assert response_dict['message'] == 'Validation job had an internal error'

    # D1 generation completed; validation with a known error
    job.error_message = ''
    val_job.error_message = ''
    val_job.error_message = 'D1 upload error message'
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'failed'
    assert response_dict['message'] == 'D1 upload error message'

    # D1 generation completed; validation with an unknown error
    job.error_message = ''
    val_job.error_message = ''
    val_job.job_status_id = JOB_STATUS_DICT['invalid']
    sess.commit()
    response_dict = check_file_generation(job.job_id)
    assert response_dict['status'] == 'failed'
    assert response_dict['message'] == 'Generated file had file-level errors'
def test_current_page(file_app, database):
    """Test the route to check what the current progress of the submission is at
    the correct page
    """

    cgac = CGACFactory()
    user = UserFactory.with_cgacs(cgac)
    user.user_id = 1
    user.name = 'Oliver Queen'
    user.website_admin = True
    database.session.add(user)
    database.session.commit()
    g.user = user

    sub = SubmissionFactory(user_id=1, cgac_code=cgac.cgac_code)
    database.session.add(sub)
    database.session.commit()

    csv_validation = JOB_TYPE_DICT['csv_record_validation']
    upload = JOB_TYPE_DICT['file_upload']
    validation = JOB_TYPE_DICT['validation']
    finished_job = JOB_STATUS_DICT['finished']
    waiting = JOB_STATUS_DICT['waiting']

    job_a = JobFactory(submission_id=sub.submission_id, file_type_id=FILE_TYPE_DICT['appropriations'],
                       job_type_id=csv_validation, number_of_errors=0, file_size=123, job_status_id=finished_job)
    job_b = JobFactory(submission_id=sub.submission_id, file_type_id=FILE_TYPE_DICT['program_activity'],
                       job_type_id=csv_validation, number_of_errors=0, file_size=123, job_status_id=finished_job)
    job_c = JobFactory(submission_id=sub.submission_id, file_type_id=FILE_TYPE_DICT['award_financial'],
                       job_type_id=csv_validation, number_of_errors=0, file_size=123, job_status_id=finished_job)
    job_d1 = JobFactory(submission_id=sub.submission_id, file_type_id=FILE_TYPE_DICT['award_procurement'],
                        job_type_id=csv_validation, number_of_errors=0, file_size=123, job_status_id=finished_job)
    job_d2 = JobFactory(submission_id=sub.submission_id, file_type_id=FILE_TYPE_DICT['award'],
                        job_type_id=csv_validation, number_of_errors=0, file_size=123, job_status_id=finished_job)
    job_e = JobFactory(submission_id=sub.submission_id, file_type_id=FILE_TYPE_DICT['executive_compensation'],
                       job_type_id=upload, number_of_errors=0, file_size=123, job_status_id=finished_job)
    job_f = JobFactory(submission_id=sub.submission_id, file_type_id=FILE_TYPE_DICT['sub_award'], job_type_id=upload,
                       number_of_errors=0, file_size=123, job_status_id=finished_job)
    job_cross_file = JobFactory(submission_id=sub.submission_id, file_type_id=None, job_type_id=validation,
                                number_of_errors=0, file_size=123, job_status_id=finished_job)

    database.session.add_all([job_a, job_b, job_c, job_d1, job_d2, job_e, job_f, job_cross_file])
    database.session.commit()

    # Everything ok
    response = file_app.get("/v1/check_current_page/?submission_id=" + str(sub.submission_id))
    response_json = json.loads(response.data.decode('UTF-8'))
    assert response_json['step'] == '5'

    job_e.job_status_id = 6
    database.session.commit()
    # E or F failed
    response = file_app.get("/v1/check_current_page/?submission_id=" + str(sub.submission_id))
    response_json = json.loads(response.data.decode('UTF-8'))
    assert response_json['step'] == '4'

    job_e.job_status_id = 4
    job_cross_file.number_of_errors = 6
    database.session.commit()

    # Restore job_e and create errors for cross_file
    response = file_app.get("/v1/check_current_page/?submission_id=" + str(sub.submission_id))
    response_json = json.loads(response.data.decode('UTF-8'))
    assert response_json['step'] == '3'

    job_d1.number_of_errors = 6
    database.session.commit()
    # D file has errors
    response = file_app.get("/v1/check_current_page/?submission_id=" + str(sub.submission_id))
    response_json = json.loads(response.data.decode('UTF-8'))
    assert response_json['step'] == '2'

    job_c.number_of_errors = 6
    database.session.commit()
    # Fail C file validation
    response = file_app.get("/v1/check_current_page/?submission_id=" + str(sub.submission_id))
    response_json = json.loads(response.data.decode('UTF-8'))
    assert response_json['step'] == '1'

    job_cross_file.job_status_id = waiting
    job_d1.number_of_errors = 0
    database.session.commit()
    # E and F generated with C file errors
    response = file_app.get("/v1/check_current_page/?submission_id=" + str(sub.submission_id))
    response_json = json.loads(response.data.decode('UTF-8'))
    assert response_json['step'] == '1'