def test_list_submissions_permissions(database, monkeypatch): """Verify that the user must be in the same CGAC group, the submission's owner, or website admin to see the submission""" cgac1, cgac2 = CGACFactory(), CGACFactory() user1, user2 = UserFactory.with_cgacs(cgac1), UserFactory() database.session.add_all([cgac1, cgac2, user1, user2]) database.session.commit() sub = SubmissionFactory(user_id=user2.user_id, cgac_code=cgac2.cgac_code, publish_status_id=1) database.session.add(sub) database.session.commit() monkeypatch.setattr(fileHandler, 'g', Mock(user=user1)) assert list_submissions_result()['total'] == 0 user1.affiliations[0].cgac = cgac2 database.session.commit() assert list_submissions_result()['total'] == 1 user1.affiliations = [] database.session.commit() assert list_submissions_result()['total'] == 0 sub.user_id = user1.user_id database.session.commit() assert list_submissions_result()['total'] == 1 sub.user_id = user2.user_id database.session.commit() assert list_submissions_result()['total'] == 0 user1.website_admin = True database.session.commit() assert list_submissions_result()['total'] == 1
def test_current_user_can_on_submission(monkeypatch, database): submission = SubmissionFactory() user = UserFactory() database.session.add_all([submission, user]) database.session.commit() current_user_can = Mock() monkeypatch.setattr(permissions, 'g', Mock(user=user)) monkeypatch.setattr(permissions, 'current_user_can', current_user_can) current_user_can.return_value = True assert permissions.current_user_can_on_submission('reader', submission) current_user_can.return_value = False assert not permissions.current_user_can_on_submission('reader', submission) submission.user_id = user.user_id assert permissions.current_user_can_on_submission('reader', submission)
def test_list_certifications(database): # set up submission sub = SubmissionFactory() database.session.add(sub) database.session.commit() # set up certify history, make sure the empty one comes last in the list cert_hist_empty = CertifyHistoryFactory(submission=sub, created_at=datetime.utcnow() - timedelta(days=1)) cert_hist = CertifyHistoryFactory(submission=sub) database.session.add_all([cert_hist_empty, cert_hist]) database.session.commit() # add some data to certified_files_history for the cert_history ID history_id = cert_hist.certify_history_id sub_id = sub.submission_id file_hist_1 = CertifiedFilesHistoryFactory( certify_history_id=history_id, submission_id=sub_id, filename="/path/to/file_a.csv", warning_filename="/path/to/warning_file_a.csv", narrative="A has a narrative", file_type_id=FILE_TYPE_DICT['appropriations']) file_hist_2 = CertifiedFilesHistoryFactory( certify_history_id=history_id, submission_id=sub_id, filename="/path/to/file_d2.csv", warning_filename=None, file_type_id=FILE_TYPE_DICT['award']) file_hist_3 = CertifiedFilesHistoryFactory( certify_history_id=history_id, submission_id=sub_id, filename=None, warning_filename="/path/to/warning_file_cross_test.csv", file_type_id=None) database.session.add_all([file_hist_1, file_hist_2, file_hist_3]) database.session.commit() json_response = fileHandler.list_certifications(sub) response_dict = json.loads(json_response.get_data().decode('utf-8')) assert len(response_dict["certifications"]) == 2 has_file_list = response_dict["certifications"][0] empty_file_list = response_dict["certifications"][1] # asserts for certification with files associated assert len(has_file_list["certified_files"]) == 4 assert has_file_list["certified_files"][0]["is_warning"] is False assert has_file_list["certified_files"][0]["filename"] == "file_a.csv" assert has_file_list["certified_files"][0][ "narrative"] == "A has a narrative" assert has_file_list["certified_files"][1]["is_warning"] assert has_file_list["certified_files"][1]["narrative"] is None # asserts for certification without files associated assert len(empty_file_list["certified_files"]) == 0
def test_list_submissions_failure(database, monkeypatch): user = UserFactory(user_id=1) sub = SubmissionFactory(user_id=1, submission_id=1, number_of_errors=1, publish_status_id=1) add_models(database, [user, sub]) monkeypatch.setattr(fileHandler, 'g', Mock(user=user)) result = list_submissions_result() assert result['total'] == 1 assert result['submissions'][0]['status'] == "validation_errors" delete_models(database, [user, sub]) sess = database.session user = UserFactory(user_id=1) sub = SubmissionFactory(user_id=1, submission_id=1, publish_status_id=1) job = JobFactory( submission_id=1, job_status=sess.query(JobStatus).filter_by(name='failed').one(), job_type=sess.query(JobType).filter_by( name='csv_record_validation').one(), file_type=sess.query(FileType).filter_by(name='award').one()) add_models(database, [user, sub, job]) result = list_submissions_result() assert result['total'] == 1 assert result['submissions'][0]['status'] == "failed" delete_models(database, [user, sub, job]) sess = database.session user = UserFactory(user_id=1) sub = SubmissionFactory(user_id=1, submission_id=1, publish_status_id=1) job = JobFactory( submission_id=1, job_status=sess.query(JobStatus).filter_by(name='invalid').one(), job_type=sess.query(JobType).filter_by( name='csv_record_validation').one(), file_type=sess.query(FileType).filter_by(name='award').one()) add_models(database, [user, sub, job]) result = list_submissions_result() assert result['total'] == 1 assert result['submissions'][0]['status'] == "file_errors" delete_models(database, [user, sub, job])
def test_generate_e_file_csv(monkeypatch, mock_broker_config_paths, database): """ Verify that an appropriate CSV is written, based on fileE.Row's structure """ # Create an award so that we have _a_ duns sub = SubmissionFactory() database.session.add(sub) database.session.commit() ap = AwardProcurementFactory(submission_id=sub.submission_id) database.session.add(ap) database.session.commit() file_path = str(mock_broker_config_paths['broker_files'].join('e_test1')) job = JobFactory(job_status_id=JOB_STATUS_DICT['running'], job_type_id=JOB_TYPE_DICT['file_upload'], file_type_id=FILE_TYPE_DICT['executive_compensation'], filename=file_path, original_filename='e_test1', submission_id=sub.submission_id) database.session.add(job) database.session.commit() monkeypatch.setattr(file_generation_manager.fileE, 'row_to_dict', Mock()) file_generation_manager.fileE.row_to_dict.return_value = {} monkeypatch.setattr(file_generation_manager.fileE, 'retrieve_rows', Mock()) file_generation_manager.fileE.retrieve_rows.return_value = [ fileE.Row('a', 'b', 'c', 'd', '1a', '1b', '2a', '2b', '3a', '3b', '4a', '4b', '5a', '5b'), fileE.Row('A', 'B', 'C', 'D', '1A', '1B', '2A', '2B', '3A', '3B', '4A', '4B', '5A', '5B') ] monkeypatch.setattr(file_generation_manager, 'mark_job_status', Mock()) file_gen_manager = FileGenerationManager(job, None, None, CONFIG_BROKER['local']) file_gen_manager.generate_e_file() expected = [[ 'AwardeeOrRecipientUniqueIdentifier', 'AwardeeOrRecipientLegalEntityName', 'UltimateParentUniqueIdentifier', 'UltimateParentLegalEntityName', 'HighCompOfficer1FullName', 'HighCompOfficer1Amount', 'HighCompOfficer2FullName', 'HighCompOfficer2Amount', 'HighCompOfficer3FullName', 'HighCompOfficer3Amount', 'HighCompOfficer4FullName', 'HighCompOfficer4Amount', 'HighCompOfficer5FullName', 'HighCompOfficer5Amount' ], [ 'a', 'b', 'c', 'd', '1a', '1b', '2a', '2b', '3a', '3b', '4a', '4b', '5a', '5b' ], [ 'A', 'B', 'C', 'D', '1A', '1B', '2A', '2B', '3A', '3B', '4A', '4B', '5A', '5B' ]] assert read_file_rows(file_path) == expected
def test_value_present(database): """budget_authority_unobligat_fyb populated does not require a previous submission""" populate_publish_status(database) sub_new = SubmissionFactory() ap_new = AppropriationFactory(submission_id=sub_new.submission_id) assert number_of_errors(_FILE, database, submission=sub_new, models=[ap_new]) == 0
def test_start_d_generation_submission_new(database, monkeypatch): """ A new file generation must update the upload Job and create a new FileGeneration object. """ sess = database.session original_filename = 'D2_test_gen.csv' submission = SubmissionFactory(submission_id=1000, reporting_start_date='2017-01-01', reporting_end_date='2017-01-31', cgac_code='123', frec_code=None, is_quarter_format=False, publishable=False, reporting_fiscal_year='2017') up_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, file_type_id=FILE_TYPE_DICT['award'], job_type_id=JOB_TYPE_DICT['file_upload'], filename=None, submission_id=submission.submission_id, original_filename=original_filename, file_generation_id=None) val_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, file_type_id=FILE_TYPE_DICT['award'], job_type_id=JOB_TYPE_DICT['csv_record_validation'], filename=None, submission_id=submission.submission_id, original_filename=original_filename) sess.add_all([submission, up_job, val_job]) sess.commit() monkeypatch.setattr( generation_helper, 'g', Mock(return_value={'is_local': CONFIG_BROKER['local']})) start_d_generation(up_job, '01/01/2017', '01/31/2017', 'awarding') assert up_job.file_generation_id is not None assert up_job.start_date == date(2017, 1, 1) assert up_job.end_date == date(2017, 1, 31) assert up_job.original_filename != original_filename assert up_job.filename != gen_file_path_from_submission( up_job.submission_id, original_filename) assert up_job.start_date == date(2017, 1, 1) assert up_job.end_date == date(2017, 1, 31) assert up_job.original_filename == up_job.original_filename assert up_job.filename == up_job.filename file_gen = sess.query(FileGeneration).filter_by( file_generation_id=up_job.file_generation_id).one_or_none() assert file_gen is not None assert file_gen.request_date == datetime.now().date() assert file_gen.start_date == date(2017, 1, 1) assert file_gen.end_date == date(2017, 1, 31) assert file_gen.file_type == 'D2' assert file_gen.file_path != gen_file_path_from_submission( 'None', original_filename)
def test_list_submissions_detached(database, monkeypatch): user = UserFactory(user_id=1) sub = SubmissionFactory(user_id=1, submission_id=1, publish_status_id=1) d2_sub = SubmissionFactory(user_id=1, submission_id=2, d2_submission=True, publish_status_id=1) add_models(database, [user, sub, d2_sub]) monkeypatch.setattr(fileHandler, 'g', Mock(user=user)) result = list_submissions_result() d2_result = list_submissions_result(d2_submission=True) assert result['total'] == 1 assert result['submissions'][0]['submission_id'] == sub.submission_id assert d2_result['total'] == 1 assert d2_result['submissions'][0]['submission_id'] == d2_sub.submission_id delete_models(database, [user, sub, d2_sub])
def test_previous_unpublished(database): """ previous submission exists but is unpublished and has not been marked publishable """ populate_publish_status(database) sub_prev_published = SubmissionFactory( publish_status_id=PUBLISH_STATUS_DICT['unpublished'], publishable=False) insert_submission(database, sub_prev_published) sub_new_published = SubmissionFactory( cgac_code=sub_prev_published.cgac_code, reporting_fiscal_year=sub_prev_published.reporting_fiscal_year) ocpa_new_published = ObjectClassProgramActivityFactory( submission_id=sub_new_published.submission_id, ussgl480100_undelivered_or_fyb=None, ussgl490800_undelivered_or_fyb=None) assert number_of_errors(_FILE, database, submission=sub_new_published, models=[ocpa_new_published]) == 1
def test_check_generation_prereqs_bad_type(database): """ Tests that check_generation_prereqs raises an error if an invalid type is provided. """ sess = database.session sub = SubmissionFactory() sess.add(sub) sess.commit() with pytest.raises(ResponseException): check_generation_prereqs(sub.submission_id, 'A')
def test_get_time_period(database): """ Tests get_time_period with subs """ sess = database.session quart_sub = SubmissionFactory(submission_id=1, reporting_fiscal_year=2020, reporting_fiscal_period=6, d2_submission=False, is_quarter_format=True) month_sub = SubmissionFactory(submission_id=2, reporting_start_date=datetime.datetime( 2020, 9, 10), d2_submission=False, is_quarter_format=False) sess.add_all([quart_sub, month_sub]) # Pass cases assert get_time_period(quart_sub) == 'FY 20 / Q2' assert get_time_period(month_sub) == '09 / 2020'
def test_failure_success_ignore_recertification(database): """ Testing invalid program activity, ingored since FY2017 Q2 or Q3 """ populate_publish_status(database) populate_publish_status(database) op = ObjectClassProgramActivityFactory(row_number=1, submission_id=1, agency_identifier='test2', main_account_code='test2', program_activity_name='test2', program_activity_code='test2') pa = ProgramActivityFactory(fiscal_year_quarter='FY14Q1', agency_id='test', allocation_transfer_id='test', account_number='test', program_activity_name='test', program_activity_code='test') # Test with published submission submission = SubmissionFactory( submission_id=1, reporting_fiscal_year='2017', reporting_fiscal_period=6, publish_status_id=PUBLISH_STATUS_DICT['updated']) assert number_of_errors(_FILE, database, models=[op, pa], submission=submission) == 0 # Test with unpublished submission submission = SubmissionFactory( submission_id=2, reporting_fiscal_year='2017', reporting_fiscal_period=6, publish_status_id=PUBLISH_STATUS_DICT['unpublished']) assert number_of_errors(_FILE, database, models=[op, pa], submission=submission) == 0
def test_obligation_stats_for_submission_zero(database): submission = SubmissionFactory() # no financials in db database.session.add(submission) database.session.commit() assert get_submission_stats(submission.submission_id) == { "total_obligations": 0, "total_procurement_obligations": 0, "total_assistance_obligations": 0 }
def test_submission_to_dict_for_status(database): cgac = CGACFactory(cgac_code='abcdef', agency_name='Age') sub = SubmissionFactory(cgac_code='abcdef', number_of_errors=1234, publish_status_id=1) database.session.add_all([cgac, sub]) database.session.commit() result = fileHandler.submission_to_dict_for_status(sub) assert result['cgac_code'] == 'abcdef' assert result['agency_name'] == 'Age' assert result['number_of_errors'] == 1234
def test_get_obligations(self): submission = SubmissionFactory() self.session.add(submission) self.session.commit() response = self.app.post_json( "/v1/get_obligations/", {"submission_id": submission.submission_id}, headers={"x-session-id": self.session_id}) assert response.status_code == 200 assert "total_obligations" in response.json
def test_list_submission_users_frec_affil(database): """ Test listing users based on frec affiliations """ cgacs = [CGACFactory(cgac_code='000'), CGACFactory(cgac_code='111')] frecs = [ FRECFactory(frec_code='0000', cgac=cgacs[0]), FRECFactory(frec_code='1111', cgac=cgacs[1]) ] first_user = UserFactory.with_cgacs(cgacs[0], name='Test User 1', email='*****@*****.**') other_user = UserFactory.with_cgacs(cgacs[1], name='Test User', email='*****@*****.**') third_user = UserFactory(name='Frec User', email='*****@*****.**') third_user.affiliations = [ UserAffiliation(frec=frecs[0], user_id=third_user.user_id, permission_type_id=PERMISSION_TYPE_DICT['reader']) ] database.session.add_all(cgacs + frecs + [first_user, other_user]) database.session.commit() sub_1 = SubmissionFactory(frec_code=frecs[0].frec_code, user_id=first_user.user_id, d2_submission=False) sub_2 = SubmissionFactory(cgac_code=cgacs[1].cgac_code, user_id=other_user.user_id, d2_submission=False) sub_3 = SubmissionFactory(frec_code=frecs[1].frec_code, user_id=other_user.user_id, d2_submission=False) database.session.add_all([sub_1, sub_2, sub_3]) database.session.commit() g.user = third_user response = list_submission_users(False) user_response = json.loads(response.data.decode('UTF-8'))['users'] # List the first user because they have a submission with that frec assert len(user_response) == 1 assert user_response[0]['user_id'] == first_user.user_id assert user_response[0]['name'] == first_user.name assert user_response[0]['email'] == first_user.email
def test_list_submission_users_cgac_affil(database): """ Test listing users based on cgac affiliations """ cgacs = [CGACFactory(cgac_code='000'), CGACFactory(cgac_code='111')] first_user = UserFactory.with_cgacs(cgacs[0], name='Test User 1', email='*****@*****.**') other_user = UserFactory.with_cgacs(cgacs[1], name='Test User', email='*****@*****.**') database.session.add_all(cgacs + [first_user, other_user]) database.session.commit() sub_1 = SubmissionFactory(cgac_code=cgacs[0].cgac_code, user_id=first_user.user_id, d2_submission=False) sub_2 = SubmissionFactory(cgac_code=cgacs[0].cgac_code, user_id=other_user.user_id, d2_submission=False) database.session.add_all([sub_1, sub_2]) database.session.commit() g.user = first_user response = list_submission_users(False) user_response = json.loads(response.data.decode('UTF-8'))['users'] # List both users because each has a submission with the cgac assert len(user_response) == 2 assert {user_response[0]['user_id'], user_response[1]['user_id'] } == {first_user.user_id, other_user.user_id} assert {user_response[0]['name'], user_response[1]['name']} == {first_user.name, other_user.name} assert {user_response[0]['email'], user_response[1]['email']} == {first_user.email, other_user.email} g.user = other_user response = list_submission_users(False) user_response = json.loads(response.data.decode('UTF-8'))['users'] # List only the submissions this user is part of because they have no cgac/frec affiliations with either submission assert len(user_response) == 1 assert user_response[0]['user_id'] == other_user.user_id assert user_response[0]['name'] == other_user.name assert user_response[0]['email'] == other_user.email
def test_value_present(database): """ gross_outlays_delivered_or_fyb populated does not require a previous submission """ populate_publish_status(database) sub_new = SubmissionFactory() ocpa_new = ObjectClassProgramActivityFactory( submission_id=sub_new.submission_id) assert number_of_errors(_FILE, database, submission=sub_new, models=[ocpa_new]) == 0
def test_no_previous_submission(database): """ No previous submission and null budget_authority_unobligat_fyb""" populate_publish_status(database) sub_new = SubmissionFactory() ap_new = AppropriationFactory(submission_id=sub_new.submission_id, budget_authority_unobligat_fyb=None) assert number_of_errors(_FILE, database, submission=sub_new, models=[ap_new]) == 1
def test_get_upload_file_url_invalid_for_type(database): """ Test that a proper error is thrown when a file type that doesn't match the submission is provided to get_upload_file_url. """ sub_1 = SubmissionFactory(submission_id=1, d2_submission=False) sub_2 = SubmissionFactory(submission_id=2, d2_submission=True) add_models(database, [sub_1, sub_2]) json_response = fileHandler.get_upload_file_url(sub_2, 'A') # check invalid type for FABS assert json_response.status_code == 400 response = json.loads(json_response.get_data().decode('utf-8')) assert response['message'] == 'Invalid file type for this submission' # check invalid type for DABS json_response = fileHandler.get_upload_file_url(sub_1, 'FABS') assert json_response.status_code == 400 response = json.loads(json_response.get_data().decode('utf-8')) assert response['message'] == 'Invalid file type for this submission'
def test_start_d_generation_submission_different_format(database, monkeypatch): """ Cached D files must update the upload Job with the FileGeneration data. """ sess = database.session original_filename = 'D1_test_gen.csv' file_path = gen_file_path_from_submission('None/', original_filename) submission = SubmissionFactory(submission_id=1000, reporting_start_date='2017-01-01', reporting_end_date='2017-01-31', frec_code='1234', cgac_code=None, is_quarter_format=False, publishable=False, reporting_fiscal_year='2017') file_gen = FileGenerationFactory(request_date=datetime.now().date(), start_date='2017-01-01', end_date='2017-01-31', file_type='D2', agency_code='1234', agency_type='awarding', is_cached_file=True, file_path=file_path, file_format='csv') up_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'], file_type_id=FILE_TYPE_DICT['award'], error_message=None, job_type_id=JOB_TYPE_DICT['file_upload'], filename=None, original_filename=None, submission_id=submission.submission_id) val_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, file_type_id=FILE_TYPE_DICT['award'], job_type_id=JOB_TYPE_DICT['csv_record_validation'], filename=None, original_filename=None, submission_id=submission.submission_id) sess.add_all([submission, file_gen, up_job, val_job]) sess.commit() monkeypatch.setattr( generation_helper, 'g', Mock(return_value={'is_local': CONFIG_BROKER['local']})) start_d_generation(up_job, '01/01/2017', '01/31/2017', 'awarding', file_format='txt') assert up_job.file_generation_id != file_gen.file_generation_id assert up_job.start_date == date(2017, 1, 1) assert up_job.end_date == date(2017, 1, 31) assert up_job.original_filename != original_filename assert up_job.filename != gen_file_path_from_submission( up_job.submission_id, original_filename)
def test_narratives(database): """Verify that we can add, retrieve, and update submission narratives. Not quite a unit test as it covers a few functions in sequence""" sub1, sub2 = SubmissionFactory(), SubmissionFactory() database.session.add_all([sub1, sub2]) database.session.commit() # Write some narratives result = fileHandler.update_narratives(sub1, {'B': 'BBBBBB', 'E': 'EEEEEE', 'FABS': 'This wont show up'}) assert result.status_code == 200 result = fileHandler.update_narratives(sub2, {'A': 'Submission2'}) assert result.status_code == 200 # Check the narratives result = fileHandler.narratives_for_submission(sub1) result = json.loads(result.get_data().decode('UTF-8')) assert result == { 'A': '', 'B': 'BBBBBB', 'C': '', 'D1': '', 'D2': '', 'E': 'EEEEEE', 'F': '' } # Replace the narratives result = fileHandler.update_narratives(sub1, {'A': 'AAAAAA', 'E': 'E2E2E2'}) assert result.status_code == 200 # Verify the change worked result = fileHandler.narratives_for_submission(sub1) result = json.loads(result.get_data().decode('UTF-8')) assert result == { 'A': 'AAAAAA', 'B': '', 'C': '', 'D1': '', 'D2': '', 'E': 'E2E2E2', 'F': '' }
def test_success_ignore_old_fy2017(database): """ Testing invalid program_activity, ignored since FY2017Q2 or FY2017Q3 """ populate_publish_status(database) af = AwardFinancialFactory(row_number=1, submission_id=1, agency_identifier='test', main_account_code='test', program_activity_name='test', program_activity_code='test') pa = ProgramActivityFactory(fiscal_year_quarter='FY17Q3', agency_id='test2', allocation_transfer_id='test2', account_number='test2', program_activity_name='test2', program_activity_code='test2') # Test with published submission submission = SubmissionFactory( submission_id=1, reporting_fiscal_year='2017', reporting_fiscal_period=9, publish_status_id=PUBLISH_STATUS_DICT['published']) assert number_of_errors(_FILE, database, models=[af, pa], submission=submission) == 0 # Test with unpublished submission submission = SubmissionFactory( submission_id=2, reporting_fiscal_year='2017', reporting_fiscal_period=9, publish_status_id=PUBLISH_STATUS_DICT['unpublished']) assert number_of_errors(_FILE, database, models=[af, pa], submission=submission) == 0
def test_generate_e_file_query(monkeypatch, mock_broker_config_paths, database): """ Verify that generate_e_file makes an appropriate query (matching both D1 and D2 entries) """ # Generate several file D1 entries, largely with the same submission_id, and with two overlapping DUNS. Generate # several D2 entries with the same submission_id as well sess = database.session sub = SubmissionFactory() sub_2 = SubmissionFactory() sess.add_all([sub, sub_2]) sess.commit() file_path = str(mock_broker_config_paths['broker_files'].join('e_test1')) job = JobFactory( job_status=database.session.query(JobStatus).filter_by(name='running').one(), job_type=database.session.query(JobType).filter_by(name='file_upload').one(), file_type=database.session.query(FileType).filter_by(name='executive_compensation').one(), filename=file_path, original_filename='e_test1', submission_id=sub.submission_id, ) database.session.add(job) database.session.commit() model = AwardProcurementFactory(submission_id=sub.submission_id) aps = [AwardProcurementFactory(submission_id=sub.submission_id) for _ in range(4)] afas = [AwardFinancialAssistanceFactory(submission_id=sub.submission_id) for _ in range(5)] same_duns = AwardProcurementFactory( submission_id=sub.submission_id, awardee_or_recipient_uniqu=model.awardee_or_recipient_uniqu) unrelated = AwardProcurementFactory(submission_id=sub_2.submission_id) sess.add_all(aps + afas + [model, same_duns, unrelated]) sess.commit() monkeypatch.setattr(file_generation_handler, 'mark_job_status', Mock()) monkeypatch.setattr(file_generation_handler.fileE, 'retrieve_rows', Mock(return_value=[])) with Flask(__name__).app_context(): file_generation_handler.generate_e_file(database.session, job, is_local=True) # [0][0] gives us the first, non-keyword args call_args = file_generation_handler.fileE.retrieve_rows.call_args[0][0] expected = [ap.awardee_or_recipient_uniqu for ap in aps] expected.append(model.awardee_or_recipient_uniqu) expected.extend(afa.awardee_or_recipient_uniqu for afa in afas) assert list(sorted(call_args)) == list(sorted(expected))
def test_no_previous_submission(database): """ No previous submission and null gross_outlays_delivered_or_fyb """ populate_publish_status(database) sub_new = SubmissionFactory() ocpa_new = ObjectClassProgramActivityFactory( submission_id=sub_new.submission_id, gross_outlays_delivered_or_fyb=None) assert number_of_errors(_FILE, database, submission=sub_new, models=[ocpa_new]) == 1
def test_submission_report_url_local(monkeypatch, tmpdir): file_path = str(tmpdir) + os.path.sep monkeypatch.setattr(fileHandler, 'CONFIG_BROKER', { 'local': True, 'broker_files': file_path }) json_response = fileHandler.submission_report_url( SubmissionFactory(submission_id=4), True, 'some_file', 'another_file') url = json.loads(json_response.get_data().decode('utf-8'))['url'] assert url == os.path.join( file_path, 'submission_4_cross_warning_some_file_another_file.csv')
def test_get_submission_metadata_test_submission(database): """ Tests the get_submission_metadata function for published fabs submissions """ sess = database.session now = datetime.datetime.utcnow() cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency') sub1 = SubmissionFactory(submission_id=1, created_at=now, updated_at=now, cgac_code=cgac.cgac_code, reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True, publish_status_id=PUBLISH_STATUS_DICT['updated'], d2_submission=False, number_of_errors=40, number_of_warnings=200) sub2 = SubmissionFactory(submission_id=2, created_at=now, updated_at=now, cgac_code=cgac.cgac_code, reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True, publish_status_id=PUBLISH_STATUS_DICT['unpublished'], d2_submission=False, number_of_errors=40, number_of_warnings=200) sess.add_all([cgac, sub1, sub2]) sess.commit() # Test for test submission expected_results = { 'cgac_code': cgac.cgac_code, 'frec_code': None, 'agency_name': cgac.agency_name, 'number_of_errors': 40, 'number_of_warnings': 200, 'number_of_rows': 0, 'total_size': 0, 'created_on': now.strftime('%m/%d/%Y'), 'last_updated': now.strftime("%Y-%m-%dT%H:%M:%S"), 'last_validated': '', 'reporting_period': 'Q1/2017', 'publish_status': 'unpublished', 'quarterly_submission': True, 'certified_submission': 1, 'fabs_submission': False, 'fabs_meta': None } results = get_submission_metadata(sub2) assert results == expected_results
def test_requires_submission_perm_no_submission(database, test_app): """If no submission exists, we should see an exception""" sub = SubmissionFactory(user=UserFactory()) database.session.add(sub) database.session.commit() g.user = sub.user fn = permissions.requires_submission_perms('writer')(Mock()) # Does not raise exception fn(sub.submission_id) with pytest.raises(ResponseException): fn(sub.submission_id + 1) # different submission id
def test_submission_report_url_s3(monkeypatch): monkeypatch.setattr(fileHandler, 'CONFIG_BROKER', {'local': False, 'submission_bucket_mapping': 'test/path'}) s3_url_handler = Mock() s3_url_handler.return_value.get_signed_url.return_value = 'some/url/here.csv' monkeypatch.setattr(fileHandler, 'S3Handler', s3_url_handler) json_response = fileHandler.submission_report_url(SubmissionFactory(submission_id=2), False, 'some_file', None) url = json.loads(json_response.get_data().decode('utf-8'))['url'] assert url == 'some/url/here.csv' assert s3_url_handler.return_value.get_signed_url.call_args == ( ('errors', 'submission_2_some_file_error_report.csv'), {'method': 'get_object', 'url_mapping': 'test/path'} )
def test_get_submission_metadata_quarterly_dabs_cgac(database): """ Tests the get_submission_metadata function for quarterly dabs submissions """ sess = database.session now = datetime.datetime.utcnow() now_plus_10 = now + datetime.timedelta(minutes=10) cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency') frec_cgac = CGACFactory(cgac_code='999', agency_name='FREC CGAC') frec = FRECFactory(frec_code='0001', agency_name='FREC Agency', cgac=frec_cgac) sub = SubmissionFactory(submission_id=1, created_at=now, updated_at=now_plus_10, cgac_code=cgac.cgac_code, reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True, publish_status_id=PUBLISH_STATUS_DICT['updated'], d2_submission=False, number_of_errors=40, number_of_warnings=200) # Job for submission job = JobFactory(submission_id=sub.submission_id, last_validated=now_plus_10, job_type=sess.query(JobType).filter_by(name='csv_record_validation').one(), job_status=sess.query(JobStatus).filter_by(name='finished').one(), file_type=sess.query(FileType).filter_by(name='appropriations').one(), number_of_rows=3, file_size=7655) job_2 = JobFactory(submission_id=sub.submission_id, last_validated=now_plus_10, job_type=sess.query(JobType).filter_by(name='csv_record_validation').one(), job_status=sess.query(JobStatus).filter_by(name='finished').one(), file_type=sess.query(FileType).filter_by(name='program_activity').one(), number_of_rows=7, file_size=12345) sess.add_all([cgac, frec_cgac, frec, sub, job, job_2]) sess.commit() # Test for Quarterly, updated DABS cgac submission expected_results = { 'cgac_code': cgac.cgac_code, 'frec_code': None, 'agency_name': cgac.agency_name, 'number_of_errors': 40, 'number_of_warnings': 200, 'number_of_rows': 10, 'total_size': 20000, 'created_on': now.strftime('%m/%d/%Y'), 'last_updated': now_plus_10.strftime("%Y-%m-%dT%H:%M:%S"), 'last_validated': now_plus_10.strftime('%m/%d/%Y'), 'reporting_period': 'Q1/2017', 'publish_status': 'updated', 'quarterly_submission': True, 'fabs_submission': False, 'fabs_meta': None } results = get_submission_metadata(sub) assert results == expected_results
def test_success(database): """ Test Prior to FY22, if the DisasterEmergencyFundCode element has a valid COVID-19 related code and the row is a balance row, then GrossOutlayAmountByAward_CPE cannot be blank. Beginning in FY22, if the row is a balance row, then GrossOutlayAmountByAward_CPE cannot be blank. """ # gross_outlay_amount_by_awa_cpe populated op1 = AwardFinancialFactory(disaster_emergency_fund_code='l', transaction_obligated_amou=None, gross_outlay_amount_by_awa_cpe=2) # 0 in either field is still populated op2 = AwardFinancialFactory(disaster_emergency_fund_code='m', transaction_obligated_amou=0, gross_outlay_amount_by_awa_cpe=None) op3 = AwardFinancialFactory(disaster_emergency_fund_code='m', transaction_obligated_amou=None, gross_outlay_amount_by_awa_cpe=0) # wrong DEFC op4 = AwardFinancialFactory(disaster_emergency_fund_code='z', transaction_obligated_amou=None, gross_outlay_amount_by_awa_cpe=None) # DEFC but not COVID op5 = AwardFinancialFactory(disaster_emergency_fund_code='a', transaction_obligated_amou=None, gross_outlay_amount_by_awa_cpe=None) # populated TOA op6 = AwardFinancialFactory(disaster_emergency_fund_code='n', transaction_obligated_amou=1, gross_outlay_amount_by_awa_cpe=None) defc1 = DEFCFactory(code='L', group='covid_19') defc2 = DEFCFactory(code='M', group='covid_19') defc3 = DEFCFactory(code='N', group='covid_19') defc4 = DEFCFactory(code='A') errors = number_of_errors( _FILE, database, models=[op1, op2, op3, op4, op5, op6, defc1, defc2, defc3, defc4]) assert errors == 0 # Testing for a submission after 2022 sub = SubmissionFactory(submission_id=2, reporting_fiscal_period=9, reporting_fiscal_year=2022, cgac_code='TEST', frec_code=None) op1 = AwardFinancialFactory(disaster_emergency_fund_code='p', transaction_obligated_amou=None, gross_outlay_amount_by_awa_cpe=2, submission_id=2) errors = number_of_errors(_FILE, database, models=[op1], submission=sub) assert errors == 0
def test_certify_dabs_submission(database, monkeypatch): """ Tests the certify_dabs_submission function """ with Flask('test-app').app_context(): now = datetime.datetime.utcnow() sess = database.session user = UserFactory() cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency') submission = SubmissionFactory(created_at=now, updated_at=now, cgac_code=cgac.cgac_code, reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True, publishable=True, publish_status_id=PUBLISH_STATUS_DICT['unpublished'], d2_submission=False, number_of_errors=0, number_of_warnings=200, certifying_user_id=None) quarter_reval = QuarterlyRevalidationThresholdFactory(year=2017, quarter=1, window_start=now - datetime.timedelta(days=1)) sess.add_all([user, cgac, submission, quarter_reval]) sess.commit() comment = CommentFactory(file_type_id=FILE_TYPE_DICT['appropriations'], comment='Test', submission_id=submission.submission_id) job_1 = JobFactory(submission_id=submission.submission_id, last_validated=now, job_type_id=JOB_TYPE_DICT['csv_record_validation']) job_2 = JobFactory(submission_id=submission.submission_id, last_validated=now + datetime.timedelta(days=1), job_type_id=JOB_TYPE_DICT['csv_record_validation']) sess.add_all([job_1, job_2, comment]) sess.commit() flex_field = FlexField(file_type_id=FILE_TYPE_DICT['appropriations'], header='flex_test', job_id=job_1.job_id, submission_id=submission.submission_id, row_number=2, cell=None) sess.add(flex_field) sess.commit() g.user = user file_handler = fileHandler.FileHandler({}, is_local=True) monkeypatch.setattr(file_handler, 'move_certified_files', Mock(return_value=True)) monkeypatch.setattr(fileHandler.GlobalDB, 'db', Mock(return_value=database)) certify_dabs_submission(submission, file_handler) sess.refresh(submission) certify_history = sess.query(CertifyHistory).filter_by(submission_id=submission.submission_id).one_or_none() assert certify_history is not None assert submission.certifying_user_id == user.user_id assert submission.publish_status_id == PUBLISH_STATUS_DICT['published'] # Make sure certified comments are created certified_comment = sess.query(CertifiedComment).filter_by(submission_id=submission.submission_id).one_or_none() assert certified_comment is not None # Make sure certified flex fields are created certified_flex = sess.query(CertifiedFlexField).filter_by(submission_id=submission.submission_id).one_or_none() assert certified_flex is not None