def test_certify_dabs_submission(database, monkeypatch): """ Tests the certify_dabs_submission function """ with Flask('test-app').app_context(): now = datetime.datetime.utcnow() sess = database.session user = UserFactory() cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency') submission = SubmissionFactory(created_at=now, updated_at=now, cgac_code=cgac.cgac_code, reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True, publishable=True, publish_status_id=PUBLISH_STATUS_DICT['unpublished'], d2_submission=False, number_of_errors=0, number_of_warnings=200, certifying_user_id=None) sess.add_all([user, cgac, submission]) sess.commit() g.user = user file_handler = fileHandler.FileHandler({}, is_local=True) monkeypatch.setattr(file_handler, 'move_certified_files', Mock(return_value=True)) monkeypatch.setattr(fileHandler.GlobalDB, 'db', Mock(return_value=database)) certify_dabs_submission(submission, file_handler) sess.refresh(submission) certify_history = sess.query(CertifyHistory).filter_by(submission_id=submission.submission_id).one_or_none() assert certify_history is not None assert submission.certifying_user_id == user.user_id assert submission.publish_status_id == PUBLISH_STATUS_DICT['published']
def test_certify_dabs_submission_quarterly_revalidation_too_early(database): """ Tests that a DABS submission that was last validated before the window start cannot be certified. """ with Flask('test-app').app_context(): now = datetime.datetime.utcnow() earlier = now - datetime.timedelta(days=1) sess = database.session user = UserFactory() cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency') submission = SubmissionFactory(created_at=earlier, updated_at=earlier, cgac_code=cgac.cgac_code, reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True, publishable=True, publish_status_id=PUBLISH_STATUS_DICT['unpublished'], d2_submission=False, number_of_errors=0, number_of_warnings=200, certifying_user_id=None) quarter_reval = QuarterlyRevalidationThresholdFactory(year=2017, quarter=1, window_start=now) sess.add_all([user, cgac, submission, quarter_reval]) sess.commit() job = JobFactory(submission_id=submission.submission_id, last_validated=earlier, job_type_id=JOB_TYPE_DICT['csv_record_validation']) sess.add(job) sess.commit() g.user = user file_handler = fileHandler.FileHandler({}, is_local=True) response = certify_dabs_submission(submission, file_handler) response_json = json.loads(response.data.decode('UTF-8')) assert response.status_code == 400 assert response_json['message'] == "This submission was last validated or its D files generated before the " \ "start of the submission window ({}). Please revalidate before " \ "certifying.".\ format(quarter_reval.window_start.strftime('%m/%d/%Y'))
def test_certify_dabs_submission_quarterly_revalidation_multiple_thresholds(database): """ Tests that a DABS submission is not affected by a different quarterly revalidation threshold than the one that matches its reporting_start_date. """ with Flask('test-app').app_context(): now = datetime.datetime.utcnow() earlier = now - datetime.timedelta(days=1) sess = database.session user = UserFactory() cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency') submission = SubmissionFactory(created_at=earlier, updated_at=earlier, cgac_code=cgac.cgac_code, reporting_fiscal_period=3, reporting_fiscal_year=2017, reporting_start_date='2016-10-01', is_quarter_format=True, publishable=True, publish_status_id=PUBLISH_STATUS_DICT['unpublished'], d2_submission=False, number_of_errors=0, number_of_warnings=200, certifying_user_id=None) quarter_reval = QuarterlyRevalidationThresholdFactory(year=2017, quarter=1, window_start=earlier) quarter_reval_2 = QuarterlyRevalidationThresholdFactory(year=2017, quarter=2, window_start=now + datetime.timedelta(days=10)) sess.add_all([user, cgac, submission, quarter_reval, quarter_reval_2]) sess.commit() job = JobFactory(submission_id=submission.submission_id, last_validated=now, job_type_id=JOB_TYPE_DICT['csv_record_validation']) sess.add(job) sess.commit() g.user = user file_handler = fileHandler.FileHandler({}, is_local=True) response = certify_dabs_submission(submission, file_handler) assert response.status_code == 200
def test_certify_dabs_submission_revalidation_needed(database): """ Tests the certify_dabs_submission function preventing certification when revalidation threshold isn't met """ with Flask('test-app').app_context(): now = datetime.datetime.utcnow() earlier = now - datetime.timedelta(days=1) sess = database.session user = UserFactory() cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency') submission = SubmissionFactory(created_at=earlier, updated_at=earlier, cgac_code=cgac.cgac_code, reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True, publishable=True, publish_status_id=PUBLISH_STATUS_DICT['unpublished'], d2_submission=False, number_of_errors=0, number_of_warnings=200, certifying_user_id=None) reval = RevalidationThresholdFactory(revalidation_date=now) sess.add_all([user, cgac, submission, reval]) sess.commit() job = JobFactory(submission_id=submission.submission_id, last_validated=earlier, job_type_id=JOB_TYPE_DICT['csv_record_validation']) sess.add(job) sess.commit() g.user = user file_handler = fileHandler.FileHandler({}, is_local=True) response = certify_dabs_submission(submission, file_handler) response_json = json.loads(response.data.decode('UTF-8')) assert response.status_code == 400 assert response_json['message'] == "This submission has not been validated since before the revalidation " \ "threshold ({}), it must be revalidated before certifying.". \ format(now.strftime('%Y-%m-%d %H:%M:%S'))
def test_certify_dabs_submission_quarterly_revalidation_not_in_db(database): """ Tests that a DABS submission that doesnt have its year/quarter in the system won't be able to certify. """ with Flask('test-app').app_context(): now = datetime.datetime.utcnow() sess = database.session user = UserFactory() cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency') submission = SubmissionFactory(created_at=now, updated_at=now, cgac_code=cgac.cgac_code, reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True, publishable=True, publish_status_id=PUBLISH_STATUS_DICT['unpublished'], d2_submission=False, number_of_errors=0, number_of_warnings=200, certifying_user_id=None) sess.add_all([user, cgac, submission]) sess.commit() job = JobFactory(submission_id=submission.submission_id, last_validated=now, job_type_id=JOB_TYPE_DICT['csv_record_validation']) sess.add(job) sess.commit() g.user = user file_handler = fileHandler.FileHandler({}, is_local=True) response = certify_dabs_submission(submission, file_handler) response_json = json.loads(response.data.decode('UTF-8')) assert response.status_code == 400 assert response_json['message'] == "No submission window for this year and quarter was found. If this is an " \ "error, please contact the Service Desk."
def test_build_file_map_file(monkeypatch): monkeypatch.setattr(fileHandler, 'CONFIG_BROKER', {'local': False}) upload_files = [] file_type_list = [ "fabs", "appropriations", "award_financial", "program_activity" ] fabs_file = io.BytesIO(b"something") fabs_file.filename = 'fabs.csv' approp_file = io.BytesIO(b"something") approp_file.filename = 'approp.csv' pa_file = io.BytesIO(b"something") pa_file.filename = 'pa.csv' award_file = io.BytesIO(b"something") award_file.filename = 'award.csv' file_dict = { "fabs": fabs_file, "award_financial": award_file, "program_activity": pa_file, "appropriations": approp_file } monkeypatch.setattr(S3Handler, 'get_timestamped_filename', Mock(side_effect=lambda x: "123_" + x)) submission = SubmissionFactory(submission_id=3) fh = fileHandler.FileHandler({}) fh.build_file_map(file_dict, file_type_list, upload_files, submission) for file in upload_files: assert file.upload_name == "3/123_" + file.file_name
def test_submission_bad_dates(start_date, end_date, quarter_flag, submission): """Verify that submission date checks fail on bad input""" # all dates must be in mm/yyyy format # quarterly submissions: # - can span a single quarter only # - must end with month = 3, 6, 9, or 12 fh = fileHandler.FileHandler(Mock()) with pytest.raises(ResponseException): fh.check_submission_dates(start_date, end_date, quarter_flag, submission)
def test_list_submissions_failure(database, job_constants, monkeypatch): fh = fileHandler.FileHandler(Mock()) mock_value = Mock() mock_value.getName.return_value = 1 monkeypatch.setattr(fileHandler, 'LoginSession', mock_value) user = UserFactory(user_id=1, cgac_code='cgac') sub = SubmissionFactory(user_id=1, submission_id=1, number_of_errors=1, cgac_code='cgac') add_models(database, [user, sub]) json_response = fh.list_submissions(PAGE, LIMIT, CERTIFIED) assert json.loads(json_response.get_data().decode("utf-8"))['total'] == 1 assert json.loads(json_response.get_data().decode( "utf-8"))['submissions'][0]['status'] == "validation_errors" delete_models(database, [user, sub]) sess = database.session user = UserFactory(user_id=1, cgac_code='cgac') sub = SubmissionFactory(user_id=1, submission_id=1, cgac_code='cgac') job = JobFactory( submission_id=1, job_status=sess.query(JobStatus).filter_by(name='failed').one(), job_type=sess.query(JobType).filter_by( name='csv_record_validation').one(), file_type=sess.query(FileType).filter_by(name='award').one()) add_models(database, [user, sub, job]) json_response = fh.list_submissions(PAGE, LIMIT, CERTIFIED) assert json.loads(json_response.get_data().decode("utf-8"))['total'] == 1 assert json.loads(json_response.get_data().decode( "utf-8"))['submissions'][0]['status'] == "failed" delete_models(database, [user, sub, job]) sess = database.session user = UserFactory(user_id=1, cgac_code='cgac') sub = SubmissionFactory(user_id=1, submission_id=1, cgac_code='cgac') job = JobFactory( submission_id=1, job_status=sess.query(JobStatus).filter_by(name='invalid').one(), job_type=sess.query(JobType).filter_by( name='csv_record_validation').one(), file_type=sess.query(FileType).filter_by(name='award').one()) add_models(database, [user, sub, job]) json_response = fh.list_submissions(PAGE, LIMIT, CERTIFIED) assert json.loads(json_response.get_data().decode("utf-8"))['total'] == 1 assert json.loads(json_response.get_data().decode( "utf-8"))['submissions'][0]['status'] == "file_errors" delete_models(database, [user, sub, job])
def test_certify_dabs_submission(database, monkeypatch): """ Tests the certify_dabs_submission function """ with Flask('test-app').app_context(): now = datetime.datetime.utcnow() sess = database.session user = UserFactory() cgac = CGACFactory(cgac_code='001', agency_name='CGAC Agency') submission = SubmissionFactory(created_at=now, updated_at=now, cgac_code=cgac.cgac_code, reporting_fiscal_period=3, reporting_fiscal_year=2017, is_quarter_format=True, publishable=True, publish_status_id=PUBLISH_STATUS_DICT['unpublished'], d2_submission=False, number_of_errors=0, number_of_warnings=200, certifying_user_id=None) quarter_reval = QuarterlyRevalidationThresholdFactory(year=2017, quarter=1, window_start=now - datetime.timedelta(days=1)) sess.add_all([user, cgac, submission, quarter_reval]) sess.commit() comment = CommentFactory(file_type_id=FILE_TYPE_DICT['appropriations'], comment='Test', submission_id=submission.submission_id) job_1 = JobFactory(submission_id=submission.submission_id, last_validated=now, job_type_id=JOB_TYPE_DICT['csv_record_validation']) job_2 = JobFactory(submission_id=submission.submission_id, last_validated=now + datetime.timedelta(days=1), job_type_id=JOB_TYPE_DICT['csv_record_validation']) sess.add_all([job_1, job_2, comment]) sess.commit() flex_field = FlexField(file_type_id=FILE_TYPE_DICT['appropriations'], header='flex_test', job_id=job_1.job_id, submission_id=submission.submission_id, row_number=2, cell=None) sess.add(flex_field) sess.commit() g.user = user file_handler = fileHandler.FileHandler({}, is_local=True) monkeypatch.setattr(file_handler, 'move_certified_files', Mock(return_value=True)) monkeypatch.setattr(fileHandler.GlobalDB, 'db', Mock(return_value=database)) certify_dabs_submission(submission, file_handler) sess.refresh(submission) certify_history = sess.query(CertifyHistory).filter_by(submission_id=submission.submission_id).one_or_none() assert certify_history is not None assert submission.certifying_user_id == user.user_id assert submission.publish_status_id == PUBLISH_STATUS_DICT['published'] # Make sure certified comments are created certified_comment = sess.query(CertifiedComment).filter_by(submission_id=submission.submission_id).one_or_none() assert certified_comment is not None # Make sure certified flex fields are created certified_flex = sess.query(CertifiedFlexField).filter_by(submission_id=submission.submission_id).one_or_none() assert certified_flex is not None
def test_revert_submission_fabs_submission(database): """ Tests reverting an updated DABS certification failure for FABS submission """ sess = database.session sub = Submission(d2_submission=True) sess.add(sub) sess.commit() file_handler = fileHandler.FileHandler({}, is_local=True) with pytest.raises(ResponseException) as resp_except: revert_to_certified(sub, file_handler) assert resp_except.value.status == 400 assert str(resp_except.value) == 'Submission must be a DABS submission.'
def test_build_file_map_string(monkeypatch): monkeypatch.setattr(fileHandler, 'CONFIG_BROKER', {'local': False}) upload_files = [] file_type_list = ["fabs", "appropriations", "award_financial", "program_activity"] file_dict = {"fabs": "fabs_file.csv", "appropriations": "appropriations.csv", "award_financial": "award_financial.csv", "program_activity": "program_activity.csv"} monkeypatch.setattr(S3Handler, 'get_timestamped_filename', Mock(side_effect=lambda x: "123_" + x)) submission = SubmissionFactory(submission_id=3) fh = fileHandler.FileHandler({}) fh.build_file_map(file_dict, file_type_list, upload_files, submission) for file in upload_files: assert file.upload_name == "3/123_"+file.file_name
def test_submission_good_dates(start_date, end_date, quarter_flag, submission): fh = fileHandler.FileHandler(Mock()) date_format = '%m/%Y' output_start_date, output_end_date = fh.check_submission_dates( start_date, end_date, quarter_flag, submission) assert isinstance(output_start_date, date) assert isinstance(output_end_date, date) # if we explicitly give a submission beginning or end date, those dates should # override the ones on the existing submission if start_date is None: assert output_start_date == submission.reporting_start_date else: assert output_start_date == datetime.strptime(start_date, date_format).date() if end_date is None: assert output_end_date == submission.reporting_end_date else: assert output_end_date == datetime.strptime(end_date, date_format).date()
def test_get_signed_url_for_submission_file_local(database, monkeypatch): submission = SubmissionFactory() database.session.add(submission) database.session.commit() file_handler = fileHandler.FileHandler(Mock(), isLocal=True, serverPath="/test/server/path/") monkeypatch.setattr(fileHandler, 'user_agency_matches', Mock(return_value=True)) mock_dict = Mock() mock_dict.return_value.getValue.side_effect = [ 'file_name', str(submission.submission_id) ] monkeypatch.setattr(fileHandler, 'RequestDictionary', mock_dict) json_response = file_handler.get_signed_url_for_submission_file() assert json.loads(json_response.get_data().decode( "utf-8"))['url'] == "/test/server/path/file_name.csv"
def test_revert_submission_not_updated_submission(database): """ Tests reverting an updated DABS certification failure for non-updated submission """ sess = database.session sub1 = Submission(publish_status_id=PUBLISH_STATUS_DICT['published'], d2_submission=False) sub2 = Submission(publish_status_id=PUBLISH_STATUS_DICT['unpublished'], d2_submission=False) sess.add_all([sub1, sub2]) sess.commit() file_handler = fileHandler.FileHandler({}, is_local=True) # Certified submission with pytest.raises(ResponseException) as resp_except: revert_to_certified(sub1, file_handler) assert resp_except.value.status == 400 assert str(resp_except.value) == 'Submission has not been certified or has not been updated since certification.' # Uncertified submission with pytest.raises(ResponseException) as resp_except: revert_to_certified(sub2, file_handler) assert resp_except.value.status == 400 assert str(resp_except.value) == 'Submission has not been certified or has not been updated since certification.'
def test_get_signed_url_for_submission_file_s3(database, monkeypatch): submission = SubmissionFactory() database.session.add(submission) database.session.commit() file_handler = fileHandler.FileHandler(Mock(), isLocal=False) monkeypatch.setattr(fileHandler, 'user_agency_matches', Mock(return_value=True)) mock_dict = Mock() mock_dict.return_value.getValue.side_effect = [ 'file_name', str(submission.submission_id) ] monkeypatch.setattr(fileHandler, 'RequestDictionary', mock_dict) mock_dict = Mock() mock_dict.return_value.getSignedUrl.return_value = '/signed/url/path/file_name.csv' monkeypatch.setattr(fileHandler, 's3UrlHandler', mock_dict) file_handler.s3manager.getSignedUrl = Mock() json_response = file_handler.get_signed_url_for_submission_file() assert json.loads(json_response.get_data().decode( "utf-8"))['url'] == '/signed/url/path/file_name.csv'
def test_move_certified_files(database, monkeypatch): # set up cgac and submission cgac = CGACFactory(cgac_code='zyxwv', agency_name='Test') sub = SubmissionFactory(cgac_code='zyxwv', number_of_errors=0, publish_status_id=1, reporting_fiscal_year=2017, reporting_fiscal_period=6) database.session.add_all([cgac, sub]) database.session.commit() # set up certify history and jobs based on submission sess = database.session cert_hist_local = CertifyHistoryFactory(submission_id=sub.submission_id) cert_hist_remote = CertifyHistoryFactory(submission_id=sub.submission_id) finished_job = sess.query(JobStatus).filter_by(name='finished').one() upload_job = sess.query(JobType).filter_by(name='file_upload').one() appropriations_job = JobFactory( submission=sub, filename="/path/to/appropriations/file_a.csv", file_type=sess.query(FileType).filter_by(name='appropriations').one(), job_type=upload_job, job_status=finished_job) prog_act_job = JobFactory(submission=sub, filename="/path/to/prog/act/file_b.csv", file_type=sess.query(FileType).filter_by( name='program_activity').one(), job_type=upload_job, job_status=finished_job) award_fin_job = JobFactory( submission=sub, filename="/path/to/award/fin/file_c.csv", file_type=sess.query(FileType).filter_by(name='award_financial').one(), job_type=upload_job, job_status=finished_job) award_proc_job = JobFactory(submission=sub, filename="/path/to/award/proc/file_d1.csv", file_type=sess.query(FileType).filter_by( name='award_procurement').one(), job_type=upload_job, job_status=finished_job) award_job = JobFactory( submission=sub, filename="/path/to/award/file_d2.csv", file_type=sess.query(FileType).filter_by(name='award').one(), job_type=upload_job, job_status=finished_job) exec_comp_job = JobFactory(submission=sub, filename="/path/to/exec/comp/file_e.csv", file_type=sess.query(FileType).filter_by( name='executive_compensation').one(), job_type=upload_job, job_status=finished_job) sub_award_job = JobFactory( submission=sub, filename="/path/to/sub/award/file_f.csv", file_type=sess.query(FileType).filter_by(name='sub_award').one(), job_type=upload_job, job_status=finished_job) award_fin_narr = SubmissionNarrativeFactory( submission=sub, narrative="Test narrative", file_type=sess.query(FileType).filter_by(name='award_financial').one()) database.session.add_all([ cert_hist_local, cert_hist_remote, appropriations_job, prog_act_job, award_fin_job, award_proc_job, award_job, exec_comp_job, sub_award_job, award_fin_narr ]) database.session.commit() s3_url_handler = Mock() monkeypatch.setattr(fileHandler, 'S3Handler', s3_url_handler) monkeypatch.setattr(fileHandler, 'CONFIG_BROKER', { 'aws_bucket': 'original_bucket', 'certified_bucket': 'cert_bucket' }) monkeypatch.setattr(fileHandler, 'CONFIG_SERVICES', {'error_report_path': '/path/to/error/reports/'}) fh = fileHandler.FileHandler(Mock()) # test local certification fh.move_certified_files(sub, cert_hist_local, True) local_id = cert_hist_local.certify_history_id # make sure we have the right number of history entries all_local_certs = sess.query(CertifiedFilesHistory).filter_by( certify_history_id=local_id).all() assert len(all_local_certs) == 11 c_cert_hist = sess.query(CertifiedFilesHistory).\ filter_by(certify_history_id=local_id, file_type_id=sess.query(FileType).filter_by(name='award_financial').one().file_type_id).one() assert c_cert_hist.filename == "/path/to/award/fin/file_c.csv" assert c_cert_hist.warning_filename == "/path/to/error/reports/submission_{}_award_financial_warning_report.csv".\ format(sub.submission_id) assert c_cert_hist.narrative == "Test narrative" # cross-file warnings warning_cert_hist = sess.query(CertifiedFilesHistory).filter_by( certify_history_id=local_id, file_type=None).all() assert len(warning_cert_hist) == 4 assert warning_cert_hist[0].narrative is None warning_cert_hist_files = [ hist.warning_filename for hist in warning_cert_hist ] assert "/path/to/error/reports/submission_{}_cross_warning_appropriations_program_activity.csv".\ format(sub.submission_id) in warning_cert_hist_files # test remote certification fh.move_certified_files(sub, cert_hist_remote, False) remote_id = cert_hist_remote.certify_history_id c_cert_hist = sess.query(CertifiedFilesHistory). \ filter_by(certify_history_id=remote_id, file_type_id=sess.query(FileType).filter_by(name='award_financial').one().file_type_id).one() assert c_cert_hist.filename == "zyxwv/2017/2/{}/file_c.csv".format( remote_id) assert c_cert_hist.warning_filename == "zyxwv/2017/2/{}/submission_{}_award_financial_warning_report.csv". \ format(remote_id, sub.submission_id)
def test_revert_submission(database, monkeypatch): """ Tests reverting an updated DABS certification """ sess = database.session sub = Submission(publish_status_id=PUBLISH_STATUS_DICT['updated'], is_quarter_format=True, d2_submission=False, publishable=False, number_of_errors=20, number_of_warnings=15) sess.add(sub) sess.commit() job = Job(submission_id=sub.submission_id, job_status_id=JOB_STATUS_DICT['finished'], job_type_id=JOB_TYPE_DICT['csv_record_validation'], file_type_id=FILE_TYPE_DICT['appropriations'], number_of_warnings=0, number_of_errors=10, filename='new/test/file.csv', number_of_rows=5, number_of_rows_valid=0) cert_history = CertifyHistory(submission_id=sub.submission_id) sess.add_all([job, cert_history]) sess.commit() cert_approp = CertifiedAppropriation(submission_id=sub.submission_id, job_id=job.job_id, row_number=1, spending_authority_from_of_cpe=2, tas='test') approp = Appropriation(submission_id=sub.submission_id, job_id=job.job_id, row_number=1, spending_authority_from_of_cpe=15, tas='test') cert_files = CertifiedFilesHistory(certify_history_id=cert_history.certify_history_id, submission_id=sub.submission_id, filename='old/test/file2.csv', file_type_id=FILE_TYPE_DICT['appropriations'], warning_filename='a/warning.csv') cert_meta1 = CertifiedErrorMetadata(job_id=job.job_id, file_type_id=FILE_TYPE_DICT['appropriations'], target_file_type_id=None, occurrences=15) cert_meta2 = CertifiedErrorMetadata(job_id=job.job_id, file_type_id=FILE_TYPE_DICT['appropriations'], target_file_type_id=None, occurrences=10) file_entry = File(file_id=FILE_TYPE_DICT['appropriations'], job_id=job.job_id, file_status_id=FILE_STATUS_DICT['incomplete'], headers_missing='something') sess.add_all([cert_approp, approp, cert_files, cert_meta1, cert_meta2, file_entry]) sess.commit() file_handler = fileHandler.FileHandler({}, is_local=True) monkeypatch.setattr(file_handler, 'revert_certified_error_files', Mock()) revert_to_certified(sub, file_handler) # Test that certified data is moved back approp_query = sess.query(Appropriation).filter_by(submission_id=sub.submission_id).all() assert len(approp_query) == 1 assert approp_query[0].spending_authority_from_of_cpe == 2 # Test that the job got updated job_query = sess.query(Job).filter_by(submission_id=sub.submission_id).all() assert len(job_query) == 1 assert job_query[0].filename == CONFIG_BROKER['broker_files'] + 'file2.csv' assert job_query[0].number_of_warnings == 25 assert job_query[0].number_of_errors == 0 assert job_query[0].number_of_rows == 2 assert job_query[0].number_of_rows_valid == 1 # Test that File got updated file_query = sess.query(File).filter_by(job_id=job.job_id).all() assert len(file_query) == 1 assert file_query[0].headers_missing is None assert file_query[0].file_status_id == FILE_STATUS_DICT['complete'] # Make sure submission got updated sub_query = sess.query(Submission).filter_by(submission_id=sub.submission_id).all() assert len(sub_query) == 1 assert sub_query[0].publishable is True assert sub_query[0].number_of_errors == 0 assert sub_query[0].number_of_warnings == 25