def setup_published_submission(cls, sess, submission_id, date='01/01/2000', is_fabs=False): ch = CertifyHistory(user_id=cls.submission_user_id, submission_id=submission_id) ph = PublishHistory(user_id=cls.submission_user_id, submission_id=submission_id) sess.add_all([ch, ph]) sess.commit() file_type_list = ['A', 'B', 'C', 'D1', 'D2', 'E', 'F' ] if not is_fabs else ['FABS'] for file_type_letter in file_type_list: cls.insert_published_files_history( sess, ch.certify_history_id, ph.publish_history_id, submission_id, date, FILE_TYPE_DICT_LETTER_ID[file_type_letter], 'path/to/file_{}.csv'.format(file_type_letter), None, None) if not is_fabs: cls.insert_published_files_history(sess, ch.certify_history_id, ph.publish_history_id, submission_id, date, None, 'path/to/comments.csv', None, None) return ch.certify_history_id, ph.publish_history_id
def certify_submission(submission): if not submission.publishable: return JsonResponse.error(ValueError("Submission cannot be certified due to critical errors"), StatusCode.CLIENT_ERROR) if not submission.is_quarter_format: return JsonResponse.error(ValueError("Monthly submissions cannot be certified"), StatusCode.CLIENT_ERROR) if submission.publish_status_id == PUBLISH_STATUS_DICT['published']: return JsonResponse.error(ValueError("Submission has already been certified"), StatusCode.CLIENT_ERROR) windows = get_window() for window in windows: if window.block_certification: return JsonResponse.error(ValueError(window.message), StatusCode.CLIENT_ERROR) sess = GlobalDB.db().session response = find_existing_submissions_in_period(sess, submission.cgac_code, submission.frec_code, submission.reporting_fiscal_year, submission.reporting_fiscal_period, submission.submission_id) if response.status_code == StatusCode.OK: sess = GlobalDB.db().session # create the certify_history entry certify_history = CertifyHistory(created_at=datetime.utcnow(), user_id=g.user.user_id, submission_id=submission.submission_id) sess.add(certify_history) sess.commit() # get the certify_history entry including the PK certify_history = sess.query(CertifyHistory).filter_by(submission_id=submission.submission_id).\ order_by(CertifyHistory.created_at.desc()).first() # move files (locally we don't move but we still need to populate the certified_files_history table) file_manager = FileHandler(request, is_local=is_local, server_path=server_path) file_manager.move_certified_files(submission, certify_history, is_local) # set submission contents submission.certifying_user_id = g.user.user_id submission.publish_status_id = PUBLISH_STATUS_DICT['published'] sess.commit() return response
def certify_dabs_submission(submission, file_manager): """ Certify a DABS submission Args: submission: the submission to be certified file_manager: a FileHandler object to be used to call move_certified_files Returns: Nothing if successful, JsonResponse error containing the details of the error if something went wrong """ current_user_id = g.user.user_id if not submission.publishable: return JsonResponse.error( ValueError( "Submission cannot be certified due to critical errors"), StatusCode.CLIENT_ERROR) if not submission.is_quarter_format: return JsonResponse.error( ValueError("Monthly submissions cannot be certified"), StatusCode.CLIENT_ERROR) if submission.publish_status_id == PUBLISH_STATUS_DICT['published']: return JsonResponse.error( ValueError("Submission has already been certified"), StatusCode.CLIENT_ERROR) windows = get_windows() for window in windows: if window.block_certification: return JsonResponse.error(ValueError(window.message), StatusCode.CLIENT_ERROR) response = find_existing_submissions_in_period( submission.cgac_code, submission.frec_code, submission.reporting_fiscal_year, submission.reporting_fiscal_period, submission.submission_id) if response.status_code == StatusCode.OK: sess = GlobalDB.db().session # create the certify_history entry certify_history = CertifyHistory( created_at=datetime.utcnow(), user_id=current_user_id, submission_id=submission.submission_id) sess.add(certify_history) sess.commit() # get the certify_history entry including the PK certify_history = sess.query(CertifyHistory).filter_by(submission_id=submission.submission_id).\ order_by(CertifyHistory.created_at.desc()).first() # move files (locally we don't move but we still need to populate the certified_files_history table) file_manager.move_certified_files(submission, certify_history, file_manager.is_local) # set submission contents submission.certifying_user_id = current_user_id submission.publish_status_id = PUBLISH_STATUS_DICT['published'] sess.commit() print(submission.__dict__)
def test_revert_submission(database, monkeypatch): """ Tests reverting an updated DABS certification """ sess = database.session sub = Submission(publish_status_id=PUBLISH_STATUS_DICT['updated'], is_quarter_format=True, d2_submission=False, publishable=False, number_of_errors=20, number_of_warnings=15) sess.add(sub) sess.commit() job = Job(submission_id=sub.submission_id, job_status_id=JOB_STATUS_DICT['finished'], job_type_id=JOB_TYPE_DICT['csv_record_validation'], file_type_id=FILE_TYPE_DICT['appropriations'], number_of_warnings=0, number_of_errors=10, filename='new/test/file.csv', number_of_rows=5, number_of_rows_valid=0) cert_history = CertifyHistory(submission_id=sub.submission_id) sess.add_all([job, cert_history]) sess.commit() cert_approp = CertifiedAppropriation(submission_id=sub.submission_id, job_id=job.job_id, row_number=1, spending_authority_from_of_cpe=2, tas='test') approp = Appropriation(submission_id=sub.submission_id, job_id=job.job_id, row_number=1, spending_authority_from_of_cpe=15, tas='test') cert_files = CertifiedFilesHistory(certify_history_id=cert_history.certify_history_id, submission_id=sub.submission_id, filename='old/test/file2.csv', file_type_id=FILE_TYPE_DICT['appropriations'], warning_filename='a/warning.csv') cert_meta1 = CertifiedErrorMetadata(job_id=job.job_id, file_type_id=FILE_TYPE_DICT['appropriations'], target_file_type_id=None, occurrences=15) cert_meta2 = CertifiedErrorMetadata(job_id=job.job_id, file_type_id=FILE_TYPE_DICT['appropriations'], target_file_type_id=None, occurrences=10) file_entry = File(file_id=FILE_TYPE_DICT['appropriations'], job_id=job.job_id, file_status_id=FILE_STATUS_DICT['incomplete'], headers_missing='something') sess.add_all([cert_approp, approp, cert_files, cert_meta1, cert_meta2, file_entry]) sess.commit() file_handler = fileHandler.FileHandler({}, is_local=True) monkeypatch.setattr(file_handler, 'revert_certified_error_files', Mock()) revert_to_certified(sub, file_handler) # Test that certified data is moved back approp_query = sess.query(Appropriation).filter_by(submission_id=sub.submission_id).all() assert len(approp_query) == 1 assert approp_query[0].spending_authority_from_of_cpe == 2 # Test that the job got updated job_query = sess.query(Job).filter_by(submission_id=sub.submission_id).all() assert len(job_query) == 1 assert job_query[0].filename == CONFIG_BROKER['broker_files'] + 'file2.csv' assert job_query[0].number_of_warnings == 25 assert job_query[0].number_of_errors == 0 assert job_query[0].number_of_rows == 2 assert job_query[0].number_of_rows_valid == 1 # Test that File got updated file_query = sess.query(File).filter_by(job_id=job.job_id).all() assert len(file_query) == 1 assert file_query[0].headers_missing is None assert file_query[0].file_status_id == FILE_STATUS_DICT['complete'] # Make sure submission got updated sub_query = sess.query(Submission).filter_by(submission_id=sub.submission_id).all() assert len(sub_query) == 1 assert sub_query[0].publishable is True assert sub_query[0].number_of_errors == 0 assert sub_query[0].number_of_warnings == 25
def certify_dabs_submission(submission, file_manager): """ Certify a DABS submission Args: submission: the submission to be certified file_manager: a FileHandler object to be used to call move_certified_files Returns: A JsonResponse containing the message "success" if successful, JsonResponse error containing the details of the error if something went wrong """ current_user_id = g.user.user_id if not submission.publishable: return JsonResponse.error(ValueError("Submission cannot be certified due to critical errors"), StatusCode.CLIENT_ERROR) if not submission.is_quarter_format: return JsonResponse.error(ValueError("Monthly submissions cannot be certified"), StatusCode.CLIENT_ERROR) if submission.publish_status_id == PUBLISH_STATUS_DICT['published']: return JsonResponse.error(ValueError("Submission has already been certified"), StatusCode.CLIENT_ERROR) windows = get_windows() for window in windows: if window.block_certification: return JsonResponse.error(ValueError(window.message), StatusCode.CLIENT_ERROR) sess = GlobalDB.db().session # Check revalidation threshold last_validated = get_last_validated_date(submission.submission_id) reval_thresh = get_revalidation_threshold()['revalidation_threshold'] if reval_thresh and reval_thresh >= last_validated: return JsonResponse.error(ValueError("This submission has not been validated since before the revalidation " "threshold ({}), it must be revalidated before certifying.". format(reval_thresh.replace('T', ' '))), StatusCode.CLIENT_ERROR) # Get the year/quarter of the submission and filter by them sub_quarter = submission.reporting_fiscal_period // 3 sub_year = submission.reporting_fiscal_year quarter_reval = sess.query(QuarterlyRevalidationThreshold).filter_by(year=sub_year, quarter=sub_quarter).\ one_or_none() # If we don't have a quarterly revalidation threshold for this year/quarter, they can't submit if not quarter_reval: return JsonResponse.error(ValueError("No submission window for this year and quarter was found. If this is an " "error, please contact the Service Desk."), StatusCode.CLIENT_ERROR) # Make sure everything was last validated after the start of the submission window last_validated = datetime.strptime(last_validated, '%Y-%m-%dT%H:%M:%S') if last_validated < quarter_reval.window_start: return JsonResponse.error(ValueError("This submission was last validated or its D files generated before the " "start of the submission window ({}). Please revalidate before " "certifying.".format(quarter_reval.window_start.strftime('%m/%d/%Y'))), StatusCode.CLIENT_ERROR) response = find_existing_submissions_in_period(submission.cgac_code, submission.frec_code, submission.reporting_fiscal_year, submission.reporting_fiscal_period, submission.submission_id) if response.status_code == StatusCode.OK: # create the certify_history entry certify_history = CertifyHistory(created_at=datetime.utcnow(), user_id=current_user_id, submission_id=submission.submission_id) sess.add(certify_history) sess.commit() # get the certify_history entry including the PK certify_history = sess.query(CertifyHistory).filter_by(submission_id=submission.submission_id).\ order_by(CertifyHistory.created_at.desc()).first() # Move the data to the certified table, deleting any old certified data in the process move_certified_data(sess, submission.submission_id) # move files (locally we don't move but we still need to populate the certified_files_history table) file_manager.move_certified_files(submission, certify_history, file_manager.is_local) # set submission contents submission.certifying_user_id = current_user_id submission.publish_status_id = PUBLISH_STATUS_DICT['published'] sess.commit() return response