def validate_job(self, request): """ Gets file for job, validates each row, and sends valid rows to a staging table Args: request -- HTTP request containing the jobId Returns: Http response object """ # Create connection to job tracker database sess = GlobalDB.db().session requestDict = RequestDictionary(request) if requestDict.exists('job_id'): job_id = requestDict.getValue('job_id') else: # Request does not have a job ID, can't validate validation_error_type = ValidationError.jobError raise ResponseException('No job ID specified in request', StatusCode.CLIENT_ERROR, None, validation_error_type) # Get the job job = sess.query(Job).filter_by(job_id=job_id).one_or_none() if job is None: validation_error_type = ValidationError.jobError writeFileError(job_id, None, validation_error_type) raise ResponseException( 'Job ID {} not found in database'.format(job_id), StatusCode.CLIENT_ERROR, None, validation_error_type) # Make sure job's prerequisites are complete if not run_job_checks(job_id): validation_error_type = ValidationError.jobError writeFileError(job_id, None, validation_error_type) raise ResponseException( 'Prerequisites for Job ID {} are not complete'.format(job_id), StatusCode.CLIENT_ERROR, None, validation_error_type) # Make sure this is a validation job if job.job_type.name in ('csv_record_validation', 'validation'): job_type_name = job.job_type.name else: validation_error_type = ValidationError.jobError writeFileError(job_id, None, validation_error_type) raise ResponseException( 'Job ID {} is not a validation job (job type is {})'.format( job_id, job.job_type.name), StatusCode.CLIENT_ERROR, None, validation_error_type) # set job status to running and do validations mark_job_status(job_id, "running") if job_type_name == 'csv_record_validation': self.runValidation(job) elif job_type_name == 'validation': self.runCrossValidation(job) else: raise ResponseException("Bad job type for validator", StatusCode.INTERNAL_ERROR) return JsonResponse.create(StatusCode.OK, {"message": "Validation complete"})
def set_new_password(self, session): """ Set a new password for a user, request should have keys "user_email" and "password" """ sess = GlobalDB.db().session request_dict = RequestDictionary.derive(self.request) required = ('user_email', 'password') try: if any(field not in request_dict for field in required): # Don't have the keys we need in request raise ResponseException( "Set password route requires keys user_email and password", StatusCode.CLIENT_ERROR ) if not self.checkPassword(request_dict['password']): raise ResponseException( "Invalid Password", StatusCode.CLIENT_ERROR) except ResponseException as exc: return JsonResponse.error(exc,exc.status) # Get user from email user = sess.query(User).filter( func.lower(User.email) == func.lower(request_dict["user_email"]) ).one() # Set new password set_user_password(user,request_dict["password"],self.bcrypt) # Invalidate token oldToken = sess.query(EmailToken).filter(EmailToken.token == session["token"]).one() sess.delete(oldToken) sess.commit() session["reset"] = None # Return success message return JsonResponse.create(StatusCode.OK,{"message":"Password successfully changed"})
def get_next_record(self): """ Read the next record into a dict and return it Returns: pair of (dictionary of expected fields, list of FlexFields) """ return_dict = {} flex_fields = [] row = self._get_line() if len(row) != self.column_count: raise ResponseException( "Wrong number of fields in this row, expected %s got %s" % (self.column_count, len(row)), StatusCode.CLIENT_ERROR, ValueError, ValidationError.readError) for idx, cell in enumerate(row): if idx >= self.column_count: raise ResponseException("Record contains too many fields", StatusCode.CLIENT_ERROR, ValueError, ValidationError.readError) # Use None instead of empty strings for sqlalchemy if cell == "": cell = None # self.expected_headers uses the short, machine-readable column names if self.expected_headers[idx] is None and self.flex_headers[ idx] is not None: flex_fields.append( FlexField(header=self.flex_headers[idx], cell=cell)) # We skip headers which aren't expected and aren't flex elif self.expected_headers[idx] is not None: return_dict[self.expected_headers[idx]] = cell return return_dict, flex_fields
def listUsersWithStatus(self): """ List all users with the specified status. Associated request body must have key 'status' """ requestDict = RequestDictionary(self.request) if (not (requestDict.exists("status"))): # Missing a required field, return 400 exc = ResponseException("Request body must include status", StatusCode.CLIENT_ERROR) return JsonResponse.error(exc, exc.status) try: users = self.interfaces.userDb.getUsersByStatus( requestDict.getValue("status")) except ValueError as e: # Client provided a bad status exc = ResponseException(str(e), StatusCode.CLIENT_ERROR, ValueError) return JsonResponse.error(exc, exc.status) userInfo = [] for user in users: thisInfo = { "name": user.name, "title": user.title, "agency": user.agency, "email": user.email, "id": user.user_id } userInfo.append(thisInfo) return JsonResponse.create(StatusCode.OK, {"users": userInfo})
def setSkipGuide(self, session): """ Set current user's skip guide parameter """ uid = session["name"] userDb = self.interfaces.userDb user = userDb.getUserByUID(uid) requestDict = RequestDictionary(self.request) if not requestDict.exists("skip_guide"): exc = ResponseException("Must include skip_guide parameter", StatusCode.CLIENT_ERROR) return JsonResponse.error(exc, exc.status) skipGuide = requestDict.getValue("skip_guide") if type(skipGuide) == type(True): # param is a bool user.skip_guide = skipGuide elif type(skipGuide) == type("string"): # param is a string, allow "true" or "false" if skipGuide.lower() == "true": user.skip_guide = True elif skipGuide.lower() == "false": user.skip_guide = False else: exc = ResponseException("skip_guide must be true or false", StatusCode.CLIENT_ERROR) return JsonResponse.error(exc, exc.status) else: exc = ResponseException("skip_guide must be a boolean", StatusCode.CLIENT_ERROR) return JsonResponse.error(exc, exc.status) userDb.session.commit() return JsonResponse.create(StatusCode.OK, { "message": "skip_guide set successfully", "skip_guide": skipGuide })
def validate_threaded(): """Start the validation process on a new thread.""" @copy_current_request_context def ThreadedFunction(arg): """The new thread.""" threadedManager = ValidationManager(local, error_report_path) threadedManager.threadedValidateJob(arg) try: interfaces = InterfaceHolder() jobTracker = interfaces.jobDb except ResponseException as e: open("errorLog","a").write(str(e) + "\n") return JsonResponse.error(e,e.status,table = "cannot connect to job database") except Exception as e: open("errorLog","a").write(str(e) + "\n") exc = ResponseException(str(e),StatusCode.INTERNAL_ERROR,type(e)) return JsonResponse.error(exc,exc.status,table= "cannot connect to job database") jobId = None manager = ValidationManager(local, error_report_path) try: jobId = manager.getJobID(request) except ResponseException as e: manager.markJob(jobId,jobTracker,"invalid",interfaces.errorDb,manager.filename) CloudLogger.logError(str(e),e,traceback.extract_tb(sys.exc_info()[2])) return JsonResponse.error(e,e.status,table ="") except Exception as e: exc = ResponseException(str(e),StatusCode.CLIENT_ERROR,type(e)) manager.markJob(jobId,jobTracker,"invalid",interfaces.errorDb,manager.filename) CloudLogger.logError(str(e),exc,traceback.extract_tb(sys.exc_info()[2])) return JsonResponse.error(exc,exc.status,table="") try: manager.testJobID(jobId,interfaces) except ResponseException as e: open("errorLog","a").write(str(e) + "\n") # Job is not ready to run according to job tracker, do not change status of job in job tracker interfaces.errorDb.writeFileError(jobId,manager.filename,ValidationError.jobError) return JsonResponse.error(e,e.status,table="") except Exception as e: open("errorLog","a").write(str(e) + "\n") exc = ResponseException(str(e),StatusCode.CLIENT_ERROR,type(e)) interfaces.errorDb.writeFileError(jobId,manager.filename,ValidationError.jobError) return JsonResponse.error(exc,exc.status,table="") thread = Thread(target=ThreadedFunction, args= (jobId,)) try : jobTracker.markJobStatus(jobId,"running") except Exception as e: open("errorLog","a").write(str(e) + "\n") exc = ResponseException(str(e),StatusCode.INTERNAL_ERROR,type(e)) return JsonResponse.error(exc,exc.status,table="could not start job") interfaces.close() thread.start() return JsonResponse.create(StatusCode.OK,{"table":"job"+str(jobId)})
def loadSubmitParams(cls, requestDict): """ Load params from request, return dictionary of values provided mapped to submission fields """ # Existing submission ID is optional existingSubmission = False existingSubmissionId = None if requestDict.exists("existing_submission_id"): # Agency name and reporting dates are required for new submissions existingSubmission = True existingSubmissionId = requestDict.getValue( "existing_submission_id") submissionData = {} for key in cls.metaDataFieldMap: if requestDict.exists(key): if (key == "reporting_period_start_date" or key == "reporting_period_end_date"): # Create a date object from formatted string, assuming "MM/DD/YYYY" try: submissionData[ cls.metaDataFieldMap[key]] = JobHandler.createDate( requestDict.getValue(key)) except Exception as e: raise ResponseException( "Submission dates must be formatted as MM/DD/YYYY, hit error: " + str(e), StatusCode.CLIENT_ERROR, type(e)) else: submissionData[ cls.metaDataFieldMap[key]] = requestDict.getValue(key) else: if not existingSubmission: raise ResponseException(key + " is required", StatusCode.CLIENT_ERROR, ValueError) return submissionData, existingSubmissionId
def getNextRecord(self): """ Read the next record into a dict and return it Returns: dictionary representing this record """ returnDict = {} line = self._getLine() for row in csv.reader([line], dialect='excel', delimiter=self.delimiter): if len(row) != self.columnCount: raise ResponseException("Wrong number of fields in this row", StatusCode.CLIENT_ERROR, ValueError, ValidationError.readError) for current, cell in enumerate(row): if (current >= self.columnCount): raise ResponseException("Record contains too many fields", StatusCode.CLIENT_ERROR, ValueError, ValidationError.readError) if (cell == ""): # Use None instead of empty strings for sqlalchemy cell = None # self.headerDictionary uses the short, machine-readable column names if self.headerDictionary[current] is None: # Skip this column as it is unknown continue else: returnDict[self.headerDictionary[current]] = cell return returnDict
def set_skip_guide(self): """ Set current user's skip guide parameter Returns: JsonResponse object containing results of setting the skip guide or details of the error that occurred. Possible errors include the request not containing a skip_guide parameter or it not being a boolean value """ sess = GlobalDB.db().session request_dict = RequestDictionary.derive(self.request) try: if 'skip_guide' not in request_dict: raise ResponseException("Must include skip_guide parameter", StatusCode.CLIENT_ERROR) skip_guide = str(request_dict['skip_guide']).lower() if skip_guide not in ("true", "false"): raise ResponseException("skip_guide must be true or false", StatusCode.CLIENT_ERROR) g.user.skip_guide = skip_guide == "true" except ResponseException as exc: return JsonResponse.error(exc, exc.status) sess.commit() return JsonResponse.create(StatusCode.OK, { "message": "skip_guide set successfully", "skip_guide": skip_guide })
def year_period_to_dates(year, period): """ Converts a year and period to the real-life start and end dates they represents. Args: year: integer representing the year to use period: integer representing the period (month of the fiscal year) to use Returns: Strings representing the start and end dates of the given quarter """ # Make sure year is in the proper format if not year or not re.match('^\d{4}$', str(year)): raise ResponseException('Year must be in YYYY format.', StatusCode.CLIENT_ERROR) # Make sure period is a number 2-12 if not period or period not in list(range(2, 13)): raise ResponseException('Period must be an integer 2-12.', StatusCode.CLIENT_ERROR) # Set the actual month, add 12 if it's negative so it loops around and adjusts the year month = period - 3 if month < 1: month += 12 year -= 1 # Get the last day of the month last_day_of_month = calendar.monthrange(year, month)[1] start = str(month).zfill(2) + '/01/' + str(year) end = str(month).zfill(2) + '/' + str(last_day_of_month) + '/' + str(year) return start, end
def check_existing_submission_perms(perm, submission_id): """ Checks the current user's permissions against the submission with the ID of submission_id Args: perm: the type of permission we are checking for submission_id: the ID of the Submission that the user input Raises: ResponseException: If the user doesn't have permission to access the submission at the level requested or no valid agency code was provided. """ sess = GlobalDB.db().session submission = sess.query(Submission).filter( Submission.submission_id == submission_id).one_or_none() # Ensure submission exists if submission is None: raise ResponseException( "existing_submission_id must be a valid submission_id", StatusCode.CLIENT_ERROR) # Check permissions for the submission if not active_user_can_on_submission(perm, submission): raise ResponseException( "User does not have permissions to write to that submission", StatusCode.PERMISSION_DENIED)
def finalize(self): """ Set upload job in job tracker database to finished, allowing dependent jobs to be started Flask request should include key "upload_id", which holds the job_id for the file_upload job Returns: A flask response object, if successful just contains key "success" with value True, otherwise value is False """ responseDict = {} try: inputDictionary = RequestDictionary(self.request) jobId = inputDictionary.getValue("upload_id") # Compare user ID with user who submitted job, if no match return 400 job = self.jobManager.getJobById(jobId) submission = self.jobManager.getSubmissionForJob(job) if(submission.user_id != LoginSession.getName(session)): # This user cannot finalize this job raise ResponseException("Cannot finalize a job created by a different user", StatusCode.CLIENT_ERROR) # Change job status to finished if(self.jobManager.checkUploadType(jobId)): self.jobManager.changeToFinished(jobId) responseDict["success"] = True return JsonResponse.create(StatusCode.OK,responseDict) else: raise ResponseException("Wrong job type for finalize route",StatusCode.CLIENT_ERROR) except ( ValueError , TypeError ) as e: return JsonResponse.error(e,StatusCode.CLIENT_ERROR) except ResponseException as e: return JsonResponse.error(e,e.status) except Exception as e: # Unexpected exception, this is a 500 server error return JsonResponse.error(e,StatusCode.INTERNAL_ERROR)
def uploadFile(self): """saves a file and returns the saved path""" try: if(self.isLocal): uploadedFile = request.files['file'] if(uploadedFile): seconds = int((datetime.utcnow()-datetime(1970,1,1)).total_seconds()) filename = "".join([str(seconds),"_", secure_filename(uploadedFile.filename)]) path = os.path.join(self.serverPath, filename) uploadedFile.save(path) returnDict = {"path":path} return JsonResponse.create(StatusCode.OK,returnDict) else: exc = ResponseException("Failure to read file", StatusCode.CLIENT_ERROR) return JsonResponse.error(exc,exc.status) else : exc = ResponseException("Route Only Valid For Local Installs", StatusCode.CLIENT_ERROR) return JsonResponse.error(exc,exc.status) except ( ValueError , TypeError ) as e: return JsonResponse.error(e,StatusCode.CLIENT_ERROR) except ResponseException as e: return JsonResponse.error(e,e.status) except Exception as e: # Unexpected exception, this is a 500 server error return JsonResponse.error(e,StatusCode.INTERNAL_ERROR)
def wrapped(*args, **kwargs): req_args = webargs_parser.parse({ 'existing_submission_id': webargs_fields.Int(missing=None), 'cgac_code': webargs_fields.String(missing=None), 'frec_code': webargs_fields.String(missing=None) }) # Ensure there is either an existing_submission_id, a cgac_code, or a frec_code if req_args['existing_submission_id'] is None and req_args['cgac_code'] is None and \ req_args['frec_code'] is None: raise ResponseException( 'Missing required parameter: cgac_code, frec_code, or existing_submission_id', StatusCode.CLIENT_ERROR) # Use codes based on existing Submission if existing_submission_id is provided, otherwise use CGAC or FREC if req_args['existing_submission_id'] is not None: check_existing_submission_perms( perm, req_args['existing_submission_id']) else: # Check permissions for the agency if not active_user_can(perm, cgac_code=req_args['cgac_code'], frec_code=req_args['frec_code']): raise ResponseException( "User does not have permissions to write to that agency", StatusCode.PERMISSION_DENIED) return fn(*args, **kwargs)
def sendResetPasswordEmail(self, user, system_email, email=None, unlock_user=False): if email is None: email = user.email # User must be approved and active to reset password if user.user_status_id != self.interfaces.userDb.getUserStatusId( "approved"): raise ResponseException( "User must be approved before resetting password", StatusCode.CLIENT_ERROR) elif not unlock_user and not user.is_active: raise ResponseException("User is locked, cannot reset password", StatusCode.CLIENT_ERROR) # If unlocking a user, wipe out current password if unlock_user: UserHandler().clearPassword(user) self.interfaces.userDb.session.commit() # Send email with token emailToken = sesEmail.createToken(email, "password_reset") link = "".join( [AccountHandler.FRONT_END, '#/forgotpassword/', emailToken]) emailTemplate = {'[URL]': link} templateType = "unlock_account" if unlock_user else "reset_password" newEmail = sesEmail(user.email, system_email, templateType=templateType, parameters=emailTemplate, database=self.interfaces.userDb) newEmail.send()
def resetPassword(self, system_email, session): """ Remove old password and email user a token to set a new password. Request should have key "email" arguments: system_email -- (string) email used to send messages session -- (Session) object from flask """ requestDict = RequestDictionary(self.request) if (not (requestDict.exists("email"))): # Don't have the keys we need in request exc = ResponseException( "Reset password route requires key 'email'", StatusCode.CLIENT_ERROR) return JsonResponse.error(exc, exc.status) # Get user object try: user = self.interfaces.userDb.getUserByEmail( requestDict.getValue("email")) except Exception as e: exc = ResponseException("Unknown Error", StatusCode.CLIENT_ERROR, ValueError) return JsonResponse.error(exc, exc.status) email = requestDict.getValue("email") LoginSession.logout(session) self.sendResetPasswordEmail(user, system_email, email) # Return success message return JsonResponse.create(StatusCode.OK, {"message": "Password reset"})
def setNewPassword(self, session): """ Set a new password for a user, request should have keys "user_email" and "password" """ requestDict = RequestDictionary(self.request) if (not (requestDict.exists("user_email") and requestDict.exists("password"))): # Don't have the keys we need in request exc = ResponseException( "Set password route requires keys user_email and password", StatusCode.CLIENT_ERROR) return JsonResponse.error(exc, exc.status) if (not self.checkPassword(requestDict.getValue("password"))): exc = ResponseException("Invalid Password", StatusCode.CLIENT_ERROR) return JsonResponse.error(exc, exc.status) # Get user from email user = self.interfaces.userDb.getUserByEmail( requestDict.getValue("user_email")) # Set new password self.interfaces.userDb.setPassword(user, requestDict.getValue("password"), self.bcrypt) # Invalidate token self.interfaces.userDb.deleteToken(session["token"]) session["reset"] = None # Return success message return JsonResponse.create( StatusCode.OK, {"message": "Password successfully changed"})
def validate_table_properties(page, limit, order, sort, sort_options): """ Validate table properties like page, limit, and sort Args: page: page number to use in getting the list limit: the number of entries per page order: order ascending or descending sort: the column to order on sort_options: the list of valid options for sorting Exceptions: ResponseException if filter is invalid """ if not isinstance(page, int) or page <= 0: raise ResponseException('Page must be an integer greater than 0', status=StatusCode.CLIENT_ERROR) if not isinstance(limit, int) or limit <= 0: raise ResponseException('Limit must be an integer greater than 0', status=StatusCode.CLIENT_ERROR) if order not in ['asc', 'desc']: raise ResponseException('Order must be "asc" or "desc"', status=StatusCode.CLIENT_ERROR) if sort not in sort_options: raise ResponseException('Sort must be one of: {}'.format( ', '.join(sort_options)), status=StatusCode.CLIENT_ERROR)
def list_rule_settings(agency_code, file): """ Returns a list of prioritized rules an agency. Args: agency_code: string of the agency's CGAC/FREC code file: the rule's file type Returns: Ordered list of rules prioritized by an agency Raises: ResponseException if invalid agency code or file type """ sess = GlobalDB.db().session if file not in FILE_TYPES: raise ResponseException('Invalid file type: {}'.format(file), StatusCode.CLIENT_ERROR) if (sess.query(CGAC).filter(CGAC.cgac_code == agency_code).count() == 0) and \ (sess.query(FREC).filter(FREC.frec_code == agency_code).count() == 0): raise ResponseException('Invalid agency_code: {}'.format(agency_code), StatusCode.CLIENT_ERROR) # Get the base query with the file filter rule_settings_query = sess.query(RuleSetting.priority, RuleSql.rule_label, RuleImpact.name, RuleSql.rule_error_message, RuleSql.rule_severity_id).\ join(RuleSql, and_(RuleSql.rule_label == RuleSetting.rule_label, RuleSql.file_id == RuleSetting.file_id, is_not_distinct_from(RuleSql.target_file_id, RuleSetting.target_file_id))).\ join(RuleImpact, RuleImpact.rule_impact_id == RuleSetting.impact_id) rule_settings_query = file_filter(rule_settings_query, RuleSetting, [file]) # Filter settings by agency. If they haven't set theirs, use the defaults. if agency_has_settings(sess, agency_code, file): agency_filter = (RuleSetting.agency_code == agency_code) else: agency_filter = RuleSetting.agency_code.is_(None) rule_settings_query = rule_settings_query.filter(agency_filter) # Order by priority/significance rule_settings_query = rule_settings_query.order_by(RuleSetting.priority) errors = [] warnings = [] for rule in rule_settings_query.all(): rule_dict = { 'label': rule.rule_label, 'description': rule.rule_error_message, 'significance': rule.priority, 'impact': rule.name } if rule.rule_severity_id == RULE_SEVERITY_DICT['warning']: warnings.append(rule_dict) else: errors.append(rule_dict) return JsonResponse.create(StatusCode.OK, { 'warnings': warnings, 'errors': errors })
def startGenerationJob(self, submission_id, file_type): """ Initiates a file generation job Args: submission_id: ID of submission to start job for file_type: Type of file to be generated Returns: Tuple of boolean indicating successful start, and error response if False """ jobDb = self.interfaces.jobDb file_type_name = self.fileTypeMap[file_type] if file_type in ["D1", "D2"]: # Populate start and end dates, these should be provided in MM/DD/YYYY format, using calendar year (not fiscal year) requestDict = RequestDictionary(self.request) start_date = requestDict.getValue("start") end_date = requestDict.getValue("end") if not (StringCleaner.isDate(start_date) and StringCleaner.isDate(end_date)): exc = ResponseException("Start or end date cannot be parsed into a date", StatusCode.CLIENT_ERROR) return False, JsonResponse.error(exc, exc.status, start = "", end = "", file_type = file_type, status = "failed") elif file_type not in ["E","F"]: exc = ResponseException("File type must be either D1, D2, E or F", StatusCode.CLIENT_ERROR) return False, JsonResponse.error(exc, exc.status, file_type = file_type, status = "failed") cgac_code = self.jobManager.getSubmissionById(submission_id).cgac_code # Generate and upload file to S3 user_id = LoginSession.getName(session) timestamped_name = s3UrlHandler.getTimestampedFilename(CONFIG_BROKER["".join([str(file_type_name),"_file_name"])]) if self.isLocal: upload_file_name = "".join([CONFIG_BROKER['broker_files'], timestamped_name]) else: upload_file_name = "".join([str(user_id), "/", timestamped_name]) job = jobDb.getJobBySubmissionFileTypeAndJobType(submission_id, file_type_name, "file_upload") job.filename = upload_file_name job.original_filename = timestamped_name job.job_status_id = jobDb.getJobStatusId("running") jobDb.session.commit() if file_type in ["D1", "D2"]: CloudLogger.log("DEBUG: Adding job info for job id of " + str(job.job_id), log_type="debug", file_name=self.debug_file_name) return self.addJobInfoForDFile(upload_file_name, timestamped_name, submission_id, file_type, file_type_name, start_date, end_date, cgac_code, job) elif file_type == 'E': generate_e_file.delay( submission_id, job.job_id, InterfaceHolder, timestamped_name, upload_file_name, self.isLocal) elif file_type == 'F': generate_f_file.delay( submission_id, job.job_id, InterfaceHolder, timestamped_name, upload_file_name, self.isLocal) return True, None
def changeStatus(self, system_email): """ Changes status for specified user. Associated request body should have keys 'uid' and 'new_status' arguments: system_email -- (string) the emaily to send emails from return the reponse object with a success message """ requestDict = RequestDictionary(self.request) if (not (requestDict.exists("uid") and requestDict.exists("new_status"))): # Missing a required field, return 400 exc = ResponseException( "Request body must include uid and new_status", StatusCode.CLIENT_ERROR) return JsonResponse.error(exc, exc.status) # Find user that matches specified uid user = self.interfaces.userDb.getUserByUID( int(requestDict.getValue("uid"))) if (user.email == None): return JsonResponse.error( ResponseException("User does not have a defined email", StatusCode.INTERNAL_ERROR), StatusCode.INTERNAL_ERROR) #check if the user is waiting if (self.interfaces.userDb.checkStatus(user, "awaiting_approval")): if (requestDict.getValue("new_status") == "approved"): # Grant agency_user permission to newly approved users self.interfaces.userDb.grantPermission(user, "agency_user") link = AccountHandler.FRONT_END emailTemplate = {'[URL]': link, '[EMAIL]': system_email} newEmail = sesEmail(user.email, system_email, templateType="account_approved", parameters=emailTemplate, database=self.interfaces.userDb) newEmail.send() elif (requestDict.getValue("new_status") == "denied"): emailTemplate = {} newEmail = sesEmail(user.email, system_email, templateType="account_rejected", parameters=emailTemplate, database=self.interfaces.userDb) newEmail.send() # Change user's status self.interfaces.userDb.changeStatus(user, requestDict.getValue("new_status")) return JsonResponse.create(StatusCode.OK, {"message": "Status change successful"})
def generate_file(self, agency_code=None): """ Generates a file based on the FileGeneration object and updates any Jobs referencing it """ raw_filename = (GEN_FILENAMES[self.file_type] if not self.file_generation else GEN_FILENAMES[self.file_type].format( self.file_generation.agency_type)) file_name = S3Handler.get_timestamped_filename(raw_filename) if self.is_local: file_path = "".join([CONFIG_BROKER['broker_files'], file_name]) else: file_path = "".join(["None/", file_name]) # Generate the file and upload to S3 log_data = { 'message': 'Finished file {} generation'.format(self.file_type), 'message_type': 'ValidatorInfo', 'file_type': self.file_type, 'file_path': file_path } if self.file_generation: self.generate_d_file(file_path) log_data.update({ 'agency_code': self.file_generation.agency_code, 'agency_type': self.file_generation.agency_type, 'start_date': self.file_generation.start_date, 'end_date': self.file_generation.end_date, 'file_generation_id': self.file_generation.file_generation_id }) elif self.job.file_type.letter_name in ['A', 'E', 'F']: log_data['job_id'] = self.job.job_id mark_job_status(self.job.job_id, 'running') if self.job.file_type.letter_name == 'A': if not agency_code: raise ResponseException( 'Agency code not provided for an A file generation') self.generate_a_file(agency_code, file_path) else: # Call self.generate_%s_file() where %s is e or f based on the Job's file_type file_type_lower = self.job.file_type.letter_name.lower() getattr(self, 'generate_%s_file' % file_type_lower)() mark_job_status(self.job.job_id, 'finished') else: e = 'No FileGeneration object for D file generation.' if self.file_type in ['D1', 'D2'] else \ 'Cannot generate file for {} file type.'.format(self.file_type if self.file_type else 'empty') raise ResponseException(e) logger.info(log_data)
def generate_from_job(self, job_id, agency_code): """ Generates a file for a specified job Args: job_id: ID of the upload Job agency_code: FREC or CGAC code to generate data from """ mark_job_status(job_id, 'running') with job_context(job_id, self.is_local) as context: sess, job = context # Ensure this is a file generation job if job.job_type.name != 'file_upload': raise ResponseException( 'Job ID {} is not a file generation job (job type is {})'. format(job.job_id, job.job_type.name), StatusCode.CLIENT_ERROR, None, ValidationError.jobError) # Ensure there is an available agency_code if not agency_code: if job.submission_id: agency_code = job.submission.frec_code if job.submission.frec_code else job.submission.cgac_code else: raise ResponseException( 'An agency_code must be provided to generate a file'. format(job.job_id, job.job_type.name), StatusCode.CLIENT_ERROR, None, ValidationError.jobError) # Generate timestamped file names old_filename = job.original_filename job.original_filename = S3Handler.get_timestamped_filename( CONFIG_BROKER["".join([str(job.file_type.name), "_file_name"])]) if self.is_local: job.filename = "".join( [CONFIG_BROKER['broker_files'], job.original_filename]) else: job.filename = "".join( [str(job.submission_id), "/", job.original_filename]) # Generate the file and upload to S3 if job.file_type.letter_name in ['D1', 'D2']: # Update the validation Job if necessary if job.submission_id: self.update_validation_job_info(job) generate_d_file(sess, job, agency_code, self.is_local, old_filename) elif job.file_type.letter_name == 'E': generate_e_file(sess, job, self.is_local) else: generate_f_file(sess, job, self.is_local)
def generate_from_job(self): """ Generates a file for a specified job """ # Mark Job as running mark_job_status(self.job.job_id, 'running') # Ensure this is a file generation job job_type = self.job.job_type.name if job_type != 'file_upload': raise ResponseException( 'Job ID {} is not a file generation job (job type is {})'.format(self.job.job_id, job_type), StatusCode.CLIENT_ERROR, None, ValidationError.jobError) # Ensure there is an available agency_code if not self.agency_code: raise ResponseException( 'An agency_code must be provided to generate a file'.format(self.job.job_id, job_type), StatusCode.CLIENT_ERROR, None, ValidationError.jobError) # Retrieve any FileRequest that may have started since the Broker sent the request to SQS skip_generation = None if self.job.file_type.letter_name in ['D1', 'D2']: skip_generation = retrieve_cached_file_request(self.job, self.agency_type, self.agency_code, self.is_local) if not skip_generation: # Generate timestamped file names raw_filename = CONFIG_BROKER["".join([str(self.job.file_type.name), "_file_name"])] self.job.original_filename = S3Handler.get_timestamped_filename(raw_filename) if self.is_local: self.job.filename = "".join([CONFIG_BROKER['broker_files'], self.job.original_filename]) else: self.job.filename = "".join([str(self.job.submission_id), "/", self.job.original_filename]) self.sess.commit() # Generate the file, and upload to S3 if self.job.file_type.letter_name in ['D1', 'D2']: # Update the validation Job if necessary update_validation_job_info(self.sess, self.job) self.generate_d_file() elif self.job.file_type.letter_name == 'A': self.generate_a_file() elif self.job.file_type.letter_name == 'E': self.generate_e_file() else: self.generate_f_file() mark_job_status(self.job.job_id, 'finished') logger.info({ 'message': 'Finished file {} generation'.format(self.job.file_type.letter_name), 'message_type': 'ValidatorInfo', 'job_id': self.job.job_id, 'agency_code': self.agency_code, 'file_type': self.job.file_type.letter_name, 'start_date': self.job.start_date, 'end_date': self.job.end_date, 'filename': self.job.original_filename })
def validate_job(self, job_id): """ Gets file for job, validates each row, and sends valid rows to a staging table Args: request -- HTTP request containing the jobId Returns: Http response object """ # Create connection to job tracker database sess = GlobalDB.db().session # Get the job job = sess.query(Job).filter_by(job_id=job_id).one_or_none() if job is None: validation_error_type = ValidationError.jobError write_file_error(job_id, None, validation_error_type) raise ResponseException( 'Job ID {} not found in database'.format(job_id), StatusCode.CLIENT_ERROR, None, validation_error_type) # Make sure job's prerequisites are complete if not run_job_checks(job_id): validation_error_type = ValidationError.jobError write_file_error(job_id, None, validation_error_type) raise ResponseException( 'Prerequisites for Job ID {} are not complete'.format(job_id), StatusCode.CLIENT_ERROR, None, validation_error_type) # Make sure this is a validation job if job.job_type.name in ('csv_record_validation', 'validation'): job_type_name = job.job_type.name else: validation_error_type = ValidationError.jobError write_file_error(job_id, None, validation_error_type) raise ResponseException( 'Job ID {} is not a validation job (job type is {})'.format( job_id, job.job_type.name), StatusCode.CLIENT_ERROR, None, validation_error_type) # set job status to running and do validations mark_job_status(job_id, "running") if job_type_name == 'csv_record_validation': self.run_validation(job) elif job_type_name == 'validation': self.run_cross_validation(job) else: raise ResponseException("Bad job type for validator", StatusCode.INTERNAL_ERROR) # Update last validated date job.last_validated = datetime.utcnow() sess.commit() return JsonResponse.create(StatusCode.OK, {"message": "Validation complete"})
def addJobInfoForDFile(self, upload_file_name, timestamped_name, submission_id, file_type, file_type_name, start_date, end_date, cgac_code, job): """ Populates upload and validation job objects with start and end dates, filenames, and status Args: upload_file_name - Filename to use on S3 timestamped_name - Version of filename without user ID submission_id - Submission to add D files to file_type - File type as either "D1" or "D2" file_type_name - Full name of file type start_date - Beginning of period for D file end_date - End of period for D file cgac_code - Agency to generate D file for job - Job object for upload job """ jobDb = self.interfaces.jobDb try: valJob = jobDb.getJobBySubmissionFileTypeAndJobType(submission_id, file_type_name, "csv_record_validation") valJob.filename = upload_file_name valJob.original_filename = timestamped_name valJob.job_status_id = jobDb.getJobStatusId("waiting") job.start_date = datetime.strptime(start_date,"%m/%d/%Y").date() job.end_date = datetime.strptime(end_date,"%m/%d/%Y").date() valJob.start_date = datetime.strptime(start_date,"%m/%d/%Y").date() valJob.end_date = datetime.strptime(end_date,"%m/%d/%Y").date() # Generate random uuid and store generation task task_key = uuid4() task = FileGenerationTask(generation_task_key = task_key, submission_id = submission_id, file_type_id = jobDb.getFileTypeId(file_type_name), job_id = job.job_id) jobDb.session.add(task) jobDb.session.commit() except ValueError as e: # Date was not in expected format exc = ResponseException(str(e),StatusCode.CLIENT_ERROR,ValueError) return False, JsonResponse.error(exc, exc.status, url = "", start = "", end = "", file_type = file_type) # Create file D API URL with dates and callback URL callback = "{}://{}:{}/v1/complete_generation/{}/".format(CONFIG_SERVICES["protocol"],CONFIG_SERVICES["broker_api_host"], CONFIG_SERVICES["broker_api_port"],task_key) CloudLogger.log( 'DEBUG: Callback URL for {}: {}'.format(file_type, callback), log_type='debug', file_name=self.debug_file_name) get_url = CONFIG_BROKER["".join([file_type_name, "_url"])].format(cgac_code, start_date, end_date, callback) CloudLogger.log("DEBUG: Calling D file API => " + str(get_url), log_type="debug", file_name=self.debug_file_name) try: if not self.call_d_file_api(get_url): self.handleEmptyResponse(job, valJob) except Timeout as e: exc = ResponseException(str(e), StatusCode.CLIENT_ERROR, Timeout) return False, JsonResponse.error(e, exc.status, url="", start="", end="", file_type=file_type) return True, None
def checkUnique(queryResult, noResultMessage, multipleResultMessage): """ Check that result is unique, if not raise exception""" if (len(queryResult) == 0): # Did not get a result for this job, mark as a job error raise ResponseException(noResultMessage, StatusCode.CLIENT_ERROR, NoResultFound) elif (len(queryResult) > 1): # Multiple results for single job ID raise ResponseException(multipleResultMessage, StatusCode.INTERNAL_ERROR, MultipleResultsFound) return True
def runUniqueQuery(query, noResultMessage, multipleResultMessage): """ Run query looking for one result, if it fails wrap it in a ResponseException with an appropriate message """ try: return query.one() except NoResultFound as e: if (noResultMessage == False): # Raise the exception as is, used for specific handling raise e raise ResponseException(noResultMessage, StatusCode.CLIENT_ERROR, NoResultFound) except MultipleResultsFound as e: raise ResponseException(multipleResultMessage, StatusCode.INTERNAL_ERROR, MultipleResultsFound)
def create_email_confirmation(self,system_email): """ Creates user record and email arguments: system_email -- (string) email used to send messages """ sess = GlobalDB.db().session request_fields = RequestDictionary.derive(self.request) try: if 'email' not in request_fields: raise ResponseException( "Request body must include email", StatusCode.CLIENT_ERROR) email = request_fields['email'] if not re.match("[^@]+@[^@]+\.[^@]+",email): raise ValueError("Invalid Email Format") except (ResponseException, ValueError) as exc: return JsonResponse.error(exc, StatusCode.CLIENT_ERROR) try : user = sess.query(User).filter( func.lower(User.email) == func.lower(request_fields['email']) ).one() except NoResultFound: # Create user with specified email if none is found user = User(email=email) user.user_status_id = USER_STATUS_DICT["awaiting_confirmation"] user.permissions = 0 sess.add(user) sess.commit() else: try: good_statuses = (USER_STATUS_DICT["awaiting_confirmation"], USER_STATUS_DICT["email_confirmed"]) if user.user_status_id not in good_statuses: raise ResponseException( "User already registered", StatusCode.CLIENT_ERROR) except ResponseException as exc: return JsonResponse.error(exc, exc.status) email_token = sesEmail.createToken(email, "validate_email") link= "".join([AccountHandler.FRONT_END,'#/registration/',email_token]) email_template = {'[USER]': email, '[URL]':link} new_email = sesEmail(email, system_email,templateType="validate_email",parameters=email_template) new_email.send() return JsonResponse.create(StatusCode.OK,{"message":"Email Sent"})
def run_instance_function(accountManager, accountFunction, getSystemEmail=False, getSession=False, getUser=False, getCredentials=False): """ Standard error handling around each route """ interfaces = InterfaceHolder() try: accountManager.addInterfaces(interfaces) if (getSystemEmail and getSession): return accountFunction(RouteUtils.SYSTEM_EMAIL, session) elif (getSystemEmail): return accountFunction(RouteUtils.SYSTEM_EMAIL) elif (getSession): return accountFunction(session) elif (getUser): if (getCredentials): return accountFunction(LoginSession.getName(session), RouteUtils.CREATE_CREDENTIALS) else: # Currently no functions with user but not credentials flag raise ValueError( "Invalid combination of flags to run_instance_function" ) else: return accountFunction() except ResponseException as e: return JsonResponse.error(e, e.status) except Exception as e: exc = ResponseException(str(e), StatusCode.INTERNAL_ERROR, type(e)) return JsonResponse.error(exc, exc.status) finally: interfaces.close()