def fail_job(message): log("Failed Job '%s', message is '%s'" % (job.job_name, message)) job_report.status = False job_report.message = message job_report.end_time = datetime.now() updateJobDetails() job_report.render() sys.exit(-1)
def render_to_email(self): recipients = list() if self.status: # Gather the 'alert on success' users # 1) Global Alert Users global_groups = xsftp.webui.models.Configuration.objects.all()[0].job_success_alert_groups.all() # global_groups is now just a list of groups # for each of those groups in global_groups, we want to get out all the users for group in global_groups: for user in group.users.all(): if user not in recipients: recipients.append(user) # 2) Job Specific Groups if not job_temp.suppress_group_alerts: job_groups = job_temp.alert_groups_on_success.all() for group in job_groups: for user in group.users.all(): if user not in recipients: recipients.append(user) # 3) Job Owner if job_temp.alert_owner_on_success and job_temp.owner not in recipients: recipients.append(job_temp.owner) else: # Gather the 'alert on fail' users # 1) Global Alert Users global_groups = xsftp.webui.models.Configuration.objects.all()[0].job_failure_alert_groups.all() # global_groups is now just a list of groups # for each of those groups in global_groups, we want to get out all the users for group in global_groups: for user in group.users.all(): if user not in recipients: recipients.append(user) # 2) Job Specific Groups if not job_temp.suppress_group_alerts: job_groups = job_temp.alert_groups_on_fail.all() for group in job_groups: for user in group.users.all(): if user not in recipients: recipients.append(user) # 3) Job Owner if job_temp.alert_owner_on_fail and job_temp.owner not in recipients: recipients.append(job_temp.owner) email_addresses = [user.email for user in recipients if user.email] # Generate some strings that will be used in the message # Duration String duration = self.end_time - self.start_time total_seconds = duration.seconds + (duration.days * 86400) hours = total_seconds / 3600 minutes = total_seconds % 3600 / 60 seconds = total_seconds % 3600 % 60 duration_string = "%002d:%002d:%002d" % (hours, minutes, seconds) # Data Transferred string if self.source_files: total_data = reduce(lambda x, y: x + y, [f.file_size for f in self.source_files if f.file_size is not None] or [0]) else: total_data = 0 total_data = int(total_data) if total_data / 1024**3: data = "%.2f GB" % (float(total_data)/1024**3) elif total_data / 1024**2: data = "%.2f MB" % (float(total_data)/1024**2) elif total_data / 1024: data = "%.2f KB" % (float(total_data)/1024) else: data = "%s bytes" % total_data if not self.source_files: data = "0 (no source files selected for copy)" # PreScript String if job.use_pre_script: pre_script_string = "\nPrescript: %s" % job.pre_script.script_name else: pre_script_string = "" if job_temp.use_post_script: post_script_string = "\nPostscript: %s" % job_temp.post_script.script_name else: post_script_string = "" # generate the message # generate message, starting with pre-script and postscript message blocks pre_script_block = post_script_block = "" if self.pre_script_output: pre_script_block = "\nPre-Script Output\n=================\n***** Return Code:\n%s\n***** Output Data (stdout):\n%s\n***** Output Data (stderr):\n%s\n" % ( self.pre_script_output[0], self.pre_script_output[1], (self.pre_script_output[2] or "None"), ) if self.post_script_output: post_script_block = "\nPost-Script Output\n==================\n***** Return Code:\n%s\n***** Output Data (stdout):\n%s\n***** Output Data (stderr):\n%s\n" % ( self.post_script_output[0], (self.post_script_output[1] or "None"), (self.post_script_output[2] or "None"), ) # generate full message message = """ This is an automatic message from Fcombine: %(device_name)s The Job '%(job_name)s' has %(statusString)s See attachment for per-file details. Job Information =============== Job Name: %(job_name)s Owner: %(owner)s Comment: %(comment)s Run Count: %(count)s%(prescript)s%(postscript)s %(runnow)s Transfer Details ================ Source Server Link: %(source)s Destination Server Link: %(dest)s Destination Path: %(dest_path)s Job Results =========== Status: %(status)s Message: %(message)s Start Time: %(start)s End Time: %(end)s Duration: %(dur)s Data Transfer Size: %(data)s %(pre_script_output)s %(post_script_output)s """ % { "device_name":xsftp.webui.models.Configuration.objects.all()[0].device_name, "job_name":job.job_name, "owner":job.owner.username, "comment":job.comment, "count":job.run_count, "prescript":pre_script_string, "postscript":post_script_string, "status":["Success", "Fail"][[True, False].index(self.status)], "statusString": ["COMPLETED SUCCESSFULLY.", "FAILED."][[True, False].index(self.status)], "message":self.message, "start":self.start_time.ctime(), "end":self.end_time.ctime(), "dur":duration_string, "data":data, "source":job_temp.source_server.server_name, "dest":job_temp.dest_server.server_name, "dest_path":job_temp.dest_path, "runnow":["", "This job was invoked manually"][self.runnow], "pre_script_output":pre_script_block, "post_script_output":post_script_block, } # generate the attachment content attachment_content = """Source Files,Destination File Name,Size (bytes),Attempted,Status,Start Time,End Time,Duration (seconds),Message\n""" source_strip = len(xsftp.common.constants.SMP_DIR + str(job_temp.source_server.id)) dest_strip = len(xsftp.common.constants.SMP_DIR + str(job_temp.dest_server.id)) for f in self.source_files: sourceString = "%s:%s" % (job_temp.source_server.server_name, f.src_path[source_strip:]) if f.dest_path: destString = "%s:%s" % (job_temp.dest_server.server_name, f.dest_path[dest_strip:]) else: destString = "None" if f.start_time: start_string = f.start_time.ctime() else: start_string = "" if f.end_time: end_string = f.end_time.ctime() else: end_string = "" if f.end_time: duration = (f.end_time - f.start_time).seconds else: duration = None attachment_content += """%(src)s,%(dest)s,%(size)s,%(attempted)s,%(status)s,%(start)s,%(end)s,%(dur)s,%(msg)s\n""" % {"src":sourceString, "dest":destString, "size":f.file_size, "status":["Pass", "Fail"][[True, False].index(f.status)], "attempted":["Yes", "No"][[True, False].index(f.attempted)], "start":start_string, "end":end_string, "dur":duration, "msg":f.message or ""} try: email.send_email(subject="Fcombine Job Report for job '%s': %s" % (job.job_name, ["SUCCESS", "FAIL"][[True, False].index(self.status)]), body=message, to=email_addresses, attachments=[('Fcombine_Job_Details.csv', attachment_content, 'text/csv')]) except xsftp.webui.constants.Email_Error, e: log("Error sending email report for job '%s': %s" % (job.job_name, e))
def render_to_syslog(self): #TODO Gen2: give a bit more detail about the job in the logs, since this is the only place that the report data will be stored if the email report fails or needs to be retreived. if job_report.status: log("Job '%s' completed successfully" % (job.job_name)) else: # no need to log here on failure as fail_job() function already logged it. pass
############################### # CODE STARTS HERE ############################### sys.stdout = sys.stderr = Log() #---------------------------------- if not runNow: # Check if the job has an expiry, and if so check if it has expired, (all jobs) if job.expiry and job.expiry < currentTime: sys.exit(0) # If job is run_once, check that we have the right year if job.schedule_type == 0 and job.run_at.year != currentTime.year: sys.exit(0) log("Job '%s' received start signal from scheduler and is starting..." % job.job_name) else: log("Job '%s' received 'Run Now' signal from user and is starting..." % job.job_name) job_report = JobReport() # Set appropriate details on job_report and job object, and save the job job_report.runnow = runNow job_report.start_time = job.start_time = datetime.now() job.run_count += 1 job.pid = os.getpid() job.running_now = True # Register a handler for a SIGTERM signal.signal(signal.SIGTERM, handleSigTerm) try: job.save() # check if the job is sane if job.errorFlags: