def create_job_details(self, key, job_config, logfile, status): """Create a `JobDetails` for a single job Parameters ---------- key : str Key used to identify this particular job job_config : dict Dictionary with arguements passed to this particular job logfile : str Name of the associated log file status : int Current status of the job Returns `JobDetails` """ self.update_args(job_config) job_details = JobDetails(jobname=self.linkname, jobkey=key, appname=self.appname, logfile=logfile, job_config=job_config, timestamp=get_timestamp(), file_dict=copy.deepcopy(self.files), sub_file_dict=copy.deepcopy(self.sub_files), status=status) return job_details
def _create_job_details(self, key, job_config, logfile, status): """Create a `JobDetails` for a single job Parameters ---------- key : str Key used to identify this particular job job_config : dict Dictionary with arguements passed to this particular job logfile : str Name of the associated log file status : int Current status of the job Returns ------- job_details : `fermipy.jobs.JobDetails` Object with the details about a particular job. """ self.update_args(job_config) job_details = JobDetails(jobname=self.full_linkname, jobkey=key, appname=self.appname, logfile=logfile, job_config=job_config, timestamp=get_timestamp(), file_dict=copy.deepcopy(self.files), sub_file_dict=copy.deepcopy(self.sub_files), status=status) return job_details
def build_job_dict(self): """Build a dictionary of `JobDetails` objects for the internal `Link`""" if self.args['dry_run']: status = JobStatus.unknown else: status = JobStatus.not_ready if self.jobs.has_key('__top__'): pass else: job_details = JobDetails(jobname=self.linkname, jobkey='__top__', appname=self.appname, logfile="%s_top.log"%self.linkname, job_config=self.args, timestamp=get_timestamp(), file_dict=copy.deepcopy(self.files), sub_file_dict=copy.deepcopy(self.sub_files), status=status) self.jobs[job_details.fullkey] = job_details for jobkey, job_config in sorted(self._job_configs.items()): full_job_config = self._merge_config(job_config) ScatterGather._make_scatter_logfile_name(jobkey, self.linkname, full_job_config) logfile = full_job_config.get('logfile') self._scatter_link.register_job(key=jobkey, job_config=full_job_config, logfile=logfile, status=status)
def write_status_to_log(self, stream=sys.stdout, status=JobStatus.unknown): """ Write the status of this job to a log stream """ stream.write("Timestamp: %i\n" % get_timestamp()) if status == JobStatus.no_job: stream.write("No Job\n") else: stream.write("%s\n" % JOB_STATUS_STRINGS[status])
def _write_status_to_log(self, return_code, stream=sys.stdout): """Write the status of this job to a log stream. This is used to check on job completion.""" stream.write("Timestamp: %i\n" % get_timestamp()) if return_code == 0: stream.write("%s\n" % self._interface.string_successful) else: stream.write("%s %i\n" % (self._interface.string_exited, return_code))