def run(self) -> None: """Run the job.""" cmd_options = self.cmd_options # Do non blocking spawn followed by a wait in order to have # self.proc_handle set. This allows support for interrupt. cmd_options["bg"] = True with self.lock: if self.interrupted: # defensive code logger.debug("job %s has been cancelled", self.uid) return try: cmdline = self.cmdline assert cmdline is not None, "cmdline cannot be None" proc_handle = Run(cmdline, **cmd_options) self.proc_handle = proc_handle except Exception: logger.exception("error when spawing job %s", self.uid) self.__spawn_error = True return proc_handle.wait() logger.debug( "job %s status %s (pid:%s)", self.uid, proc_handle.status, proc_handle.pid )
class ProcessJob(Job): """Specialized version of Job that spawn processes.""" __metaclass__ = abc.ABCMeta def __init__(self, uid, data, notify_end): super(ProcessJob, self).__init__(uid, data, notify_end) self.proc_handle = None def run(self): """Run the job.""" cmd_options = self.cmd_options # Do non blocking spawn followed by a wait in order to have # self.proc_handle set. This allows support for interrupt. cmd_options['bg'] = True self.proc_handle = Run(self.cmdline, **cmd_options) self.proc_handle.wait() @abc.abstractproperty def cmdline(self): """Return the command line of the process to be spawned. :return: the command line :rtype: list[str] """ pass @property def cmd_options(self): """Process options. Important note: don't use PIPE for output or error parameters this can cause locking error in case the process is interrupted. The default redirect output and error to the console. The pipe behavior can easily be emulated by writing to a file and modifying the run method to read the file content when the process finish. :return: options for e3.os.process.Run as a dict :rtype: dict """ return {'output': None} def interrupt(self): """Kill running process tree.""" if hasattr(self, 'proc_handle') and \ self.proc_handle and \ self.proc_handle.is_running(): logger.debug('interrrupt job %s', self.uid) self.proc_handle.kill(recursive=True) self.interrupted = True
def run(self): """Run the job.""" cmd_options = self.cmd_options # Do non blocking spawn followed by a wait in order to have # self.proc_handle set. This allows support for interrupt. cmd_options['bg'] = True with self.lock: if self.interrupted: # defensive code logger.debug('job %s has been cancelled', self.uid) return try: proc_handle = Run(self.cmdline, **cmd_options) self.proc_handle = proc_handle except Exception: logger.exception('error when spawing job %s', self.uid) self.__spawn_error = True return proc_handle.wait() logger.debug('job %s status %s (pid:%s)', self.uid, proc_handle.status, proc_handle.pid)