def decorated_func(*args, **kwargs): while True: try: func(*args, **kwargs) except Exception: tracer.log_exception_trace(traceback.format_exc()) time.sleep(configuration.get_jrds_get_sandbox_actions_pooling_freq())
def decorated_func(*args, **kwargs): while True: try: func(*args, **kwargs) except Exception: tracer.log_exception_trace(traceback.format_exc()) time.sleep( configuration.get_jrds_get_sandbox_actions_pooling_freq())
def decorated_func(*args, **kwargs): global routine_loop while routine_loop: try: func(*args, **kwargs) except Exception: tracer.log_exception_trace(traceback.format_exc()) time.sleep(1) # allow the trace to make it to stdout (since traces are background threads) # this will work as long as all threads are daemon # daemon threads are only supported in 2.6+ sys.exit(1) time.sleep(configuration.get_jrds_get_job_actions_pooling_freq())
def decorated_func(*args, **kwargs): global routine_loop while routine_loop: try: func(*args, **kwargs) except Exception: tracer.log_exception_trace(traceback.format_exc()) time.sleep( 1 ) # allow the trace to make it to stdout (since traces are background threads) # this will work as long as all threads are daemon # daemon threads are only supported in 2.6+ sys.exit(1) time.sleep(configuration.get_jrds_get_job_actions_pooling_freq())
def run(self): """Monitor the job's subprocess for output (which will be uploaded as streams). Notes: PowerShell stdout : http://stackoverflow.com/questions/22349139/utf-8-output-from-powershell IMPORTANT: Do not log streams to cloud. """ stream_count = 0 while True: try: output = codecs.getwriter('utf8')( self.runtime_process.stdout).readline() if output == '' and self.runtime_process.poll() is not None: break elif output: if output.startswith(PREFIX_DEBUG.lower()) or \ output.startswith(PREFIX_DEBUG.upper()) or \ output.startswith(PREFIX_DEBUG.capitalize()): self.process_debug_stream(stream_count, output) elif output.startswith(PREFIX_ERROR.lower()) or \ output.startswith(PREFIX_ERROR.upper()) or \ output.startswith(PREFIX_ERROR.capitalize()): self.process_error_stream(stream_count, output) elif output.startswith(PREFIX_VERBOSE.lower()) or \ output.startswith(PREFIX_VERBOSE.upper()) or \ output.startswith(PREFIX_VERBOSE.capitalize()): self.process_verbose_stream(stream_count, output) elif output.startswith(PREFIX_WARNING.lower()) or \ output.startswith(PREFIX_WARNING.upper()) or \ output.startswith(PREFIX_WARNING.capitalize()): self.process_warning_stream(stream_count, output) else: self.process_output_stream(stream_count, output) stream_count += 1 # leave trace at the end to prevent encoding issue from pushing streams to cloud # leave this as debug trace to prevent logging customer streams to automation logs tracer.log_debug_trace("STDOUT : " + str(output.strip())) except: tracer.log_exception_trace(traceback.format_exc()) continue tracer.log_debug_trace("Stream processing complete.")
def run(self): """Monitor the job's subprocess for output (which will be uploaded as streams). Notes: PowerShell stdout : http://stackoverflow.com/questions/22349139/utf-8-output-from-powershell IMPORTANT: Do not log streams to cloud. """ stream_count = 0 while True: try: output = codecs.getwriter('utf8')(self.runtime_process.stdout).readline() if output == '' and self.runtime_process.poll() is not None: break elif output: if output.startswith(PREFIX_DEBUG.lower()) or \ output.startswith(PREFIX_DEBUG.upper()) or \ output.startswith(PREFIX_DEBUG.capitalize()): self.process_debug_stream(stream_count, output) elif output.startswith(PREFIX_ERROR.lower()) or \ output.startswith(PREFIX_ERROR.upper()) or \ output.startswith(PREFIX_ERROR.capitalize()): self.process_error_stream(stream_count, output) elif output.startswith(PREFIX_VERBOSE.lower()) or \ output.startswith(PREFIX_VERBOSE.upper()) or \ output.startswith(PREFIX_VERBOSE.capitalize()): self.process_verbose_stream(stream_count, output) elif output.startswith(PREFIX_WARNING.lower()) or \ output.startswith(PREFIX_WARNING.upper()) or \ output.startswith(PREFIX_WARNING.capitalize()): self.process_warning_stream(stream_count, output) else: self.process_output_stream(stream_count, output) stream_count += 1 # leave trace at the end to prevent encoding issue from pushing streams to cloud # leave this as debug trace to prevent logging customer streams to automation logs tracer.log_debug_trace("STDOUT : " + str(output.strip())) except: tracer.log_exception_trace(traceback.format_exc()) continue tracer.log_debug_trace("Stream processing complete.")
self.job_id, jobstatus.FAILED, True, exception=e.message) self.unload_job() except InvalidRunbookSignature, e: tracer.log_debug_trace("Runbook signature is invalid.") self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.FAILED, True, exception=e.message) self.unload_job() except Exception: tracer.log_debug_trace("Job runtime unhandled exception.") tracer.log_exception_trace(traceback.format_exc()) self.job_thread_exception_queue.put(sys.exc_info()) def execute_runbook(self): """Executes the job runtime and performs runtime operation (stream upload / status change).""" # set status to running tracer.log_debug_trace("Starting runbook.") self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.RUNNING, False) # create runbook subprocess self.runtime.start_runbook_subprocess() # monitor runbook output for streams stream_handler = StreamHandler(self.job_data, self.runtime.runbook_subprocess,
try: self.load_job() self.initialize_runtime() self.execute_runbook() self.unload_job() except (WorkerUnsupportedRunbookType, OSUnsupportedRunbookType), e: tracer.log_debug_trace("Runbook type not supported.") self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.FAILED, True, exception=e.message) self.unload_job() except InvalidRunbookSignature, e: tracer.log_debug_trace("Runbook signature is invalid.") self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.FAILED, True, exception=e.message) self.unload_job() except Exception: tracer.log_debug_trace("Job runtime unhandled exception.") tracer.log_exception_trace(traceback.format_exc()) self.job_thread_exception_queue.put(sys.exc_info()) def execute_runbook(self): """Executes the job runtime and performs runtime operation (stream upload / status change).""" # set status to running tracer.log_debug_trace("Starting runbook.") self.jrds_client.set_job_status(self.sandbox_id, self.job_id, jobstatus.RUNNING, False) # create runbook subprocess self.runtime.start_runbook_subprocess() # monitor runbook output for streams stream_handler = StreamHandler(self.job_data, self.runtime.runbook_subprocess, self.jrds_client) stream_handler.daemon = True stream_handler.start()