def post(self, **kwargs): """ Start an execution """ request_dict = rest_utils.get_json_and_verify_params( dict( service_id={'type': int}, workflow_name={'type': basestring}, ) ) service = self.model.service.get(request_dict['service_id']) executor = process.ProcessExecutor(plugin_manager=self.plugin_manager) compiler = execution_preparer.ExecutionPreparer( self.model, self.resource, self.plugin_manager, service, request_dict['workflow_name'] ) workflow_ctx = compiler.prepare(executor=executor) engine_ = engine.Engine(executor) engine_.execute(workflow_ctx) return workflow_ctx.execution.to_dict( workflow_ctx.execution.fields() - {'created_at', 'started_at', 'ended_at'}), \ 201
def executor(plugin_manager): result = process.ProcessExecutor(plugin_manager=plugin_manager, python_path=[tests.ROOT_DIR]) try: yield result finally: result.close()
def post(self, execution_id, **kwargs): """ Apply execution action (cancel, force-cancel) by id """ request_dict = get_json_and_verify_params({'action'}) action = request_dict['action'] valid_actions = ['cancel', 'force-cancel'] if action not in valid_actions: raise manager_exceptions.BadParametersError( 'Invalid action: {0}, Valid action values are: {1}'.format( action, valid_actions)) if action in ('cancel', 'force-cancel'): service = self.model.execution.get(execution_id) executor = process.ProcessExecutor(self.plugin_manager) compiler = execution_preparer.ExecutionPreparer( self.model, self.resource, self.plugin_manager, service, request_dict['workflow_name'] ) workflow_ctx = compiler.prepare(execution_id=execution_id) engine_ = engine.Engine(executor) engine_.cancel_execution(workflow_ctx)
def execute(env, workflow_name): ctx = execution_preparer.ExecutionPreparer(env.model_storage, env.resource_storage, env.plugin_manager, env.service, workflow_name).prepare() eng = engine.Engine( process.ProcessExecutor(env.plugin_manager, strict_loading=False)) # Since we want a live log feed, we need to execute the workflow # while simultaneously printing the logs into the CFY logger. This Thread # executes the workflow, while the main process thread writes the logs. thread = Thread(target=eng.execute, kwargs=dict(ctx=ctx)) thread.start() log_iterator = logger.ModelLogIterator(env.model_storage, ctx.execution.id) while thread.is_alive(): for log in log_iterator: leveled_log = getattr(env.ctx_logger, log.level.lower()) leveled_log(log) if log.traceback: leveled_log(log.traceback) thread.join(0.1) aria_execution = ctx.execution if aria_execution.status != aria_execution.SUCCEEDED: raise AriaWorkflowError( 'ARIA workflow {aria_execution.workflow_name} was not successful\n' 'status: {aria_execution.status}\n' 'error message: {aria_execution.error}'.format( aria_execution=aria_execution))
def executor(): result = process.ProcessExecutor(python_path=[tests.ROOT_DIR]) yield result result.close()
def executor(self): result = process.ProcessExecutor() try: yield result finally: result.close()
def executor(self): result = process.ProcessExecutor() yield result result.close()
def executor(): result = process.ProcessExecutor(python_path=[tests.ROOT_DIR]) try: yield result finally: result.close()
def executor(plugin_manager): result = process.ProcessExecutor(plugin_manager=plugin_manager) yield result result.close()
def process_executor(): ex = process.ProcessExecutor(**dict(python_path=tests.ROOT_DIR)) try: yield ex finally: ex.close()