def send_task(self, task_name, queue, deployment_id=None, args=None, kwargs=None, timeout=10, execution_env=None): cloudify_context = test_utils.op_context(task_name, task_target=queue, plugin_name=PLUGIN_NAME, execution_env=execution_env, deployment_id=deployment_id) kwargs = kwargs or {} kwargs['__cloudify_context'] = cloudify_context handler = amqp_client.BlockingRequestResponseHandler(exchange=queue) client = amqp_client.get_client() client.add_handler(handler) with client: task = {'cloudify_task': {'kwargs': kwargs}} result = handler.publish(task, routing_key='operation', timeout=timeout) error = result.get('error') if error: raise deserialize_known_exception(error) else: return result.get('result')
def send_task(self, task_name, queue, deployment_id=None, args=None, kwargs=None, timeout=10, execution_env=None): cloudify_context = test_utils.op_context(task_name, task_target=queue, plugin_name=PLUGIN_NAME, execution_env=execution_env, deployment_id=deployment_id) kwargs = kwargs or {} kwargs['__cloudify_context'] = cloudify_context handler = amqp_client.BlockingRequestResponseHandler(queue) client = amqp_client.get_client() client.add_handler(handler) with client: task = {'cloudify_task': {'kwargs': kwargs}} result = handler.publish(task, routing_key='operation', timeout=timeout) error = result.get('error') if error: raise deserialize_known_exception(error) else: return result.get('result')
def dispatch_to_subprocess(self): # inputs.json, output.json and output are written to a temporary # directory that only lives during the lifetime of the subprocess split = self.cloudify_context['task_name'].split('.') dispatch_dir = tempfile.mkdtemp( prefix='task-{0}.{1}-'.format(split[0], split[-1])) try: with open(os.path.join(dispatch_dir, 'input.json'), 'w') as f: json.dump( { 'cloudify_context': self.cloudify_context, 'args': self.args, 'kwargs': self.kwargs }, f) if self.cloudify_context.get('bypass_maintenance'): os.environ[constants.BYPASS_MAINTENANCE] = 'True' env = self._build_subprocess_env() if self._uses_external_plugin(): plugin_dir = self._extract_plugin_dir() if plugin_dir is None: self._install_plugin() plugin_dir = self._extract_plugin_dir() if plugin_dir is None: raise RuntimeError('Plugin was not installed: {0}'.format( self.cloudify_context['plugin'])) executable = utils.get_python_path(plugin_dir) else: executable = sys.executable env['PATH'] = '{0}:{1}'.format(os.path.dirname(executable), env['PATH']) command_args = [ executable, '-u', '-m', 'cloudify.dispatch', dispatch_dir ] self.run_subprocess(command_args, env=env, bufsize=1, close_fds=os.name != 'nt') with open(os.path.join(dispatch_dir, 'output.json')) as f: dispatch_output = json.load(f) if dispatch_output['type'] == 'result': return dispatch_output['payload'] elif dispatch_output['type'] == 'error': e = dispatch_output['payload'] error = deserialize_known_exception(e) error.causes.append({ 'message': e['message'], 'type': e['exception_type'], 'traceback': e.get('traceback') }) raise error else: raise exceptions.NonRecoverableError( 'Unexpected output type: {0}'.format( dispatch_output['type'])) finally: shutil.rmtree(dispatch_dir, ignore_errors=True)
def _send_amqp_task(agent, params, timeout): if not _validate_cloudify_amqp(agent): raise RecoverableError('Agent is not responding') task = {'cloudify_task': {'kwargs': params}} handler = amqp_client.BlockingRequestResponseHandler(agent['queue']) with _get_amqp_client(agent) as client: client.add_handler(handler) with client: result = handler.publish(task, routing_key='operation', timeout=timeout) error = result.get('error') if error: raise deserialize_known_exception(error)
def _send_amqp_task(agent, params, timeout): if not _validate_cloudify_amqp(agent): raise RecoverableError('Agent is not responding') task = {'cloudify_task': {'kwargs': params}} handler = amqp_client.BlockingRequestResponseHandler( exchange=agent['queue']) with _get_amqp_client(agent) as client: client.add_handler(handler) with client: result = handler.publish(task, routing_key='operation', timeout=timeout) error = result.get('error') if error: raise deserialize_known_exception(error)
def dispatch_to_subprocess(self): # inputs.json, output.json and output are written to a temporary # directory that only lives during the lifetime of the subprocess split = self.cloudify_context['task_name'].split('.') dispatch_dir = tempfile.mkdtemp( prefix='task-{0}.{1}-'.format(split[0], split[-1])) try: with open(os.path.join(dispatch_dir, 'input.json'), 'w') as f: json.dump( { 'cloudify_context': self.cloudify_context, 'args': self.args, 'kwargs': self.kwargs }, f) env = self._build_subprocess_env() command_args = [ sys.executable, '-u', '-m', 'cloudify.dispatch', dispatch_dir ] self.run_subprocess(command_args, env=env, bufsize=1, close_fds=os.name != 'nt') with open(os.path.join(dispatch_dir, 'output.json')) as f: dispatch_output = json.load(f) if dispatch_output['type'] == 'result': return dispatch_output['payload'] elif dispatch_output['type'] == 'error': e = dispatch_output['payload'] error = deserialize_known_exception(e) error.causes.append({ 'message': e['message'], 'type': e['exception_type'], 'traceback': e['traceback'] }) raise error else: raise exceptions.NonRecoverableError( 'Unexpected output type: {0}'.format( dispatch_output['type'])) finally: shutil.rmtree(dispatch_dir, ignore_errors=True)