def run(fn, *args, **kwargs):
    cloudify_context = kwargs.get('__cloudify_context', {})
    cloudify_context.update({
        'type': 'operation',
        'task_name': '{0}.{1}'.format(fn.__module__, fn.__name__)
    })
    kwargs.pop('__cloudify_context', None)
    return dispatch.dispatch(cloudify_context, *args, **kwargs)
예제 #2
0
 def _assert_task_runnable(self, task_name,
                           expected_return=None,
                           package_name=None,
                           package_version=None,
                           deployment_id=None):
     self.assertEqual(
         dispatch.dispatch(test_utils.op_context(
             task_name,
             plugin_name=PLUGIN_NAME,
             package_name=package_name,
             package_version=package_version,
             deployment_id=deployment_id)),
         expected_return)
예제 #3
0
 def _assert_task_runnable(self,
                           task_name,
                           expected_return=None,
                           package_name=None,
                           package_version=None,
                           deployment_id=None):
     self.assertEqual(
         dispatch.dispatch(
             test_utils.op_context(task_name,
                                   plugin_name=PLUGIN_NAME,
                                   package_name=package_name,
                                   package_version=package_version,
                                   deployment_id=deployment_id)),
         expected_return)
예제 #4
0
    def execute(self,
                workflow,
                parameters=None,
                allow_custom_parameters=False,
                task_retries=-1,
                task_retry_interval=30,
                subgraph_retries=0,
                task_thread_pool_size=DEFAULT_LOCAL_TASK_THREAD_POOL_SIZE):
        workflows = self.plan['workflows']
        workflow_name = workflow
        if workflow_name not in workflows:
            raise ValueError("'{0}' workflow does not exist. "
                             "existing workflows are: [{1}]".format(
                                 workflow_name, ', '.join(workflows)))

        workflow = workflows[workflow_name]
        execution_id = str(uuid.uuid4())
        ctx = {
            'type': 'workflow',
            'local': True,
            'deployment_id': self.name,
            'blueprint_id': self.name,
            'execution_id': execution_id,
            'workflow_id': workflow_name,
            'storage': self.storage,
            'task_retries': task_retries,
            'task_retry_interval': task_retry_interval,
            'subgraph_retries': subgraph_retries,
            'local_task_thread_pool_size': task_thread_pool_size,
            'task_name': workflow['operation']
        }

        merged_parameters = _merge_and_validate_execution_parameters(
            workflow, workflow_name, parameters, allow_custom_parameters)
        self.storage.store_execution(execution_id, ctx, merged_parameters)
        try:
            rv = dispatch.dispatch(__cloudify_context=ctx, **merged_parameters)
            self.storage.execution_ended(execution_id)
            return rv
        except Exception as e:
            self.storage.execution_ended(execution_id, e)
            reraise(e.__class__, e, sys.exc_info()[2])
예제 #5
0
 def _process(self, channel, method, properties, body):
     with open('/tmp/bla', 'ab') as f:
         f.write(body)
         f.write('\n########\n')
     parsed_body = json.loads(body)
     logger.info(parsed_body)
     try:
         task = parsed_body['cloudify_task']
         kwargs = task['kwargs']
         rv = dispatch.dispatch(**kwargs)
         result = {'ok': True, 'id': parsed_body['id'], 'result': rv}
     except Exception as e:
         logger.warn('Failed message processing: {0!r}'.format(e))
         logger.warn('Body: {0}\nType: {1}'.format(body, type(body)))
         result = {'ok': False, 'error': repr(e), 'id': parsed_body['id']}
     finally:
         logger.info('response %r', result)
         self.channel.basic_publish(self.result_exchange, '',
                                    json.dumps(result))
         self.channel.basic_ack(method.delivery_tag)
    def execute(self,
                workflow,
                parameters=None,
                allow_custom_parameters=False,
                task_retries=-1,
                task_retry_interval=30,
                subgraph_retries=0,
                task_thread_pool_size=DEFAULT_LOCAL_TASK_THREAD_POOL_SIZE):
        workflows = self.plan['workflows']
        workflow_name = workflow
        if workflow_name not in workflows:
            raise ValueError("'{0}' workflow does not exist. "
                             "existing workflows are: {1}"
                             .format(workflow_name,
                                     workflows.keys()))

        workflow = workflows[workflow_name]
        execution_id = str(uuid.uuid4())
        ctx = {
            'type': 'workflow',
            'local': True,
            'deployment_id': self.name,
            'blueprint_id': self.name,
            'execution_id': execution_id,
            'workflow_id': workflow_name,
            'storage': self.storage,
            'task_retries': task_retries,
            'task_retry_interval': task_retry_interval,
            'subgraph_retries': subgraph_retries,
            'local_task_thread_pool_size': task_thread_pool_size,
            'task_name': workflow['operation']
        }

        merged_parameters = _merge_and_validate_execution_parameters(
            workflow, workflow_name, parameters, allow_custom_parameters)

        return dispatch.dispatch(__cloudify_context=ctx, **merged_parameters)