Exemplo n.º 1
0
def process_operation(plugins,
                      operation_name,
                      operation_content,
                      error_code,
                      partial_error_message,
                      resource_bases,
                      is_workflows=False):
    payload_field_name = 'parameters' if is_workflows else 'inputs'
    mapping_field_name = 'mapping' if is_workflows else 'implementation'
    operation_mapping = operation_content[mapping_field_name]
    operation_payload = operation_content[payload_field_name]

    # only for node operations
    operation_executor = operation_content.get('executor', None)
    operation_max_retries = operation_content.get('max_retries', None)
    operation_retry_interval = operation_content.get('retry_interval', None)

    if not operation_mapping:
        if is_workflows:
            raise RuntimeError('Illegal state. workflow mapping should always'
                               'be defined (enforced by schema validation)')
        else:
            return no_op_operation(operation_name=operation_name)

    candidate_plugins = [
        p for p in plugins.keys()
        if operation_mapping.startswith('{0}.'.format(p))
    ]
    if candidate_plugins:
        if len(candidate_plugins) > 1:
            raise exceptions.DSLParsingLogicException(
                91, 'Ambiguous operation mapping. [operation={0}, '
                'plugins={1}]'.format(operation_name, candidate_plugins))
        plugin_name = candidate_plugins[0]
        mapping = operation_mapping[len(plugin_name) + 1:]
        if is_workflows:
            return workflow_operation(plugin_name=plugin_name,
                                      workflow_mapping=mapping,
                                      workflow_parameters=operation_payload)
        else:
            if not operation_executor:
                operation_executor = plugins[plugin_name]['executor']
            return operation(name=operation_name,
                             plugin_name=plugin_name,
                             operation_mapping=mapping,
                             operation_inputs=operation_payload,
                             executor=operation_executor,
                             max_retries=operation_max_retries,
                             retry_interval=operation_retry_interval)
    elif resource_bases and _resource_exists(resource_bases,
                                             operation_mapping):
        operation_payload = copy.deepcopy(operation_payload or {})
        if constants.SCRIPT_PATH_PROPERTY in operation_payload:
            message = "Cannot define '{0}' property in '{1}' for {2} '{3}'" \
                .format(constants.SCRIPT_PATH_PROPERTY,
                        operation_mapping,
                        'workflow' if is_workflows else 'operation',
                        operation_name)
            raise exceptions.DSLParsingLogicException(60, message)
        script_path = operation_mapping
        if is_workflows:
            operation_mapping = constants.SCRIPT_PLUGIN_EXECUTE_WORKFLOW_TASK
            operation_payload.update({
                constants.SCRIPT_PATH_PROPERTY: {
                    'default':
                    script_path,
                    'description':
                    'Workflow script executed by the script'
                    ' plugin'
                }
            })
        else:
            operation_mapping = constants.SCRIPT_PLUGIN_RUN_TASK
            operation_payload.update(
                {constants.SCRIPT_PATH_PROPERTY: script_path})
        if constants.SCRIPT_PLUGIN_NAME not in plugins:
            message = "Script plugin is not defined but it is required for" \
                      " mapping '{0}' of {1} '{2}'" \
                .format(operation_mapping,
                        'workflow' if is_workflows else 'operation',
                        operation_name)
            raise exceptions.DSLParsingLogicException(61, message)

        if is_workflows:
            return workflow_operation(plugin_name=constants.SCRIPT_PLUGIN_NAME,
                                      workflow_mapping=operation_mapping,
                                      workflow_parameters=operation_payload)
        else:
            if not operation_executor:
                operation_executor = plugins[
                    constants.SCRIPT_PLUGIN_NAME]['executor']
            return operation(name=operation_name,
                             plugin_name=constants.SCRIPT_PLUGIN_NAME,
                             operation_mapping=operation_mapping,
                             operation_inputs=operation_payload,
                             executor=operation_executor,
                             max_retries=operation_max_retries,
                             retry_interval=operation_retry_interval)
    else:
        # This is an error for validation done somewhere down the
        # current stack trace
        base_error_message = (
            "Could not extract plugin from {2} "
            "mapping '{0}', which is declared for {2} '{1}'. ".format(
                operation_mapping, operation_name,
                'workflow' if is_workflows else 'operation'))
        error_message = base_error_message + partial_error_message
        raise exceptions.DSLParsingLogicException(error_code, error_message)
Exemplo n.º 2
0
def process_operation(
    plugins,
    operation_name,
    operation_content,
    error_code,
    partial_error_message,
    resource_bases,
    remote_resources_namespaces,
    is_workflows=False,
    is_workflow_cascading=False,
    workflow_availability=None,
):
    payload_field_name = 'parameters' if is_workflows else 'inputs'
    mapping_field_name = 'mapping' if is_workflows else 'implementation'
    operation_mapping = operation_content[mapping_field_name]
    operation_payload = operation_content[payload_field_name]

    operation_payload_types = \
        {k: v['type']
         for k, v in operation_payload.items()
         if isinstance(v, dict) and 'type' in v}

    # only for node operations
    operation_executor = operation_content.get('executor', None)
    operation_max_retries = operation_content.get('max_retries', None)
    operation_retry_interval = operation_content.get('retry_interval', None)
    operation_timeout = operation_content.get('timeout', None)
    operation_timeout_recoverable = operation_content.get(
        'timeout_recoverable', None)

    if not operation_mapping:
        if is_workflows:
            raise RuntimeError('Illegal state. workflow mapping should always'
                               'be defined (enforced by schema validation)')
        else:
            return no_op_operation(operation_name=operation_name)

    candidate_plugins = [
        p for p in plugins if operation_mapping.startswith('{0}.'.format(p))
    ]

    if (utils.is_valid_url(operation_mapping) or
            _is_local_script_resource_exists(resource_bases, operation_mapping)
            or (not candidate_plugins and _is_remote_script_resource(
                operation_mapping, remote_resources_namespaces))):

        operation_payload = copy.deepcopy(operation_payload or {})
        if constants.SCRIPT_PATH_PROPERTY in operation_payload:
            message = "Cannot define '{0}' property in '{1}' for {2} '{3}'" \
                .format(constants.SCRIPT_PATH_PROPERTY,
                        operation_mapping,
                        'workflow' if is_workflows else 'operation',
                        operation_name)
            raise exceptions.DSLParsingLogicException(60, message)
        script_path = operation_mapping
        if is_workflows:
            operation_mapping = constants.SCRIPT_PLUGIN_EXECUTE_WORKFLOW_TASK
            operation_payload.update({
                constants.SCRIPT_PATH_PROPERTY: {
                    'default':
                    script_path,
                    'description':
                    'Workflow script executed by the script'
                    ' plugin'
                }
            })
        else:
            operation_mapping = constants.SCRIPT_PLUGIN_RUN_TASK
            operation_payload.update(
                {constants.SCRIPT_PATH_PROPERTY: script_path})

        # There can be more then one script plugin defined in the blueprint,
        # in case of the other one's are namespaced. But they are actually
        # pointing to the same installed one.
        script_plugins = utils.find_suffix_matches_in_list(
            constants.SCRIPT_PLUGIN_NAME, plugins)

        if script_plugins:
            script_plugin = script_plugins[0]
        else:
            message = "Script plugin is not defined but it is required for" \
                      " mapping '{0}' of {1} '{2}'" \
                .format(operation_mapping,
                        'workflow' if is_workflows else 'operation',
                        operation_name)
            raise exceptions.DSLParsingLogicException(61, message)

        if is_workflows:
            return workflow_operation(
                plugin_name=script_plugin,
                workflow_mapping=operation_mapping,
                workflow_parameters=operation_payload,
                is_workflow_cascading=is_workflow_cascading,
                workflow_availability=workflow_availability,
            )
        else:
            if not operation_executor:
                operation_executor = plugins[script_plugin]['executor']
            return operation(
                name=operation_name,
                plugin_name=script_plugin,
                operation_mapping=operation_mapping,
                operation_inputs=operation_payload,
                executor=operation_executor,
                max_retries=operation_max_retries,
                retry_interval=operation_retry_interval,
                timeout=operation_timeout,
                timeout_recoverable=operation_timeout_recoverable,
                operation_inputs_types=operation_payload_types,
            )

    if candidate_plugins:
        if len(candidate_plugins) > 1:
            raise exceptions.DSLParsingLogicException(
                91, 'Ambiguous operation mapping. [operation={0}, '
                'plugins={1}]'.format(operation_name, candidate_plugins))
        plugin_name = candidate_plugins[0]
        mapping = operation_mapping[len(plugin_name) + 1:]
        if is_workflows:
            return workflow_operation(
                plugin_name=plugin_name,
                workflow_mapping=mapping,
                workflow_parameters=operation_payload,
                is_workflow_cascading=is_workflow_cascading,
                workflow_availability=workflow_availability,
            )
        else:
            if not operation_executor:
                operation_executor = plugins[plugin_name]['executor']
            return operation(
                name=operation_name,
                plugin_name=plugin_name,
                operation_mapping=mapping,
                operation_inputs=operation_payload,
                executor=operation_executor,
                max_retries=operation_max_retries,
                retry_interval=operation_retry_interval,
                timeout=operation_timeout,
                timeout_recoverable=operation_timeout_recoverable,
                operation_inputs_types=operation_payload_types,
            )

    else:
        base_error_message = (
            "Could not extract plugin or a script resource is not found from "
            "{2} mapping/script-path '{0}', which is declared for {2} '{1}'.".
            format(operation_mapping, operation_name,
                   'workflow' if is_workflows else 'operation'))
        error_message = base_error_message + partial_error_message
        raise exceptions.DSLParsingLogicException(error_code, error_message)
def process_operation(
        plugins,
        operation_name,
        operation_content,
        error_code,
        partial_error_message,
        resource_bases,
        is_workflows=False):
    payload_field_name = 'parameters' if is_workflows else 'inputs'
    mapping_field_name = 'mapping' if is_workflows else 'implementation'
    operation_mapping = operation_content[mapping_field_name]
    operation_payload = operation_content[payload_field_name]

    # only for node operations
    operation_executor = operation_content.get('executor', None)
    operation_max_retries = operation_content.get('max_retries', None)
    operation_retry_interval = operation_content.get('retry_interval', None)

    if not operation_mapping:
        if is_workflows:
            raise RuntimeError('Illegal state. workflow mapping should always'
                               'be defined (enforced by schema validation)')
        else:
            return no_op_operation(operation_name=operation_name)

    candidate_plugins = [p for p in plugins.keys()
                         if operation_mapping.startswith('{0}.'.format(p))]
    if candidate_plugins:
        if len(candidate_plugins) > 1:
            raise exceptions.DSLParsingLogicException(
                91, 'Ambiguous operation mapping. [operation={0}, '
                    'plugins={1}]'.format(operation_name, candidate_plugins))
        plugin_name = candidate_plugins[0]
        mapping = operation_mapping[len(plugin_name) + 1:]
        if is_workflows:
            return workflow_operation(
                plugin_name=plugin_name,
                workflow_mapping=mapping,
                workflow_parameters=operation_payload)
        else:
            if not operation_executor:
                operation_executor = plugins[plugin_name]['executor']
            return operation(
                name=operation_name,
                plugin_name=plugin_name,
                operation_mapping=mapping,
                operation_inputs=operation_payload,
                executor=operation_executor,
                max_retries=operation_max_retries,
                retry_interval=operation_retry_interval)
    elif resource_bases and _resource_exists(resource_bases,
                                             operation_mapping):
        operation_payload = copy.deepcopy(operation_payload or {})
        if constants.SCRIPT_PATH_PROPERTY in operation_payload:
            message = "Cannot define '{0}' property in '{1}' for {2} '{3}'" \
                .format(constants.SCRIPT_PATH_PROPERTY,
                        operation_mapping,
                        'workflow' if is_workflows else 'operation',
                        operation_name)
            raise exceptions.DSLParsingLogicException(60, message)
        script_path = operation_mapping
        if is_workflows:
            operation_mapping = constants.SCRIPT_PLUGIN_EXECUTE_WORKFLOW_TASK
            operation_payload.update({
                constants.SCRIPT_PATH_PROPERTY: {
                    'default': script_path,
                    'description': 'Workflow script executed by the script'
                                   ' plugin'
                }
            })
        else:
            operation_mapping = constants.SCRIPT_PLUGIN_RUN_TASK
            operation_payload.update({
                constants.SCRIPT_PATH_PROPERTY: script_path
            })
        if constants.SCRIPT_PLUGIN_NAME not in plugins:
            message = "Script plugin is not defined but it is required for" \
                      " mapping '{0}' of {1} '{2}'" \
                .format(operation_mapping,
                        'workflow' if is_workflows else 'operation',
                        operation_name)
            raise exceptions.DSLParsingLogicException(61, message)

        if is_workflows:
            return workflow_operation(
                plugin_name=constants.SCRIPT_PLUGIN_NAME,
                workflow_mapping=operation_mapping,
                workflow_parameters=operation_payload)
        else:
            if not operation_executor:
                operation_executor = plugins[constants.SCRIPT_PLUGIN_NAME][
                    'executor']
            return operation(
                name=operation_name,
                plugin_name=constants.SCRIPT_PLUGIN_NAME,
                operation_mapping=operation_mapping,
                operation_inputs=operation_payload,
                executor=operation_executor,
                max_retries=operation_max_retries,
                retry_interval=operation_retry_interval)
    else:
        # This is an error for validation done somewhere down the
        # current stack trace
        base_error_message = (
            "Could not extract plugin from {2} "
            "mapping '{0}', which is declared for {2} '{1}'. "
            .format(operation_mapping,
                    operation_name,
                    'workflow' if is_workflows else 'operation'))
        error_message = base_error_message + partial_error_message
        raise exceptions.DSLParsingLogicException(error_code, error_message)