def execute(new_global_elements_map_total_implementation, is_delete, cluster_name, target_parameter=None):
    if not is_delete:
        default_executor = 'ansible'
        configuration_class = get_configuration_tool_class(default_executor)()
        new_ansible_artifacts = copy.deepcopy(new_global_elements_map_total_implementation)
        for i in range(len(new_ansible_artifacts)):
            new_ansible_artifacts[i]['configuration_tool'] = new_ansible_artifacts[i]['executor']
            configuration_class = get_configuration_tool_class(new_ansible_artifacts[i]['configuration_tool'])()
            extension = configuration_class.get_artifact_extension()

            seed(time())
            new_ansible_artifacts[i]['name'] = '_'.join(
                [SOURCE, str(randint(ARTIFACT_RANGE_START, ARTIFACT_RANGE_END))]) + extension
        artifacts_with_brackets = utils.replace_brackets(new_ansible_artifacts, False)
        new_ansible_tasks, filename = utils.generate_artifacts(configuration_class, artifacts_with_brackets,
                                                                   configuration_class.initial_artifacts_directory,
                                                                   store=False)
        os.remove(filename)

        q = Queue()
        playbook = {
            'hosts': 'localhost',
            'tasks': new_ansible_tasks
        }

        os.makedirs(os.path.join(utils.get_tmp_clouni_dir(), cluster_name, configuration_class.initial_artifacts_directory), exist_ok=True)
        copy_tree(utils.get_project_root_path() + '/toscatranslator/configuration_tools/ansible/artifacts',
                  os.path.join(utils.get_tmp_clouni_dir(), cluster_name, configuration_class.initial_artifacts_directory))
        configuration_class.parallel_run([playbook], 'artifacts', 'artifacts', q, cluster_name)

        # there is such a problem that if runner cotea was launched in the current process, then all subsequent launches
        # of other playbooks from this process are impossible because the initial playbook will be launched, and
        # even if this process is forked, the effect remains so that something inside cotea
        # is preserved in the context of the process
        results = q.get()
        if target_parameter is not None:
            value = 'not_found'
            if_failed = False
            for result in results:
                if result.is_failed or result.is_unreachable:
                    logging.error("Task %s has failed because of exception: \n%s" %
                                    (result.task_name, result.result.get('exception', '(Unknown reason)')))
                    if_failed = True
                if 'results' in result.result and len(result.result['results']) > 0 and 'ansible_facts' in \
                        result.result['results'][0] and 'matched_object' in result.result['results'][0]['ansible_facts']:
                    value = result.result['results'][0]['ansible_facts']['matched_object'][target_parameter.split('.')[-1]]
            if if_failed:
                value = 'not_found'
            return value
    return None
def generate_artifacts(new_artifacts, directory):
    """
    From the info of new artifacts generate files which execute
    :param new_artifacts: list of dicts containing (value, source, parameters, executor, name, configuration_tool)
    :return: None
    """
    r_artifacts = []
    for art in new_artifacts:
        filename = os.path.join(directory, art[NAME])
        configuration_class = get_configuration_tool_class(art[EXECUTOR])()
        if not configuration_class:
            ExceptionCollector.appendException(
                UnsupportedExecutorType(what=art[EXECUTOR]))
        configuration_class.create_artifact(filename, art)
        r_artifacts.append(filename)

    return r_artifacts
def restructure_mapping_facts(elements_map, self, is_delete, cluster_name, extra_elements_map=None, target_parameter=None, source_parameter=None,
                              source_value=None):
    """
    Function is used to restructure mapping values with the case of `facts`, `condition`, `arguments`, `value` keys
    :param elements_map:
    :param self:
    :param extra_elements_map:
    :param target_parameter:
    :param source_parameter:
    :param source_value:
    :return:
    """

    elements_map = copy.deepcopy(elements_map)
    if not extra_elements_map:
        extra_elements_map = []

    if isinstance(elements_map, dict):
        cur_parameter = elements_map.get(PARAMETER)
        if cur_parameter and isinstance(cur_parameter, str):
            if elements_map.get(MAP_KEY):
                source_parameter = cur_parameter
                source_value = elements_map.get(VALUE)
            elif target_parameter:
                target_parameter += SEPARATOR + cur_parameter
            else:
                target_parameter = cur_parameter
        new_elements_map = dict()
        for k, v in elements_map.items():
            cur_elements, extra_elements_map = restructure_mapping_facts(v, self, is_delete, cluster_name, extra_elements_map,
                                                                         target_parameter,
                                                                         source_parameter, source_value)
            new_elements_map.update({k: cur_elements})

        if isinstance(new_elements_map.get(PARAMETER, ''), dict):
            separated_target_parameter = target_parameter.split(SEPARATOR)
            target_type = None
            target_short_parameter = None
            for i in range(len(separated_target_parameter)):
                if separated_target_parameter[i] in NODE_TEMPLATE_KEYS:
                    target_type = SEPARATOR.join(separated_target_parameter[:i])
                    target_short_parameter = '_'.join(separated_target_parameter[i:])
                    break
            if not target_short_parameter or not target_type:
                logging.critical("Unable to parse the following parameter: %s" % json.dumps(target_parameter))
                sys.exit(1)

            input_parameter = new_elements_map[PARAMETER]
            input_value = new_elements_map[VALUE]
            input_keyname = new_elements_map.get(KEYNAME)

            provider = separated_target_parameter[0]
            target_relationship_type = SEPARATOR.join([provider, RELATIONSHIPS, "DependsOn"])
            relationship_name = "{self[name]}_server_" + utils.snake_case(separated_target_parameter[-1])

            operation_name = 'modify_' + target_short_parameter
            value_name = 'modified_' + target_short_parameter
            interface_name = 'Extra'
            new_elements_map = {
                GET_OPERATION_OUTPUT: [relationship_name, interface_name, operation_name, value_name]
            }

            cur_target_parameter = SEPARATOR.join(
                [target_relationship_type, INTERFACES, interface_name, operation_name])
            cur_extra_element = {
                PARAMETER: source_parameter,
                MAP_KEY: {
                    PARAMETER: cur_target_parameter,
                    KEYNAME: relationship_name,
                    VALUE: {
                        IMPLEMENTATION: {
                            SOURCE: SET_FACT_SOURCE,
                            VALUE: "default_value",
                            EXECUTOR: ANSIBLE,
                            PARAMETERS: {
                                value_name: "\\{\\{ input_parameter: input_value \\}\\}"
                            }
                        },
                        INPUTS: {
                            "input_parameter": input_parameter,
                            "input_value": input_value
                        }
                    }
                },
                VALUE: source_value
            }
            if input_keyname:
                # TODO add keyname to the parameter outside the new_elements_map
                cur_extra_element[map][KEYNAME] = input_keyname
            extra_elements_map.append(cur_extra_element)

        if_facts_structure = False
        keys = new_elements_map.keys()
        if len(keys) > 0:
            if_facts_structure = True
            for k in FACTS_MAPPING_VALUE_STRUCTURE:
                if k not in keys:
                    if_facts_structure = False
        if if_facts_structure:
            # NOTE: end of recursion
            assert target_parameter
            condition = new_elements_map[CONDITION]
            fact_name = new_elements_map[FACTS]
            value = new_elements_map[VALUE]
            arguments = new_elements_map[ARGUMENTS]
            executor = new_elements_map[EXECUTOR]
            if not get_configuration_tool_class(executor):
                logging.critical("Unsupported executor name \'%s\'" % json.dumps(executor))
                sys.exit(1)
            new_value = get_source_structure_from_facts(condition, fact_name, value, arguments, executor, source_value, is_delete, cluster_name)
            return new_value, extra_elements_map

        return new_elements_map, extra_elements_map

    if isinstance(elements_map, list):
        new_elements_map = []
        for k in elements_map:
            cur_elements, extra_elements_map = restructure_mapping_facts(k, self, is_delete, cluster_name, extra_elements_map,
                                                                         target_parameter,
                                                                         source_parameter, source_value)
            new_elements_map.append(cur_elements)
        return new_elements_map, extra_elements_map

    return elements_map, extra_elements_map
def restructure_value(mapping_value, self, if_format_str=True, if_upper=True):
    """
    Recursive function which processes the mapping_value to become parameter:value format
    :param mapping_value: the map of non normative parameter:value
    :param self: the function used to store normative parameter, value and other values
    :param if_format_str:
    :param if_upper: detects if the parameter value must be saved
    :return: dict in the end of recursion, because the first is always (parameter, value) keys
    """
    if isinstance(mapping_value, dict):
        flat_mapping_value = dict()
        for key in MAPPING_VALUE_KEYS:
            mapping_sub_value = mapping_value.get(key)
            if mapping_sub_value is not None:
                restructured_value = restructure_value(mapping_sub_value, self, if_format_str=key != PARAMETER,
                                                       if_upper=False)
                if restructured_value is None:
                    logging.critical("Unable to parse the following parameter: %s" % json.dumps(mapping_value))
                    sys.exit(1)
                flat_mapping_value[key] = restructured_value

        # NOTE: the case when value has keys ERROR and REASON
        if flat_mapping_value.get(ERROR, False):
            logging.error('Unable to use unsupported TOSCA parameter: %s'
                          % flat_mapping_value.get(REASON).format(self=self))
            sys.exit(1)

        # NOTE: the case when value has keys PARAMETER, VALUE, KEYNAME
        parameter = flat_mapping_value.get(PARAMETER)
        value = flat_mapping_value.get(VALUE)
        keyname = flat_mapping_value.get(KEYNAME)
        if value is None:
            # The case when variable is indivisible
            # This case parameter and keyname are None too or they doesn't have sense
            filled_value = dict()
            for k, v in mapping_value.items():
                filled_k = restructure_value(k, self, if_upper=False)
                filled_v = restructure_value(v, self, if_upper=False)
                filled_value[filled_k] = filled_v
            return filled_value
        if parameter is not None:
            if not isinstance(parameter, six.string_types):
                logging.critical("Unable to parse the following parameter: %s" % json.dumps(parameter))
                sys.exit(1)
            if parameter[:6] == '{self[' and parameter[-1] == '}':
                logging.critical("Parameter is format value, but has to be resolved on this stage: %s"
                                 % json.dumps(parameter))
                sys.exit(1)
            r = dict()
            r[parameter] = value

            if if_upper:
                # r[PARAMETER] = parameter
                if keyname:
                    r[KEYNAME] = keyname
            return r

        # NOTE: the case when value has keys SOURCE, PARAMETERS, EXTRA, VALUE, EXECUTOR
        source_name = flat_mapping_value.get(SOURCE)
        parameters_dict = flat_mapping_value.get(PARAMETERS)
        extra_parameters = flat_mapping_value.get(EXTRA)
        executor_name = flat_mapping_value.get(EXECUTOR)
        if source_name is not None and executor_name is not None:
            if executor_name == PYTHON_EXECUTOR:
                return utils.execute_function(PYTHON_SOURCE_DIRECTORY, source_name, parameters_dict)
            else:
                tool = get_configuration_tool_class(executor_name)()
                extension = tool.get_artifact_extension()

                seed(time())
                artifact_name = '_'.join([self[NAME], executor_name, source_name,
                                          str(randint(ARTIFACT_RANGE_START, ARTIFACT_RANGE_END))]) + extension

                flat_mapping_value.update(
                    name=artifact_name,
                    configuration_tool=executor_name
                )

            if not get_configuration_tool_class(executor_name):
                logging.critical('Unsupported executor/configuration tool name: %s' % executor_name)
                sys.exit(1)
            if self.get(ARTIFACTS) is None:
                self[ARTIFACTS] = []

            self[ARTIFACTS].append(flat_mapping_value)
            # return the name of artifact
            return artifact_name

        logging.error("Unable to parse the following parameter: %s" % json.dumps(mapping_value))
        sys.exit(1)

    elif isinstance(mapping_value, list):
        return [restructure_value(v, self, if_upper=False) for v in mapping_value]

    if isinstance(mapping_value, str) and if_format_str:
        # NOTE: the process is needed because using only format function makes string from json
        mapping_value, _ = format_value(mapping_value, self, False)
    return mapping_value
Beispiel #5
0
def translate(template_file,
              validate_only,
              provider,
              configuration_tool,
              cluster_name,
              is_delete=False,
              a_file=True,
              extra=None,
              log_level='info',
              host_ip_parameter='public_address',
              public_key_path='~/.ssh/id_rsa.pub',
              debug=False):
    """
    Main function, is called by different shells, i.e. bash, Ansible module, grpc
    :param template_file: filename of TOSCA template or TOSCA template data if a_file is False
    :param validate_only: boolean, if template should be only validated
    :param provider: key of cloud provider
    :param configuration_tool: key of configuration tool
    :param cluster_name: name to point to desired infrastructure as one component
    :param is_delete: generate dsl scripts for infrastructure deletion
    :param a_file: if template_file is filename
    :param extra: extra for template
    :return: string that is a script to deploy or delete infrastructure
    """
    log_map = dict(debug=logging.DEBUG,
                   info=logging.INFO,
                   warning=logging.WARNING,
                   error=logging.ERROR,
                   critical=logging.ERROR)

    logging_format = "%(asctime)s %(levelname)s %(message)s"
    logging.basicConfig(filename=os.path.join(os.getenv('HOME'),
                                              '.clouni.log'),
                        filemode='a',
                        level=log_map[log_level],
                        format=logging_format,
                        datefmt='%Y-%m-%d %H:%M:%S')
    logging.info(
        "Started translation of TOSCA template \'%s\' for provider \'%s\' and configuration tool \'%s\'"
        % (template_file if a_file else 'raw', provider, configuration_tool))
    logging.info("Cluster name set to \'%s\'" % cluster_name)
    logging.info("Deploying script for cluster %s will be created" %
                 'deletion' if is_delete else 'creation')
    logging.info(
        "Extra parameters to the unit of deployment scripts will be added: %s"
        % json.dumps(extra))
    logging.info("Log level is set to %s" % log_level)

    config = Configuration()
    for sec in REQUIRED_CONFIGURATION_PARAMS:
        if sec not in config.get_section(config.MAIN_SECTION).keys():
            logging.error(
                'Provider configuration parameter "%s" is missing in configuration file'
                % sec)
            sys.exit(1)

    if a_file:
        template_file = os.path.join(os.getcwd(), template_file)
        with open(template_file, 'r') as f:
            template_content = f.read()
    else:
        template_content = template_file

    try:
        template = yaml.load(template_content, Loader=yaml.SafeLoader)
    except yaml.scanner.ScannerError as e:
        logging.error("Error parsing TOSCA template: %s%s" %
                      (e.problem, e.context_mark))
        sys.exit(1)

    def_files = config.get_section(
        config.MAIN_SECTION).get(TOSCA_ELEMENTS_DEFINITION_FILE)
    if isinstance(def_files, six.string_types):
        def_files = [def_files]
    default_import_files = []
    for def_file in def_files:
        default_import_files.append(
            os.path.join(utils.get_project_root_path(), def_file))
    logging.info(
        "Default TOSCA template definition file to be imported \'%s\'" %
        json.dumps(default_import_files))

    # Add default import of normative TOSCA types to the template
    template[IMPORTS] = template.get(IMPORTS, [])
    for i in range(len(template[IMPORTS])):
        if isinstance(template[IMPORTS][i], dict):
            for import_key, import_value in template[IMPORTS][i].items():
                if isinstance(import_value, six.string_types):
                    template[IMPORTS][i] = import_value
                elif isinstance(import_value, dict):
                    if import_value.get('file', None) is None:
                        logging.error(
                            "Imports %s doesn't contain \'file\' key" %
                            import_key)
                        sys.exit(1)
                    else:
                        template[IMPORTS][i] = import_value['file']
                    if import_value.get('repository', None) is not None:
                        logging.warning(
                            "Clouni doesn't support imports \'repository\'")
    template[IMPORTS].extend(default_import_files)
    for i in range(len(template[IMPORTS])):
        template[IMPORTS][i] = os.path.abspath(template[IMPORTS][i])

    try:
        tosca_parser_template_object = ToscaTemplate(yaml_dict_tpl=template,
                                                     a_file=a_file)
    except:
        logging.exception("Got exception from OpenStack tosca-parser")
        sys.exit(1)

    # After validation, all templates are imported
    if validate_only:
        msg = 'The input "%(template_file)s" successfully passed validation.' \
              % {'template_file': template_file if a_file else 'TOSCA template'}
        return msg

    if not provider:
        logging.error(
            "Provider must be specified unless \'validate-only\' flag is used")
        sys.exit(1)

    map_files = config.get_section(
        config.MAIN_SECTION).get(TOSCA_ELEMENTS_MAP_FILE)
    if isinstance(map_files, six.string_types):
        map_files = [map_files]
    default_map_files = []
    for map_file in map_files:
        default_map_files.append(
            os.path.join(utils.get_project_root_path(), map_file))
    logging.info("Default TOSCA template map file to be used \'%s\'" %
                 json.dumps(default_map_files))

    # Parse and generate new TOSCA service template with only provider specific TOSCA types from normative types
    tosca = ProviderToscaTemplate(tosca_parser_template_object,
                                  provider,
                                  configuration_tool,
                                  cluster_name,
                                  host_ip_parameter,
                                  public_key_path,
                                  is_delete,
                                  common_map_files=default_map_files)

    # Init configuration tool class
    tool = get_configuration_tool_class(configuration_tool)()

    default_artifacts_directory = config.get_section(
        config.MAIN_SECTION).get(DEFAULT_ARTIFACTS_DIRECTORY)

    # Copy used conditions from intermediate service template
    if tosca.used_conditions_set:
        tool.copy_conditions_to_the_directory(tosca.used_conditions_set,
                                              default_artifacts_directory)

    # Manage new artifacts for intermediate template
    tool_artifacts = []
    for art in tosca.artifacts:
        executor = art.get(EXECUTOR)
        if bool(executor) and executor != configuration_tool:
            art_list = [art]
            configuration_class = get_configuration_tool_class(
                art['executor'])()
            _, new_art = utils.generate_artifacts(configuration_class,
                                                  art_list,
                                                  default_artifacts_directory)
            tosca.artifacts.append(new_art)
        else:
            tool_artifacts.append(art)

    if not extra:
        extra = {}
    extra_full = utils.deep_update_dict(
        extra,
        tosca.extra_configuration_tool_params.get(configuration_tool, {}))

    configuration_content = tool.to_dsl(
        tosca.provider,
        tosca.provider_operations,
        tosca.reversed_provider_operations,
        tosca.cluster_name,
        is_delete,
        artifacts=tool_artifacts,
        target_directory=default_artifacts_directory,
        inputs=tosca.inputs,
        outputs=tosca.outputs,
        extra=extra_full,
        debug=debug)
    return configuration_content
def translate(template_file,
              validate_only,
              provider,
              configuration_tool,
              cluster_name,
              is_delete=False,
              a_file=True,
              extra=None):
    """
    Main function, is called by different shells, i.e. bash, Ansible module, grpc
    :param template_file: filename of TOSCA template or TOSCA template data if a_file is False
    :param validate_only: boolean, if template should be only validated
    :param provider: key of cloud provider
    :param configuration_tool: key of configuration tool
    :param cluster_name: name to point to desired infrastructure as one component
    :param is_delete: generate dsl scripts for infrastructure deletion
    :param a_file: if template_file is filename
    :param extra: extra for template
    :return: string that is a script to deploy or delete infrastructure
    """

    config = Configuration()
    for sec in REQUIRED_CONFIGURATION_PARAMS:
        if sec not in config.get_section(config.MAIN_SECTION).keys():
            raise ProviderConfigurationParameterError(what=sec)

    if a_file:
        template_file = os.path.join(os.getcwd(), template_file)
        with open(template_file, 'r') as f:
            template_content = f.read()
    else:
        template_content = template_file
    template = yaml_parse(template_content)

    def_file = config.get_section(
        config.MAIN_SECTION).get(TOSCA_DEFINITION_FILE)

    default_import_file = os.path.join(utils.get_project_root_path(), def_file)

    # Add default import of normative TOSCA types to the template
    if not template.get(IMPORTS):
        template[IMPORTS] = [default_import_file]
    else:
        for i in range(len(template[IMPORTS])):
            template[IMPORTS][i] = os.path.abspath(template[IMPORTS][i])
        template[IMPORTS].append(default_import_file)
    tosca_parser_template_object = ToscaTemplate(yaml_dict_tpl=template,
                                                 a_file=a_file)

    if validate_only:
        msg = 'The input "%(template_file)s" successfully passed validation.' \
              % {'template_file': template_file if a_file else 'template'}
        return msg

    if not provider:
        ExceptionCollector.appendException(
            UnspecifiedParameter(what=('validate-only', 'provider')))

    # Parse and generate new TOSCA service template with only provider specific TOSCA types from normative types
    tosca = ProviderToscaTemplate(tosca_parser_template_object, provider,
                                  cluster_name)

    # Init configuration tool class
    tool = get_configuration_tool_class(configuration_tool)()

    default_artifacts_directory = config.get_section(
        config.MAIN_SECTION).get(DEFAULT_ARTIFACTS_DIRECTORY)

    # Copy used conditions from intermediate service template
    if tosca.used_conditions_set:
        tool.copy_conditions_to_the_directory(tosca.used_conditions_set,
                                              default_artifacts_directory)

    # Manage new artifacts for intermediate template
    tool_artifacts = []
    for art in tosca.artifacts:
        executor = art.get(EXECUTOR)
        if bool(executor) and executor != configuration_tool:
            art_list = [art]
            new_arts = generate_artifacts(art_list,
                                          default_artifacts_directory)
            tosca.artifacts.extend(new_arts)
        else:
            tool_artifacts.append(art)

    if not extra:
        extra = {}
    extra_full = utils.deep_update_dict(
        extra,
        tosca.extra_configuration_tool_params.get(configuration_tool, {}))

    configuration_content = tool.to_dsl(tosca.provider,
                                        tosca.provider_nodes_queue,
                                        tosca.cluster_name,
                                        is_delete,
                                        tool_artifacts,
                                        default_artifacts_directory,
                                        inputs=tosca.inputs,
                                        outputs=tosca.outputs,
                                        extra=extra_full)

    return configuration_content
def restructure_mapping_facts(elements_map,
                              extra_elements_map=None,
                              target_parameter=None,
                              source_parameter=None,
                              source_value=None):
    """
    Function is used to restructure mapping values with the case of `facts`, `condition`, `arguments`, `value` keys
    :param elements_map:
    :param extra_elements_map:
    :param target_parameter:
    :param source_parameter:
    :param source_value:
    :return:
    """
    conditions = []
    elements_map = copy.deepcopy(elements_map)
    if not extra_elements_map:
        extra_elements_map = []

    if isinstance(elements_map, dict):
        cur_parameter = elements_map.get(PARAMETER)
        if cur_parameter and isinstance(cur_parameter, str):
            if elements_map.get(MAP_KEY):
                source_parameter = cur_parameter
                source_value = elements_map.get(VALUE)
            elif target_parameter:
                target_parameter += SEPARATOR + cur_parameter
            else:
                target_parameter = cur_parameter
        new_elements_map = dict()
        for k, v in elements_map.items():
            cur_elements, extra_elements_map, new_conditions = restructure_mapping_facts(
                v, extra_elements_map, target_parameter, source_parameter,
                source_value)
            new_elements_map.update({k: cur_elements})
            conditions.extend(new_conditions)

        if isinstance(new_elements_map.get(PARAMETER, ''), dict):
            separated_target_parameter = target_parameter.split(SEPARATOR)
            target_type = None
            target_short_parameter = None
            for i in range(len(separated_target_parameter)):
                if separated_target_parameter[i] in NODE_TEMPLATE_KEYS:
                    target_type = SEPARATOR.join(
                        separated_target_parameter[:i])
                    target_short_parameter = '_'.join(
                        separated_target_parameter[i:])
                    break
            if not target_short_parameter or not target_type:
                ExceptionCollector.appendException(
                    ToscaParametersMappingFailed(what=target_parameter))

            input_parameter = new_elements_map[PARAMETER]
            input_value = new_elements_map[VALUE]
            input_keyname = new_elements_map.get(KEYNAME)

            provider = separated_target_parameter[0]
            target_relationship_type = SEPARATOR.join(
                [provider, RELATIONSHIPS, "DependsOn"])
            relationship_name = "{self[name]}_server_" + utils.snake_case(
                separated_target_parameter[-1])

            operation_name = 'modify_' + target_short_parameter
            value_name = 'modified_' + target_short_parameter
            interface_name = 'Extra'
            new_elements_map = {
                GET_OPERATION_OUTPUT: [
                    relationship_name, interface_name, operation_name,
                    value_name
                ]
            }

            cur_target_parameter = SEPARATOR.join([
                target_relationship_type, INTERFACES, interface_name,
                operation_name
            ])
            cur_extra_element = {
                PARAMETER: source_parameter,
                MAP_KEY: {
                    PARAMETER: cur_target_parameter,
                    KEYNAME: relationship_name,
                    VALUE: {
                        IMPLEMENTATION: {
                            SOURCE: SET_FACT_SOURCE,
                            VALUE: "default_value",
                            EXECUTOR: ANSIBLE,
                            PARAMETERS: {
                                value_name:
                                "{{{{ {{ input_parameter: input_value }} }}}}"
                                # so many braces because format
                                # uses braces and replace '{{' with '{'
                            }
                        },
                        INPUTS: {
                            "input_parameter": input_parameter,
                            "input_value": input_value
                        }
                    }
                },
                VALUE: source_value
            }
            if input_keyname:
                # TODO add keyname to the parameter outside the new_elements_map
                cur_extra_element[map][KEYNAME] = input_keyname
            extra_elements_map.append(cur_extra_element)

        if_facts_structure = False
        keys = new_elements_map.keys()
        if len(keys) > 0:
            if_facts_structure = True
            for k in FACTS_MAPPING_VALUE_STRUCTURE:
                if k not in keys:
                    if_facts_structure = False
        if if_facts_structure:
            # NOTE: end of recursion
            assert target_parameter

            condition = new_elements_map[CONDITION]
            fact_name = new_elements_map[FACTS]
            value = new_elements_map[VALUE]
            arguments = new_elements_map[ARGUMENTS]
            executor = new_elements_map[EXECUTOR]
            if not get_configuration_tool_class(executor):
                ExceptionCollector.appendException(
                    UnsupportedExecutorType(what=executor))
            new_elements_map, cur_extra_elements = get_source_structure_from_facts(
                condition, fact_name, value, arguments, executor,
                target_parameter, source_parameter, source_value)
            conditions.append(condition)
            extra_elements_map.extend(cur_extra_elements)

        return new_elements_map, extra_elements_map, conditions

    if isinstance(elements_map, list):
        new_elements_map = []
        for k in elements_map:
            cur_elements, extra_elements_map, new_conditions = restructure_mapping_facts(
                k, extra_elements_map, target_parameter, source_parameter,
                source_value)
            new_elements_map.append(cur_elements)
            conditions.extend(new_conditions)
        return new_elements_map, extra_elements_map, conditions

    return elements_map, extra_elements_map, conditions