Esempio n. 1
0
    def gather_global_operations(self, element_object):

        interfaces = []
        element_template_name = None
        (_, element_type, _) = tosca_type.parse(element_object.type)
        if element_type == NODES:
            interfaces = self.get_interfaces_from_node(element_object)
            element_template_name = element_object.name
            op_required = self.list_get_operation_outputs(
                element_object.nodetemplate.entity_tpl)
            self.manage_operation_output(op_required, element_template_name)
        elif element_type == RELATIONSHIPS:
            # NOTE interfaces can't be used as it contains the error ()
            interfaces = self.get_interfaces_from_relationship(element_object)
            element_template_name = element_object.name

        if not element_template_name:
            return

        operations = {}
        for interface_name, ops in interfaces.items():
            for operation_name, operation_data in ops.items():
                operations['_'.join([interface_name.lower(),
                                     operation_name])] = operation_data

        # Sort operations by dependency
        prev_len = len(operations) + 1
        required_operations = {}
        for op_name, op in operations.items():
            if isinstance(op, six.string_types):
                op = {IMPLEMENTATION: op}
            op_required = self.list_get_operation_outputs(op)
            required_operations[op_name] = op_required
            self.manage_operation_output(op_required, element_template_name)

        while len(operations) > 0 and prev_len > len(operations):
            ops_for_iter = copy.deepcopy(operations)
            prev_len = len(operations)
            for op_name, op in ops_for_iter.items():
                op_required = required_operations[op_name]
                if_executable_now = True
                for i in op_required:
                    if i[0] == SELF:
                        i[0] = element_template_name
                    temp_op_name = '_'.join(i[:3]).lower()
                    if temp_op_name not in self.global_operations_queue:
                        if_executable_now = False
                        break
                if if_executable_now:
                    temp_op_name = '_'.join([element_template_name,
                                             op_name]).lower()
                    self.global_operations_queue.append(temp_op_name)
                    updating_op_info = {temp_op_name: op}
                    utils.deep_update_dict(self.global_operations_info,
                                           updating_op_info)
                    operations.pop(op_name)

        if len(operations) > 0:
            ExceptionCollector.appendException(
                TemplateDependencyError(what=element_template_name))
Esempio n. 2
0
 def update_relationships(self, new_dependencies, templ_name, direction, rel_name, post_op, banned_ops=[]):
     utils.deep_update_dict(new_dependencies, {
         templ_name + SEPARATOR + rel_name: {direction + SEPARATOR + post_op}})
     for key, value in new_dependencies.items():
         for elem in value:
             if elem == direction + SEPARATOR + post_op and key != templ_name + SEPARATOR + rel_name and key not in [
                 templ_name + SEPARATOR + x for x in banned_ops]:
                 utils.deep_update_dict(new_dependencies,
                                        {key: {templ_name + SEPARATOR + rel_name}})
     return new_dependencies
Esempio n. 3
0
 def normative_nodes_graph_dependency(self):
     """
         This method generates dict of nodes, sorted by
         dependencies from normative TOSCA templates
     """
     nodes = set(self.node_templates.keys())
     dependencies = {}
     for templ_name in nodes:
         set_intersection = nodes.intersection(self.template_dependencies.get(templ_name, set()))
         utils.deep_update_dict(dependencies, {templ_name: set_intersection})
     return dependencies
Esempio n. 4
0
    def manage_operation_output(self, op_required, element_template_name):
        for o in op_required:
            if o[0] == SELF:
                o[0] = element_template_name
            temp_op_name = '_'.join(o[:3]).lower()

            seed(time())
            output_id = o[-1] + '_' + str(
                randint(OUTPUT_ID_RANGE_START, OUTPUT_ID_RANGE_END))
            updating_op_info = {temp_op_name: {OUTPUT_IDS: {o[-1]: output_id}}}
            utils.deep_update_dict(self.global_operations_info,
                                   updating_op_info)
    def get_ansible_tasks_from_interface(self, element_object, target_directory, is_delete, additional_args=None):
        if additional_args is None:
            additional_args = {}
        else:
            additional_args_global = copy.deepcopy(additional_args.get('global', {}))
            additional_args_element = copy.deepcopy(additional_args.get(element_object.name, {}))
            additional_args = utils.deep_update_dict(additional_args_global,
                                                     additional_args_element)
        ansible_tasks = []
        scripts = []
        for interface in element_object.nodetemplate.interfaces:
            if not is_delete and interface.name == 'create' or is_delete and interface.name == 'delete':
                implementations = interface.implementation
                if isinstance(interface.implementation, six.string_types):
                    implementations = [interface.implementation]
                scripts.extend(implementations)
                for script in implementations:
                    import_file = os.path.join(target_directory, script)
                    os.makedirs(os.path.dirname(import_file), exist_ok=True)
                    copyfile(script, import_file)
                    for input_name, input_value in interface.inputs.items():
                        ansible_tasks.append({
                            SET_FACT: {
                                input_name: input_value
                            }
                        })
                    new_ansible_task = {
                        IMPORT_TASKS_MODULE: import_file
                    }
                    new_ansible_task.update(additional_args)
                    ansible_tasks.append(new_ansible_task)

        return ansible_tasks
    def get_ansible_tasks_for_delete(self, element_object, description_by_type, module_by_type, additional_args=None):
        """
        Fulfill the dict with ansible task arguments to delete infrastructure
        Operations are mentioned in the node or in relationship_template
        :param: node: ProviderResource
        :return: string of ansible task to place in playbook
        """
        ansible_tasks = []
        if additional_args is None:
            additional_args = {}
        else:
            additional_args_global = copy.deepcopy(additional_args.get('global', {}))
            additional_args_element = {}
            additional_args = utils.deep_update_dict(additional_args_global, additional_args_element)

        task_name = element_object.name.replace('-', '_')
        ansible_task_list = [dict(), dict()]
        for task in ansible_task_list:
            task[NAME] = description_by_type
        ansible_task_list[0][module_by_type] = {
            NAME: self.rap_ansible_variable(task_name + '_delete'), 'state': 'absent'}
        ansible_task_list[1][module_by_type] = {
            NAME: self.rap_ansible_variable('item'), 'state': 'absent'}
        ansible_task_list[0]['when'] = task_name + '_delete' + IS_DEFINED
        ansible_task_list[1]['when'] = task_name + '_ids is defined'
        ansible_task_list[1]['loop'] = self.rap_ansible_variable(task_name + '_ids | flatten(levels=1)')
        for task in ansible_task_list:
            task[REGISTER] = task_name + '_var'
            task.update(additional_args)
            ansible_tasks.append(task)
            ansible_tasks.append(
                {SET_FACT: task_name + '=\'' + self.rap_ansible_variable(task_name + '_var') + '\'',
                 'when': task_name + '_var' + '.changed'})
        return ansible_tasks
Esempio n. 7
0
    def to_configuration_dsl(self,
                             configuration_tool,
                             is_delete,
                             directory=None,
                             extra=None):
        """
        Fulfill configuration_content with functions based on configuration tool from every node
        :return:
        """
        if not directory:
            directory = self.DEFAULT_ARTIFACTS_DIRECTOR
        if not extra:
            extra = dict()

        self.configuration_content = ''
        self.configuration_ready = False
        tool = CONFIGURATION_TOOLS.get(configuration_tool)()
        if bool(self.used_conditions_set):
            tool.copy_conditions_to_the_directory(self.used_conditions_set,
                                                  directory)
        tool_artifacts = []
        for art in self.artifacts:
            executor = art.get(EXECUTOR)
            if bool(executor) and executor != configuration_tool:
                self.generate_artifacts([art], directory)
            else:
                tool_artifacts.append(art)
        extra = deep_update_dict(
            extra,
            self.extra_configuration_tool_params.get(configuration_tool, {}))
        self.configuration_content = tool.to_dsl_for_delete(self.provider, self.provider_nodes_queue, tool_artifacts, directory, self.cluster_name, extra=extra) \
                if is_delete else tool.to_dsl_for_create(self.provider, self.provider_nodes_queue, tool_artifacts, directory, self.cluster_name, extra=extra)
        self.configuration_ready = True
        return self.configuration_content
Esempio n. 8
0
    def translate_to_provider(self):
        new_element_templates, new_extra, template_mapping = translate_to_provider(self)

        dict_tpl = {}
        self.template_mapping = template_mapping
        if new_element_templates.get(NODES):
            dict_tpl[NODE_TEMPLATES] = new_element_templates[NODES]
        if new_element_templates.get(RELATIONSHIPS):
            dict_tpl[RELATIONSHIP_TEMPLATES] = new_element_templates[RELATIONSHIPS]
        if new_element_templates.get(OUTPUTS):
            dict_tpl[OUTPUTS] = new_element_templates[OUTPUTS]
        if self.inputs:
            dict_tpl[INPUTS] = self.inputs

        rel_types = {}
        for k, v in self.definitions.items():
            (_, element_type, _) = utils.tosca_type_parse(k)
            if element_type == RELATIONSHIPS:
                rel_types[k] = v

        logging.debug("TOSCA template with non normative types for provider %s was generated: \n%s"
                      % (self.provider, yaml.dump(dict_tpl)))

        try:
            topology_tpl = TopologyTemplate(dict_tpl, self.definitions, rel_types=rel_types)
        except:
            logging.exception("Failed to parse intermidiate non-normative TOSCA template with OpenStack tosca-parser")
            sys.exit(1)

        self.extra_configuration_tool_params = utils.deep_update_dict(self.extra_configuration_tool_params, new_extra)

        self.node_templates = new_element_templates.get(NODES, {})
        self.relationship_templates = new_element_templates.get(RELATIONSHIPS, {})
        self.outputs = new_element_templates.get(OUTPUTS, {})
Esempio n. 9
0
    def translate_to_provider(self):
        new_element_templates, new_artifacts, conditions_set, new_extra = translate_to_provider(
            self.tosca_elements_map_to_provider(),
            self.tosca_topology_template)

        self.used_conditions_set = conditions_set
        dict_tpl = copy.deepcopy(self.tosca_topology_template.tpl)
        if new_element_templates.get(NODES):
            dict_tpl[NODE_TEMPLATES] = new_element_templates[NODES]
        if new_element_templates.get(RELATIONSHIPS):
            dict_tpl[RELATIONSHIP_TEMPLATES] = new_element_templates[
                RELATIONSHIPS]

        rel_types = []
        for k, v in self.provider_defs.items():
            (_, element_type, _) = tosca_type.parse(k)
            if element_type == RELATIONSHIP_TYPES:
                rel_types.append(v)

        topology_tpl = TopologyTemplate(dict_tpl, self.full_provider_defs,
                                        rel_types)
        self.artifacts.extend(new_artifacts)
        self.extra_configuration_tool_params = deep_update_dict(
            self.extra_configuration_tool_params, new_extra)

        return topology_tpl
Esempio n. 10
0
def retrieve_node_templates(input_dict, input_prefix=None):
    r = dict()
    for k, v in input_dict.items():
        if k in NODE_TEMPLATE_KEYS:
            return {input_prefix: input_dict}
        t = retrieve_node_templates(v, SEPARATOR.join([input_prefix, k]) if input_prefix else k)
        r = utils.deep_update_dict(r, t)
    return r
def translate(tosca_elements_map_to_provider, topology_template, provider):
    """
    Main function of this file, the only which is used outside the file
    :param tosca_elements_map_to_provider: dict from provider specific file
    tosca_elements_map_to_<provider>.yaml
    :param node_templates: input node_templates
    :return: list of new node templates and relationship templates and
    list of artifacts to be used for generating scripts
    """
    node_templates = topology_template.nodetemplates
    relationship_templates = topology_template.relationship_templates
    element_templates = node_templates + relationship_templates

    new_element_templates = {}
    artifacts = []
    conditions = []
    extra = dict()
    self = dict()
    self[ARTIFACTS] = []
    self[EXTRA] = dict()

    for element in element_templates:
        (namespace, _, _) = utils.tosca_type_parse(element.type)
        self[NAME] = element.name
        self[KEYNAME] = element.name
        self[BUFFER] = {}

        if namespace != provider:
            restructured_mapping = restructure_mapping(
                tosca_elements_map_to_provider, element, self)

            restructured_mapping, extra_mappings, new_conditions = restructure_mapping_facts(
                restructured_mapping)
            restructured_mapping.extend(extra_mappings)
            conditions.extend(new_conditions)

            tpl_structure = translate_node_from_tosca(restructured_mapping,
                                                      element.name, self)
            for tpl_name, temp_tpl in tpl_structure.items():
                for node_type, tpl in temp_tpl.items():
                    (_, element_type, _) = utils.tosca_type_parse(node_type)
                    tpl[TYPE] = node_type
                    new_element_templates[
                        element_type] = new_element_templates.get(
                            element_type, {})
                    new_element_templates[element_type].update(
                        {tpl_name: copy.deepcopy(tpl)})
        else:
            new_element = translate_element_from_provider(element)
            new_element_templates = utils.deep_update_dict(
                new_element_templates, new_element)

    conditions = set(conditions)

    return new_element_templates, self[ARTIFACTS], conditions, self[EXTRA]
Esempio n. 12
0
def restructure_mapping_buffer(restructured_mapping, self):
    self[BUFFER] = resolve_self_in_buffer(self[BUFFER], self)
    r = []
    for mapping in restructured_mapping:
        parameter = mapping[MAP_KEY][PARAMETER]
        filter = "\{self\[buffer\]"
        if re.search(filter, parameter):
            self[VALUE] = mapping[VALUE]
            self[PARAMETER] = mapping[PARAMETER]
            iter_value, _ = format_value(mapping[MAP_KEY][VALUE], self)
            params_parameter = parameter[6:-2].split('][')
            iter_num = len(params_parameter)
            for i in range(iter_num - 1, 0, -1):
                temp_param = dict()
                temp_param[params_parameter[i]] = iter_value
                iter_value = temp_param
            utils.deep_update_dict(self, {params_parameter[0]: iter_value})
        else:
            r.append(mapping)
    return r
Esempio n. 13
0
 def update_node_template(self, template, node_name, update_value,
                          param_type):
     update_value = {
         TOPOLOGY_TEMPLATE: {
             NODE_TEMPLATES: {
                 node_name: {
                     param_type: update_value
                 }
             }
         }
     }
     return deep_update_dict(template, update_value)
Esempio n. 14
0
def translate(service_tmpl):
    """
    Main function of this file, the only which is used outside the file
    :param tosca_elements_map_to_provider: dict from provider specific file
    tosca_elements_map_to_<provider>.yaml
    :param node_templates: input node_templates
    :return: list of new node templates and relationship templates and
    list of artifacts to be used for generating scripts
    """
    element_templates = copy.copy(service_tmpl.node_templates)
    element_templates.update(copy.copy(service_tmpl.relationship_templates))

    new_element_templates = {}
    template_mapping = {}
    self = dict()
    self[ARTIFACTS] = []
    self[EXTRA] = dict()
    self[PUBLIC_KEY] = service_tmpl.public_key_path

    for tmpl_name, element in element_templates.items():
        (namespace, _, _) = utils.tosca_type_parse(element[TYPE])
        self[NAME] = tmpl_name
        self[KEYNAME] = tmpl_name
        self[BUFFER] = {}

        if namespace != service_tmpl.provider:
            restructured_mapping = restructure_mapping(service_tmpl, element, tmpl_name, self)
            restructured_mapping = sort_host_ip_parameter(restructured_mapping, service_tmpl.host_ip_parameter)
            restructured_mapping = restructure_mapping_buffer(restructured_mapping, self)
            restructured_mapping, extra_mappings = restructure_mapping_facts(restructured_mapping, self, service_tmpl.is_delete, service_tmpl.cluster_name)
            restructured_mapping.extend(extra_mappings)
            restructured_mapping = restructure_get_attribute(restructured_mapping, service_tmpl, self)

            tpl_structure = translate_node_from_tosca(restructured_mapping, tmpl_name, self)
            for tpl_name, temp_tpl in tpl_structure.items():
                for node_type, tpl in temp_tpl.items():
                    (_, element_type, _) = utils.tosca_type_parse(node_type)
                    tpl[TYPE] = node_type
                    if tmpl_name not in template_mapping:
                        template_mapping[tmpl_name] = {tpl_name}
                    else:
                        template_mapping[tmpl_name].add(tpl_name)
                    new_element_templates[element_type] = new_element_templates.get(element_type, {})
                    new_element_templates[element_type].update({tpl_name: copy.deepcopy(tpl)})
        else:
            new_element = translate_element_from_provider(tmpl_name, element)
            new_element_templates = utils.deep_update_dict(new_element_templates, new_element)

    self_extra = utils.replace_brackets(self[EXTRA], False)
    self_artifacts = utils.replace_brackets(self[ARTIFACTS], False)
    execute(self_artifacts, service_tmpl.is_delete, service_tmpl.cluster_name)

    return new_element_templates, self_extra, template_mapping
Esempio n. 15
0
def translate_node_from_tosca(restructured_mapping, tpl_name, self):
    """
    Translator from TOSCA definitions in provider definitions using rules from element_map_to_provider
    :param restructured_mapping: list of dicts(parameter, map, value)
    :param tpl_name: str
    :return: entity_tpl as dict
    """
    resulted_structure = {}

    for item in restructured_mapping:
        ExceptionCollector.start()
        self[PARAMETER] = item[PARAMETER]
        self[VALUE] = item[VALUE]
        mapped_param = restructure_value(mapping_value=item[MAP_KEY],
                                         self=self)
        ExceptionCollector.stop()
        if ExceptionCollector.exceptionsCaught():
            raise ValidationError(
                message='\nTranslating to provider failed: '.join(
                    ExceptionCollector.getExceptionsReport()))
        structures, keyname = get_structure_of_mapped_param(
            mapped_param, item[VALUE])

        for structure in structures:
            r = retrieve_node_templates(structure)
            for node_type, tpl in r.items():
                if not keyname:
                    (_, _, type_name) = tosca_type.parse(node_type)
                    if not type_name:
                        ExceptionCollector.appendException()
                    keyname = self[KEYNAME] + "_" + snake_case.convert(
                        type_name)
                node_tpl_with_name = {keyname: {node_type: tpl}}
                resulted_structure = deep_update_dict(resulted_structure,
                                                      node_tpl_with_name)

    for keyname, node in resulted_structure.items():
        for node_type, tpl in node.items():
            if tpl.get(REQUIREMENTS):
                reqs = []
                for req_name, req in tpl[REQUIREMENTS].items():
                    reqs.append({req_name: req})
                resulted_structure[keyname][node_type][REQUIREMENTS] = reqs
    return resulted_structure
Esempio n. 16
0
    def get_ansible_tasks_for_create(self,
                                     element_object,
                                     target_directory,
                                     node_filter_config,
                                     description_by_type,
                                     module_by_type,
                                     additional_args=None):
        """
        Fulfill the dict with ansible task arguments to create infrastructure
        If the node contains get_operation_output parameters then the operation is executed
        If the operation is not mentioned then it is not executed
        Operations are mentioned in the node or in relationship_template
        :param: node: ProviderResource
        :param additional_args: dict of arguments to add
        :return: string of ansible task to place in playbook
        """

        if additional_args is None:
            additional_args = {}
        else:
            additional_args_global = copy.deepcopy(
                additional_args.get('global', {}))
            additional_args_element = copy.deepcopy(
                additional_args.get(element_object.name, {}))
            additional_args = utils.deep_update_dict(additional_args_global,
                                                     additional_args_element)

        ansible_tasks = []

        configuration_args = {}
        for arg_key, arg in element_object.configuration_args.items():
            configuration_args[arg_key] = arg

        ansible_args = copy.copy(element_object.configuration_args)
        ansible_args[STATE] = 'present'
        task_name = element_object.name.replace('-', '_')
        ansible_task_as_dict = dict()
        ansible_task_as_dict[NAME] = description_by_type
        ansible_task_as_dict[module_by_type] = configuration_args
        ansible_task_as_dict[REGISTER] = task_name
        ansible_task_as_dict.update(additional_args)
        ansible_tasks.append(ansible_task_as_dict)
        return ansible_tasks
Esempio n. 17
0
def translate_node_from_tosca(restructured_mapping, tpl_name, self):
    """
    Translator from TOSCA definitions in provider definitions using rules from element_map_to_provider
    :param restructured_mapping: list of dicts(parameter, map, value)
    :param tpl_name: str
    :return: entity_tpl as dict
    """
    resulted_structure = {}

    for item in restructured_mapping:
        self[PARAMETER] = item[PARAMETER]
        self[VALUE] = item[VALUE]
        mapped_param = restructure_value(
            mapping_value=item[MAP_KEY],
            self=self
        )

        structures, keyname = get_structure_of_mapped_param(mapped_param, item[VALUE])

        for structure in structures:
            r = retrieve_node_templates(structure)
            for node_type, tpl in r.items():
                if not keyname:
                    keyname = get_keyname_from_type(self[KEYNAME], node_type)
                node_tpl_with_name = {keyname: {node_type: tpl}}
                resulted_structure = utils.deep_update_dict(resulted_structure, node_tpl_with_name)

    for keyname, node in resulted_structure.items():
        for node_type, tpl in node.items():
            if tpl.get(REQUIREMENTS):
                reqs = []
                for req_name, req in tpl[REQUIREMENTS].items():
                    reqs.append({
                        req_name: req
                    })
                resulted_structure[keyname][node_type][REQUIREMENTS] = reqs
    return resulted_structure
Esempio n. 18
0
    def _get_full_defintion(self, definition, def_type, ready_set):
        if def_type in ready_set:
            return definition, def_type in self.software_types

        (_, _, def_type_short) = utils.tosca_type_parse(def_type)
        is_software_type = def_type_short == 'SoftwareComponent'
        is_software_parent = False
        parent_def_name = definition.get(DERIVED_FROM, None)
        if parent_def_name is not None:
            if def_type == parent_def_name:
                logging.critical("Invalid type \'%s\' is derived from itself" % def_type)
                sys.exit(1)
            if parent_def_name in ready_set:
                parent_definition = self.definitions[parent_def_name]
                is_software_parent = parent_def_name in self.software_types
            else:
                parent_definition, is_software_parent = \
                    self._get_full_defintion(self.definitions[parent_def_name], parent_def_name, ready_set)
            parent_definition = copy.deepcopy(parent_definition)
            definition = utils.deep_update_dict(parent_definition, definition)
        if is_software_type or is_software_parent:
            self.software_types.add(def_type)
        ready_set.add(def_type)
        return definition, def_type in self.software_types
Esempio n. 19
0
def get_resulted_mapping_values(parameter, mapping_value, value, self):
    """
    Manage the case when mapping value has multiple structures in mapping_value
    :param parameter:
    :param mapping_value:
    :param value:
    :return:
    """
    mapping_value = copy.deepcopy(mapping_value)
    if isinstance(mapping_value, six.string_types):
        mapping_value = {
            PARAMETER: mapping_value,
            VALUE: "{self[value]}"
        }
    mapping_value_parameter = mapping_value.get(PARAMETER)
    mapping_value_value = mapping_value.get(VALUE)
    # NOTE at first check if parameter self[buffer] parameter
    if mapping_value_parameter and mapping_value_parameter[:6] == '{self[' and mapping_value_parameter[-2:] == ']}':
        self[VALUE] = value
        self[PARAMETER] = parameter
        # The case when variable is written to the parameter self!
        # Inside string can be more self parameters
        if isinstance(mapping_value_value, dict):
            if mapping_value_value.get(VALUE) and mapping_value_value.get(EXECUTOR) == PYTHON_EXECUTOR and \
                    mapping_value_value.get(SOURCE):
                args, _ = format_value(mapping_value_value[PARAMETERS], self)
                mapping_value_value = utils.execute_function(PYTHON_SOURCE_DIRECTORY, mapping_value_value[SOURCE],
                                                             args)
        params_parameter = mapping_value_parameter[6:-2].split('][')
        iter_value, is_buffer = format_value(mapping_value_value, self)
        if is_buffer:
            return dict(
                parameter=parameter,
                map=dict(
                    parameter=mapping_value_parameter,
                    value=mapping_value_value
                ),
                value=value
            )
        iter_num = len(params_parameter)
        for i in range(iter_num - 1, 0, -1):
            temp_param = dict()
            param_key, _ = format_value(params_parameter[i], self)
            temp_param[param_key] = iter_value
            iter_value = temp_param
        utils.deep_update_dict(self, {params_parameter[0]: iter_value})
        return []
    elif mapping_value_parameter:
        splitted_mapping_value_parameter = mapping_value_parameter.split(SEPARATOR)
        has_section = False
        for v in splitted_mapping_value_parameter:
            if v in NODE_TEMPLATE_KEYS:
                has_section = True
                break
        if not has_section:
            if isinstance(mapping_value_value, list) and len(mapping_value_value) > 1:
                r = []
                for v in mapping_value_value:
                    mapping_value[VALUE] = v
                    item = get_resulted_mapping_values(parameter, mapping_value, value, self)
                    if isinstance(item, list):
                        if len(item) == 1:
                            item = [item]
                    else:
                        item = [item]
                    r.extend(item)
                return r
            if isinstance(mapping_value_value, six.string_types):
                splitted_mapping_value_value = mapping_value_value.split(SEPARATOR)
                for i in range(len(splitted_mapping_value_value)):
                    if splitted_mapping_value_value[i] in NODE_TEMPLATE_KEYS:
                        mapping_value[PARAMETER] = mapping_value_parameter + SEPARATOR + mapping_value_value
                        mapping_value[VALUE] = "{self[value]}"
                        return dict(
                            parameter=parameter,
                            map=mapping_value,
                            value=value
                        )

            if isinstance(mapping_value_value, dict):
                # NOTE the only valid case when the value is parameter-value structure
                mapping_value_value_parameter = mapping_value_value.get(PARAMETER)
                mapping_value_value_value = mapping_value_value.get(VALUE)
                if mapping_value_value_parameter and mapping_value_value_value:
                    mapping_value_value_keyname = mapping_value_value.get(KEYNAME)
                    if mapping_value_value_keyname:
                        mapping_value[KEYNAME] = mapping_value_value_keyname
                    mapping_value[PARAMETER] = mapping_value_parameter + SEPARATOR + mapping_value_value_parameter
                    mapping_value[VALUE] = mapping_value_value_value
                    r = get_resulted_mapping_values(parameter, mapping_value, value, self)
                    return r

            logging.critical("Unable to parse the following parameter: %s" % json.dumps(mapping_value))
            sys.exit(1)

    return dict(
        parameter=parameter,
        map=mapping_value,
        value=value
    )
Esempio n. 20
0
    def sort_nodes_and_operations_by_graph_dependency(self):
        """
            This method generates dict fith ProviderTemplates with operation, sorted by
            dependencies from normative and provider TOSCA templates
        """
        nodes = set(self.provider_nodes.keys())
        nodes = nodes.union(set(self.provider_relations.keys()))
        dependencies = {}
        lifecycle = ['configure', 'start', 'stop', 'delete']
        reversed_full_lifecycle = lifecycle[::-1] + ['create']
        # generate only dependencies from nodes
        for templ_name in nodes:
            set_intersection = nodes.intersection(self.template_dependencies.get(templ_name, set()))
            templ = self.provider_nodes.get(templ_name, self.provider_relations.get(templ_name))
            (_, element_type, _) = utils.tosca_type_parse(templ.type)
            if element_type == NODES:
                if 'interfaces' in templ.tmpl and 'Standard' in templ.tmpl['interfaces']:
                    new_operations = ['create']
                    # operation create always exists
                    for elem in lifecycle:
                        if elem in templ.tmpl['interfaces']['Standard']:
                            new_operations.append(elem)
                    # if there is any other operations - add ti new_operations and translate to dict
                    # in format {node.op: {node1, node2}}
                    # node requieres node1 and node2
                    if len(new_operations) == 1:
                        utils.deep_update_dict(dependencies, {templ_name + SEPARATOR + 'create': set_intersection})
                    else:
                        for i in range(1, len(new_operations)):
                            utils.deep_update_dict(dependencies, {
                                templ_name + SEPARATOR + new_operations[i]: {
                                    templ_name + SEPARATOR + new_operations[i - 1]}})
                        utils.deep_update_dict(dependencies,
                                               {templ_name + SEPARATOR + new_operations[0]: set_intersection})
                else:
                    utils.deep_update_dict(dependencies, {templ_name + SEPARATOR + 'create': set_intersection})
        new_normative_graph = {}

        # getting dependencies for create operaions of nodes, translated from 1 normative node
        for key, value in self.normative_nodes_graph.items():
            for elem in value:
                for op in reversed_full_lifecycle:
                    new_oper = elem + SEPARATOR + op
                    if new_oper in dependencies:
                        if key + SEPARATOR + 'create' in new_normative_graph:
                            new_normative_graph[key + SEPARATOR + 'create'].add(new_oper)
                        else:
                            new_normative_graph[key + SEPARATOR + 'create'] = {new_oper}
                        break
                else:
                    logging.error("Operation create not found")
                    sys.exit(1)
        # update dependencies
        dependencies = utils.deep_update_dict(dependencies, new_normative_graph)
        new_dependencies = {}
        # new_dependencies is needed for updating set operations
        # dict must be in format {node.op: {node1, node2}}
        for key, value in dependencies.items():
            new_set = set()
            for elem in value:
                for oper in reversed_full_lifecycle:
                    if elem + SEPARATOR + oper in dependencies:
                        new_set.add(elem + SEPARATOR + oper)
                        break
                    elif elem in dependencies:
                        new_set.add(elem)
                        break
            new_dependencies[key] = new_set

        # adding relationships operations pre_configure_source after create source node
        # pre_configure_target after create target node
        # add_source in parallel with pre_configure_source but in will be executed on target
        # post_configure_target after configure target node (if not configure then create - in parallel
        # with pre_configure_target)
        # post_configure_source after configure target node (if not configure then create - in parallel
        # with pre_configure_source)
        # other - not supported!
        for templ_name in nodes:
            templ = self.provider_nodes.get(templ_name, self.provider_relations.get(templ_name))
            (_, element_type, _) = utils.tosca_type_parse(templ.type)
            if element_type == RELATIONSHIPS:
                if 'interfaces' in templ.tmpl and 'Configure' in templ.tmpl['interfaces']:
                    if 'pre_configure_source' in templ.tmpl['interfaces']['Configure']:
                        new_dependencies = self.update_relationships(new_dependencies, templ.name, templ.source,
                                                                     'pre_configure_source', 'create', ['add_source'])
                    if 'pre_configure_target' in templ.tmpl['interfaces']['Configure']:
                        new_dependencies = self.update_relationships(new_dependencies, templ.name, templ.target,
                                                                     'pre_configure_target', 'create')
                    if 'post_configure_source' in templ.tmpl['interfaces']['Configure']:
                        if templ.source + SEPARATOR + 'configure' in new_dependencies:
                            new_dependencies = self.update_relationships(new_dependencies, templ.name, templ.source,
                                                                         'post_configure_source', 'configure')
                        else:
                            new_dependencies = self.update_relationships(new_dependencies, templ.name, templ.source,
                                                                         'post_configure_source', 'create')
                    if 'post_configure_target' in templ.tmpl['interfaces']['Configure']:
                        if templ.target + SEPARATOR + 'configure' in new_dependencies:
                            new_dependencies = self.update_relationships(new_dependencies, templ.name, templ.target,
                                                                         'post_configure_target', 'configure')
                        else:
                            new_dependencies = self.update_relationships(new_dependencies, templ.name, templ.target,
                                                                         'post_configure_target', 'create')
                    if 'add_source' in templ.tmpl['interfaces']['Configure']:
                        new_dependencies = self.update_relationships(new_dependencies, templ.name, templ.source,
                                                                     'add_source', 'create', ['pre_configure_source'])
                    if 'add_target' in templ.tmpl['interfaces']['Configure']:
                        logging.warning('Operation add_target not supported, it will be skipped')
                    if 'target_changed' in templ.tmpl['interfaces']['Configure']:
                        logging.warning('Operation target_changed not supported, it will be skipped')
                    if 'remove_target' in templ.tmpl['interfaces']['Configure']:
                        logging.warning('Operation remove_target not supported, it will be skipped')
        # mapping strings 'node.op' to provider template of this node with this operation
        templ_mappling = {}
        for elem in new_dependencies:
            templ_name = elem.split(SEPARATOR)[0]
            templ = copy.deepcopy(self.provider_nodes.get(templ_name, self.provider_relations.get(templ_name)))
            templ.operation = elem.split(SEPARATOR)[1]
            templ_mappling[elem] = templ
        templ_dependencies = {}
        reversed_templ_dependencies = {}
        # create dict where all elements will be replaced with provider template from templ_mappling
        # reversed_templ_dependencies needed for delete - it just a reversed version of graph
        for key, value in new_dependencies.items():
            new_set = set()
            for elem in value:
                new_set.add(templ_mappling[elem])
                if templ_mappling[elem] not in reversed_templ_dependencies:
                    reversed_templ_dependencies[templ_mappling[elem]] = {templ_mappling[key]}
                elif templ_mappling[key] not in reversed_templ_dependencies[templ_mappling[elem]]:
                    reversed_templ_dependencies[templ_mappling[elem]].add(templ_mappling[key])
            templ_dependencies[templ_mappling[key]] = new_set
        return templ_dependencies, reversed_templ_dependencies
Esempio n. 21
0
def translate(template_file,
              validate_only,
              provider,
              configuration_tool,
              cluster_name,
              is_delete=False,
              a_file=True,
              extra=None,
              log_level='info',
              host_ip_parameter='public_address',
              public_key_path='~/.ssh/id_rsa.pub',
              debug=False):
    """
    Main function, is called by different shells, i.e. bash, Ansible module, grpc
    :param template_file: filename of TOSCA template or TOSCA template data if a_file is False
    :param validate_only: boolean, if template should be only validated
    :param provider: key of cloud provider
    :param configuration_tool: key of configuration tool
    :param cluster_name: name to point to desired infrastructure as one component
    :param is_delete: generate dsl scripts for infrastructure deletion
    :param a_file: if template_file is filename
    :param extra: extra for template
    :return: string that is a script to deploy or delete infrastructure
    """
    log_map = dict(debug=logging.DEBUG,
                   info=logging.INFO,
                   warning=logging.WARNING,
                   error=logging.ERROR,
                   critical=logging.ERROR)

    logging_format = "%(asctime)s %(levelname)s %(message)s"
    logging.basicConfig(filename=os.path.join(os.getenv('HOME'),
                                              '.clouni.log'),
                        filemode='a',
                        level=log_map[log_level],
                        format=logging_format,
                        datefmt='%Y-%m-%d %H:%M:%S')
    logging.info(
        "Started translation of TOSCA template \'%s\' for provider \'%s\' and configuration tool \'%s\'"
        % (template_file if a_file else 'raw', provider, configuration_tool))
    logging.info("Cluster name set to \'%s\'" % cluster_name)
    logging.info("Deploying script for cluster %s will be created" %
                 'deletion' if is_delete else 'creation')
    logging.info(
        "Extra parameters to the unit of deployment scripts will be added: %s"
        % json.dumps(extra))
    logging.info("Log level is set to %s" % log_level)

    config = Configuration()
    for sec in REQUIRED_CONFIGURATION_PARAMS:
        if sec not in config.get_section(config.MAIN_SECTION).keys():
            logging.error(
                'Provider configuration parameter "%s" is missing in configuration file'
                % sec)
            sys.exit(1)

    if a_file:
        template_file = os.path.join(os.getcwd(), template_file)
        with open(template_file, 'r') as f:
            template_content = f.read()
    else:
        template_content = template_file

    try:
        template = yaml.load(template_content, Loader=yaml.SafeLoader)
    except yaml.scanner.ScannerError as e:
        logging.error("Error parsing TOSCA template: %s%s" %
                      (e.problem, e.context_mark))
        sys.exit(1)

    def_files = config.get_section(
        config.MAIN_SECTION).get(TOSCA_ELEMENTS_DEFINITION_FILE)
    if isinstance(def_files, six.string_types):
        def_files = [def_files]
    default_import_files = []
    for def_file in def_files:
        default_import_files.append(
            os.path.join(utils.get_project_root_path(), def_file))
    logging.info(
        "Default TOSCA template definition file to be imported \'%s\'" %
        json.dumps(default_import_files))

    # Add default import of normative TOSCA types to the template
    template[IMPORTS] = template.get(IMPORTS, [])
    for i in range(len(template[IMPORTS])):
        if isinstance(template[IMPORTS][i], dict):
            for import_key, import_value in template[IMPORTS][i].items():
                if isinstance(import_value, six.string_types):
                    template[IMPORTS][i] = import_value
                elif isinstance(import_value, dict):
                    if import_value.get('file', None) is None:
                        logging.error(
                            "Imports %s doesn't contain \'file\' key" %
                            import_key)
                        sys.exit(1)
                    else:
                        template[IMPORTS][i] = import_value['file']
                    if import_value.get('repository', None) is not None:
                        logging.warning(
                            "Clouni doesn't support imports \'repository\'")
    template[IMPORTS].extend(default_import_files)
    for i in range(len(template[IMPORTS])):
        template[IMPORTS][i] = os.path.abspath(template[IMPORTS][i])

    try:
        tosca_parser_template_object = ToscaTemplate(yaml_dict_tpl=template,
                                                     a_file=a_file)
    except:
        logging.exception("Got exception from OpenStack tosca-parser")
        sys.exit(1)

    # After validation, all templates are imported
    if validate_only:
        msg = 'The input "%(template_file)s" successfully passed validation.' \
              % {'template_file': template_file if a_file else 'TOSCA template'}
        return msg

    if not provider:
        logging.error(
            "Provider must be specified unless \'validate-only\' flag is used")
        sys.exit(1)

    map_files = config.get_section(
        config.MAIN_SECTION).get(TOSCA_ELEMENTS_MAP_FILE)
    if isinstance(map_files, six.string_types):
        map_files = [map_files]
    default_map_files = []
    for map_file in map_files:
        default_map_files.append(
            os.path.join(utils.get_project_root_path(), map_file))
    logging.info("Default TOSCA template map file to be used \'%s\'" %
                 json.dumps(default_map_files))

    # Parse and generate new TOSCA service template with only provider specific TOSCA types from normative types
    tosca = ProviderToscaTemplate(tosca_parser_template_object,
                                  provider,
                                  configuration_tool,
                                  cluster_name,
                                  host_ip_parameter,
                                  public_key_path,
                                  is_delete,
                                  common_map_files=default_map_files)

    # Init configuration tool class
    tool = get_configuration_tool_class(configuration_tool)()

    default_artifacts_directory = config.get_section(
        config.MAIN_SECTION).get(DEFAULT_ARTIFACTS_DIRECTORY)

    # Copy used conditions from intermediate service template
    if tosca.used_conditions_set:
        tool.copy_conditions_to_the_directory(tosca.used_conditions_set,
                                              default_artifacts_directory)

    # Manage new artifacts for intermediate template
    tool_artifacts = []
    for art in tosca.artifacts:
        executor = art.get(EXECUTOR)
        if bool(executor) and executor != configuration_tool:
            art_list = [art]
            configuration_class = get_configuration_tool_class(
                art['executor'])()
            _, new_art = utils.generate_artifacts(configuration_class,
                                                  art_list,
                                                  default_artifacts_directory)
            tosca.artifacts.append(new_art)
        else:
            tool_artifacts.append(art)

    if not extra:
        extra = {}
    extra_full = utils.deep_update_dict(
        extra,
        tosca.extra_configuration_tool_params.get(configuration_tool, {}))

    configuration_content = tool.to_dsl(
        tosca.provider,
        tosca.provider_operations,
        tosca.reversed_provider_operations,
        tosca.cluster_name,
        is_delete,
        artifacts=tool_artifacts,
        target_directory=default_artifacts_directory,
        inputs=tosca.inputs,
        outputs=tosca.outputs,
        extra=extra_full,
        debug=debug)
    return configuration_content
Esempio n. 22
0
    def get_ansible_tasks_for_create(self, element_object, target_directory, node_filter_config, description_by_type,
                                     module_by_type, additional_args=None):
        """
        Fulfill the dict with ansible task arguments to create infrastructure
        If the node contains get_operation_output parameters then the operation is executed
        If the operation is not mentioned then it is not executed
        Operations are mentioned in the node or in relationship_template
        :param: node: ProviderResource
        :param additional_args: dict of arguments to add
        :return: string of ansible task to place in playbook
        """

        if additional_args is None:
            additional_args = {}
        else:
            additional_args_global = copy.deepcopy(additional_args.get('global', {}))
            additional_args_element = copy.deepcopy(additional_args.get(element_object.name, {}))
            additional_args = utils.deep_update_dict(additional_args_global,
                                                     additional_args_element)

        ansible_tasks = []
        if not node_filter_config:
            node_filter_config = {}
        node_filter_source_prefix = node_filter_config.get('node_filter_source_prefix', '')
        node_filter_source_postfix = node_filter_config.get('node_filter_source_postfix', '')
        node_filter_exceptions = node_filter_config.get('node_filter_exceptions', '')
        node_filter_inner_variable = node_filter_config.get('node_filter_inner_variable')

        configuration_args = {}
        for arg_key, arg in element_object.configuration_args.items():
            if isinstance(arg, dict):
                node_filter_key = arg.get(SOURCE, {}).get(NODE_FILTER)
                node_filter_value = arg.get(VALUE)
                node_filter_params = arg.get(PARAMETERS)

                if node_filter_key and node_filter_value and node_filter_params:
                    node_filter_source = node_filter_source_prefix + node_filter_key + node_filter_source_postfix
                    if node_filter_exceptions.get(node_filter_key):
                        node_filter_source = node_filter_exceptions[node_filter_key]

                    node_filter_value_with_id = node_filter_value + '_' + str(
                        utils.get_random_int(OUTPUT_ID_RANGE_START, OUTPUT_ID_RANGE_END))

                    NODE_FILTER_FACTS = 'node_filter_facts'
                    NODE_FILTER_FACTS_REGISTER = NODE_FILTER_FACTS + '_raw'
                    NODE_FILTER_FACTS_VALUE = NODE_FILTER_FACTS_REGISTER
                    if node_filter_inner_variable:
                        if isinstance(node_filter_inner_variable, dict):
                            node_filter_inner_variable = node_filter_inner_variable.get(node_filter_key, '')
                        if isinstance(node_filter_inner_variable, six.string_types):
                            node_filter_inner_variable = [node_filter_inner_variable]
                        if isinstance(node_filter_inner_variable, list):
                            for v in node_filter_inner_variable:
                                NODE_FILTER_FACTS_VALUE += '[\"' + v + '\"]'
                        else:
                            ExceptionCollector.appendException(ProviderConfigurationParameterError(
                                what='ansible.node_filter: node_filter_inner_variable'
                            ))

                    include_path = self.copy_condition_to_the_directory('equals', target_directory)
                    ansible_tasks_temp = [
                        {
                            node_filter_source: {},
                            REGISTER: NODE_FILTER_FACTS_REGISTER
                        },
                        {
                            SET_FACT: {
                                "input_facts": self.rap_ansible_variable(NODE_FILTER_FACTS_VALUE)
                            }
                        },
                        {
                            SET_FACT: {
                                "input_args": node_filter_params
                            }
                        },
                        {
                            IMPORT_TASKS_MODULE: include_path
                        },
                        {
                            SET_FACT: {
                                node_filter_value_with_id: self.rap_ansible_variable(
                                    'matched_object[\"' + node_filter_value + '\"]')
                            }
                        }
                    ]
                    # self.copy_conditions_to_the_directory({'equals'}, target_directory)
                    ansible_tasks.extend(ansible_tasks_temp)
                    arg = self.rap_ansible_variable(node_filter_value_with_id)
            configuration_args[arg_key] = arg

        post_tasks = []
        for i in element_object.nodetemplate.interfaces:
            if i.name == 'preconfigure':
                op_name = '_'.join([element_object.name, 'prepare', 'preconfigure'])
                if not self.global_operations_info.get(op_name, {}).get(OUTPUT_IDS):
                    ansible_tasks.extend(
                        self.get_ansible_tasks_from_operation(op_name, target_directory, True))
            if i.name == 'configure':
                op_name = '_'.join([element_object.name, 'prepare', 'configure'])
                if not self.global_operations_info.get(op_name, {}).get(OUTPUT_IDS):
                    post_tasks.extend(
                        self.get_ansible_tasks_from_operation(op_name, target_directory, True))
        ansible_args = copy.copy(element_object.configuration_args)
        ansible_args[STATE] = 'present'
        task_name = element_object.name.replace('-', '_')
        ansible_task_as_dict = dict()
        ansible_task_as_dict[NAME] = description_by_type
        ansible_task_as_dict[module_by_type] = configuration_args
        ansible_task_as_dict[REGISTER] = task_name
        ansible_task_as_dict.update(additional_args)
        ansible_tasks.append(ansible_task_as_dict)
        ansible_tasks.extend(post_tasks)
        return ansible_tasks
Esempio n. 23
0
def get_structure_of_mapped_param(mapped_param,
                                  value,
                                  input_value=None,
                                  indivisible=False,
                                  if_list_type=False):
    if mapped_param is None:
        # NOTE The case when parameter was 'input_value'
        return [], None
    if input_value is None:
        input_value = value

    if isinstance(mapped_param, str):
        splitted_mapped_param = mapped_param.split(SEPARATOR)
        if splitted_mapped_param[-1] in INDIVISIBLE_KEYS:
            indivisible = True
        if not indivisible:
            if isinstance(value, list):
                r = []
                len_v = len(value)
                if len_v == 1:
                    param, _ = get_structure_of_mapped_param(mapped_param,
                                                             value[0],
                                                             input_value,
                                                             if_list_type=True)
                    return param, None

                for v in value:
                    if isinstance(v, str):
                        param, _ = get_structure_of_mapped_param(
                            SEPARATOR.join([mapped_param, v]), input_value,
                            input_value)
                    else:
                        param, _ = get_structure_of_mapped_param(
                            mapped_param, v, input_value)
                    r += param
                return r, None

            if isinstance(value, dict):
                r = dict()
                for k, v in value.items():
                    param, _ = get_structure_of_mapped_param(k, v, input_value)
                    for p in param:
                        r = deep_update_dict(r, p)
                r, _ = get_structure_of_mapped_param(mapped_param,
                                                     r,
                                                     input_value,
                                                     indivisible=True,
                                                     if_list_type=if_list_type)
                return r, None

        # NOTE: end of recursion
        structure = value
        if if_list_type:
            structure = [value]
        for i in range(len(splitted_mapped_param), 0, -1):
            structure = {splitted_mapped_param[i - 1]: structure}
        return [structure], None

    if isinstance(mapped_param, list):
        r = []
        for p in mapped_param:
            param, _ = get_structure_of_mapped_param(p, value, input_value)
            r += param
        return r, None

    if isinstance(mapped_param, dict):
        # NOTE: Assert number of keys! Always start of recursion?
        num_of_keys = len(mapped_param.keys())
        # if mapped_param.get(PARAMETER) and (num_of_keys == 2 or num_of_keys == 3 and KEYNAME in mapped_param.keys()):
        if num_of_keys == 1 or num_of_keys == 2 and KEYNAME in mapped_param.keys(
        ):
            for k, v in mapped_param.items():
                if k != PARAMETER and k != KEYNAME:
                    param, _ = get_structure_of_mapped_param(k, v, input_value)
                    if isinstance(param, tuple):
                        (param, keyname) = param
                        if mapped_param.get(KEYNAME):
                            mapped_param[KEYNAME] = keyname
                    return param, mapped_param.get(KEYNAME)
        else:
            # TODO find the cases
            assert False

    ExceptionCollector.appendException(
        ToscaParametersMappingFailed(what=mapped_param))
Esempio n. 24
0
def get_resulted_mapping_values(parameter, mapping_value, value, self):
    """
    Manage the case when mapping value has multiple structures in mapping_value
    :param parameter:
    :param mapping_value:
    :param value:
    :return:
    """
    mapping_value = copy.deepcopy(mapping_value)
    if isinstance(mapping_value, six.string_types):
        mapping_value = {PARAMETER: mapping_value, VALUE: "{self[value]}"}
    mapping_value_parameter = mapping_value.get(PARAMETER)
    mapping_value_value = mapping_value.get(VALUE)
    # NOTE at first check if parameter self[buffer] parameter
    if mapping_value_parameter and mapping_value_parameter[:6] == '{self[' and \
            mapping_value_parameter[-1] == '}':
        self[VALUE] = value
        self[PARAMETER] = parameter
        # The case when variable is written to the parameter self!
        # Inside string can be more self parameters
        format_parameter = mapping_value_parameter[6:-2].format(self=self)
        params_parameter = format_parameter.split('][')
        if isinstance(mapping_value_value, dict):
            if mapping_value_value.get(VALUE) and mapping_value_value.get(EXECUTOR) == PYTHON_EXECUTOR and \
                    mapping_value_value.get(SOURCE):
                mapping_value_value = execute_function(
                    PYTHON_SOURCE_DIRECTORY, mapping_value_value[SOURCE],
                    {'self': self})
        iter_value = format_value(mapping_value_value, self)
        iter_num = len(params_parameter)
        for i in range(iter_num - 1, 0, -1):
            temp_param = dict()
            temp_param[params_parameter[i]] = iter_value
            iter_value = temp_param
        self = deep_update_dict(self, {params_parameter[0]: iter_value})
        return []
    elif mapping_value_parameter:
        splitted_mapping_value_parameter = mapping_value_parameter.split(
            SEPARATOR)
        has_section = False
        for v in splitted_mapping_value_parameter:
            if v in NODE_TEMPLATE_KEYS:
                has_section = True
                break
        if not has_section:
            mapping_value_value = mapping_value.get(VALUE)
            if isinstance(mapping_value_value,
                          list) and len(mapping_value_value) > 1:
                r = []
                for v in mapping_value_value:
                    mapping_value[VALUE] = v
                    item = get_resulted_mapping_values(parameter,
                                                       mapping_value, value,
                                                       self)
                    if isinstance(item, list):
                        if len(item) == 1:
                            item = [item]
                    else:
                        item = [item]
                    r.extend(item)
                return r
            if isinstance(mapping_value_value, six.string_types):
                splitted_mapping_value_value = mapping_value_value.split(
                    SEPARATOR)
                for i in range(len(splitted_mapping_value_value)):
                    if splitted_mapping_value_value[i] in NODE_TEMPLATE_KEYS:
                        # parameter_tag = SEPARATOR.join(splitted_mapping_value_value[:i+1])
                        # value_new = SEPARATOR.join(splitted_mapping_value_value[i-1:])
                        # mapping_value[PARAMETER] = mapping_value_parameter + SEPARATOR + parameter_tag
                        # mapping_value[VALUE] = value_new
                        mapping_value[
                            PARAMETER] = mapping_value_parameter + SEPARATOR + mapping_value_value
                        mapping_value[VALUE] = "{self[value]}"
                        return dict(parameter=parameter,
                                    map=mapping_value,
                                    value=value)
            if isinstance(mapping_value_value, dict):
                # NOTE the only valid case when the value is parameter-value structure
                mapping_value_value_parameter = mapping_value_value.get(
                    PARAMETER)
                mapping_value_value_value = mapping_value_value.get(VALUE)
                if mapping_value_value_parameter and mapping_value_value_value:
                    mapping_value_value_keyname = mapping_value_value.get(
                        KEYNAME)
                    if mapping_value_value_keyname:
                        mapping_value[KEYNAME] = mapping_value_value_keyname
                    mapping_value[
                        PARAMETER] = mapping_value_parameter + SEPARATOR + mapping_value_value_parameter
                    mapping_value[VALUE] = mapping_value_value_value
                    r = get_resulted_mapping_values(parameter, mapping_value,
                                                    value, self)
                    return r

            ExceptionCollector.appendException(
                ToscaParametersMappingFailed(what=mapping_value))

    return dict(parameter=parameter, map=mapping_value, value=value)
Esempio n. 25
0
    def get_ansible_tasks_from_interface(self,
                                         element_object,
                                         target_directory,
                                         is_delete,
                                         operation,
                                         cluster_name,
                                         additional_args=None):
        if additional_args is None:
            additional_args = {}
        else:
            additional_args_global = copy.deepcopy(
                additional_args.get('global', {}))
            additional_args_element = copy.deepcopy(
                additional_args.get(element_object.name, {}))
            additional_args = utils.deep_update_dict(additional_args_global,
                                                     additional_args_element)
        ansible_tasks = []
        scripts = []

        primary = False
        for interface_name, interface in self.get_interfaces_from_node(
                element_object).items():
            interface_operation = interface.get(operation, {})
            if isinstance(interface_operation, six.string_types):
                implementations = interface_operation
                primary = True
            else:
                implementations = interface_operation.get(IMPLEMENTATION)
            (_, element_type, _) = utils.tosca_type_parse(element_object.type)
            if (interface_name == 'Standard' and element_type == NODES
                    or interface_name == 'Configure' and element_type
                    == RELATIONSHIPS) and implementations is not None:
                if isinstance(implementations, six.string_types):
                    implementations = [implementations]
                if isinstance(
                        implementations,
                        dict) and 'primary' in implementations and isinstance(
                            implementations['primary'], six.string_types):
                    implementations = [implementations['primary']]
                    primary = True
                scripts.extend(implementations)
                for script in implementations:
                    target_filename = os.path.join(utils.get_tmp_clouni_dir(),
                                                   cluster_name,
                                                   target_directory, script)
                    os.makedirs(os.path.dirname(target_filename),
                                exist_ok=True)
                    script_filename_1 = os.path.join(os.getcwd(), script)
                    script_filename_2 = os.path.join(
                        self.get_ansible_artifacts_directory(), script)
                    if os.path.isfile(script_filename_1):
                        copyfile(script_filename_1, target_filename)
                    elif os.path.isfile(script_filename_2):
                        copyfile(script_filename_2, target_filename)
                    else:
                        logging.error(
                            "Artifact filename %s was not found in %s or %s" %
                            (script, script_filename_1, script_filename_2))
                    if not primary and interface_operation.get(
                            INPUTS) is not None:
                        for input_name, input_value in interface_operation[
                                INPUTS].items():
                            ansible_tasks.append(
                                {SET_FACT: {
                                    input_name: input_value
                                }})
                    new_ansible_task = {IMPORT_TASKS_MODULE: target_filename}
                    new_ansible_task.update(additional_args)
                    ansible_tasks.append(new_ansible_task)
        return ansible_tasks
Esempio n. 26
0
def get_structure_of_mapped_param(mapped_param, value, input_value=None, indivisible=False, if_list_type=False):
    if mapped_param is None:
        # NOTE The case when parameter was 'input_value'
        return [], None
    if input_value is None:
        input_value = value

    if isinstance(mapped_param, str):
        splitted_mapped_param = mapped_param.split(SEPARATOR)
        if splitted_mapped_param[-1] in INDIVISIBLE_KEYS:
            indivisible = True
        if not indivisible:
            if isinstance(value, list):
                r = []
                len_v = len(value)
                if len_v == 1:
                    param, _ = get_structure_of_mapped_param(mapped_param, value[0], input_value, if_list_type=True)
                    return param, None

                for v in value:
                    if isinstance(v, str):
                        # Error! Stucks in recursion if {get_property: []} is used
                        param, _ = get_structure_of_mapped_param(SEPARATOR.join([mapped_param, v]), input_value,
                                                                 input_value)
                    else:
                        param, _ = get_structure_of_mapped_param(mapped_param, v, input_value)
                    r += param
                return r, None

            if isinstance(value, dict):
                r = dict()
                for k, v in value.items():
                    # Error! Stucks in recursion if {get_property: []} is used
                    param, _ = get_structure_of_mapped_param(k, v, input_value)
                    for p in param:
                        r = utils.deep_update_dict(r, p)
                r, _ = get_structure_of_mapped_param(mapped_param, r, input_value, indivisible=True,
                                                     if_list_type=if_list_type)
                return r, None

        # NOTE: end of recursion
        structure = value
        if if_list_type:
            structure = [value]
        for i in range(len(splitted_mapped_param), 0, -1):
            structure = {
                splitted_mapped_param[i - 1]: structure
            }
        return [structure], None

    if isinstance(mapped_param, list):
        r = []
        for p in mapped_param:
            param, _ = get_structure_of_mapped_param(p, value, input_value)
            r += param
        return r, None

    if isinstance(mapped_param, dict):
        # NOTE: Assert number of keys! Always start of recursion?
        num_of_keys = len(mapped_param.keys())
        if num_of_keys == 1 or num_of_keys == 2 and KEYNAME in mapped_param.keys():
            for k, v in mapped_param.items():
                if k != PARAMETER and k != KEYNAME:
                    param, _ = get_structure_of_mapped_param(k, v, input_value)
                    if isinstance(param, tuple):
                        (param, keyname) = param
                        if mapped_param.get(KEYNAME):
                            mapped_param[KEYNAME] = keyname
                    return param, mapped_param.get(KEYNAME)
        else:
            # TODO find the cases
            assert False

    logging.critical("Unable to parse the following parameter: %s" % json.dumps(mapped_param))
    sys.exit(1)
def translate(template_file,
              validate_only,
              provider,
              configuration_tool,
              cluster_name,
              is_delete=False,
              a_file=True,
              extra=None):
    """
    Main function, is called by different shells, i.e. bash, Ansible module, grpc
    :param template_file: filename of TOSCA template or TOSCA template data if a_file is False
    :param validate_only: boolean, if template should be only validated
    :param provider: key of cloud provider
    :param configuration_tool: key of configuration tool
    :param cluster_name: name to point to desired infrastructure as one component
    :param is_delete: generate dsl scripts for infrastructure deletion
    :param a_file: if template_file is filename
    :param extra: extra for template
    :return: string that is a script to deploy or delete infrastructure
    """

    config = Configuration()
    for sec in REQUIRED_CONFIGURATION_PARAMS:
        if sec not in config.get_section(config.MAIN_SECTION).keys():
            raise ProviderConfigurationParameterError(what=sec)

    if a_file:
        template_file = os.path.join(os.getcwd(), template_file)
        with open(template_file, 'r') as f:
            template_content = f.read()
    else:
        template_content = template_file
    template = yaml_parse(template_content)

    def_file = config.get_section(
        config.MAIN_SECTION).get(TOSCA_DEFINITION_FILE)

    default_import_file = os.path.join(utils.get_project_root_path(), def_file)

    # Add default import of normative TOSCA types to the template
    if not template.get(IMPORTS):
        template[IMPORTS] = [default_import_file]
    else:
        for i in range(len(template[IMPORTS])):
            template[IMPORTS][i] = os.path.abspath(template[IMPORTS][i])
        template[IMPORTS].append(default_import_file)
    tosca_parser_template_object = ToscaTemplate(yaml_dict_tpl=template,
                                                 a_file=a_file)

    if validate_only:
        msg = 'The input "%(template_file)s" successfully passed validation.' \
              % {'template_file': template_file if a_file else 'template'}
        return msg

    if not provider:
        ExceptionCollector.appendException(
            UnspecifiedParameter(what=('validate-only', 'provider')))

    # Parse and generate new TOSCA service template with only provider specific TOSCA types from normative types
    tosca = ProviderToscaTemplate(tosca_parser_template_object, provider,
                                  cluster_name)

    # Init configuration tool class
    tool = get_configuration_tool_class(configuration_tool)()

    default_artifacts_directory = config.get_section(
        config.MAIN_SECTION).get(DEFAULT_ARTIFACTS_DIRECTORY)

    # Copy used conditions from intermediate service template
    if tosca.used_conditions_set:
        tool.copy_conditions_to_the_directory(tosca.used_conditions_set,
                                              default_artifacts_directory)

    # Manage new artifacts for intermediate template
    tool_artifacts = []
    for art in tosca.artifacts:
        executor = art.get(EXECUTOR)
        if bool(executor) and executor != configuration_tool:
            art_list = [art]
            new_arts = generate_artifacts(art_list,
                                          default_artifacts_directory)
            tosca.artifacts.extend(new_arts)
        else:
            tool_artifacts.append(art)

    if not extra:
        extra = {}
    extra_full = utils.deep_update_dict(
        extra,
        tosca.extra_configuration_tool_params.get(configuration_tool, {}))

    configuration_content = tool.to_dsl(tosca.provider,
                                        tosca.provider_nodes_queue,
                                        tosca.cluster_name,
                                        is_delete,
                                        tool_artifacts,
                                        default_artifacts_directory,
                                        inputs=tosca.inputs,
                                        outputs=tosca.outputs,
                                        extra=extra_full)

    return configuration_content