Exemplo n.º 1
0
def execute(new_global_elements_map_total_implementation, is_delete, cluster_name, target_parameter=None):
    if not is_delete:
        default_executor = 'ansible'
        configuration_class = get_configuration_tool_class(default_executor)()
        new_ansible_artifacts = copy.deepcopy(new_global_elements_map_total_implementation)
        for i in range(len(new_ansible_artifacts)):
            new_ansible_artifacts[i]['configuration_tool'] = new_ansible_artifacts[i]['executor']
            configuration_class = get_configuration_tool_class(new_ansible_artifacts[i]['configuration_tool'])()
            extension = configuration_class.get_artifact_extension()

            seed(time())
            new_ansible_artifacts[i]['name'] = '_'.join(
                [SOURCE, str(randint(ARTIFACT_RANGE_START, ARTIFACT_RANGE_END))]) + extension
        artifacts_with_brackets = utils.replace_brackets(new_ansible_artifacts, False)
        new_ansible_tasks, filename = utils.generate_artifacts(configuration_class, artifacts_with_brackets,
                                                                   configuration_class.initial_artifacts_directory,
                                                                   store=False)
        os.remove(filename)

        q = Queue()
        playbook = {
            'hosts': 'localhost',
            'tasks': new_ansible_tasks
        }

        os.makedirs(os.path.join(utils.get_tmp_clouni_dir(), cluster_name, configuration_class.initial_artifacts_directory), exist_ok=True)
        copy_tree(utils.get_project_root_path() + '/toscatranslator/configuration_tools/ansible/artifacts',
                  os.path.join(utils.get_tmp_clouni_dir(), cluster_name, configuration_class.initial_artifacts_directory))
        configuration_class.parallel_run([playbook], 'artifacts', 'artifacts', q, cluster_name)

        # there is such a problem that if runner cotea was launched in the current process, then all subsequent launches
        # of other playbooks from this process are impossible because the initial playbook will be launched, and
        # even if this process is forked, the effect remains so that something inside cotea
        # is preserved in the context of the process
        results = q.get()
        if target_parameter is not None:
            value = 'not_found'
            if_failed = False
            for result in results:
                if result.is_failed or result.is_unreachable:
                    logging.error("Task %s has failed because of exception: \n%s" %
                                    (result.task_name, result.result.get('exception', '(Unknown reason)')))
                    if_failed = True
                if 'results' in result.result and len(result.result['results']) > 0 and 'ansible_facts' in \
                        result.result['results'][0] and 'matched_object' in result.result['results'][0]['ansible_facts']:
                    value = result.result['results'][0]['ansible_facts']['matched_object'][target_parameter.split('.')[-1]]
            if if_failed:
                value = 'not_found'
            return value
    return None
Exemplo n.º 2
0
    def check_tasks_success(self, tasks):
        correct = True

        with open(os.path.join(utils.get_tmp_clouni_dir(), SUCCESS_CHECK_FILE),
                  "r") as check:
            succ_tasks = yaml.load(check, Loader=Loader)
            self.assertEqual(len(tasks), len(succ_tasks))
            for task in tasks:
                if task not in succ_tasks:
                    correct = False
        self.assertTrue(correct)
Exemplo n.º 3
0
def run_ansible(ansible_playbook, cluster_name):
    """

    :param ansible_playbook: dict which is equal to Ansible playbook in YAML
    :param cluster_name: name of cluster
    :return: empty
    """
    random_id = get_random_int(1000, 9999)
    os.makedirs(utils.get_tmp_clouni_dir(), exist_ok=True)
    os.makedirs(os.path.join(utils.get_tmp_clouni_dir(), cluster_name), exist_ok=True)
    tmp_current_dir = os.path.join(utils.get_tmp_clouni_dir(), cluster_name)
    playbook_path = os.path.join(tmp_current_dir, str(random_id) + '_ansible_playbook.yaml')
    successful_tasks_path = os.path.join(utils.get_tmp_clouni_dir(), 'successful_tasks.yaml')
    with open(playbook_path, 'w') as playbook_file:
        playbook_file.write(yaml.dump(ansible_playbook, default_flow_style=False, sort_keys=False))
        logging.info("Running ansible playbook from: %s" % playbook_path)

    am = argument_maker()
    am.add_arg("-i", os.path.join(tmp_current_dir, 'hosts.ini'))

    r = runner(playbook_path, am)
    results = []
    with open(successful_tasks_path, "a") as successful_tasks_file:
        while r.has_next_play():
            current_play = r.get_cur_play_name()

            while r.has_next_task():
                task_results = r.run_next_task()
                results.extend(task_results)
                succ_task = r.get_prev_task()
                for status in r.get_last_task_result():
                    if status.is_unreachable or status.is_failed:
                        continue
                if succ_task is not None and succ_task.get_ds() is not None:
                    if 'meta' not in succ_task.get_ds():
                        d = str(succ_task.get_ds())
                        successful_tasks_file.write(
                            yaml.dump([ast.literal_eval(d)], default_flow_style=False, sort_keys=False))

    r.finish_ansible()
    return results
Exemplo n.º 4
0
def prepare_for_run():
    successful_tasks_path = os.path.join(utils.get_tmp_clouni_dir(), 'successful_tasks.yaml')
    if os.path.isfile(successful_tasks_path):
        os.remove(successful_tasks_path)
Exemplo n.º 5
0
    def get_ansible_tasks_from_operation(self,
                                         op_name,
                                         target_directory,
                                         cluster_name,
                                         if_required=False):
        tasks = []

        op_info = self.global_operations_info[op_name]
        if not if_required and not op_info.get(OUTPUT_IDS) or not op_info.get(
                IMPLEMENTATION):
            return []

        import_task_arg = op_info[IMPLEMENTATION]
        if not isinstance(import_task_arg, list):
            import_task_arg = [import_task_arg]

        if op_info.get(INPUTS):
            for k, v in op_info[INPUTS].items():
                arg_v = v

                if isinstance(v, (dict, list, set, tuple)):
                    arg_v = '_'.join([
                        k,
                        str(
                            utils.get_random_int(OUTPUT_ID_RANGE_START,
                                                 OUTPUT_ID_RANGE_END))
                    ])
                    new_task = {SET_FACT: {arg_v: v}}
                    tasks.append(new_task)
                    arg_v = self.rap_ansible_variable(arg_v)
                arg_k = k
                new_task = {SET_FACT: {arg_k: arg_v}}
                tasks.append(new_task)
        for art in import_task_arg:
            if self.artifacts.get(art):
                art_data = self.artifacts[art]
                new_tasks = self.create_artifact_data(art_data)
                tasks.extend(new_tasks)
            else:
                target_filename = os.path.join(utils.get_tmp_clouni_dir(),
                                               target_directory, cluster_name,
                                               art)
                art_filename_1 = os.path.join(os.getcwd(), art)
                art_filename_2 = os.path.join(
                    self.get_ansible_artifacts_directory(), art)
                if os.path.isfile(art_filename_1):
                    copyfile(art_filename_1, target_filename)
                elif os.path.isfile(art_filename_2):
                    copyfile(art_filename_2, target_filename)
                else:
                    logging.error(
                        "Artifact filename %s was not found in %s or %s" %
                        (art, art_filename_1, art_filename_2))
                new_task = {
                    IMPORT_TASKS_MODULE: os.path.join(target_directory, art)
                }
                tasks.append(new_task)
        if op_info.get(OUTPUT_IDS):
            for k, v in op_info[OUTPUT_IDS].items():
                new_task = {SET_FACT: {v: self.rap_ansible_variable(k)}}
                tasks.append(new_task)
        return tasks
Exemplo n.º 6
0
    def to_dsl(self,
               provider,
               operations_graph,
               reversed_operations_graph,
               cluster_name,
               is_delete,
               artifacts=None,
               target_directory=None,
               inputs=None,
               outputs=None,
               extra=None,
               debug=False):
        if artifacts is None:
            artifacts = []
        if target_directory is None:
            target_directory = self.initial_artifacts_directory

        self.artifacts = {}
        for art in artifacts:
            self.artifacts[art[NAME]] = art

        provider_config = ProviderConfiguration(provider)
        ansible_config = provider_config.get_section(ANSIBLE)
        node_filter_config = provider_config.get_subsection(
            ANSIBLE, NODE_FILTER)

        ids_file_path = os.getcwd(
        ) + '/id_vars_' + cluster_name + self.get_artifact_extension()

        self.init_global_variables(inputs)

        operations_graph = self.init_graph(operations_graph)
        # the graph of operations at the moment is a dictionary of copies of ProviderTemplatre objects,
        # of the form Node/Relationship: {the set of opers of Nodes/Relationships on which it depends}
        elements = TopologicalSorter(operations_graph)
        # use TopologicalSorter for creating graph

        if is_delete:
            reversed_operations_graph = self.init_graph(
                reversed_operations_graph)
            elements = TopologicalSorter(reversed_operations_graph)

        elements.prepare()
        # first operations from on top of the graph in state 'ready'

        ansible_playbook = []
        if not debug:
            self.prepare_for_run()
        # function for initializing tmp clouni directory
        q = Queue()
        # queue for node names + operations
        active = []
        # list of parallel active operations
        first = True

        while elements.is_active():
            node_name = None
            # try to get new finished operation from queue and find it in list of active
            # if get - mark done this operation (but before it find in graph)
            # if ready operations exists - get it and execute, remove from active
            try:
                node_name = q.get_nowait()
            except:
                time.sleep(1)
            if node_name is not None:
                for node in active:
                    if node.name == node_name.split(
                            SEPARATOR
                    )[0] and node.operation == node_name.split(SEPARATOR)[1]:
                        active.remove(node)
                        elements.done(node)
            for v in elements.get_ready():
                # in delete mode we skip all operations exept delete and create operation transforms to delete
                if is_delete:
                    if v.operation == 'create':
                        v.operation = 'delete'
                    else:
                        elements.done(v)
                        continue
                logging.debug("Creating ansible play from operation: %s" %
                              v.name + ':' + v.operation)
                extra_tasks_for_delete = self.get_extra_tasks_for_delete(
                    v.type, v.name.replace('-', '_'), ids_file_path)
                description_prefix, module_prefix = self.get_module_prefixes(
                    is_delete, ansible_config)
                description_by_type = self.ansible_description_by_type(
                    v.type_name, description_prefix)
                module_by_type = self.ansible_module_by_type(
                    v.type_name, module_prefix)
                ansible_play_for_elem = dict(name=description_prefix + ' ' +
                                             provider + ' cluster: ' + v.name +
                                             ':' + v.operation,
                                             hosts=self.default_host,
                                             tasks=[])
                # reload id_vars file
                if not is_delete and first:
                    first = False
                    ansible_play_for_elem['tasks'].append(
                        copy.deepcopy(
                            {FILE: {
                                PATH: ids_file_path,
                                STATE: 'absent'
                            }}))
                    ansible_play_for_elem['tasks'].append(
                        copy.deepcopy(
                            {FILE: {
                                PATH: ids_file_path,
                                STATE: 'touch'
                            }}))
                # create playbook for every operation
                if v.operation == 'delete':
                    if not v.is_software_component:
                        ansible_play_for_elem['tasks'].append(
                            copy.deepcopy({'include_vars': ids_file_path}))
                        ansible_tasks = self.get_ansible_tasks_for_delete(
                            v,
                            description_by_type,
                            module_by_type,
                            additional_args=extra)
                        ansible_tasks.extend(
                            self.get_ansible_tasks_from_interface(
                                v,
                                target_directory,
                                is_delete,
                                v.operation,
                                cluster_name,
                                additional_args=extra))
                        if not any(item == module_by_type
                                   for item in ansible_config.get(
                                       'modules_skipping_delete', [])):
                            ansible_play_for_elem['tasks'].extend(
                                copy.deepcopy(ansible_tasks))
                elif v.operation == 'create':
                    if not v.is_software_component:
                        ansible_play_for_elem['tasks'].extend(
                            copy.deepcopy(
                                self.get_ansible_tasks_for_inputs(inputs)))
                        ansible_tasks = self.get_ansible_tasks_for_create(
                            v,
                            target_directory,
                            node_filter_config,
                            description_by_type,
                            module_by_type,
                            additional_args=extra)
                        ansible_tasks.extend(
                            self.get_ansible_tasks_from_interface(
                                v,
                                target_directory,
                                is_delete,
                                v.operation,
                                cluster_name,
                                additional_args=extra))

                        ansible_play_for_elem['tasks'].extend(
                            copy.deepcopy(ansible_tasks))
                        ansible_play_for_elem['tasks'].extend(
                            copy.deepcopy(extra_tasks_for_delete))

                    else:
                        ansible_play_for_elem['hosts'] = v.host
                        ansible_play_for_elem['tasks'].extend(
                            copy.deepcopy(
                                self.get_ansible_tasks_from_interface(
                                    v,
                                    target_directory,
                                    is_delete,
                                    v.operation,
                                    cluster_name,
                                    additional_args=extra)))
                else:
                    (_, element_type, _) = utils.tosca_type_parse(v.type)
                    if element_type == NODES:
                        if v.is_software_component:
                            ansible_play_for_elem['hosts'] = v.host
                    # operations for relationships executes on target/source host depends on operation
                    elif element_type == RELATIONSHIPS:
                        if v.operation == 'pre_configure_target' or v.operation == 'post_configure_target' or v.operation == 'add_source':
                            for elem in operations_graph:
                                if elem.name == v.target:
                                    if elem.is_software_component:
                                        ansible_play_for_elem[
                                            'hosts'] = elem.host
                                    break
                        elif v.operation == 'pre_configure_source' or v.operation == 'post_configure_source':
                            for elem in operations_graph:
                                if elem.name == v.source:
                                    if elem.is_software_component:
                                        ansible_play_for_elem[
                                            'hosts'] = elem.host
                                    break
                        else:
                            logging.error(
                                "Unsupported operation for relationship in operation graph"
                            )
                            sys.exit(1)
                    else:
                        logging.error(
                            "Unsupported element type in operation graph")
                        sys.exit(1)
                    ansible_play_for_elem['tasks'].extend(
                        copy.deepcopy(
                            self.get_ansible_tasks_from_interface(
                                v,
                                target_directory,
                                is_delete,
                                v.operation,
                                cluster_name,
                                additional_args=extra)))
                ansible_playbook.append(ansible_play_for_elem)
                # run playbooks
                if not debug:
                    self.parallel_run([ansible_play_for_elem], v.name,
                                      v.operation, q, cluster_name)
                    # add element to active list
                    active.append(v)
                else:
                    elements.done(v)
        if is_delete:
            last_play = dict(name='Renew id_vars_example.yaml',
                             hosts=self.default_host,
                             tasks=[])
            last_play['tasks'].append(
                copy.deepcopy({FILE: {
                    PATH: ids_file_path,
                    STATE: 'absent'
                }}))
            if not debug:
                self.parallel_run([last_play], None, None, q, cluster_name)
                done = q.get()
                if done != 'Done':
                    logging.error("Something wrong with multiprocessing queue")
                    sys.exit(1)
            ansible_playbook.append(last_play)
        # delete dir with cluster_name in tmp clouni dir
        if not debug:
            rmtree(os.path.join(utils.get_tmp_clouni_dir(), cluster_name))
        return yaml.dump(ansible_playbook,
                         default_flow_style=False,
                         sort_keys=False)
Exemplo n.º 7
0
    def get_ansible_tasks_from_interface(self,
                                         element_object,
                                         target_directory,
                                         is_delete,
                                         operation,
                                         cluster_name,
                                         additional_args=None):
        if additional_args is None:
            additional_args = {}
        else:
            additional_args_global = copy.deepcopy(
                additional_args.get('global', {}))
            additional_args_element = copy.deepcopy(
                additional_args.get(element_object.name, {}))
            additional_args = utils.deep_update_dict(additional_args_global,
                                                     additional_args_element)
        ansible_tasks = []
        scripts = []

        primary = False
        for interface_name, interface in self.get_interfaces_from_node(
                element_object).items():
            interface_operation = interface.get(operation, {})
            if isinstance(interface_operation, six.string_types):
                implementations = interface_operation
                primary = True
            else:
                implementations = interface_operation.get(IMPLEMENTATION)
            (_, element_type, _) = utils.tosca_type_parse(element_object.type)
            if (interface_name == 'Standard' and element_type == NODES
                    or interface_name == 'Configure' and element_type
                    == RELATIONSHIPS) and implementations is not None:
                if isinstance(implementations, six.string_types):
                    implementations = [implementations]
                if isinstance(
                        implementations,
                        dict) and 'primary' in implementations and isinstance(
                            implementations['primary'], six.string_types):
                    implementations = [implementations['primary']]
                    primary = True
                scripts.extend(implementations)
                for script in implementations:
                    target_filename = os.path.join(utils.get_tmp_clouni_dir(),
                                                   cluster_name,
                                                   target_directory, script)
                    os.makedirs(os.path.dirname(target_filename),
                                exist_ok=True)
                    script_filename_1 = os.path.join(os.getcwd(), script)
                    script_filename_2 = os.path.join(
                        self.get_ansible_artifacts_directory(), script)
                    if os.path.isfile(script_filename_1):
                        copyfile(script_filename_1, target_filename)
                    elif os.path.isfile(script_filename_2):
                        copyfile(script_filename_2, target_filename)
                    else:
                        logging.error(
                            "Artifact filename %s was not found in %s or %s" %
                            (script, script_filename_1, script_filename_2))
                    if not primary and interface_operation.get(
                            INPUTS) is not None:
                        for input_name, input_value in interface_operation[
                                INPUTS].items():
                            ansible_tasks.append(
                                {SET_FACT: {
                                    input_name: input_value
                                }})
                    new_ansible_task = {IMPORT_TASKS_MODULE: target_filename}
                    new_ansible_task.update(additional_args)
                    ansible_tasks.append(new_ansible_task)
        return ansible_tasks