Exemplo n.º 1
0
    def run(self):
        workdir = get_workdir()
        create_artifact_dir()

        commands = self._get_commands()

        for filename, content, name, path, dictionary, raw_node, artifacts in commands:
            if self._are_all_artifacts_available(artifacts):
                logging.info(('''Artifacts for command '{}' are already there. '''
                              '''Delete them to regenerate them.'''
                              ).format(name))
            else:
                with tempfile.TemporaryDirectory(dir=workdir) as tmpdir:
                    chown_to_user(tmpdir)
                    require_root = raw_node.get('require_root', False)

                    logging.info(("Running command {} located in "
                                  "{} with dictionary:\n{}"
                                  ).format(name, path,
                                           yaml.dump(remove_passwords(dictionary),
                                                     default_flow_style=False)))

                    command_file = self._flush_command_file(tmpdir, filename, content)
                    self._run_command(command_file, require_root)
                    self._post_process_artifacts(name, artifacts)

        return self._result(commands)
Exemplo n.º 2
0
    def run_all(self):
        workdir = self.config.get_workdir()

        applied_playbooks = []
        with tempfile.TemporaryDirectory(dir=workdir) as tempdir:
            chown_to_user(tempdir)
            inventory = self._write_inventory_file(tempdir)

            playbook_list = self.config.get_ordered_path_items("playbooks")
            for name, path, extra_vars in playbook_list:
                sfc = SharedFolderCoordinator(self.config)
                extra_vars['edi_shared_folder_mountpoints'] = sfc.get_mountpoints()
                logging.info(("Running playbook {} located in "
                              "{} with extra vars:\n{}"
                              ).format(name, path,
                                       yaml.dump(remove_passwords(extra_vars),
                                                 default_flow_style=False)))

                extra_vars_file = os.path.join(tempdir, ("extra_vars_{}"
                                                         ).format(name))
                with open(extra_vars_file, encoding='utf-8', mode='w') as f:
                    f.write(yaml.dump(extra_vars))

                ansible_user = extra_vars.get("edi_config_management_user_name")
                self._run_playbook(path, inventory, extra_vars_file, ansible_user)
                applied_playbooks.append(name)

        return applied_playbooks
Exemplo n.º 3
0
    def run_all(self):
        workdir = get_workdir()

        applied_playbooks = []
        with tempfile.TemporaryDirectory(dir=workdir) as tempdir:
            chown_to_user(tempdir)
            inventory = self._write_inventory_file(tempdir)

            for name, path, extra_vars, in self._get_playbooks():
                logging.info(("Running playbook {} located in "
                              "{} with extra vars:\n{}"
                              ).format(name, path,
                                       yaml.dump(remove_passwords(extra_vars),
                                                 default_flow_style=False)))

                extra_vars_file = os.path.join(tempdir, ("extra_vars_{}"
                                                         ).format(name))
                with open(extra_vars_file, encoding='utf-8', mode='w') as f:
                    f.write(yaml.dump(extra_vars))

                ansible_user = extra_vars.get("edi_config_management_user_name")
                self._run_playbook(path, inventory, extra_vars_file, ansible_user)
                applied_playbooks.append(name)

        return applied_playbooks
Exemplo n.º 4
0
    def _run(self):
        profile_name_list = []

        for profile, name, path, dictionary in self._get_profiles(
                self.include_post_config_profiles):
            logging.info(("Creating profile {} located in "
                          "{} with dictionary:\n{}").format(
                              name, path,
                              yaml.dump(remove_passwords(dictionary),
                                        default_flow_style=False)))

            full_name, new_profile = write_lxc_profile(profile)
            if new_profile:
                print_success("Created lxc profile {}.".format(full_name))
            profile_name_list.append(full_name)

        print_success('The following profiles are now available: {}'.format(
            ', '.join(profile_name_list)))
        return profile_name_list
Exemplo n.º 5
0
    def run(self,
            config_file,
            include_post_config_profiles=False,
            introspection_method=None):
        self._setup_parser(config_file)

        if introspection_method:
            print(introspection_method())
            return []

        profile_list = self.config.get_ordered_path_items("lxc_profiles")
        profile_name_list = []
        for name, path, dictionary in profile_list:
            logging.info(("Creating profile {} located in "
                          "{} with dictionary:\n{}").format(
                              name, path,
                              yaml.dump(remove_passwords(dictionary),
                                        default_flow_style=False)))

            with open(path, encoding="UTF-8", mode="r") as profile_file:
                profile = Template(profile_file.read())
                profile_text = profile.render(dictionary)
                name, new_profile = write_lxc_profile(profile_text)
                if new_profile:
                    print_success("Created lxc profile {}.".format(name))
                profile_name_list.append(name)

        sfc = SharedFolderCoordinator(self.config)
        if include_post_config_profiles:
            sfc_profiles = sfc.get_post_config_profiles()
        else:
            sfc_profiles = sfc.get_pre_config_profiles()

        for profile in sfc_profiles:
            name, new_profile = write_lxc_profile(profile)
            if new_profile:
                print_success("Created lxc profile {}.".format(name))
            profile_name_list.append(name)

        print_success('The following profiles are now available: {}'.format(
            ', '.join(profile_name_list)))
        return profile_name_list
Exemplo n.º 6
0
    def _write_container_metadata(self, imagedir):
        metadata = {}
        # we build this container for the host architecture
        # (QEMU makes sure that the binaries of a foreign architecture can also run)
        metadata["architecture"] = get_debian_architecture()
        metadata["creation_date"] = calendar.timegm(time.gmtime())

        template_node = {}
        template_list = self.config.get_ordered_path_items("lxc_templates")

        if template_list:
            templates_dest = os.path.join(imagedir, "templates")
            os.mkdir(templates_dest)

        for name, path, dictionary in template_list:
            logging.info(("Loading template {} located in "
                          "{} with dictionary:\n{}").format(
                              name, path,
                              yaml.dump(remove_passwords(dictionary),
                                        default_flow_style=False)))

            with open(path, encoding="UTF-8", mode="r") as template_file:
                template = Template(template_file.read())
                sub_node = yaml.load(template.render(dictionary))

            template_node = dict(template_node, **sub_node)

            templates_src = os.path.dirname(path)

            tpl_files = glob.iglob(os.path.join(templates_src, "*.tpl"))
            for tpl_file in tpl_files:
                if os.path.isfile(tpl_file):
                    shutil.copy(tpl_file, templates_dest)

        if template_node:
            metadata["templates"] = template_node

        metadatafile = os.path.join(imagedir, "metadata.yaml")

        with open(metadatafile, encoding='utf-8', mode='w') as f:
            f.write(yaml.dump(metadata))
Exemplo n.º 7
0
    def run_all(self):
        self.fetch_artifact_setup()

        workdir = get_workdir()
        applied_documentation_steps = []
        with tempfile.TemporaryDirectory(dir=workdir) as tempdir:
            temp_output_file_paths = set()
            for name, path, parameters, raw_node in self._get_documentation_steps(
            ):
                output_file = self._get_output_file(name, raw_node)
                temp_output_path = os.path.join(tempdir, output_file)
                temp_output_file_paths.add(temp_output_path)
                with open(temp_output_path, encoding="UTF-8",
                          mode="a") as output:
                    augmented_parameters = self.augment_step_parameters(
                        parameters)

                    logging.info(
                        ("Running documentation step {} located in "
                         "{} with parameters:\n{}\n"
                         "Writing output to {}.").format(
                             name, path,
                             yaml.dump(remove_passwords(augmented_parameters),
                                       default_flow_style=False),
                             os.path.join(self.rendered_output, output_file)))

                    self._run_documentation_step(path, augmented_parameters,
                                                 output)
                    applied_documentation_steps.append(name)

            for temp_output_file_path in temp_output_file_paths:
                shutil.move(
                    temp_output_file_path,
                    os.path.join(self.rendered_output,
                                 os.path.basename(temp_output_file_path)))

        return applied_documentation_steps