def run(self): workdir = get_workdir() create_artifact_dir() commands = self._get_commands() for filename, content, name, path, dictionary, raw_node, artifacts in commands: if self._are_all_artifacts_available(artifacts): logging.info(('''Artifacts for command '{}' are already there. ''' '''Delete them to regenerate them.''' ).format(name)) else: with tempfile.TemporaryDirectory(dir=workdir) as tmpdir: chown_to_user(tmpdir) require_root = raw_node.get('require_root', False) logging.info(("Running command {} located in " "{} with dictionary:\n{}" ).format(name, path, yaml.dump(remove_passwords(dictionary), default_flow_style=False))) command_file = self._flush_command_file(tmpdir, filename, content) self._run_command(command_file, require_root) self._post_process_artifacts(name, artifacts) return self._result(commands)
def _run(self): if os.path.isfile(self._result()): logging.info(("{0} is already there. " "Delete it to regenerate it." ).format(self._result())) return self._result() self._require_sudo() bootstrap_cmd = Bootstrap() # This command is based upon the output of the bootstrap command bootstrap_result = bootstrap_cmd.run(self.config.get_base_config_file()) workdir = get_workdir() print("Going to upgrade the bootstrap image to a lxc image.") with tempfile.TemporaryDirectory(dir=workdir) as tempdir: chown_to_user(tempdir) lxcimagedir = os.path.join(tempdir, "lxcimage") self._unpack_image(bootstrap_result, lxcimagedir) self._write_container_metadata(lxcimagedir) archive = self._pack_image(tempdir, lxcimagedir) chown_to_user(archive) create_artifact_dir() shutil.move(archive, self._result()) print_success("Created lxc image {}.".format(self._result())) return self._result()
def _run(self): if os.path.isfile(self._result()): logging.info( ("{0} is already there. " "Delete it to regenerate it.").format(self._result())) return self._result() self._require_sudo() qemu_executable = Fetch().run(self.config.get_base_config_file()) print("Going to bootstrap initial image - be patient.") if self.config.get_bootstrap_tool() != "debootstrap": raise FatalError(("At the moment only debootstrap " "is supported for bootstrapping!")) workdir = get_workdir() with tempfile.TemporaryDirectory(dir=workdir) as tempdir: chown_to_user(tempdir) key_data = fetch_repository_key( self.config.get_bootstrap_repository_key()) keyring_file = build_keyring(tempdir, "temp_keyring.gpg", key_data) rootfs = self._run_debootstrap(tempdir, keyring_file, qemu_executable) self._postprocess_rootfs(rootfs, key_data) archive = self._pack_image(tempdir, rootfs) chown_to_user(archive) create_artifact_dir() shutil.move(archive, self._result()) print_success("Bootstrapped initial image {}.".format(self._result())) return self._result()
def run_all(self): workdir = get_workdir() applied_playbooks = [] with tempfile.TemporaryDirectory(dir=workdir) as tempdir: chown_to_user(tempdir) inventory = self._write_inventory_file(tempdir) for name, path, extra_vars, in self._get_playbooks(): logging.info(("Running playbook {} located in " "{} with extra vars:\n{}" ).format(name, path, yaml.dump(remove_passwords(extra_vars), default_flow_style=False))) extra_vars_file = os.path.join(tempdir, ("extra_vars_{}" ).format(name)) with open(extra_vars_file, encoding='utf-8', mode='w') as f: f.write(yaml.dump(extra_vars)) ansible_user = extra_vars.get("edi_config_management_user_name") self._run_playbook(path, inventory, extra_vars_file, ansible_user) applied_playbooks.append(name) return applied_playbooks
def _get_load_time_dictionary(self): load_dict = get_base_dictionary() load_dict["edi_work_directory"] = get_workdir() load_dict["edi_project_directory"] = self.project_directory load_dict["edi_project_plugin_directory"] = self.get_project_plugin_directory() load_dict['edi_log_level'] = logging.getLevelName(logging.getLogger().getEffectiveLevel()) load_dict['edi_configuration_name'] = self.get_configuration_name() load_dict.update(ConfigurationParser.command_context) return load_dict
def clean(self): commands = self._get_commands() for filename, content, name, path, dictionary, raw_node, artifacts in commands: for _, artifact in artifacts.items(): if not str(get_workdir()) in str(artifact): raise FatalError(('Output artifact {} is not within the current working directory!' ).format(artifact)) if os.path.isfile(artifact): logging.info("Removing '{}'.".format(artifact)) os.remove(artifact) print_success("Removed image file artifact {}.".format(artifact)) elif os.path.isdir(artifact): safely_remove_artifacts_folder(artifact, sudo=raw_node.get('require_root', False)) print_success("Removed image directory artifact {}.".format(artifact))
def _run(self): if not self._needs_qemu(): return None if os.path.isfile(self._result()): logging.info(("{0} is already there. " "Delete it to re-fetch it." ).format(self._result())) return self._result() qemu_package = self.config.get_qemu_package_name() print("Going to fetch qemu Debian package ({}).".format(qemu_package)) workdir = get_workdir() with tempfile.TemporaryDirectory(dir=workdir) as tempdir: chown_to_user(tempdir) qemu_repository = self.config.get_qemu_repository() if qemu_repository: key_url = self.config.get_qemu_repository_key() else: qemu_repository = self.config.get_bootstrap_repository() key_url = self.config.get_bootstrap_repository_key() d = PackageDownloader(repository=qemu_repository, repository_key=key_url, architectures=[get_debian_architecture()]) package_file = d.download(package_name=qemu_package, dest=tempdir) apt_inst.DebFile(package_file).data.extractall(tempdir) qemu_binary = os.path.join(tempdir, 'usr', 'bin', self._get_qemu_binary_name()) chown_to_user(qemu_binary) create_artifact_dir() if not os.path.isdir(self._result_folder()): os.mkdir(self._result_folder()) chown_to_user(self._result_folder()) shutil.move(qemu_binary, self._result()) print_success("Fetched qemu binary {}.".format(self._result())) return self._result()
def run_all(self): self.fetch_artifact_setup() workdir = get_workdir() applied_documentation_steps = [] with tempfile.TemporaryDirectory(dir=workdir) as tempdir: temp_output_file_paths = set() for name, path, parameters, raw_node in self._get_documentation_steps( ): output_file = self._get_output_file(name, raw_node) temp_output_path = os.path.join(tempdir, output_file) temp_output_file_paths.add(temp_output_path) with open(temp_output_path, encoding="UTF-8", mode="a") as output: augmented_parameters = self.augment_step_parameters( parameters) logging.info( ("Running documentation step {} located in " "{} with parameters:\n{}\n" "Writing output to {}.").format( name, path, yaml.dump(remove_passwords(augmented_parameters), default_flow_style=False), os.path.join(self.rendered_output, output_file))) self._run_documentation_step(path, augmented_parameters, output) applied_documentation_steps.append(name) for temp_output_file_path in temp_output_file_paths: shutil.move( temp_output_file_path, os.path.join(self.rendered_output, os.path.basename(temp_output_file_path))) return applied_documentation_steps