def command_line(self): """ The complete SSH client command including the remote command. This is a list of strings with the SSH client command to connect to the remote host and execute :attr:`~.ExternalCommand.command`. """ ssh_command = list(self.ssh_command) if self.identity_file: ssh_command.extend(('-i', self.identity_file)) if self.ssh_user: ssh_command.extend(('-l', self.ssh_user)) if self.port: ssh_command.extend(('-p', '%i' % self.port)) ssh_command.extend(('-o', 'BatchMode=%s' % ('yes' if self.batch_mode else 'no'))) ssh_command.extend(('-o', 'ConnectTimeout=%i' % self.connect_timeout)) ssh_command.extend(('-o', 'LogLevel=%s' % self.log_level)) if self.strict_host_key_checking in ('yes', 'no', 'ask'): ssh_command.extend(('-o', 'StrictHostKeyChecking=%s' % self.strict_host_key_checking)) else: ssh_command.extend(('-o', 'StrictHostKeyChecking=%s' % ('yes' if self.strict_host_key_checking else 'no'))) ssh_command.extend(('-o', 'UserKnownHostsFile=%s' % self.known_hosts_file)) if self.tty: ssh_command.append('-t') ssh_command.append(self.ssh_alias) remote_command = quote(super(RemoteCommand, self).command_line) if remote_command: if self.remote_directory != DEFAULT_WORKING_DIRECTORY: cd_command = 'cd %s' % quote(self.remote_directory) remote_command = quote(self.prefix_shell_command(cd_command, remote_command)) ssh_command.append(remote_command) return ssh_command
def get_delete_branch_command(self, branch_name, message, author): """Get the command to delete or close a branch in the local repository.""" tokens = ['hg update --rev=%s && hg commit' % quote(branch_name)] if author: tokens.append('--user=%s' % quote(author.combined)) tokens.append('--message=%s' % quote(message)) tokens.append('--close-branch') return [' '.join(tokens)]
def error_message(self): """A user friendly explanation of how the remote command failed (a string or :data:`None`).""" if self.error_type is RemoteConnectFailed: return format("SSH connection to %s failed! (SSH command: %s)", self.ssh_alias, quote(self.command_line)) elif self.error_type is RemoteCommandNotFound: return format("External command on %s isn't available! (SSH command: %s)", self.ssh_alias, quote(self.command_line)) elif self.error_type is RemoteCommandFailed: return format("External command on %s failed with exit code %s! (SSH command: %s)", self.ssh_alias, self.returncode, quote(self.command_line))
def error_message(self): """A user friendly explanation of how the remote command failed (a string or :data:`None`).""" if self.error_type is RemoteConnectFailed: return format("SSH connection to %s failed! (SSH command: %s)", self.ssh_alias, quote(self.command_line)) elif self.error_type is RemoteCommandNotFound: return format( "External command on %s isn't available! (SSH command: %s)", self.ssh_alias, quote(self.command_line)) elif self.error_type is RemoteCommandFailed: return format( "External command on %s failed with exit code %s! (SSH command: %s)", self.ssh_alias, self.returncode, quote(self.command_line))
def get_commit_command(self, message, author=None): """ Get the command to commit changes to tracked files in the working tree. This method uses the ``hg remove --after`` to match the semantics of ``git commit --all`` (which is _not_ the same as ``hg commit --addremove``) however ``hg remove --after`` is _very_ verbose (it comments on every existing file in the repository) and it ignores the ``--quiet`` option. This explains why I've decided to silence the standard error stream (though I feel I may regret this later). """ tokens = ['hg remove --after 2>/dev/null; hg commit'] if author: tokens.append('--user=%s' % quote(author.combined)) tokens.append('--message=%s' % quote(message)) return [' '.join(tokens)]
def command_line(self): """ The complete `chroot` command including the command to run inside the chroot. This is a list of strings with the `chroot` command line to enter the requested chroot and execute :attr:`~.ExternalCommand.command`. """ chroot_command = list(self.chroot_command) # Check if we have superuser privileges on _the host system_ (via super()). if not super(ChangeRootCommand, self).have_superuser_privileges: # The chroot() system call requires superuser privileges on the host system. chroot_command.insert(0, 'sudo') chroot_command.append('--userspec=%s:%s' % (self.chroot_user, self.chroot_group)) chroot_command.append(self.chroot) # Get the command to be executed inside the chroot. command_inside_chroot = list(super(ChangeRootCommand, self).command_line) # Check if we need to change the working directory inside the chroot. if self.chroot_directory and not command_inside_chroot: # We need to change the working directory but we don't have a # command to execute. In this case we assume that an interactive # shell was intended (inspired by how chroot, schroot and ssh work # when used interactively). command_inside_chroot = [DEFAULT_SHELL, '-i'] if command_inside_chroot: # Check if we need to change the working directory inside the chroot. if self.chroot_directory: # The chroot program doesn't have an option to set the working # directory so as a workaround we use a shell to do this. cd_command = 'cd %s' % quote(self.chroot_directory) chroot_command.extend(self.prefix_shell_command(cd_command, command_inside_chroot)) else: # If we don't need to change the working directory then # we don't need to quote the user's command any further. chroot_command.extend(command_inside_chroot) return chroot_command
def create_snapshot(self): """ Create a snapshot of the destination directory. :raises: The following exceptions can be raised: - :exc:`.DestinationContextUnavailable`, refer to :attr:`destination_context` for details. - :exc:`.ParentDirectoryUnavailable`, refer to :attr:`.parent_directory` for details. - :exc:`~executor.ExternalCommandFailed` when the ``cp`` command reports an error. """ # Compose the `cp' command needed to create a snapshot. snapshot = os.path.join(self.destination.parent_directory, time.strftime('%Y-%m-%d %H:%M:%S')) cp_command = [ 'cp', '--archive', '--link', self.destination.directory, snapshot, ] # Execute the `cp' command? if self.dry_run: logger.info("Snapshot command: %s", quote(cp_command)) else: timer = Timer() logger.info("Creating snapshot: %s", snapshot) self.destination_context.execute(*cp_command, ionice=self.ionice) logger.info("Took %s to create snapshot.", timer)
def wait_for_processes(processes): """ Wait for the given processes to end. Prints an overview of running processes to the terminal once a second so the user knows what they are waiting for. This function is not specific to :mod:`proc.cron` at all (it doesn't even need to know what cron jobs are), it just waits until all of the given processes have ended. :param processes: A list of :class:`~proc.tree.ProcessNode` objects. """ wait_timer = Timer() running_processes = list(processes) for process in running_processes: logger.info("Waiting for process %i: %s (runtime is %s)", process.pid, quote(process.cmdline), format_timespan(round(process.runtime))) with Spinner(timer=wait_timer) as spinner: while True: for process in list(running_processes): if not process.is_alive: running_processes.remove(process) if not running_processes: break num_processes = pluralize(len(running_processes), "process", "processes") process_ids = concatenate(str(p.pid) for p in running_processes) spinner.step(label="Waiting for %s: %s" % (num_processes, process_ids)) spinner.sleep() logger.info("All processes have finished, we're done waiting (took %s).", wait_timer.rounded)
def get_checkout_command(self, revision, clean=False): """Get the command to update the working tree of the local repository.""" if clean: # This looks a bit obscure but it does the right thing: We give our # superclass a shell command that chains together two git commands. return ['git checkout . && git checkout %s' % quote(revision)] else: return ['git', 'checkout', revision]
def generate_screenshots(): """Generate screenshots from shell scripts.""" this_script = os.path.abspath(__file__) this_directory = os.path.dirname(this_script) repository = os.path.join(this_directory, os.pardir) examples_directory = os.path.join(repository, 'docs', 'examples') images_directory = os.path.join(repository, 'docs', 'images') for shell_script in sorted(glob.glob(os.path.join(examples_directory, '*.sh'))): basename, extension = os.path.splitext(os.path.basename(shell_script)) image_file = os.path.join(images_directory, '%s.png' % basename) logger.info("Generating %s by running %s ..", format_path(image_file), format_path(shell_script)) command_line = [sys.executable, __file__, shell_script] random_title = random_string(25) # Generate the urxvt command line. urxvt_command = [ 'urxvt', # Enforce a default geometry. '-geometry', '98x30', # Set the text and background color. '-fg', TEXT_COLOR, '-bg', BACKGROUND_COLOR, # Set the font name and pixel size. '-fn', 'xft:%s:pixelsize=%i' % (FONT_NAME, FONT_SIZE), # Set the window title. '-title', random_title, # Hide scrollbars. '+sb', ] if which('qtile-run'): # I've been using tiling window managers for years now, at the # moment 'qtile' is my window manager of choice. It requires the # following special handling to enable the 'urxvt' window to float, # which in turn enables it to respect the '--geometry' option. urxvt_command.insert(0, 'qtile-run') urxvt_command.insert(1, '-f') # Apply the Ubuntu color scheme to urxvt. for index, css_color in enumerate(EIGHT_COLOR_PALETTE): urxvt_command.extend(('--color%i' % index, css_color)) # Add the command that should run inside the terminal. urxvt_command.extend(('-e', 'sh', '-c', 'setterm -cursor off; %s' % quote(command_line))) # Launch urxvt. execute(*urxvt_command, asynchronous=True) # Make sure we close the urxvt window. try: # Wait for urxvt to start up. If I were to improve this I could # instead wait for the creation of a file by interpret_script(). time.sleep(10) # Take a screen shot of the window using ImageMagick. execute('import', '-window', random_title, image_file) # Auto-trim the screen shot, then give it a 5px border. execute('convert', image_file, '-trim', '-bordercolor', BACKGROUND_COLOR, '-border', '5', image_file) finally: execute('wmctrl', '-c', random_title)
def error_message(self): """A user friendly explanation of how the remote command failed (a string or :data:`None`).""" messages = { RemoteCommandFailed: "External command on {a} failed with exit code {n}!", RemoteCommandNotFound: "External command on {a} isn't available!", RemoteConnectFailed: "SSH connection to {a} failed!", } if self.error_type in messages: return self.format_error_message("\n\n".join([ messages[self.error_type], "SSH command:\n{c}", ]), a=self.ssh_alias, n=self.returncode, c=quote(self.command_line))
def command_line(self): """ The complete SSH client command including the remote command. This is a list of strings with the SSH client command to connect to the remote host and execute :attr:`~.ExternalCommand.command`. """ ssh_command = list(self.ssh_command) if self.identity_file: ssh_command.extend(('-i', self.identity_file)) if self.ssh_user: ssh_command.extend(('-l', self.ssh_user)) if self.port: ssh_command.extend(('-p', '%i' % self.port)) ssh_command.extend( ('-o', 'BatchMode=%s' % ('yes' if self.batch_mode else 'no'))) ssh_command.extend(('-o', 'ConnectTimeout=%i' % self.connect_timeout)) ssh_command.extend(('-o', 'LogLevel=%s' % self.log_level)) if self.strict_host_key_checking in ('yes', 'no', 'ask'): ssh_command.extend( ('-o', 'StrictHostKeyChecking=%s' % self.strict_host_key_checking)) else: ssh_command.extend( ('-o', 'StrictHostKeyChecking=%s' % ('yes' if self.strict_host_key_checking else 'no'))) ssh_command.extend( ('-o', 'UserKnownHostsFile=%s' % self.known_hosts_file)) if self.compression: ssh_command.append('-C') if self.tty: ssh_command.append('-t') ssh_command.append(self.ssh_alias) remote_command = quote(super(RemoteCommand, self).command_line) if remote_command: if self.remote_directory != DEFAULT_WORKING_DIRECTORY: cd_command = 'cd %s' % quote(self.remote_directory) remote_command = quote( self.prefix_shell_command(cd_command, remote_command)) ssh_command.append(remote_command) return ssh_command
def pre_context(self): """ The command execution context inside the pre-boot environment. The computed value of this property is a command execution context created by :mod:`executor.contexts`, more specifically it's a :class:`~executor.contexts.RemoteContext` object. """ # Prepare the remote context options. options = dict( identity_file=self.pre_boot.identity_file, port=self.pre_boot.port_number, shell=False, ssh_alias=self.pre_boot.hostname, ssh_command=[ SSH_PROGRAM_NAME, '-o', 'ControlMaster=auto', '-o', 'ControlPersist=60', '-o', format('ControlPath={d}/%r@%h:%p', d=self.control_directory), ], ssh_user=self.pre_boot.username, tty=False, ) # Use the configured SSH proxy? if self.ssh_proxy: options['ssh_command'].extend( ('-o', format('ProxyCommand=ssh %s -W %s:%i', quote(self.ssh_proxy), quote(self.pre_boot.hostname), self.pre_boot.port_number))) # Decide what to do about the `known_hosts' file. if self.known_hosts_file: options['known_hosts_file'] = self.known_hosts_file else: options['ignore_known_hosts'] = True # Create the remote context object. return RemoteContext(**options)
def write_file(self, filename, contents): """ Write a file in the initial ram disk of the pre-boot environment. :param filename: The pathname of the file to write (a string). :param contents: The contents to write to the file (a string). This method writes a file in the initial ram disk by running a remote 'sh' shell that redirects its standard input to the given filename. """ self.pre_context.execute('sh', '-c', 'cat > %s' % quote(filename), input=contents)
def create_image_file(filename, size, context=None): r""" Create an image file filled with bytes containing zero (``\0``). :param filename: The pathname of the image file (a string). :param size: How large the image file should be (see :func:`.coerce_size()`). :param context: See :func:`.coerce_context()` for details. :raises: :exc:`~exceptions.ValueError` when `size` is invalid, :exc:`~executor.ExternalCommandFailed` when the command fails. """ context = coerce_context(context) size = coerce_size(size) logger.debug("Creating image file of %i bytes: %s", size, filename) head_command = 'head --bytes=%i /dev/zero > %s' context.execute(head_command % (size, quote(filename)), shell=True, tty=False)
def __init__(self, command, timeout): """ Initialize a :class:`CommandTimedOut` object. :param command: The command that timed out (an :class:`~executor.ExternalCommand` object). :param timeout: The timeout that was exceeded (a number). """ super(CommandTimedOut, self).__init__( command=command, error_message=format( "External command exceeded timeout of %s: %s", format_timespan(timeout), quote(command.command_line), ), )
def install_with_npm_cache(self, directory, silent=False): """ Use npm-cache_ to install dependencies. :param directory: The pathname of a directory with a ``package.json`` file (a string). :param silent: Used to set :attr:`~executor.ExternalCommand.silent`. :raises: Any exceptions raised by the :mod:`executor.contexts` module. If the ``npm-cache`` command isn't already installed (globally) it will be installed (locally). .. warning:: When I tried out npm-cache_ for the second time I found out that it unconditionally includes both production dependencies_ and devDependencies_ in the cache keys that it calculates, thereby opening the door for 'cache poisoning'. For more details please refer to `npm-cache issue 74`_. Currently npm-accel does not work around this problem, so consider yourself warned ;-). .. _npm-cache: https://www.npmjs.com/package/npm-cache .. _dependencies: https://docs.npmjs.com/files/package.json#dependencies .. _devDependencies: https://docs.npmjs.com/files/package.json#devdependencies .. _npm-cache issue 74: https://github.com/swarajban/npm-cache/issues/74 """ timer = Timer() program_name = 'npm-cache' if not self.context.test('which', program_name): program_name = os.path.join(directory, 'node_modules', '.bin', 'npm-cache') if not self.context.exists(program_name): logger.verbose( "Installing npm-cache locally (because it's not globally installed) .." ) self.context.execute('npm', 'install', 'npm-cache', directory=directory, silent=silent) install_command = [ program_name, 'install', 'npm', self.production_option ] logger.info("Running command: %s", quote(install_command)) self.context.execute(*install_command, directory=directory, silent=silent) logger.verbose("Took %s to install with npm-cache.", timer)
def prune_dependencies(self, directory, silent=False): """ Remove extraneous packages using `npm prune`_. :param directory: The pathname of a directory with a ``package.json`` file (a string). :param silent: Used to set :attr:`~executor.ExternalCommand.silent`. :raises: Any exceptions raised by the :mod:`executor.contexts` module. .. _npm prune: https://docs.npmjs.com/cli/prune """ timer = Timer() prune_command = ['npm', 'prune', self.production_option] logger.info("Running command: %s", quote(prune_command)) self.context.execute(*prune_command, directory=directory, silent=silent) logger.verbose("Took %s to run 'npm prune'.", timer)
def install_with_npm(self, directory, silent=False): """ Use `npm install`_ to install dependencies. :param directory: The pathname of a directory with a ``package.json`` file (a string). :param silent: Used to set :attr:`~executor.ExternalCommand.silent`. :raises: Any exceptions raised by the :mod:`executor.contexts` module. .. _npm install: https://docs.npmjs.com/cli/install """ timer = Timer() install_command = ['npm', 'install', self.production_option] logger.info("Running command: %s", quote(install_command)) self.context.execute(*install_command, directory=directory, silent=silent) logger.verbose("Took %s to install with npm.", timer)
def install_sources_file(self): """Install a 'package resource list' that points ``apt`` to the NodeSource repository.""" logger.info("Installing package resource list (%s) ..", self.sources_file) sources_list = dedent(''' # {filename}: # Get NodeJS binaries from the NodeSource repository. deb https://deb.nodesource.com/{version} {codename} main deb-src https://deb.nodesource.com/{version} {codename} main ''', filename=self.sources_file, version=self.nodejs_version, codename=self.distribution_codename) # TODO It would be nicer if context.write_file() accepted sudo=True! self.context.execute('cat > %s' % quote(self.sources_file), input=sources_list, sudo=True)
def write_file(self, filename, contents): """ Change the contents of a file. :param filename: The pathname of the file to write (a string). :param contents: The contents to write to the file (a byte string). This method uses a combination of cat_ and `output redirection`_ to change the contents of files so that options like :attr:`~.ExternalCommand.sudo` are respected (regardless of whether we're dealing with a :class:`LocalContext` or :class:`RemoteContext`). Due to the use of cat_ this method will create files that don't exist yet, assuming the directory containing the file already exists and the context provides permission to write to the directory. .. _output redirection: https://en.wikipedia.org/wiki/Redirection_(computing) """ return self.execute('cat > %s' % quote(filename), shell=True, input=contents)
def create_key_script(self): """ Create a key script in the pre-boot environment (initial ram disk). This method creates a minimal key script (a shell script containing a single ``echo`` command) in the pre-boot environment and modifies the :attr:`cryptroot_config` file so that the key script is used to unlock the root disk. """ logger.info("Creating key script: %s", self.key_script) self.write_file(self.key_script, 'echo -n %s\n' % quote(self.password)) self.pre_context.execute('chmod', '700', self.key_script) logger.info("Updating configuration file: %s", self.cryptroot_config) contents = self.pre_context.read_file(self.cryptroot_config) self.write_file( self.cryptroot_config, ''.join('%s,keyscript=%s\n' % (line.strip(), self.key_script) for line in contents.splitlines()))
def launch_program(command, is_running=None): """ Start a program if it's not already running. This function makes it easy to turn any program into a single instance program. If the default "Is the program already running?" check fails to work you can redefine the way this check is done. :param command: The shell command used to launch the application (a string). :param is_running: The shell command used to check whether the application is already running (a string, optional). :returns: One of the values from the :class:`LaunchStatus` enumeration. Examples of custom "is running" checks: .. code-block:: python # Chromium uses a wrapper script, so we need to match the absolute # pathname of the executable. launch_program('chromium-browser', is_running='pidof /usr/lib/chromium-browser/chromium-browser') # Dropbox does the same thing as Chromium, but the absolute pathname of # the executable contains a version number that I don't want to hard # code in my ~/.dwimrc profile :-) launch_program('dropbox start', is_running='pgrep -f "$HOME/.dropbox-dist/*/dropbox"') """ try: pathname = resolve_program(extract_program(command)) if not is_running: is_running = 'pidof %s' % quote(pathname) logger.verbose("Checking if program is running (%s) ..", pathname) if execute(is_running, silent=True, check=False): logger.info("Command already running: %s", command) return LaunchStatus.already_running else: logger.info("Starting command: %s", command) execute('sh', '-c', '(%s >/dev/null 2>&1) &' % command) return LaunchStatus.started except MissingProgramError: logger.warning("Program not installed! (%s)", command) return LaunchStatus.not_installed except Exception as e: logger.warning("Failed to start program! (%s)", e) return LaunchStatus.unspecified_error
def __str__(self): """ Render a human friendly representation of a :class:`ControllableProcess` object. :returns: A string describing the process. Includes the process ID and the command line (when available). """ text = [] # Include the process ID? (only when it's available) if self.pid is not None: text.append(str(self.pid)) # Include the command line? (again, only when it's available) if self.command_line: # We import here to avoid circular imports. from executor import quote text.append("(%s)" % quote(self.command_line)) if not text: # If all else fails we fall back to the super class. text.append(object.__str__(self)) return " ".join(text)
def write_file(self, filename, contents, **options): """ Change the contents of a file. :param filename: The pathname of the file to write (a string). :param contents: The contents to write to the file (a byte string). :param options: Optional keyword arguments to :func:`execute()`. This method uses a combination of cat_ and `output redirection`_ to change the contents of files so that options like :attr:`~.ExternalCommand.sudo` are respected (regardless of whether we're dealing with a :class:`LocalContext` or :class:`RemoteContext`). Due to the use of cat_ this method will create files that don't exist yet, assuming the directory containing the file already exists and the context provides permission to write to the directory. .. _output redirection: https://en.wikipedia.org/wiki/Redirection_(computing) """ options.update(input=contents, shell=True) return self.execute('cat > %s' % quote(filename), **options)
def set_random_background(command, directory): """ Set a random desktop wallpaper / background. :param command: The command to set the wallpaper (a string containing an ``{image}`` marker). :param directory: The pathname of a directory containing wallpapers (a string). """ assert '{image}' in command, "The 1st argument should contain an {image} marker!" backgrounds = [] logger.verbose("Searching for desktop backgrounds in %s ..", directory) for root, dirs, files in os.walk(directory): for filename in files: if filename.lower().endswith(('.jpg', '.jpeg', '.png')): backgrounds.append(os.path.join(root, filename)) logger.verbose("Found %i desktop backgrounds.", len(backgrounds)) selected_background = random.choice(backgrounds) logger.info("Selected random background: %s", selected_background) execute(command.format(image=quote(selected_background)))
def set_random_background(command, directory): """ Set a random desktop wallpaper / background. :param command: The command to set the wallpaper (a string containing an ``{image}`` marker). :param directory: The pathname of a directory containing wallpapers (a string). :raises: :exc:`~exceptions.ValueError` when the `command` string doesn't contain an ``{image}`` placeholder. """ if '{image}' not in command: raise ValueError("The 1st argument should contain an {image} marker!") backgrounds = [] logger.verbose("Searching for desktop backgrounds in %s ..", directory) for root, dirs, files in os.walk(directory): for filename in files: if filename.lower().endswith(('.jpg', '.jpeg', '.png')): backgrounds.append(os.path.join(root, filename)) logger.verbose("Found %s.", pluralize(len(backgrounds), "desktop background")) selected_background = random.choice(backgrounds) logger.info("Selected random background: %s", format_path(selected_background)) execute(command.format(image=quote(selected_background)))
def run_command(arguments, timeout=None): """ Run the specified command (with an optional timeout). :param arguments: The command line for the external command (a list of strings). :param timeout: The optional command timeout (a number or :data:`None`). :raises: :exc:`CommandTimedOut` if the command times out. """ timer = Timer() logger.info("Running command: %s", quote(arguments)) with execute(*arguments, async=True) as command: # Wait for the command to finish or exceed the given timeout. while command.is_running: if timeout and timer.elapsed_time > timeout: raise CommandTimedOut(command, timeout) # Sleep between 0.1 and 1 second, waiting for # the external command to finish its execution. time_to_sleep = min(1, max(0.1, timeout - timer.elapsed_time)) if time_to_sleep > 0: time.sleep(time_to_sleep) if command.succeeded: logger.info("Command completed successfully in %s.", timer)
def gpg_command(self): """ The GPG command line that can be used to sign using the key, export the key, etc (a string). The value of :attr:`gpg_command` is based on :attr:`scoped_command` combined with the ``--no-default-keyring`` The documentation of :func:`GPGKey.__init__()` contains two examples. """ command = self.scoped_command if not have_updated_gnupg(): command.extend(( '--no-default-keyring', '--keyring', self.public_key_file, '--secret-keyring', self.secret_key_file, )) if self.key_id: command.extend(('--recipient', self.key_id)) if self.use_agent: command.append('--use-agent') return quote(command)
def foreach(hosts, *command, **options): """ Execute a command simultaneously on a group of remote hosts using SSH. :param hosts: An iterable of strings with SSH host aliases. :param command: Any positional arguments are converted to a list and used to set the :attr:`~.ExternalCommand.command` property of the :class:`RemoteCommand` objects constructed by :func:`foreach()`. :param concurrency: The value of :attr:`.concurrency` to use (defaults to :data:`DEFAULT_CONCURRENCY`). :param delay_checks: The value of :attr:`.delay_checks` to use (defaults to :data:`True`). :param logs_directory: The value of :attr:`.logs_directory` to use (defaults to :data:`None`). :param options: Additional keyword arguments can be used to conveniently override the default values of the writable properties of the :class:`RemoteCommand` objects constructed by :func:`foreach()` (see :func:`RemoteCommand.__init__()` for details). :returns: The list of :class:`RemoteCommand` objects constructed by :func:`foreach()`. :raises: Any of the following exceptions can be raised: - :exc:`.CommandPoolFailed` if :attr:`.delay_checks` is enabled (the default) and a command in the pool that has :attr:`.check` enabled (the default) fails. - :exc:`RemoteCommandFailed` if :attr:`.delay_checks` is disabled (not the default) and an SSH connection was successful but the remote command failed (the exit code of the ``ssh`` command was neither zero nor 255). Use the keyword argument ``check=False`` to disable raising of this exception. - :exc:`RemoteConnectFailed` if :attr:`.delay_checks` is disabled (not the default) and an SSH connection failed (the exit code of the ``ssh`` command is 255). Use the keyword argument ``check=False`` to disable raising of this exception. .. note:: The :func:`foreach()` function enables the :attr:`.check` and :attr:`.delay_checks` options by default in an attempt to make it easy to do "the right thing". My assumption here is that if you are running *the same command* on multiple remote hosts: - You definitely want to know when a remote command has failed, ideally without manually checking the :attr:`.succeeded` property of each command. - Regardless of whether some remote commands fail you want to know that the command was at least executed on all hosts, otherwise your cluster of hosts will end up in a very inconsistent state. - If remote commands fail and an exception is raised the exception message should explain *which* remote commands failed. If these assumptions are incorrect then you can use the keyword arguments ``check=False`` and/or ``delay_checks=False`` to opt out of "doing the right thing" ;-) """ hosts = list(hosts) # Separate command pool options from command options. concurrency = options.pop('concurrency', DEFAULT_CONCURRENCY) delay_checks = options.pop('delay_checks', True) logs_directory = options.pop('logs_directory', None) # Capture the output of remote commands by default # (unless the caller requested capture=False). if options.get('capture') is not False: options['capture'] = True # Enable error checking of remote commands by default # (unless the caller requested check=False). if options.get('check') is not False: options['check'] = True # Create a command pool. timer = Timer() pool = RemoteCommandPool(concurrency=concurrency, delay_checks=delay_checks, logs_directory=logs_directory) hosts_pluralized = pluralize(len(hosts), "host") logger.debug("Preparing to run remote command on %s (%s) with a concurrency of %i: %s", hosts_pluralized, concatenate(hosts), concurrency, quote(command)) # Populate the pool with remote commands to execute. for ssh_alias in hosts: pool.add(identifier=ssh_alias, command=RemoteCommand(ssh_alias, *command, **options)) # Run all commands in the pool. pool.run() # Report the results to the caller. logger.debug("Finished running remote command on %s in %s.", hosts_pluralized, timer) return dict(pool.commands).values()
def get_export_command(self, directory, revision): """Get the command to export the complete tree from the local repository.""" shell_command = 'git archive %s | tar --extract --directory=%s' return [shell_command % (quote(revision), quote(directory))]
def install_with_npm_fast_install(self, directory, silent=False): """ Use npm-fast-install_ to install dependencies. :param directory: The pathname of a directory with a ``package.json`` file (a string). :param silent: Used to set :attr:`~executor.ExternalCommand.silent`. :raises: Any exceptions raised by the :mod:`executor.contexts` module. If the ``npm-fast-install`` command isn't already installed (globally) it will be installed (locally). .. warning:: When I tried out npm-fast-install_ for the first time I found out that ``npm-fast-install --all`` fails to actually install the devDependencies_. For more details please refer to `npm-fast-install pull request 3`_. Because this bug prevented me from evaluating how fast npm-fast-install_ was I implemented a workaround that temporarily rewrites the ``package.json`` file by merging devDependencies_ into dependencies_. This approach has the potential to corrupt the contents of ``package.json`` if the process of restoring the original contents is interrupted (e.g. when you abort npm-accel by pressing Control-C and keeping it pressed for a while). .. _npm-fast-install: https://www.npmjs.com/package/npm-fast-install .. _npm-fast-install pull request 3: https://github.com/appcelerator/npm-fast-install/pull/3 """ timer = Timer() program_name = 'npm-fast-install' if not self.context.test('which', 'npm-fast-install'): program_name = os.path.join(directory, 'node_modules', '.bin', 'npm-fast-install') if not self.context.exists(program_name): logger.verbose( "Installing npm-fast-install locally (because it's not globally installed) .." ) self.context.execute('npm', 'install', 'npm-fast-install', directory=directory, silent=silent) package_file = os.path.join(directory, 'package.json') original_contents = self.context.read_file(package_file) metadata = dict(dependencies={}, devDependencies={}) metadata.update(json.loads(auto_decode(original_contents))) need_patch = metadata['devDependencies'] and not self.production try: # Temporarily change the contents of the package.json file? if need_patch: logger.debug("Temporarily patching %s ..", package_file) patched_data = copy.deepcopy(metadata) patched_data['dependencies'].update( patched_data['devDependencies']) patched_data.pop('devDependencies') self.context.write_file( package_file, json.dumps(patched_data).encode('UTF-8')) # Run the npm-fast-install command. logger.info("Running command: %s", quote(program_name)) self.context.execute(program_name, directory=directory, silent=silent) finally: # Restore the original contents of the package.json file? if need_patch: logger.debug("Restoring original contents of %s ..", package_file) self.context.write_file(package_file, original_contents) logger.verbose("Took %s to install with npm-fast-install.", timer)
def write_to_named_pipe(self): """Write :attr:`password` to the named pipe configured by :attr:`named_pipe`.""" logger.info("Unlocking root filesystem using named pipe ..") unlock_cmd = 'echo -n %s > %s' % (quote( self.password), quote(self.named_pipe)) self.pre_context.execute(input=unlock_cmd)
def foreach(hosts, *command, **options): """ Execute a command simultaneously on a group of remote hosts using SSH. :param hosts: An iterable of strings with SSH host aliases. :param command: Any positional arguments are converted to a list and used to set the :attr:`~.ExternalCommand.command` property of the :class:`RemoteCommand` objects constructed by :func:`foreach()`. :param concurrency: The value of :attr:`.concurrency` to use (defaults to :data:`DEFAULT_CONCURRENCY`). :param delay_checks: The value of :attr:`.delay_checks` to use (defaults to :data:`True`). :param logs_directory: The value of :attr:`.logs_directory` to use (defaults to :data:`None`). :param options: Additional keyword arguments can be used to conveniently override the default values of the writable properties of the :class:`RemoteCommand` objects constructed by :func:`foreach()` (see :func:`RemoteCommand.__init__()` for details). :returns: The list of :class:`RemoteCommand` objects constructed by :func:`foreach()`. :raises: Any of the following exceptions can be raised: - :exc:`.CommandPoolFailed` if :attr:`.delay_checks` is enabled (the default) and a command in the pool that has :attr:`.check` enabled (the default) fails. - :exc:`RemoteCommandFailed` if :attr:`.delay_checks` is disabled (not the default) and an SSH connection was successful but the remote command failed (the exit code of the ``ssh`` command was neither zero nor 255). Use the keyword argument ``check=False`` to disable raising of this exception. - :exc:`RemoteConnectFailed` if :attr:`.delay_checks` is disabled (not the default) and an SSH connection failed (the exit code of the ``ssh`` command is 255). Use the keyword argument ``check=False`` to disable raising of this exception. .. note:: The :func:`foreach()` function enables the :attr:`.check` and :attr:`.delay_checks` options by default in an attempt to make it easy to do "the right thing". My assumption here is that if you are running *the same command* on multiple remote hosts: - You definitely want to know when a remote command has failed, ideally without manually checking the :attr:`.succeeded` property of each command. - Regardless of whether some remote commands fail you want to know that the command was at least executed on all hosts, otherwise your cluster of hosts will end up in a very inconsistent state. - If remote commands fail and an exception is raised the exception message should explain *which* remote commands failed. If these assumptions are incorrect then you can use the keyword arguments ``check=False`` and/or ``delay_checks=False`` to opt out of "doing the right thing" ;-) """ hosts = list(hosts) # Separate command pool options from command options. concurrency = options.pop('concurrency', DEFAULT_CONCURRENCY) delay_checks = options.pop('delay_checks', True) logs_directory = options.pop('logs_directory', None) # Capture the output of remote commands by default # (unless the caller requested capture=False). if options.get('capture') is not False: options['capture'] = True # Enable error checking of remote commands by default # (unless the caller requested check=False). if options.get('check') is not False: options['check'] = True # Create a command pool. timer = Timer() pool = RemoteCommandPool(concurrency=concurrency, delay_checks=delay_checks, logs_directory=logs_directory) hosts_pluralized = pluralize(len(hosts), "host") logger.debug( "Preparing to run remote command on %s (%s) with a concurrency of %i: %s", hosts_pluralized, concatenate(hosts), concurrency, quote(command)) # Populate the pool with remote commands to execute. for ssh_alias in hosts: pool.add(identifier=ssh_alias, command=RemoteCommand(ssh_alias, *command, **options)) # Run all commands in the pool. pool.run() # Report the results to the caller. logger.debug("Finished running remote command on %s in %s.", hosts_pluralized, timer) return dict(pool.commands).values()