def multilocal(spec, arg): """run the command in parallel locally for each host""" def run_local(): return local(arg, capture=0, dir=spec.dir, format=spec.format) env().multirun( run_local, spec.shell, spec.pty, spec.combine_stderr, spec.dir, spec.format, quiet_exit=1 )
def multirun(spec, arg): """run the command in parallel on the various hosts""" env().multirun(arg, spec.shell, spec.pty, spec.combine_stderr, spec.dir, spec.format, quiet_exit=1)
def multisudo(spec, arg): """run the sudoed command in parallel on the various hosts""" def run_sudo(): return sudo( arg, spec.shell, spec.pty, spec.combine_stderr, None, spec.dir, spec.format ) env().multirun( run_sudo, spec.shell, spec.pty, spec.combine_stderr, spec.dir, spec.format, quiet_exit=1 )
def multilocal(spec, arg): """run the command in parallel locally for each host""" def run_local(): return local(arg, capture=0, dir=spec.dir, format=spec.format) env().multirun(run_local, spec.shell, spec.pty, spec.combine_stderr, spec.dir, spec.format, quiet_exit=1)
def multisudo(spec, arg): """run the sudoed command in parallel on the various hosts""" def run_sudo(): return sudo(arg, spec.shell, spec.pty, spec.combine_stderr, None, spec.dir, spec.format) env().multirun(run_sudo, spec.shell, spec.pty, spec.combine_stderr, spec.dir, spec.format, quiet_exit=1)
def info(spec, arg): """list the hosts and the current context""" print print "Context:" print print "\n".join(" %s" % ctx for ctx in env.ctx) print print "Hosts:" print for setting in env().settings: print " ", setting['host_string'] print
def main(): """ Main command-line execution loop. """ try: # Parse command line options parser, options, arguments = parse_options() # Handle regular args vs -- args arguments = parser.largs remainder_arguments = parser.rargs # Update env with any overridden option values # NOTE: This needs to remain the first thing that occurs # post-parsing, since so many things hinge on the values in env. for option in env_options: env[option.dest] = getattr(options, option.dest) # Handle --hosts, --roles (comma separated string => list) for key in ['hosts', 'roles']: if key in env and isinstance(env[key], str): env[key] = env[key].split(',') # Handle output control level show/hide update_output_levels(show=options.show, hide=options.hide) # Handle version number option if options.show_version: print("Fabric %s" % env.version) sys.exit(0) # Load settings from user settings file, into shared env dict. env.update(load_settings(env.rcfile)) # Find local fabfile path or abort fabfile = find_fabfile() if not fabfile and not remainder_arguments: abort("Couldn't find any fabfiles!") # Store absolute path to fabfile in case anyone needs it env.real_fabfile = fabfile # Load fabfile (which calls its module-level code, including # tweaks to env values) and put its commands in the shared commands # dict if fabfile: docstring, callables = load_fabfile(fabfile) commands.update(callables) # Autocompletion support autocomplete_items = [cmd.replace('_', '-') for cmd in commands] if 'autocomplete' in env: autocomplete_items += env.autocomplete autocomplete(parser, ListCompleter(autocomplete_items)) # Handle hooks related options _disable_hooks = options.disable_hooks _enable_hooks = options.enable_hooks if _disable_hooks: for _hook in _disable_hooks.strip().split(): DISABLED_HOOKS.append(_hook.strip()) if _enable_hooks: for _hook in _enable_hooks.strip().split(): ENABLED_HOOKS.append(_hook.strip()) # Handle the non-execution flow if not arguments and not remainder_arguments: # Non-verbose command list if options.shortlist: shortlist() # Handle show (command-specific help) option if options.display: display_command(options.display) # Else, show the list of commands and exit list_commands(docstring) # Now that we're settled on a fabfile, inform user. if output.debug: if fabfile: print("Using fabfile '%s'" % fabfile) else: print("No fabfile loaded -- remainder command only") # Parse arguments into commands to run (plus args/kwargs/hosts) commands_to_run, env_update = parse_arguments(arguments) env.update(env_update) # Parse remainders into a faux "command" to execute remainder_command = parse_remainder(remainder_arguments) # Figure out if any specified task names are invalid unknown_commands = [] for tup in commands_to_run: if tup[0] not in commands: unknown_commands.append(tup[0]) # Abort if any unknown commands were specified if unknown_commands: abort("Command(s) not found:\n%s" \ % indent(unknown_commands)) # Generate remainder command and insert into commands, commands_to_run if remainder_command: r = '<remainder>' commands[r] = lambda: api.run(remainder_command) commands_to_run.append((r, [], {}, [], [])) if output.debug: names = ", ".join(x[0] for x in commands_to_run) print("Commands to run: %s" % names) call_hooks('commands.before', commands, commands_to_run) # Initialse context runner env() # Initialise the default stage if none are given as the first command. if 'stages' in env: if commands_to_run[0][0] not in env.stages: execute_command( (env.stages[0], (), {}, None, None, None), commands ) else: execute_command(commands_to_run.pop(0), commands) if env.config_file: config_path = realpath(expanduser(env.config_file)) config_path = join(dirname(fabfile), config_path) config_file = open(config_path, 'rb') config = load_yaml(config_file.read()) if not config: env.config = AttributeDict() elif not isinstance(config, dict): abort("Invalid config file found at %s" % config_path) else: env.config = AttributeDict(config) config_file.close() call_hooks('config.loaded') first_time_env_call = 1 # At this point all commands must exist, so execute them in order. for spec in commands_to_run: execute_command(spec, commands) # If we got here, no errors occurred, so print a final note. if output.status: msg = "\nDone." if env.colors: msg = env.color_settings['finish'](msg) print(msg) except SystemExit: # a number of internal functions might raise this one. raise except KeyboardInterrupt: if output.status: msg = "\nStopped." if env.colors: msg = env.color_settings['finish'](msg) print >> sys.stderr, msg sys.exit(1) except: sys.excepthook(*sys.exc_info()) # we might leave stale threads if we don't explicitly exit() sys.exit(1) finally: call_hooks('commands.after') disconnect_all() sys.exit(0)
def multirun(spec, arg): """run the command in parallel on the various hosts""" env().multirun( arg, spec.shell, spec.pty, spec.combine_stderr, spec.dir, spec.format, quiet_exit=1 )