def help(name=None): """Display help for a given task Options: name - The task to display help on. To display a list of available tasks type: $ fab -l To display help on a specific task type: $ fab help:<name> """ if name is None: name = "help" task = crawl(name, state.commands) if isinstance(task, Task): doc = getattr(task, "__doc__", None) if doc is not None: print("Help on {0:s}:".format(name)) print(doc) else: print("No help available for {0:s}".format(name)) else: print("No such task {0:s}".format(name)) print("For a list of tasks type: fab -l")
def loadable(self, src, dst): """ Determines if there's enough space to load the target database. """ from fabric import state from fabric.task_utils import crawl src_task = crawl(src, state.commands) assert src_task, 'Unknown source role: %s' % src dst_task = crawl(dst, state.commands) assert dst_task, 'Unknown destination role: %s' % src # Get source database size. src_task() env.host_string = env.hosts[0] src_size_bytes = self.get_size() # Get target database size, if any. dst_task() env.host_string = env.hosts[0] try: dst_size_bytes = self.get_size() except (ValueError, TypeError): dst_size_bytes = 0 # Get target host disk size. free_space_bytes = self.get_free_space() # Deduct existing database size, because we'll be deleting it. balance_bytes = free_space_bytes + dst_size_bytes - src_size_bytes balance_bytes_scaled, units = pretty_bytes(balance_bytes) viable = balance_bytes >= 0 if self.verbose: print('src_db_size:', pretty_bytes(src_size_bytes)) print('dst_db_size:', pretty_bytes(dst_size_bytes)) print('dst_free_space:', pretty_bytes(free_space_bytes)) print if viable: print('Viable! There will be %.02f %s of disk space left.' % (balance_bytes_scaled, units)) else: print('Not viable! We would be %.02f %s short.' % (balance_bytes_scaled, units)) return viable
def help(name): """Show extended help for a task (e.g. 'fab help:search.reindex')""" task = crawl(name, state.commands) if task is None: abort("%r is not a valid task name" % task) puts(textwrap.dedent(task.__doc__).strip())
def test_task_decorator_plays_well_with_others(self): """ @task, when inside @hosts/@roles, should not hide the decorated task. """ module = fabfile('decorator_order') with path_prefix(module): docs, funcs = load_fabfile(module) # When broken, crawl() finds None for 'foo' instead. eq_(crawl('foo', funcs), funcs['foo'])
def help(name=""): """Show extended help for a task (e.g. 'fab help:search.reindex')""" from fabric.main import show_commands if not name: puts("\nFor more information on a task run `fab help:<task>`.\n") show_commands(None, 'short') task = crawl(name, state.commands) if task is None: abort("%r is not a valid task name" % task) puts(textwrap.dedent(task.__doc__).strip())
def help(name=""): """ Show extended help for a task (e.g. 'fab help:changepassword') These fabric tasks are designed to take a vanilla Ubuntu 12.04 machine with default passwords/setup and then apply various security tasks to make it suitable to place this machine on the internet awaiting further setup. """ task = crawl(name, state.commands) if name is "": help("help") elif task is None: abort("%r is not a valid task name" % task) else: puts(textwrap.dedent(task.__doc__).strip())
def run(self): for candidate in self.tasks: is_callable = callable(candidate) if not (is_callable or _is_task(candidate)): # Assume string, set env.command to it task = crawl(candidate, state.commands) if task is None: abort("Invalid dependency, %r is not callable or a valid task name" % (candidate,)) # Normalize to Task instance if we ended up with a regular callable if not _is_task(task): task = WrappedCallableTask(task) for func in self.filters: task.wrapped = func(task.wrapped) execute(task)
def display_command(name): """ Print command function's docstring, then exit. Invoked with -d/--display. """ # Sanity check command = crawl(name, state.commands) if command is None: msg = "Task '%s' does not appear to exist. Valid task names:\n%s" abort(msg % (name, "\n".join(_normal_list(False)))) # Print out nicely presented docstring if found if command.__doc__: print("Displaying detailed information for task '%s':" % name) print('') print(indent(command.__doc__, strip=True)) print('') # Or print notice if not else: print("No detailed information available for task '%s':" % name) sys.exit(0)
def display_command(name, code=0): """ Print command function's docstring, then exit. Invoked with -d/--display. """ # Sanity check command = crawl(name, state.commands) name = name.replace(".", " ") if command is None: msg = "Task '%s' does not appear to exist. Valid task names:\n%s" abort(msg % (name, "\n".join(_normal_list(False)))) # get the presented docstring if found task_details = get_task_docstring(command) if task_details: print("Displaying detailed information for task '%s':" % name) print('') print(indent(task_details, strip=True)) print('') # Or print notice if not else: print("No detailed information available for task '%s':" % name) sys.exit(code)
def display_command(name): """ Print command function's docstring, then exit. Invoked with -d/--display. """ # Sanity check command = crawl(name, state.commands) if command is None: msg = "Task '%s' does not appear to exist. Valid task names:\n%s" abort(msg % (name, "\n".join(_normal_list(False)))) # Print out nicely presented docstring if found if hasattr(command, '__details__'): task_details = command.__details__() else: task_details = get_task_details(command) if task_details: logging.debug("Displaying detailed information for task '%s':" % name) logging.debug('') logging.debug(indent(task_details, strip=True)) logging.debug('') # Or print notice if not else: logging.debug("No detailed information available for task '%s':" % name) sys.exit(0)
def has_task(task): """ Checks if fabric task exists """ return crawl(task, state.commands) is not None
def executes(task, all_args, *args, **kwargs): """ """ hosts = [all_args[index].get('host', '') for index in xrange(len(all_args))] my_env = {'clean_revert': True} results = {} # Obtain task is_callable = callable(task) if not (is_callable or _is_task(task)): # Assume string, set env.command to it my_env['command'] = task task = crawl(task, state.commands) if task is None: msg = "%r is not callable or a valid task name" % (my_env['command'],) if state.env.get('skip_unknown_tasks', False): warn(msg) return else: abort(msg) # Set env.command if we were given a real function or callable task obj else: dunder_name = getattr(task, '__name__', None) my_env['command'] = getattr(task, 'name', dunder_name) # Normalize to Task instance if we ended up with a regular callable if not _is_task(task): task = WrappedCallableTask(task) my_env['all_hosts'] = hosts my_env['all_args'] = all_args parallel = requires_parallel(task) if parallel: # Import multiprocessing if needed, erroring out usefully # if it can't. try: import multiprocessing except ImportError: import traceback tb = traceback.format_exc() abort(tb + """ At least one task needs to be run in parallel, but the multiprocessing module cannot be imported (see above traceback.) Please make sure the module is installed or that the above ImportError is fixed.""") else: multiprocessing = None # Get pool size for this task pool_size = task.get_pool_size(my_env['all_hosts'], state.env.pool_size) # Set up job queue in case parallel is needed queue = multiprocessing.Queue() if parallel else None jobs = JobQueue(pool_size, queue) if state.output.debug: jobs._debug = True has_hosts = len(my_env['all_hosts']) > 0 for index in xrange(len(my_env['all_args'])): host = my_env['all_args'][index].get('host', '') if has_hosts: host = my_env['all_hosts'][index] input_kwargs = my_env['all_args'][index] try: results[host] = _execute( task, host, my_env, args, input_kwargs, jobs, queue, multiprocessing ) except NetworkError, e: results[host] = e # Backwards compat test re: whether to use an exception or # abort if not state.env.use_exceptions_for['network']: func = warn if state.env.skip_bad_hosts else abort error(e.message, func=func, exception=e.wrapped) else: raise # If requested, clear out connections here and not just at the end. if state.env.eagerly_disconnect: disconnect_all()
def executes(task, all_args, *args, **kwargs): """ """ hosts = [ all_args[index].get('host', '') for index in xrange(len(all_args)) ] my_env = {'clean_revert': True} results = {} # Obtain task is_callable = callable(task) if not (is_callable or _is_task(task)): # Assume string, set env.command to it my_env['command'] = task task = crawl(task, state.commands) if task is None: msg = "%r is not callable or a valid task name" % ( my_env['command'], ) if state.env.get('skip_unknown_tasks', False): warn(msg) return else: abort(msg) # Set env.command if we were given a real function or callable task obj else: dunder_name = getattr(task, '__name__', None) my_env['command'] = getattr(task, 'name', dunder_name) # Normalize to Task instance if we ended up with a regular callable if not _is_task(task): task = WrappedCallableTask(task) my_env['all_hosts'] = hosts my_env['all_args'] = all_args parallel = requires_parallel(task) if parallel: # Import multiprocessing if needed, erroring out usefully # if it can't. try: import multiprocessing except ImportError: import traceback tb = traceback.format_exc() abort(tb + """ At least one task needs to be run in parallel, but the multiprocessing module cannot be imported (see above traceback.) Please make sure the module is installed or that the above ImportError is fixed.""") else: multiprocessing = None # Get pool size for this task pool_size = task.get_pool_size(my_env['all_hosts'], state.env.pool_size) # Set up job queue in case parallel is needed queue = multiprocessing.Queue() if parallel else None jobs = JobQueue(pool_size, queue) if state.output.debug: jobs._debug = True has_hosts = len(my_env['all_hosts']) > 0 for index in xrange(len(my_env['all_args'])): host = my_env['all_args'][index].get('host', '') if has_hosts: host = my_env['all_hosts'][index] input_kwargs = my_env['all_args'][index] try: results[host] = _execute(task, host, my_env, args, input_kwargs, jobs, queue, multiprocessing) except NetworkError, e: results[host] = e # Backwards compat test re: whether to use an exception or # abort if not state.env.use_exceptions_for['network']: func = warn if state.env.skip_bad_hosts else abort error(e.message, func=func, exception=e.wrapped) else: raise # If requested, clear out connections here and not just at the end. if state.env.eagerly_disconnect: disconnect_all()
def execute(task, *args, **kwargs): """ Execute ``task`` (callable or name), honoring host/role decorators, etc. ``task`` may be an actual callable object, or it may be a registered task name, which is used to look up a callable just as if the name had been given on the command line (including :ref:`namespaced tasks <namespaces>`, e.g. ``"deploy.migrate"``. The task will then be executed once per host in its host list, which is (again) assembled in the same manner as CLI-specified tasks: drawing from :option:`-H`, :ref:`env.hosts <hosts>`, the `~fabric.decorators.hosts` or `~fabric.decorators.roles` decorators, and so forth. ``host``, ``hosts``, ``role``, ``roles`` and ``exclude_hosts`` kwargs will be stripped out of the final call, and used to set the task's host list, as if they had been specified on the command line like e.g. ``fab taskname:host=hostname``. Any other arguments or keyword arguments will be passed verbatim into ``task`` (the function itself -- not the ``@task`` decorator wrapping your function!) when it is called, so ``execute(mytask, 'arg1', kwarg1='value')`` will (once per host) invoke ``mytask('arg1', kwarg1='value')``. :returns: a dictionary mapping host strings to the given task's return value for that host's execution run. For example, ``execute(foo, hosts=['a', 'b'])`` might return ``{'a': None, 'b': 'bar'}`` if ``foo`` returned nothing on host `a` but returned ``'bar'`` on host `b`. In situations where a task execution fails for a given host but overall progress does not abort (such as when :ref:`env.skip_bad_hosts <skip-bad-hosts>` is True) the return value for that host will be the error object or message. .. seealso:: :ref:`The execute usage docs <execute>`, for an expanded explanation and some examples. .. versionadded:: 1.3 .. versionchanged:: 1.4 Added the return value mapping; previously this function had no defined return value. """ my_env = {'clean_revert': True} results = {} # Obtain task is_callable = callable(task) if not (is_callable or _is_task(task)): # Assume string, set env.command to it my_env['command'] = task task = crawl(task, state.commands) if task is None: msg = "%r is not callable or a valid task name" % (my_env['command'],) if state.env.get('skip_unknown_tasks', False): warn(msg) return else: abort(msg) # Set env.command if we were given a real function or callable task obj else: dunder_name = getattr(task, '__name__', None) my_env['command'] = getattr(task, 'name', dunder_name) # Normalize to Task instance if we ended up with a regular callable if not _is_task(task): task = WrappedCallableTask(task) # Filter out hosts/roles kwargs new_kwargs, hosts, roles, exclude_hosts = parse_kwargs(kwargs) # Set up host list my_env['all_hosts'], my_env['effective_roles'] = task.get_hosts_and_effective_roles(hosts, roles, exclude_hosts, state.env) parallel = requires_parallel(task) if parallel: # Import multiprocessing if needed, erroring out usefully # if it can't. try: import multiprocessing except ImportError: import traceback tb = traceback.format_exc() abort(tb + """ At least one task needs to be run in parallel, but the multiprocessing module cannot be imported (see above traceback.) Please make sure the module is installed or that the above ImportError is fixed.""") else: multiprocessing = None # Get pool size for this task pool_size = task.get_pool_size(my_env['all_hosts'], state.env.pool_size) # Set up job queue in case parallel is needed queue = multiprocessing.Queue() if parallel else None jobs = JobQueue(pool_size, queue) if state.output.debug: jobs._debug = True # Call on host list if my_env['all_hosts']: # Attempt to cycle on hosts, skipping if needed for host in my_env['all_hosts']: try: results[host] = _execute( task, host, my_env, args, new_kwargs, jobs, queue, multiprocessing ) except NetworkError as e: results[host] = e # Backwards compat test re: whether to use an exception or # abort if not state.env.use_exceptions_for['network']: func = warn if state.env.skip_bad_hosts else abort error(e.message, func=func, exception=e.wrapped) else: raise # If requested, clear out connections here and not just at the end. if state.env.eagerly_disconnect: disconnect_all() # If running in parallel, block until job queue is emptied if jobs: err = "One or more hosts failed while executing task '%s'" % ( my_env['command'] ) jobs.close() # Abort if any children did not exit cleanly (fail-fast). # This prevents Fabric from continuing on to any other tasks. # Otherwise, pull in results from the child run. ran_jobs = jobs.run() for name, d in ran_jobs.items(): if d['exit_code'] != 0: if isinstance(d['results'], NetworkError) and \ _is_network_error_ignored(): error(d['results'].message, func=warn, exception=d['results'].wrapped) elif isinstance(d['results'], BaseException): error(err, exception=d['results']) else: error(err) results[name] = d['results'] # Or just run once for local-only else: with settings(**my_env): results['<local-only>'] = task.run(*args, **new_kwargs) # Return what we can from the inner task executions return results
def _parallel_tasks(commands_to_run): return any([ requires_parallel(crawl(x[0], state.commands)) for x in commands_to_run ])
def _print_docstring(docstrings, name): if not docstrings: return False docstring = crawl(name, state.commands).__doc__ if isinstance(docstring, str): return docstring
def _print_docstring(docstrings, name): if not docstrings: return False docstring = crawl(name, state.commands).__doc__ if type(docstring) in types.StringTypes: return docstring
def execute(task, *args, **kwargs): """ Patched version of fabric's execute task with alternative error handling """ my_env = {'clean_revert': True} results = {} # Obtain task is_callable = callable(task) if not (is_callable or _is_task(task)): # Assume string, set env.command to it my_env['command'] = task task = crawl(task, state.commands) if task is None: msg = "%r is not callable or a valid task name" % ( my_env['command'], ) if state.env.get('skip_unknown_tasks', False): warn(msg) return else: abort(msg) # Set env.command if we were given a real function or callable task obj else: dunder_name = getattr(task, '__name__', None) my_env['command'] = getattr(task, 'name', dunder_name) # Normalize to Task instance if we ended up with a regular callable if not _is_task(task): task = WrappedCallableTask(task) # Filter out hosts/roles kwargs new_kwargs, hosts, roles, exclude_hosts = parse_kwargs(kwargs) # Set up host list my_env['all_hosts'], my_env[ 'effective_roles'] = task.get_hosts_and_effective_roles( hosts, roles, exclude_hosts, state.env) parallel = requires_parallel(task) if parallel: # Import multiprocessing if needed, erroring out usefully # if it can't. try: import multiprocessing except ImportError: import traceback tb = traceback.format_exc() abort(tb + """ At least one task needs to be run in parallel, but the multiprocessing module cannot be imported (see above traceback.) Please make sure the module is installed or that the above ImportError is fixed.""") else: multiprocessing = None # Get pool size for this task pool_size = task.get_pool_size(my_env['all_hosts'], state.env.pool_size) # Set up job queue in case parallel is needed queue = multiprocessing.Queue() if parallel else None jobs = JobQueue(pool_size, queue) if state.output.debug: jobs._debug = True # Call on host list if my_env['all_hosts']: # Attempt to cycle on hosts, skipping if needed for host in my_env['all_hosts']: try: results[host] = _execute(task, host, my_env, args, new_kwargs, jobs, queue, multiprocessing) except NetworkError, e: results[host] = e # Backwards compat test re: whether to use an exception or # abort func = warn if state.env.skip_bad_hosts or state.env.warn_only \ else abort error(e.message, func=func, exception=e.wrapped) except SystemExit, e: results[host] = e # If requested, clear out connections here and not just at the end. if state.env.eagerly_disconnect: disconnect_all()
def execute(task, *args, **kwargs): """ Execute ``task`` (callable or name), honoring host/role decorators, etc. ``task`` may be an actual callable object, or it may be a registered task name, which is used to look up a callable just as if the name had been given on the command line (including :ref:`namespaced tasks <namespaces>`, e.g. ``"deploy.migrate"``. The task will then be executed once per host in its host list, which is (again) assembled in the same manner as CLI-specified tasks: drawing from :option:`-H`, :ref:`env.hosts <hosts>`, the `~fabric.decorators.hosts` or `~fabric.decorators.roles` decorators, and so forth. ``host``, ``hosts``, ``role``, ``roles`` and ``exclude_hosts`` kwargs will be stripped out of the final call, and used to set the task's host list, as if they had been specified on the command line like e.g. ``fab taskname:host=hostname``. Any other arguments or keyword arguments will be passed verbatim into ``task`` when it is called, so ``execute(mytask, 'arg1', kwarg1='value')`` will (once per host) invoke ``mytask('arg1', kwarg1='value')``. This function returns a dictionary mapping host strings to the given task's return value for that host's execution run. For example, ``execute(foo, hosts=['a', 'b'])`` might return ``{'a': None, 'b': 'bar'}`` if ``foo`` returned nothing on host `a` but returned ``'bar'`` on host `b`. In situations where a task execution fails for a given host but overall progress does not abort (such as when :ref:`env.skip_bad_hosts <skip-bad-hosts>` is True) the return value for that host will be the error object or message. .. seealso:: :ref:`The execute usage docs <execute>`, for an expanded explanation and some examples. .. versionadded:: 1.3 .. versionchanged:: 1.4 Added the return value mapping; previously this function had no defined return value. """ my_env = {'clean_revert': True} results = {} # Obtain task if not (callable(task) or _is_task(task)): # Assume string, set env.command to it my_env['command'] = task task = crawl(task, state.commands) if task is None: abort("%r is not callable or a valid task name" % (task,)) # Set env.command if we were given a real function or callable task obj else: dunder_name = getattr(task, '__name__', None) my_env['command'] = getattr(task, 'name', dunder_name) # Normalize to Task instance if we ended up with a regular callable if not _is_task(task): from fabric.decorators import task as task_decorator task = task_decorator(task) # Filter out hosts/roles kwargs new_kwargs, hosts, roles, exclude_hosts = parse_kwargs(kwargs) # Set up host list my_env['all_hosts'] = task.get_hosts(hosts, roles, exclude_hosts, state.env) # No hosts, just run once locally if not my_env['all_hosts']: with settings(**my_env): results['<local-only>'] = task.run(*args, **new_kwargs) return results parallel = requires_parallel(task) if parallel: # Import multiprocessing if needed, erroring out usefully # if it can't. try: import multiprocessing except ImportError: import traceback tb = traceback.format_exc() abort(tb + """ At least one task needs to be run in parallel, but the multiprocessing module cannot be imported (see above traceback.) Please make sure the module is installed or that the above ImportError is fixed.""") # Get max pool size for this task pool_size = task.get_pool_size(my_env['all_hosts'], state.env.pool_size) # Set up job comms queue queue = multiprocessing.Queue() role_limits = state.env.get('role_limits', None) jobs = JobQueue(pool_size, queue, role_limits=role_limits, debug=state.output.debug) else: queue = None jobs = None # Attempt to cycle on hosts, skipping if needed for host in my_env['all_hosts']: task.role = task.get_role(host, hosts, state.env) try: results[host] = _execute(task, host, my_env, args, new_kwargs, jobs, queue) except NetworkError, e: results[host] = e # Backwards compat test re: whether to use an exception or # abort if not state.env.use_exceptions_for['network']: func = warn if state.env.skip_bad_hosts else abort error(e.message, func=func, exception=e.wrapped) else: raise
def test_default_task_loading(): """ crawl() should return default tasks where found, instead of module objs """ docs, tasks = load_fabfile(fabfile('default_tasks')) ok_(isinstance(crawl('mymodule', tasks), Task))
def get_task_instance(name): from fabric import state return crawl(name, state.commands)
def _parallel_tasks(commands_to_run): return any([requires_parallel(crawl(x[0], state.commands)) for x in commands_to_run])
def execute(task, *args, **kwargs): """ Execute ``task`` (callable or name), honoring host/role decorators, etc. ``task`` may be an actual callable object, or it may be a registered task name, which is used to look up a callable just as if the name had been given on the command line (including :ref:`namespaced tasks <namespaces>`, e.g. ``"deploy.migrate"``. The task will then be executed once per host in its host list, which is (again) assembled in the same manner as CLI-specified tasks: drawing from :option:`-H`, :ref:`env.hosts <hosts>`, the `~fabric.decorators.hosts` or `~fabric.decorators.roles` decorators, and so forth. ``host``, ``hosts``, ``role``, ``roles`` and ``exclude_hosts`` kwargs will be stripped out of the final call, and used to set the task's host list, as if they had been specified on the command line like e.g. ``fab taskname:host=hostname``. Any other arguments or keyword arguments will be passed verbatim into ``task`` (the function itself -- not the ``@task`` decorator wrapping your function!) when it is called, so ``execute(mytask, 'arg1', kwarg1='value')`` will (once per host) invoke ``mytask('arg1', kwarg1='value')``. :returns: a dictionary mapping host strings to the given task's return value for that host's execution run. For example, ``execute(foo, hosts=['a', 'b'])`` might return ``{'a': None, 'b': 'bar'}`` if ``foo`` returned nothing on host `a` but returned ``'bar'`` on host `b`. In situations where a task execution fails for a given host but overall progress does not abort (such as when :ref:`env.skip_bad_hosts <skip-bad-hosts>` is True) the return value for that host will be the error object or message. .. seealso:: :ref:`The execute usage docs <execute>`, for an expanded explanation and some examples. .. versionadded:: 1.3 .. versionchanged:: 1.4 Added the return value mapping; previously this function had no defined return value. """ my_env = {'clean_revert': True} results = {} # Obtain task is_callable = callable(task) if not (is_callable or _is_task(task)): # Assume string, set env.command to it my_env['command'] = task task = crawl(task, state.commands) if task is None: msg = "%r is not callable or a valid task name" % ( my_env['command'], ) if state.env.get('skip_unknown_tasks', False): warn(msg) return else: abort(msg) # Set env.command if we were given a real function or callable task obj else: dunder_name = getattr(task, '__name__', None) my_env['command'] = getattr(task, 'name', dunder_name) # Normalize to Task instance if we ended up with a regular callable if not _is_task(task): task = WrappedCallableTask(task) # Filter out hosts/roles kwargs new_kwargs, hosts, roles, exclude_hosts = parse_kwargs(kwargs) # Set up host list my_env['all_hosts'], my_env[ 'effective_roles'] = task.get_hosts_and_effective_roles( hosts, roles, exclude_hosts, state.env) parallel = requires_parallel(task) if parallel: # Import multiprocessing if needed, erroring out usefully # if it can't. try: import multiprocessing except ImportError: import traceback tb = traceback.format_exc() abort(tb + """ At least one task needs to be run in parallel, but the multiprocessing module cannot be imported (see above traceback.) Please make sure the module is installed or that the above ImportError is fixed.""") else: multiprocessing = None # Get pool size for this task pool_size = task.get_pool_size(my_env['all_hosts'], state.env.pool_size) # Set up job queue in case parallel is needed queue = multiprocessing.Queue() if parallel else None jobs = JobQueue(pool_size, queue) if state.output.debug: jobs._debug = True # Call on host list if my_env['all_hosts']: # Attempt to cycle on hosts, skipping if needed for host in my_env['all_hosts']: try: results[host] = _execute(task, host, my_env, args, new_kwargs, jobs, queue, multiprocessing) except NetworkError as e: results[host] = e # Backwards compat test re: whether to use an exception or # abort if not state.env.use_exceptions_for['network']: func = warn if state.env.skip_bad_hosts else abort error(e.message, func=func, exception=e.wrapped) else: raise # If requested, clear out connections here and not just at the end. if state.env.eagerly_disconnect: disconnect_all() # If running in parallel, block until job queue is emptied if jobs: err = "One or more hosts failed while executing task '%s'" % ( my_env['command']) jobs.close() # Abort if any children did not exit cleanly (fail-fast). # This prevents Fabric from continuing on to any other tasks. # Otherwise, pull in results from the child run. ran_jobs = jobs.run() for name, d in six.iteritems(ran_jobs): if d['exit_code'] != 0: if isinstance(d['results'], NetworkError) and \ _is_network_error_ignored(): error(d['results'].message, func=warn, exception=d['results'].wrapped) elif isinstance(d['results'], BaseException): error(err, exception=d['results']) else: error(err) results[name] = d['results'] # Or just run once for local-only else: with settings(**my_env): results['<local-only>'] = task.run(*args, **new_kwargs) # Return what we can from the inner task executions return results
def _parallel_tasks(commands_to_run): return any(list(map( lambda x: requires_parallel(crawl(x[0], state.commands)), commands_to_run )))
def main(fabfile_locations=None): """ Main command-line execution loop. """ try: # Parse command line options parser, options, arguments = parse_options() # Handle regular args vs -- args arguments = parser.largs remainder_arguments = parser.rargs # Allow setting of arbitrary env keys. # This comes *before* the "specific" env_options so that those may # override these ones. Specific should override generic, if somebody # was silly enough to specify the same key in both places. # E.g. "fab --set shell=foo --shell=bar" should have env.shell set to # 'bar', not 'foo'. for pair in _escape_split(',', options.env_settings): pair = _escape_split('=', pair) # "--set x" => set env.x to True # "--set x=" => set env.x to "" key = pair[0] value = True if len(pair) == 2: value = pair[1] state.env[key] = value # Update env with any overridden option values # NOTE: This needs to remain the first thing that occurs # post-parsing, since so many things hinge on the values in env. for option in env_options: state.env[option.dest] = getattr(options, option.dest) # Handle --hosts, --roles, --exclude-hosts (comma separated string => # list) for key in ['hosts', 'roles', 'exclude_hosts']: if key in state.env and isinstance(state.env[key], string_types): state.env[key] = state.env[key].split(',') # Feed the env.tasks : tasks that are asked to be executed. state.env['tasks'] = arguments # Handle output control level show/hide update_output_levels(show=options.show, hide=options.hide) # Handle version number option if options.show_version: print("Fabric %s" % state.env.version) print("Paramiko %s" % ssh.__version__) sys.exit(0) # Load settings from user settings file, into shared env dict. state.env.update(load_settings(state.env.rcfile)) # Find local fabfile path or abort fabfile = find_fabfile(fabfile_locations) if not fabfile and not remainder_arguments: abort("""Couldn't find any fabfiles! Remember that -f can be used to specify fabfile path, and use -h for help.""") # Store absolute path to fabfile in case anyone needs it state.env.real_fabfile = fabfile # Load fabfile (which calls its module-level code, including # tweaks to env values) and put its commands in the shared commands # dict default = None if fabfile: docstring, callables, default = load_fabfile(fabfile) state.commands.update(callables) # Handle case where we were called bare, i.e. just "fab", and print # a help message. actions = (options.list_commands, options.shortlist, options.display, arguments, remainder_arguments, default) if not any(actions): parser.print_help() sys.exit(1) # Abort if no commands found if not state.commands and not remainder_arguments: abort("Fabfile didn't contain any commands!") # Now that we're settled on a fabfile, inform user. if state.output.debug: if fabfile: print("Using fabfile '%s'" % fabfile) else: print("No fabfile loaded -- remainder command only") # Shortlist is now just an alias for the "short" list format; # it overrides use of --list-format if somebody were to specify both if options.shortlist: options.list_format = 'short' options.list_commands = True # List available commands if options.list_commands: show_commands(docstring, options.list_format) # Handle show (command-specific help) option if options.display: display_command(options.display) # If user didn't specify any commands to run, show help if not (arguments or remainder_arguments or default): parser.print_help() sys.exit(0) # Or should it exit with error (1)? # Parse arguments into commands to run (plus args/kwargs/hosts) commands_to_run = parse_arguments(arguments) # Parse remainders into a faux "command" to execute remainder_command = parse_remainder(remainder_arguments) # Figure out if any specified task names are invalid unknown_commands = [] for tup in commands_to_run: if crawl(tup[0], state.commands) is None: unknown_commands.append(tup[0]) # Abort if any unknown commands were specified if unknown_commands: warn("Command(s) not found:\n%s" \ % indent(unknown_commands)) show_commands(None, options.list_format, 1) # Generate remainder command and insert into commands, commands_to_run if remainder_command: r = '<remainder>' state.commands[r] = lambda: api.run(remainder_command) commands_to_run.append((r, [], {}, [], [], [])) # Ditto for a default, if found if not commands_to_run and default: commands_to_run.append((default.name, [], {}, [], [], [])) # Initial password prompt, if requested if options.initial_password_prompt: prompt = "Initial value for env.password: "******", ".join(x[0] for x in commands_to_run) print("Commands to run: %s" % names) # At this point all commands must exist, so execute them in order. for name, args, kwargs, arg_hosts, arg_roles, arg_exclude_hosts in commands_to_run: execute(name, hosts=arg_hosts, roles=arg_roles, exclude_hosts=arg_exclude_hosts, *args, **kwargs) # If we got here, no errors occurred, so print a final note. if state.output.status: print("\nDone.") except SystemExit: # a number of internal functions might raise this one. raise except KeyboardInterrupt: if state.output.status: sys.stderr.write("\nStopped.\n") sys.exit(1) except: sys.excepthook(*sys.exc_info()) # we might leave stale threads if we don't explicitly exit() sys.exit(1) finally: disconnect_all() sys.exit(0)
def _parallel_tasks(commands_to_run): return any( map(lambda x: requires_parallel(crawl(x[0], state.commands)), commands_to_run))
def get_task_instance(name): """ """ from fabric import state return crawl(name, state.commands)
def _print_docstring(docstrings, name): if not docstrings: return False docstring = crawl(name, state.commands).__doc__ if isinstance(docstring, string_types): return docstring
def execute(task, *args, **kwargs): """ Patched version of fabric's execute task with alternative error handling """ my_env = {'clean_revert': True} results = {} # Obtain task is_callable = callable(task) if not (is_callable or _is_task(task)): # Assume string, set env.command to it my_env['command'] = task task = crawl(task, state.commands) if task is None: msg = "%r is not callable or a valid task name" % ( my_env['command'],) if state.env.get('skip_unknown_tasks', False): warn(msg) return else: abort(msg) # Set env.command if we were given a real function or callable task obj else: dunder_name = getattr(task, '__name__', None) my_env['command'] = getattr(task, 'name', dunder_name) # Normalize to Task instance if we ended up with a regular callable if not _is_task(task): task = WrappedCallableTask(task) # Filter out hosts/roles kwargs new_kwargs, hosts, roles, exclude_hosts = parse_kwargs(kwargs) # Set up host list my_env['all_hosts'], my_env[ 'effective_roles'] = task.get_hosts_and_effective_roles(hosts, roles, exclude_hosts, state.env) parallel = requires_parallel(task) if parallel: # Import multiprocessing if needed, erroring out usefully # if it can't. try: import multiprocessing except ImportError: import traceback tb = traceback.format_exc() abort(tb + """ At least one task needs to be run in parallel, but the multiprocessing module cannot be imported (see above traceback.) Please make sure the module is installed or that the above ImportError is fixed.""") else: multiprocessing = None # Get pool size for this task pool_size = task.get_pool_size(my_env['all_hosts'], state.env.pool_size) # Set up job queue in case parallel is needed queue = multiprocessing.Queue() if parallel else None jobs = JobQueue(pool_size, queue) if state.output.debug: jobs._debug = True # Call on host list if my_env['all_hosts']: # Attempt to cycle on hosts, skipping if needed for host in my_env['all_hosts']: try: results[host] = _execute( task, host, my_env, args, new_kwargs, jobs, queue, multiprocessing ) except NetworkError, e: results[host] = e # Backwards compat test re: whether to use an exception or # abort func = warn if state.env.skip_bad_hosts or state.env.warn_only \ else abort error(e.message, func=func, exception=e.wrapped) except SystemExit, e: pass # If requested, clear out connections here and not just at the end. if state.env.eagerly_disconnect: disconnect_all()
def gentleman(): args_dict = docopt(__doc__, version=__version__) arguments = args_dict['COMMAND'] if not os.path.exists(os.path.join(os.getcwd(), '.gentle.yaml')): if arguments != ['init']: print(red('This is not a gentle directory, Make sure this is the correct' # noqa 'directory, and use `gt init` Initialization')) return sys.exit(1) else: init() arguments = args_dict['COMMAND'] try: # state.env.update(load_settings(state.env.rcfile)) fabfile = os.path.join(here, 'gt') state.env.real_fabfile = fabfile default = None if fabfile: docstring, callables, default = load_fabfile(fabfile) state.commands.update(callables) commands_to_run = parse_arguments(arguments) unknown_commands = [] for tup in commands_to_run: if crawl(tup[0], state.commands) is None: unknown_commands.append(tup[0]) if args_dict['--list']: show_commands(docstring, 'normal', 1) for type in ['show', 'hide']: t = args_dict['--' + type] if t is not None: hide = True if type == 'hide' else False set_output(t, hide=hide, only=args_dict['--only']) # Abort if any unknown commands were specified if unknown_commands: warn("Command(s) not found:\n%s" % indent(unknown_commands)) show_commands(None, 'normal', 1) for name, args, kwargs, arg_hosts, arg_roles, arg_exclude_hosts in \ commands_to_run: execute( name, hosts=arg_hosts, roles=arg_roles, exclude_hosts=arg_exclude_hosts, *args, **kwargs) if state.output.status: print("\nDone.") except SystemExit: raise except KeyboardInterrupt: if state.output.status: sys.stderr.write("\nStopped.\n") sys.exit(1) except: sys.excepthook(*sys.exc_info()) sys.exit(1) finally: disconnect_all() sys.exit(0)
def main(): """ Main command-line execution loop. """ try: # Parse command line options parser, options, arguments = parse_options() # Handle regular args vs -- args arguments = parser.largs remainder_arguments = parser.rargs # Allow setting of arbitrary env keys. # This comes *before* the "specific" env_options so that those may # override these ones. Specific should override generic, if somebody # was silly enough to specify the same key in both places. # E.g. "fab --set shell=foo --shell=bar" should have env.shell set to # 'bar', not 'foo'. for pair in _escape_split(',', options.env_settings): pair = _escape_split('=', pair) # "--set x" => set env.x to True # "--set x=" => set env.x to "" key = pair[0] value = True if len(pair) == 2: value = pair[1] state.env[key] = value # Update env with any overridden option values # NOTE: This needs to remain the first thing that occurs # post-parsing, since so many things hinge on the values in env. for option in env_options: state.env[option.dest] = getattr(options, option.dest) # Handle --hosts, --roles, --exclude-hosts (comma separated string => # list) for key in ['hosts', 'roles', 'exclude_hosts']: if key in state.env and isinstance(state.env[key], basestring): state.env[key] = state.env[key].split(',') # Handle output control level show/hide update_output_levels(show=options.show, hide=options.hide) # Handle version number option if options.show_version: print("Fabric %s" % state.env.version) print("ssh (library) %s" % ssh.__version__) sys.exit(0) # Load settings from user settings file, into shared env dict. state.env.update(load_settings(state.env.rcfile)) # Find local fabfile path or abort fabfile = find_fabfile() if not fabfile and not remainder_arguments: abort("""Couldn't find any fabfiles! Remember that -f can be used to specify fabfile path, and use -h for help.""") # Store absolute path to fabfile in case anyone needs it state.env.real_fabfile = fabfile # Load fabfile (which calls its module-level code, including # tweaks to env values) and put its commands in the shared commands # dict default = None if fabfile: docstring, callables, default = load_fabfile(fabfile) state.commands.update(callables) # Handle case where we were called bare, i.e. just "fab", and print # a help message. actions = (options.list_commands, options.shortlist, options.display, arguments, remainder_arguments, default) if not any(actions): parser.print_help() sys.exit(1) # Abort if no commands found if not state.commands and not remainder_arguments: abort("Fabfile didn't contain any commands!") # Now that we're settled on a fabfile, inform user. if state.output.debug: if fabfile: print("Using fabfile '%s'" % fabfile) else: print("No fabfile loaded -- remainder command only") # Shortlist is now just an alias for the "short" list format; # it overrides use of --list-format if somebody were to specify both if options.shortlist: options.list_format = 'short' options.list_commands = True # List available commands if options.list_commands: show_commands(docstring, options.list_format) # Handle show (command-specific help) option if options.display: display_command(options.display) # If user didn't specify any commands to run, show help if not (arguments or remainder_arguments or default): parser.print_help() sys.exit(0) # Or should it exit with error (1)? # Parse arguments into commands to run (plus args/kwargs/hosts) commands_to_run = parse_arguments(arguments) # Parse remainders into a faux "command" to execute remainder_command = parse_remainder(remainder_arguments) # Figure out if any specified task names are invalid unknown_commands = [] for tup in commands_to_run: if crawl(tup[0], state.commands) is None: unknown_commands.append(tup[0]) # Abort if any unknown commands were specified if unknown_commands: warn("Command(s) not found:\n%s" \ % indent(unknown_commands)) show_commands(None, options.list_format, 1) # Generate remainder command and insert into commands, commands_to_run if remainder_command: r = '<remainder>' state.commands[r] = lambda: api.run(remainder_command) commands_to_run.append((r, [], {}, [], [], [])) # Ditto for a default, if found if not commands_to_run and default: commands_to_run.append((default.name, [], {}, [], [], [])) if state.output.debug: names = ", ".join(x[0] for x in commands_to_run) print("Commands to run: %s" % names) # At this point all commands must exist, so execute them in order. for name, args, kwargs, arg_hosts, arg_roles, arg_exclude_hosts in commands_to_run: execute( name, hosts=arg_hosts, roles=arg_roles, exclude_hosts=arg_exclude_hosts, *args, **kwargs ) # If we got here, no errors occurred, so print a final note. if state.output.status: print("\nDone.") except SystemExit: # a number of internal functions might raise this one. raise except KeyboardInterrupt: if state.output.status: print >> sys.stderr, "\nStopped." sys.exit(1) except: sys.excepthook(*sys.exc_info()) # we might leave stale threads if we don't explicitly exit() sys.exit(1) finally: disconnect_all() sys.exit(0)