def get_tasks(): path_list_to_search = [ # Current directory os.getcwd(), # Parent directory os.path.abspath(os.path.join(os.getcwd(), os.pardir)) ] deploy_config_file_path = find_file_in_path(DEPLOYMENT_CONFIG_FILE, path_list_to_search) if deploy_config_file_path: with open(deploy_config_file_path, "r") as deploy_config_file: data = deploy_config_file.read() else: raise MissingConfiguration( "Configuration file `{0}` was not found in `{1}`".format(DEPLOYMENT_CONFIG_FILE, path_list_to_search) ) try: deployment_data = json.loads(data) except ValueError as e: raise InvalidConfiguration("Cannot load your deployment configuration. JSON file is probably broken. Additional message: %s" % e.message) for target, options in deployment_data.items(): yield target, task(name=target)(function_builder(target, options)) __all__.append(target) globals()[target] = task(name=target)(function_builder(target, options)) for fabric_task in [venv_run, deploy, backup, update_python_tools, stop, start, restart, graceful_restart, kill, kill_celery, status, check, clean, check_urls, npm, get_media, rebuild_staticfiles, rebuild_virtualenv, get_dumps, dump_db, drop_schema, shell_plus, migrate, manage, pull, pip_install, register_deployment, gulp]: yield fabric_task.__name__, fabric_task
def main(argv, args): task = None local_task = True if args.args: if '-a' in argv: argv.remove('-a') if '--args' in argv: argv.remove('--args') for a in args.args: argv.remove(a) # print(argv, args) # return if len(argv) == 2: task_name = argv[1] if task_name in globals(): task = globals()[task_name] elif len(argv) == 3: task_name = argv[2] if task_name in globals(): task = globals()[task_name] server_alias = argv[1] local_task = False else: print('Not en(mistyped) arguments') if task is not None: if not local_task: server = get_server(server_alias) env.host_string = '%(ssh_user)s@%(ip)s' % server env.passwords[env.host_string] = server['ssh_password'] globals()['server'] = server config = db.en_clients.find({'title': 'Nervarin'})[0] globals()['config'] = config if args.args is not None: task(*args.args) else: task() else: print('Unknown task')
def test_passes_all_regular_args_to_run(self): def foo(*args): return args random_args = tuple( [random.randint(1000, 2000) for i in range(random.randint(1, 5))] ) task = tasks.WrappedCallableTask(foo) self.assertEqual(random_args, task(*random_args))
def __createTasks(self): if not hasattr(self._getModule(), self.name): module = imp.new_module('{}.{}'.format(self._getModuleName(), self.name)) setattr(self._getModule(), self.name, module) else: module = getattr(self._getModule(), self.name) for fname, f in self.getCommands().items(): f = getattr(self, f) message = '{}{} {}'.format(self._messagePrefix(), self.name, fname) if hasattr(module, fname): pf = getattr(module, fname) def chain(a, b): def _f(): fab_api.execute(a) fab_api.execute(b) _f.__doc__ = message return _f f = chain(pf, f) else: try: f.__func__.__doc__ = message except AttributeError: f.__doc__ = message setattr(module, fname, task(name=fname)(f)) ManagedTask.__tasks.append(self)
def inner(*args, **kwargs): hosts = kwargs.pop('hosts', None) user = kwargs.pop('user', None) quiet = kwargs.pop('quiet', True) if user != None: confirm_passwd() with settings(parallel=True): return task(*args, user=user, quiet=quiet, hosts=hosts, **kwargs)
def get_tasks(): path_list_to_search = [ # Current directory os.getcwd(), # Parent directory os.path.abspath(os.path.join(os.getcwd(), os.pardir)) ] deploy_config_file_path = find_file_in_path(DEPLOYMENT_CONFIG_FILE, path_list_to_search) if deploy_config_file_path: with open(deploy_config_file_path, "r") as deploy_config_file: data = deploy_config_file.read() else: raise MissingConfiguration( "Configuration file `{0}` was not found in `{1}`".format(DEPLOYMENT_CONFIG_FILE, path_list_to_search) ) try: deployment_data = json.loads(data) except ValueError as e: raise InvalidConfiguration(e.message) for target, options in deployment_data.items(): yield target, task(name=target)(function_builder(target, options)) __all__.append(target) globals()[target] = task(name=target)(function_builder(target, options)) for fabric_task in [venv_run, deploy, backup, update_python_tools, restart, graceful_restart, status, check, clean, check_urls, npm, get_media, rebuild_staticfiles, get_dumps, get_database_engine, dump_db, gulp]: yield fabric_task.__name__, fabric_task
def load_config_task(_func): def func(*args, **kw): from fabric.state import env env['_initialized'] = True return _func(*args, **kw) func.__name__ = _func.__name__ return task(func)
def __init__(self, **kw): super(Service, self).__init__(**kw) Service.__services.append(self) all_services = ','.join([x.name for x in self.__services]) for key, value in self.__allMethods().items(): value.__func__.__doc__ = ( '{} all services ({})'.format(key, all_services)) setattr(self._getModule(), key, task(name=key)(value))
def test_passes_all_keyword_args_to_run(self): def foo(**kwargs): return kwargs random_kwargs = {} for i in range(random.randint(1, 5)): random_key = ("foo", "bar", "baz", "foobar", "barfoo")[i] random_kwargs[random_key] = random.randint(1000, 2000) task = tasks.WrappedCallableTask(foo) self.assertEqual(random_kwargs, task(**random_kwargs))
def bootstrap(): """ Add state- and role-tasks, i.e. app@live Import blueprint libraries """ fabric.state.env.update({ 'user': '******', 'sudo_user': '******', 'colorize_errors': True, 'skip_unknown_tasks': True, 'merge_states': True, 'prompt_hosts': True, 'forward_agent': True, 'sudo_prefix': "sudo -S -E -p '%(sudo_prompt)s' SSH_AUTH_SOCK=$SSH_AUTH_SOCK", }) for env_name, env in fabric.state.env.states.items(): if env_name == 'default': continue task_name = '@{env}'.format(env=env_name) state_task = partial(dispatch, env_name) docstring = 'switch to configured Fab env "{env}"'.format(env=env_name) state_task.__doc__ = docstring fabric.state.commands[task_name] = task(state_task) roledefs = env.get('roledefs') if roledefs: for role_name in roledefs.keys(): task_name = '{role}@{env}'.format(role=role_name, env=env_name) state_task = partial(dispatch, env_name, roles=[role_name]) docstring = 'switch to configured Fab env "{env}", ' \ 'and use role "{role}"'.format(env=env_name, role=role_name) state_task.__doc__ = docstring fabric.state.commands[task_name] = task(state_task) load_blueprints()
def test_calling_the_object_is_the_same_as_run(self): random_return = random.randint(1000, 2000) def foo(): return random_return task = tasks.WrappedCallableTask(foo) self.assertEqual(task(), task.run())
def test_dispatches_to_wrapped_callable_on_run(self): random_value = "some random value %d" % random.randint(1000, 2000) def foo(): return random_value task = tasks.WrappedCallableTask(foo) self.assertEqual(random_value, task())
def test_decorator_closure_hiding(): """ @task should not accidentally destroy decorated attributes from @hosts/etc """ from fabric.decorators import task, hosts def foo(): print(env.host_string) foo = task(hosts("me@localhost")(foo)) eq_(["me@localhost"], foo.hosts)
def task_group(name, task_list, package=None): def run_all(): for i in task_list: if isinstance(i, (str, unicode)): pass elif not callable(i): i = getattr(i, 'run') fab_api.execute(i) if package is None: package = __import__( os.path.splitext(os.path.split(env.real_fabfile)[1])[0]) if isinstance(package, dict): package[name] = task(name=name)(run_all) return package[name] else: setattr(package, name, task(name=name)(run_all)) return getattr(package, name)
def _getModule(self): if ManagedTask.__module is None: ManagedTask.__module = imp.new_module(self._getModuleName()) top_package = __import__( os.path.splitext(os.path.split(env.real_fabfile)[1])[0]) setattr(top_package, self._getModuleName(), ManagedTask.__module) setattr(ManagedTask.__module, 'run', task(name="run")(ManagedTask.__runAll)) return ManagedTask.__module
def test_decorator_incompatibility_on_task(): from fabric.decorators import task, hosts, runs_once, roles def foo(): return "foo" foo = task(foo) # since we aren't setting foo to be the newly decorated thing, its cool hosts('me@localhost')(foo) runs_once(foo) roles('www')(foo)
def test_decorator_closure_hiding(): from fabric.decorators import task, hosts def foo(): print env.host_string foo = hosts("me@localhost")(foo) foo = task(foo) # this broke in the old way, due to closure stuff hiding in the # function, but task making an object eq_(["me@localhost"], foo.hosts)
def test_decorator_closure_hiding(): """ @task should not accidentally destroy decorated attributes from @hosts/etc """ from fabric.decorators import task, hosts def foo(): print env.host_string foo = task(hosts("me@localhost")(foo)) eq_(["me@localhost"], foo.hosts)
def test_task_will_invoke_provided_class(): def foo(): pass fake = Fake() fake.expects("__init__").with_args(foo) fudge.clear_calls() fudge.clear_expectations() foo = decorators.task(foo, task_class=fake) fudge.verify()
def config_required_task(_func): def func(*args, **kw): from fabric.state import env if not env.get('_initialized'): raise Exception("You must load config to " "execute task '{}'".format(_func.__name__)) return _func(*args, **kw) func.__name__ = _func.__name__ return task(func)
def test_task_passes_args_to_the_task_class(): random_vars = ("some text", random.randint(100, 200)) def foo(): pass fake = Fake() fake.expects("__init__").with_args(foo, *random_vars) fudge.clear_calls() fudge.clear_expectations() foo = decorators.task(foo, task_class=fake, *random_vars) fudge.verify()
def _create_stage(name, stage_config): def stage_wrap(*args, **kwargs): for key, value in stage_config.iteritems(): setattr(env, key, value) env.exists = exists env.run = run env.cd = cd stage_wrap.__name__ = name return task(stage_wrap)
def service_task(name, action, check_status=False): partial_service = partial(service, name, action, check_status) pretty_action = action.replace('-', ' ').capitalize() partial_service.__doc__ = '{} {}'.format(pretty_action, name) return task(partial_service)
ssl_path = 'ssl/logstash-forwarder.crt' if not os.path.exists(blueprint.get_user_template_path(ssl_path)): download_server_ssl_cert(ssl_path) blueprint.upload('ssl/logstash-forwarder.crt', '/etc/pki/tls/certs/') if uploads: restart('forwarder') def service(target=None, action=None): """ Debian service dispatcher for logstash server and forwarder """ if not target: abort( 'Missing logstash service target argument, start:<server|forwarder|both>' ) if target in ('server', 'both'): debian.service('logstash', action, check_status=False) if target in ('forwarder', 'both'): debian.service('logstash-forwarder', action, check_status=False) start = task(partial(service, action='start')) stop = task(partial(service, action='stop')) restart = task(partial(service, action='restart')) start.__doc__ = 'Start logstash' stop.__doc__ = 'Stop logstash' restart.__doc__ = 'Restart logstash'
def test_task_returns_an_instance_of_wrappedfunctask_object(): def foo(): pass task = decorators.task(foo) ok_(isinstance(task, tasks.WrappedCallableTask))
""" Show program(s) status, shortcut to supervisorctl status :param program: Optional program to query status """ ctl('status', program=program) def service(command, program=None): if not program: debian.service('supervisor', command) else: ctl(command, program) start = task(partial(service, 'start')) stop = task(partial(service, 'stop')) restart = task(partial(service, 'restart')) start.__doc__ = 'Start supervisor or start program(s)' stop.__doc__ = 'Stop supervisor or stop program(s)' restart.__doc__ = 'Restart supervisor or restart program(s)' @task def reload(program=None): """ Reload supervisor or reload program(s), via SIGHUP :param program: The program to reload (all|exact|pattern). If not given, supervisor service will reload """
def get_hosts(command, *args): if isinstance(command, Task): return command.get_hosts(*args) return task(command).get_hosts(*args)
from functools import partial from fabric.decorators import task from refabric.api import info from refabric.context_managers import sudo from refabric.contrib import blueprints from . import debian __all__ = ['setup', 'configure', 'start', 'stop', 'restart'] blueprint = blueprints.get(__name__) start = task(partial(debian.service, 'apm-server', 'start', check_status=False)) stop = task(partial(debian.service, 'apm-server', 'stop', check_status=False)) restart = task(partial(debian.service, 'apm-server', 'restart', check_status=False)) start.__doc__ = 'Start beats' stop.__doc__ = 'Stop beats' restart.__doc__ = 'Restart beats' @task def setup(): """ Setup apm-server """ from .elasticsearch import add_elastic_repo
blueprint = blueprints.get(__name__) def service(target=None, action=None): if blueprint.get('infrastructure', False): debian.service('newrelic-infra', action, check_status=False) if blueprint.get('sysmon', False): debian.service('newrelic-sysmond', action, check_status=False) if blueprint.get('plugins', None): debian.service('newrelic-plugin-agent', action, check_status=False) start = task(partial(service, action='start')) stop = task(partial(service, action='stop')) restart = task(partial(service, action='restart')) start.__doc__ = 'Start newrelic agent' stop.__doc__ = 'Stop newrelic agent' restart.__doc__ = 'Restart newrelic agent' @task def setup(): """ Install and configure newrelic server """ install() configure()
def project_task(key): def _task(): activate_project(key) _task.__doc__ = "Selects this project for the next commands." return task(name=key)(_task)
# clean up build environment shutil.rmtree(env['build_dir']) print 'finished running %s for %s' % (_task_name, target) ############# MAIN EXECUTION ################ ############################# # LOAD BUILD CONFIGURATIONS ############################# available_tasks = [f.replace('.json', '') for f in os.listdir(os.path.join(AUTOMATION_DIR, 'common'))] available_targets = [d for d in os.listdir(AUTOMATION_DIR) \ if os.path.isdir(os.path.join(AUTOMATION_DIR, d)) and not d == 'common'] # dynamically construct a fabric task for each build task found for task_name in available_tasks: # load common configuration dynamic task instantiation json_data = _load_config(os.path.join(AUTOMATION_DIR, 'common', '%s.json' % task_name), inject=False) task_desc = json_data['meta']['desc'] # construct new task and inject into global namespace so fab can find it f = lambda t=DEFAULT_TARGET: _build(t) # create a lambda from `_build` template f.__name__ = task_name # rename lambda f.__doc__ = task_desc # re-doc lambda globals()[f.__name__] = task(f) # decorate the lambda and inject into global namespace for fab to find # load universal common configuration _load_config(os.path.join(AUTOMATION_DIR, 'common.json')) # fab task will be executed after this point
""" Show program(s) status, shortcut to supervisorctl status :param program: Optional program to query status """ ctl('status', program=program) def service(command, program=None): if not program: debian.service('supervisor', command) else: ctl(command, program) start = task(partial(service, 'start')) stop = task(partial(service, 'stop')) restart = task(partial(service, 'restart')) start.__doc__ = 'Start supervisor or start program(s)' stop.__doc__ = 'Stop supervisor or stop program(s)' restart.__doc__ = 'Restart supervisor or restart program(s)' @task def reload(program=None): """ Reload supervisor or reload program(s), via SIGHUP :param program: The program to reload (all|exact|pattern). If not given, the supervisor service will reload """
def aws_task(fn): def wrapper(*args, **kwargs): boto = ec2_connection() return fn(boto, *args, **kwargs) return task(name=fn.func_name)(wrapper)
def service_task(name, action, check_status=False, show_output=False): partial_service = partial(service, name, action, check_status, show_output) pretty_action = action.replace('-', ' ').capitalize() partial_service.__doc__ = '{} {}'.format(pretty_action, name) return task(partial_service)
# coding=utf-8 import os import sys from fabric.decorators import task from fabric.state import env from django_fabric import App from fab_local import settings sys.path.append(os.path.dirname(__file__)) env.user = settings['USER'] env.hosts = [settings['HOST']] class FriggApp(App): project_package = 'frigg' test_settings = 'frigg.settings.test' site = FriggApp( project_paths=settings['PROJECT_PATHS'], urls=settings['URL'], restart_command=settings['RESTART_COMMAND'], ) deploy = task(site.deploy) test = task(site.test) clone_data = task(site.clone_data, "prod")
uploads = blueprint.upload("forwarder/logstash-forwarder.conf", "/etc/logstash-forwarder", context=context) ssl_path = "ssl/logstash-forwarder.crt" if not os.path.exists(blueprint.get_user_template_path(ssl_path)): download_server_ssl_cert(ssl_path) blueprint.upload("ssl/logstash-forwarder.crt", "/etc/pki/tls/certs/") if uploads: restart("forwarder") def service(target=None, action=None): """ Debian service dispatcher for logstash server and forwarder """ if not target: abort("Missing logstash service target argument, start:<server|forwarder|both>") if target in ("server", "both"): debian.service("logstash", action, check_status=False) if target in ("forwarder", "both"): debian.service("logstash-forwarder", action, check_status=False) start = task(partial(service, action="start")) stop = task(partial(service, action="stop")) restart = task(partial(service, action="restart")) start.__doc__ = "Start logstash" stop.__doc__ = "Stop logstash" restart.__doc__ = "Restart logstash"
import os import sys from fabric.decorators import task from fabric.state import env from django_fabric import App from fab_local import settings sys.path.append(os.path.dirname(__file__)) env.user = settings['USER'] env.hosts = [settings['HOST']] class FriggApp(App): project_package = 'frigg' test_settings = 'frigg.settings.test' site = FriggApp( project_paths=settings['PROJECT_PATHS'], urls=settings['URL'], restart_command=settings['RESTART_COMMAND'], ) deploy = task(site.deploy) test = task(site.test) clone_data = task(site.clone_data, "prod")
""" Show program(s) status, shortcut to supervisorctl status :param program: Optional program to query status """ ctl("status", program=program) def service(command, program=None): if not program: debian.service("supervisor", command) else: ctl(command, program) start = task(partial(service, "start")) stop = task(partial(service, "stop")) restart = task(partial(service, "restart")) start.__doc__ = "Start supervisor or start program(s)" stop.__doc__ = "Stop supervisor or stop program(s)" restart.__doc__ = "Restart supervisor or restart program(s)" @task def reload(program=None): """ Reload supervisor or reload program(s), via SIGHUP :param program: The program to reload (all|exact|pattern). If not given, the supervisor service will reload """