def copyDirectory(user,directoryName,destinationPath,publicIPs,keyFile): for IPAddress in publicIPs: config = Config({'identity':'us-1east.pem'}) connection = Connection(host=IPAddress, user=user, config=config, connect_kwargs = {'key_filename': ['' + keyFile + ''] } ) filePaths = [] connection.run('mkdir '+directoryName+'') for root, directories, filenames in os.walk(directoryName): #Directories for directory in directories: directoryPath = os.path.join(root, directory) index = directoryPath.find('/') print directoryPath[index+1:] connection.run('mkdir '+directoryPath+'') for filename in filenames: filePaths.append(os.path.join(root,filename)) for filePath in filePaths: connection.put(filePath,remote=filePath,preserve_mode=True)
def __init__(self, hostname='', user="******", port=22, connect_timeout=60, password="", # pylint: disable=too-many-arguments key_file=None, extra_ssh_options=""): self.key_file = key_file self.port = port self.extra_ssh_options = extra_ssh_options self.connect_timeout = connect_timeout self._use_rsync = None self.known_hosts_file = tempfile.mkstemp()[1] self._ssh_up_thread = None self._ssh_is_up = threading.Event() self._ssh_up_thread_termination = threading.Event() self.ssh_config = Config(overrides={ 'load_ssh_config': False, 'UserKnownHostsFile': self.known_hosts_file, 'ServerAliveInterval': 300, 'StrictHostKeyChecking': 'no'}) self.connect_config = {'pkey': RSAKey(filename=os.path.expanduser(self.key_file))} self.auth_sleep_time = 30 # sleep time between failed authentication attempts super(RemoteCmdRunner, self).__init__(hostname, user, password) self.start_ssh_up_thread()
def connect(ssh_user_name, host_fqdn, ssh_port, ssh_user_password): # Create connection. config = Config( overrides={ 'sudo': { 'password': ssh_user_password, 'prompt': '[sudo] password: \n' }, 'run': { 'echo': True } }) c = Connection(host=host_fqdn, user=ssh_user_name, port=ssh_port, connect_kwargs={"password": ssh_user_password}, config=config) # Check connection. c.run('echo "Login user is $(whoami)"') return c
def backup_db(ctx): ## must call with fab backup-db ## timestr = time.strftime("%Y%m%d-%H%M%S") # print("Hello world!") sudo_pass = getpass.getpass("What's your sudo password?") config = Config(overrides={"sudo": {"password": sudo_pass}}) c = Connection("*****@*****.**", config=config) # c.run( # "mysqldump -u root -p'" # + sudo_pass # + "' meal_project > ~/mysql_backups/meal_project_" # + timestr # + ".sql" # ) c.sudo("docker exec mysql mysqldump -u root -p'" + sudo_pass + "' meal_project > ~/mysql_backups/meal_project_" + timestr + ".sql")
def deploy(host, user, password, version, resetdb, resetconf, component): ''' deploy services to specified machine via fabric example: python3 -m tao.tools deploy_bigdata -h 10.1.2.13 -p dx@666 -c usercenter:master -c console:master python -m tao.tools deploy_bigdata -h 10.1.2.138 -u dingxiang -p dx@666 -c usercenter:feature/feature_20181211_ci ''' config = Config(overrides={'sudo': {'password': password}}) if not component: _echo("no components to deploy") raise RuntimeError with Connection(host, user, config=config, connect_kwargs={'password': password}) as conn: conn.version = version conn.reset_db = resetdb conn.reset_conf = resetconf for app_item in component: _echo('start to deploy %s:%s' % (app_item.name, app_item.version)) _do_deploy(conn, app_item) _echo('done')
def new_tester_ssh_connection(setup_test_container): config_hide = Config() config_hide.run.hide = True with Connection( host="localhost", user=setup_test_container.user, port=setup_test_container.port, config=config_hide, connect_kwargs={ "key_filename": setup_test_container.key_filename, "password": "", "timeout": 60, "banner_timeout": 60, "auth_timeout": 60, }, ) as conn: ready = _probe_ssh_connection(conn) assert ready, "SSH connection can not be established. Aborting" return conn
def deploy(c): sudo_pass = getpass.getpass("Enter your sudo password on %s: " % SERVER) config = Config(overrides={'sudo': {'password': sudo_pass}}) c = Connection(SERVER, config=config) # Pull from GitHub c.run('bash -c "cd %s && git pull [email protected]:openstate/%s.git"' % (DIR, GIT_REPO)) # Compile assets output = c.sudo('docker inspect --format="{{.State.Status}}" %s' % (NODE_CONTAINER)) if output.stdout.strip() != 'running': raise Exit( '\n*** ERROR: The %s container, used to compile the assets, is ' 'not running. Please build/run/start the container.' % (NODE_CONTAINER)) c.sudo('docker exec %s gulp' % (NODE_CONTAINER)) # Reload app c.run('bash -c "cd %s && touch uwsgi-touch-reload"' % (DIR))
def deploy(c, server='test'): if server not in HOST_CONF.keys(): return password = getpass.getpass("What's your ssh password?") config = Config(overrides={'connect_kwargs': {'password': password}, 'run': {'echo': True}}) host_conf = HOST_CONF[server] remote_path = '/home/{user}/'.format(user=host_conf[1]) my_c = Connection(host=host_conf[0], user=host_conf[1], port=host_conf[2], config=config) # 部署前端代码 front_code_dir = remote_path + 'workspace/craftsman_front/' back_code_dir = remote_path + 'workspace/craftsman_back/' from_templates_path = front_code_dir + 'templates/' dest_templates_path = back_code_dir + 'templates/' from_static = front_code_dir + 'static/' dest_static = back_code_dir + 'static/' with my_c.cd(front_code_dir): my_c.run("git reset --hard") my_c.run("git pull origin test" if server == 'test' else "git pull") my_c.run("rsync -r {from_temp} {dest_temp}".format(from_temp=from_templates_path, dest_temp=dest_templates_path)) my_c.run("rsync -r {from_static} {dest_static}".format(from_static=from_static, dest_static=dest_static))
def backup(): ''' 备份数据库 ''' # 配置sudo命令的密码 config = Config(overrides={'sudo': {'password': remote_su_pass}}) # 以明文方式配置用户登录密码 conn = Connection(ip, user=remote_user, config=config, connect_kwargs={ "allow_agent": False, "password": remote_pass }) f = 'backup-%s.sql' % datetime.now().strftime('%y-%m-%d_%H.%M.%S') with conn.cd('/tmp'): conn.run( 'mysqldump --user={} --password=\'{}\' --single-transaction --routines --triggers --events --skip-extended-insert {}>{}' .format(remote_sql_user, remote_sql_pass, remote_sql_db, f)) conn.run('tar -czvf %s.tar.gz %s' % (f, f)) conn.get('/tmp/%s.tar.gz' % f, 'backup/%s.tar.gz' % f) conn.run('rm -f %s' % f) conn.run('rm -f %s.tar.gz' % f)
def main(c): has_key = lambda k, c: k in list(c.keys()) if not has_key('hosts', c) and not c.hosts: return log = None if has_key('log', c): log = c.log grp = ThreadingGroupSudo(*c.hosts, config=Config(overrides=c.cfg_override), connect_kwargs=c.connect_kwargs) if has_key('commands', c): if has_key('run', c.commands): run_res = grp.run(c.commands.run) if log: _save_log(log, 'RUN', run_res) if has_key('sudo', c.commands): sudo_res = grp.sudo(c.commands.sudo) if log: _save_log(log, 'SUDO', sudo_res)
def publish(ctx): """Remotly publish Atlas of Information Management. :param conn: connection to deploy to Deploy Extract Management 2.0 by running: .. code-block:: console cd publish && fab publish && cd .. To monitor online status of site while publishing .. code-block:: console watch -n.1 curl -Is host_ip | grep HTTP About Each publish will create a new instance of the website + gunicorn. The new website will be started, the nginx config reloaded, and then the old gunicorn processes will be ended and removed. Finally, old code is removed. """ connection_properties = { "host": config["host"], "user": config["user"], "config": Config(overrides={"sudo": { "password": config["pass"] }}), "connect_kwargs": { "password": config["pass"] }, } # pylint: disable=R1704 with Connection(**connection_properties) as ctx: ctx.sudo('bash -c "$(curl -kfsSL -H "PRIVATE-TOKEN: %s" "%s")"' % (config["token"], config["sh"]))
def __init__(self, hostname, user="******", port=22, connect_timeout=60, password="", key_file=None, wait_key_installed=0, extra_ssh_options=""): super(RemoteCmdRunner, self).__init__(hostname, user, password) self.key_file = key_file self.port = port self.extra_ssh_options = extra_ssh_options self.connect_timeout = connect_timeout self._use_rsync = False self.known_hosts_file = tempfile.mkstemp()[1] config = Config(overrides={ 'load_ssh_config': False, 'UserKnownHostsFile': self.known_hosts_file, 'ServerAliveInterval': 300, 'StrictHostKeyChecking': 'no'}) self._create_connection(hostname, user=self.user, port=self.port, config=config, connect_timeout=self.connect_timeout, connect_kwargs={ 'key_filename': os.path.expanduser(self.key_file), })
def main(command: str, mode: str, with_runbook: bool, params: List[str], run_config: str): mode = CodeDistributionMode[mode.upper()] config = Config(runtime_ssh_path='./ssh_config') hosts = config.base_ssh_config.get_hostnames() workers = [x for x in hosts if x.startswith('worker') and x != 'worker0'] master = 'worker0' print( f"Submitting the job via {mode} mode\n" f"Running {'with' if with_runbook else 'without'} the the runbook\n" f"Run config: {run_config}" ) if with_runbook: runbook_dst = RunConfig(run_config).results_dir upload_runbook(Connection(master, config=config), dst=runbook_dst) results = [run(Connection(host, config=config), mode, command, params, run_config, asynchronous=True) for host in workers] run(Connection(master, config=config), mode, command, params, run_config) for result in results: result.join()
def create_conn(obj): # 输入server/project,创建服务器连接,project直接使用invoke来执行本地shell命令 if obj['objtype'] == K_OBJ_TYPES[0]: if '_conn_' not in obj: if 'ssh' not in obj: six.print_('Server {} has no [ssh] config!'.format( obj['title'])) return ssh_arg = {} if 'keyfile' in obj['ssh']: ssh_arg['key_filename'] = fill_vars(obj['ssh']['keyfile']) if 'keypass' in obj['ssh']: ssh_arg['password'] = fill_vars(obj['ssh']['keypass']) from fabric import Connection, Config if 'sudopass' in obj['ssh']: obj['_conn_'] = Connection( obj['ssh']['host'], user=obj['ssh']['user'], config=Config(overrides={ 'sudo': { 'password': fill_vars(obj['ssh']['sudopass']) } }), connect_kwargs=ssh_arg) else: obj['_conn_'] = Connection(obj['ssh']['host'], user=obj['ssh']['user'], connect_kwargs=ssh_arg) six.print_('Connecting to {} ...... '.format(obj['title']), end='') sys.stdout.flush() six.print_('Work path: ', end='') obj['_conn_'].run('pwd') elif obj['objtype'] == K_OBJ_TYPES[1]: if '_conn_' not in obj: import invoke obj['_conn_'] = invoke
def when_config_obj_given_default_paths_are_not_sought(self, method): sc = SSHConfig() Config(ssh_config=sc) assert not method.called
def amends_Invoke_runners_map(self): config = Config() assert config.runners == dict( remote=Remote, remote_shell=RemoteShell, local=Local )
def uses_Fabric_prefix(self): # NOTE: see also the integration-esque tests in tests/main.py; this # just tests the underlying data/attribute driving the behavior. assert Config().prefix == "fabric"
def may_use_lazy_plus_explicit_methods_to_control_flow(self, method): c = Config(lazy=True) assert not method.called c.set_runtime_ssh_path(self._runtime_path) c.load_ssh_config() method.assert_called_once_with(self._runtime_path)
def overrides_some_Invoke_defaults(self): config = Config() assert config.tasks.collection_name == "fabfile"
def user_path_subject_to_user_expansion(self, method): # TODO: other expansion types? no real need for abspath... tilded = "~/probably/not/real/tho" Config(user_ssh_path=tilded) method.assert_any_call(expanduser(tilded))
def skips_default_paths(self, method): Config(overrides={"load_ssh_configs": False}) assert not method.called
def user_path_loads_ok(self): c = Config(**dict(self._empty_kwargs, user_ssh_path=self._user_path)) names = c.base_ssh_config.get_hostnames() assert names == {"user", "shared", "*"}
def runtime_path_subject_to_user_expansion(self, method): # TODO: other expansion types? no real need for abspath... tilded = "~/probably/not/real/tho" Config(runtime_ssh_path=tilded) method.assert_called_once_with(expanduser(tilded))
def default_file_paths_match_openssh(self, method): Config() method.assert_has_calls( [call(expanduser("~/.ssh/config")), call("/etc/ssh/ssh_config")] )
def system_path_loads_ok(self): c = Config( **dict(self._empty_kwargs, system_ssh_path=self._system_path) ) names = c.base_ssh_config.get_hostnames() assert names == {"system", "shared", "*"}
def runtime_path_can_be_given_via_config_itself(self, method): Config(overrides={"ssh_config_path": self._runtime_path}) method.assert_called_once_with(self._runtime_path)
def when_runtime_path_given_other_paths_are_not_sought(self, method): Config(runtime_ssh_path=self._runtime_path) method.assert_called_once_with(self._runtime_path)
def config_obj_prevents_loading_runtime_path_too(self, method): sc = SSHConfig() Config(ssh_config=sc, runtime_ssh_path=self._system_path) assert not method.called
def load_ssh_config(ssh_config_file): ssh_config_file = os.path.basename(ssh_config_file) ssh_config_path = os.path.join(BASE_DIR, ssh_config_file) return Config(runtime_ssh_path=ssh_config_path)
def object_can_be_given_explicitly_via_ssh_config_kwarg(self): sc = SSHConfig() assert Config(ssh_config=sc).base_ssh_config is sc