def get_connection(host, user, private_key_path): conn = Connection(host=host, user=user, connect_kwargs={"key_filename": private_key_path}) conn.sudo("whoami", hide=True) return conn
def calls_agent_handler_close_if_enabled(self, Handler, client): c = Connection('host', forward_agent=True) c.create_session() c.close() # NOTE: this will need to change if, for w/e reason, we ever want # to run multiple handlers at once Handler.return_value.close.assert_called_once_with()
class SshClient(object): def __init__(self, ssh_key, ip): self._ssh_key = ssh_key self._ip = ip self.client = None def __enter__(self): self._create_client() return self def __exit__(self, exc_type, exc_value, traceback): self.close() def _create_client(self): if self.client is None: self.client = Connection( host=self._ip, user="******", connect_kwargs={"key_filename": self._ssh_key}) return self.client async def execute(self, cmd, ignore_errors=False): logging.info("About to run %s on %s", cmd, self._ip) try: return await run_in_loop_executor(self.client.run, cmd, hide=True) except UnexpectedExit as e: if ignore_errors: return e.result raise # TODO: handle other exceptions? async def put(self, local_file_path, remote_file_path): """Uploads local file(s) to remote server, recursively if passed a directory. """ if os.path.isdir(local_file_path): await self.execute('mkdir {}'.format(remote_file_path)) for f in os.listdir(local_file_path): await self.put(os.path.join(local_file_path, f), os.path.join(remote_file_path, f)) else: await run_in_loop_executor(self.client.put, local_file_path, remote=remote_file_path) async def get(self, remote_file_path, local_file_path): """Downloads remote files to local file system TODO: Support recusrive mode if passed a directory """ await run_in_loop_executor(self.client.get, remote_file_path, local=local_file_path) def close(self): if self.client and self.client.is_connected: self.client.close()
def short_circuits_if_already_connected(self, client): cxn = Connection('host') # First call will set self.transport to fixture's mock cxn.open() # Second call will check .is_connected which will see active==True, # and short circuit cxn.open() assert client.connect.call_count == 1
def multi_hop_works_ok(self): cxn = self._runtime_cxn(basename="proxyjump_multi") innermost = cxn.gateway.gateway.gateway middle = cxn.gateway.gateway outermost = cxn.gateway assert innermost == Connection("jumpuser3@jumphost3:411") assert middle == Connection("jumpuser2@jumphost2:872") assert outermost == Connection("jumpuser@jumphost:373")
def _create_client(self): if self.client is None: self.client = Connection( host=self._ip, user="******", connect_kwargs={"key_filename": self._ssh_key}) return self.client
def is_connected_still_False_when_connect_fails(self, client): client.connect.side_effect = socket.error cxn = Connection("host") try: cxn.open() except socket.error: pass assert cxn.is_connected is False
def is_connected_still_False_when_connect_fails(self, client): client.connect.side_effect = socket.error cxn = Connection('host') try: cxn.open() except socket.error: pass assert cxn.is_connected is False
def accepts_configuration_value(self): gw = Connection("jumpbox") config = Config( overrides={"gateway": gw, "load_ssh_configs": False} ) # TODO: the fact that they will be eq, but _not_ necessarily be # the same object, could be problematic in some cases... cxn = Connection("host", config=config) assert cxn.gateway == gw
def ipv6_addresses_work_ok_but_avoid_port_shorthand(self): for addr in ("2001:DB8:0:0:0:0:0:1", "2001:DB8::1", "::1"): c = Connection(addr, port=123) assert c.user == get_local_user() assert c.host == addr assert c.port == 123 c2 = Connection("somebody@{}".format(addr), port=123) assert c2.user == "somebody" assert c2.host == addr assert c2.port == 123
def _compile_source(conn: Connection, config: str, repo_dir: str, web_dir: str, virtualenv_python: str): with conn.cd(repo_dir): conn.run('venv/bin/pip install --quiet --requirement=requirements.txt') with conn.cd(web_dir): conn.sudo('find . -iname "*.pyc" -delete') conn.sudo('{0} -m compileall .'.format(virtualenv_python)) conn.sudo('{0} manage_{1}.py collectstatic --noinput'.format( virtualenv_python, config))
def accepts_configuration_value(self): gw = Connection('jumpbox') config = Config(overrides={ 'gateway': gw, 'load_ssh_configs': False, }) # TODO: the fact that they will be eq, but _not_ necessarily be # the same object, could be problematic in some cases... cxn = Connection('host', config=config) assert cxn.gateway == gw
def uses_proxycommand_as_sock_for_Client_connect(self, moxy, client): "uses ProxyCommand from gateway as 'sock' arg to SSHClient.connect" # Setup main = Connection('host', gateway="net catty %h %p") main.open() # Expect ProxyCommand instantiation moxy.assert_called_once_with("net catty host 22") # Expect result of that as sock arg to connect() sock_arg = client.connect.call_args[1]['sock'] assert sock_arg is moxy.return_value
def uses_proxycommand_as_sock_for_Client_connect(self, moxy, client): "uses ProxyCommand from gateway as 'sock' arg to SSHClient.connect" # Setup main = Connection("host", gateway="net catty %h %p") main.open() # Expect ProxyCommand instantiation moxy.assert_called_once_with("net catty host 22") # Expect result of that as sock arg to connect() sock_arg = client.connect.call_args[1]["sock"] assert sock_arg is moxy.return_value
def lazily_caches_result(self, client): sentinel1, sentinel2 = object(), object() client.open_sftp.side_effect = [sentinel1, sentinel2] cxn = Connection("host") first = cxn.sftp() # TODO: why aren't we just asserting about calls of open_sftp??? err = "{0!r} wasn't the sentinel object()!" assert first is sentinel1, err.format(first) second = cxn.sftp() assert second is sentinel1, err.format(second)
def lazily_caches_result(self, client): sentinel1, sentinel2 = object(), object() client.open_sftp.side_effect = [sentinel1, sentinel2] cxn = Connection('host') first = cxn.sftp() # TODO: why aren't we just asserting about calls of open_sftp??? err = "{0!r} wasn't the sentinel object()!" assert first is sentinel1, err.format(first) second = cxn.sftp() assert second is sentinel1, err.format(second)
def upload_or_download_files(method, connect_object: Connection, local_file_path, remote_file_path): ''' 上传/下载文件 use: eg: upload_or_download_files( method='put', connect_object=xxx, local_file_path='/Users/afa/myFiles/tmp/my_spider_logs.zip', remote_file_path='/root/myFiles/my_spider_logs.zip' ) :param method: 上传的方式 :param connect_object: 连接对象 :param local_file_path: 本地待上传文件路径(必须是绝对路径) :param remote_file_path: server待上传文件路径(必须是绝对路径) :return: bool ''' from os.path import ( basename, dirname, ) # 本地工作上下文path local_work_content = dirname(local_file_path) local_file_name = basename(local_file_path) # server工作上下文path remote_work_content = dirname(remote_file_path) remote_file_name = basename(remote_file_path) # print(remote_work_content) _ = False if method == 'put': try: connect_object.put(local=local_file_path, remote=remote_file_path) print('[+] 上传 {0} 到server成功!'.format(local_file_name)) _ = True except Exception as e: print(e) print('[-] 上传 {0} 到server失败!'.format(local_file_name)) elif method == 'get': try: connect_object.get(remote=remote_file_path, local=local_file_path) print('[+] 下载 {0} 到本地成功!'.format(remote_file_name)) _ = True except Exception as e: print(e) print('[-] 下载 {0} 到本地失败!'.format(remote_file_name)) else: # method = 'get' raise ValueError('method只支持put or get 方法!') return _
def basic_invocation(self, Remote, client): # Technically duplicates Invoke-level tests, but ensures things # still work correctly at our level. cxn = Connection('host') cxn.sudo('foo') cmd = "sudo -S -p '{}' foo".format(cxn.config.sudo.prompt) # NOTE: this is another spot where Mock.call_args is inexplicably # None despite call_args_list being populated. WTF. (Also, # Remote.return_value is two different Mocks now, despite Remote's # own Mock having the same ID here and in code under test. WTF!!) expected = [call(cxn), call().run(cmd, watchers=ANY)] assert Remote.mock_calls == expected
def basic_invocation(self, Remote, client): # Technically duplicates Invoke-level tests, but ensures things # still work correctly at our level. cxn = Connection("host") cxn.sudo("foo") cmd = "sudo -S -p '{}' foo".format(cxn.config.sudo.prompt) # NOTE: this is another spot where Mock.call_args is inexplicably # None despite call_args_list being populated. WTF. (Also, # Remote.return_value is two different Mocks now, despite Remote's # own Mock having the same ID here and in code under test. WTF!!) expected = [call(cxn), call().run(cmd, watchers=ANY)] assert Remote.mock_calls == expected
def update_python_packages(connect_object: Connection): '''更新python包''' print('正在更新相关依赖包...') try: # connect_object.run('sudo apt-get update --fix-missing && sudo apt-get autoremove && sudo apt-get clean && apt-get -f install && apt-get install unzip --fix-missing') # connect_object.run('sudo apt-get install libcurl4-openssl-dev') # for pycurl connect_object.run('pip3 install --upgrade pip') connect_object.run( 'pip3 install -i http://pypi.douban.com/simple/ fzutils --trusted-host pypi.douban.com -U' ) except Exception as e: print(e) return False return True
def _forward_remote(self, kwargs, Client, select, mocket): # TODO: unhappy with how much this duplicates of the code under # test, re: sig/default vals # Set up parameter values/defaults remote_port = kwargs["remote_port"] remote_host = kwargs.get("remote_host", "127.0.0.1") local_port = kwargs.get("local_port", remote_port) local_host = kwargs.get("local_host", "localhost") # Mock/etc setup, anything that can be prepped before the forward # occurs (which is most things) tun_socket = mocket.return_value cxn = Connection("host") # Channel that will yield data when read from chan = Mock() chan.recv.return_value = "data" # And make select() yield it as being ready once, when called select.select.side_effect = _select_result(chan) with cxn.forward_remote(**kwargs): # At this point Connection.open() has run and generated a # Transport mock for us (because SSHClient is mocked). Let's # first make sure we asked it for the port forward... # NOTE: this feels like it's too limited/tautological a test, # until you realize that it's functionally impossible to mock # out everything required for Paramiko's inner guts to run # _parse_channel_open() and suchlike :( call = cxn.transport.request_port_forward.call_args_list[0] assert call[1]["address"] == remote_host assert call[1]["port"] == remote_port # Pretend the Transport called our callback with mock Channel call[1]["handler"](chan, tuple(), tuple()) # Then have to sleep a bit to make sure we give the tunnel # created by that callback to spin up; otherwise ~5% of the # time we exit the contextmanager so fast, the tunnel's "you're # done!" flag is set before it even gets a chance to select() # once. time.sleep(0.01) # And make sure we hooked up to the local socket OK tup = (local_host, local_port) tun_socket.connect.assert_called_once_with(tup) # Expect that our socket got written to by the tunnel (due to the # above-setup select() and channel mocking). Need to do this after # tunnel shutdown or we risk thread ordering issues. tun_socket.sendall.assert_called_once_with("data") # Ensure we closed down the mock socket mocket.return_value.close.assert_called_once_with() # And that the transport canceled the port forward on the remote # end. assert cxn.transport.cancel_port_forward.call_count == 1
def _forward_remote(self, kwargs, Client, select, mocket): # TODO: unhappy with how much this duplicates of the code under # test, re: sig/default vals # Set up parameter values/defaults remote_port = kwargs['remote_port'] remote_host = kwargs.get('remote_host', '127.0.0.1') local_port = kwargs.get('local_port', remote_port) local_host = kwargs.get('local_host', 'localhost') # Mock/etc setup, anything that can be prepped before the forward # occurs (which is most things) tun_socket = mocket.return_value cxn = Connection('host') # Channel that will yield data when read from chan = Mock() chan.recv.return_value = "data" # And make select() yield it as being ready once, when called select.select.side_effect = _select_result(chan) with cxn.forward_remote(**kwargs): # At this point Connection.open() has run and generated a # Transport mock for us (because SSHClient is mocked). Let's # first make sure we asked it for the port forward... # NOTE: this feels like it's too limited/tautological a test, # until you realize that it's functionally impossible to mock # out everything required for Paramiko's inner guts to run # _parse_channel_open() and suchlike :( call = cxn.transport.request_port_forward.call_args_list[0] assert call[1]['address'] == remote_host assert call[1]['port'] == remote_port # Pretend the Transport called our callback with mock Channel call[1]['handler'](chan, tuple(), tuple()) # Then have to sleep a bit to make sure we give the tunnel # created by that callback to spin up; otherwise ~5% of the # time we exit the contextmanager so fast, the tunnel's "you're # done!" flag is set before it even gets a chance to select() # once. time.sleep(0.01) # And make sure we hooked up to the local socket OK tup = (local_host, local_port) tun_socket.connect.assert_called_once_with(tup) # Expect that our socket got written to by the tunnel (due to the # above-setup select() and channel mocking). Need to do this after # tunnel shutdown or we risk thread ordering issues. tun_socket.sendall.assert_called_once_with("data") # Ensure we closed down the mock socket mocket.return_value.close.assert_called_once_with() # And that the transport canceled the port forward on the remote # end. assert cxn.transport.cancel_port_forward.call_count == 1
def Iniciar(ctx): # Iniciamos el servicio web with Connection('noticiarioiv1819.westus.cloudapp.azure.com', user='******') as c: with c.cd('Proyecto-Vengadores'): c.run('sudo gunicorn api_web:app -b 0.0.0.0:80')
def sets_missing_host_key_policy(self, Policy, client): # TODO: should make the policy configurable early on sentinel = Mock() Policy.return_value = sentinel Connection("host") set_policy = client.set_missing_host_key_policy set_policy.assert_called_once_with(sentinel)
def loses_to_explicit(self): # Would be True, as above config = self._runtime_config() cxn = Connection( "runtime", config=config, forward_agent=False ) assert cxn.forward_agent is False
def param_comparison_uses_config(self): conf = Config(overrides={"user": "******"}) c = Connection( user="******", host="myhost", port=123, config=conf ) template = "<Connection host=myhost port=123>" assert repr(c) == template
def kwarg_wins_over_config(self): # TODO: should this be more of a merge-down? c = Config(overrides={"connect_kwargs": {"origin": "config"}}) cxn = Connection( "host", connect_kwargs={"origin": "kwarg"}, config=c ) assert cxn.connect_kwargs == {"origin": "kwarg"}
def migrate_database(c): with Connection(host=HOST) as c: with c.cd(PROJECT_PATH): c.run( f'{VENV_PATH}/bin/python manage.py makemigrations course lesson user' ) c.run(f'{VENV_PATH}/bin/python manage.py migrate')
def merges_sources(self, client, ssh, invoke, kwarg, expected): config_kwargs = {} if ssh: # SSH config with 2x IdentityFile directives. config_kwargs["runtime_ssh_path"] = join( support, "ssh_config", "runtime_identity.conf" ) if invoke: # Use overrides config level to mimic --identity use NOTE: (the # fact that --identity is an override, and thus overrides eg # invoke config file values is part of invoke's config test # suite) config_kwargs["overrides"] = { "connect_kwargs": {"key_filename": ["configured.key"]} } conf = Config(**config_kwargs) connect_kwargs = {} if kwarg: # Stitch in connect_kwargs value connect_kwargs = {"key_filename": ["kwarg.key"]} # Tie in all sources that were configured & open() Connection( "runtime", config=conf, connect_kwargs=connect_kwargs ).open() # Ensure we got the expected list of keys kwargs = client.connect.call_args[1] if expected: assert kwargs["key_filename"] == expected else: # No key filenames -> it's not even passed in as connect_kwargs # is gonna be a blank dict assert "key_filename" not in kwargs
def Prueba(ctx): #Realizamos el test with Connection('noticiarioiv1819.westus.cloudapp.azure.com', user='******') as c: with c.cd('Proyecto-Vengadores'): c.run('pytest')
def run_remote(params_path, gpu=False, instance_type='m5.large', ami='ami-00b8b0b2dff90dcab', spot_price=0.5): command = COMMAND % params_path if gpu: ami = 'ami-03fd6608775f924b8' instance_type = 'g3.4xlarge' spot_price = 0.5 command = GPU_COMMAND % params_path instance = request_instance(instance_type, ami, spot_price, params_path) with create_parasol_zip() as parasol_zip, Connection( instance, user="******", connect_kwargs={"key_filename": PEM_FILE}) as conn: print("Running remote experiment...") conn.put(parasol_zip) conn.run( "mkdir parasol; unzip -o parasol.zip -d parasol; rm parasol.zip", hide='stdout') conn.run("PIPENV_YES=1 pipenv run python setup.py develop", hide='stdout') conn.run("PIPENV_YES=1 pipenv run pip install deepx --upgrade", hide='stdout') conn.run("echo \"%s\" > run.py" % command, hide='stdout') conn.run( "tmux new-session -d -s 'experiment' \"xvfb-run -s '-screen 0 1400x900x24' pipenv run python run.py; sudo poweroff\"" )
def _read_loggers(self): """ Read the log files for GROMACS software to get step values """ from fabric.connection import Connection with Connection(self.hostname, self.user) as portal: # Find the file paths file_paths_stream = portal.run('find ' + self.target_directory + ' -type f -name "' + self.target_log + '"', hide=True) file_paths = file_paths_stream.stdout.strip().split('\n') # Grep the logs for file_path in file_paths: stdout_stream = portal.run('tail -n 13 ' + file_path + '', hide=True) stdout = stdout_stream.stdout.strip().split('\n')[0].split( 'vol')[0].strip().split(' ')[-1] if str(stdout) == '0.0': stdout = 'Completed' self.timings[file_path] = stdout
def uses_configured_user_host_and_port(self, client): Connection(user='******', host='myhost', port=9001).open() client.connect.assert_called_once_with( username='******', hostname='myhost', port=9001, )
def run_ghap_job(job): username = job.ghap_username server = job.ghap_ip password = cache.get("ghap_password_%s" % job.id) host = "%s@%s" % (username, server) with Connection(host=host, connect_kwargs={"password": password}) as c: output = upload_to_ghap(c, job, username, password)
def _run_fabric(data): """ Runs fabric commands on a 'remote' host. """ host = data.get('host') if not host: app.app.logger.warning("No 'host' specified in request") data['status'] = 'failed' data['warnings'] = "No 'host' specified in request" return False data['responses'] = [] if data.get('cmds'): with Connection(host) as c: app.app.logger.debug(data.get('cmds')) for cmd in data.get('cmds'): app.app.logger.debug(cmd) rsp = {'cmd': cmd, 'output': None} try: cmd_response = c.run(cmd) rsp['output'] = str(cmd_response) except UnexpectedExit: app.app.logger.warning('%s CMD non exit 0' % cmd) rsp['output'] = str(UnexpectedExit.__doc__) data['status'] = 'failed' data['responses'].append(rsp) break data['responses'].append(rsp)
def loses_to_explicit(self): # Would be "my gateway", as above config = self._runtime_config() cxn = Connection( "runtime", config=config, gateway="other gateway" ) assert cxn.gateway == "other gateway"
def configure_nginx(c): with Connection(host=HOST) as c: if c.run(f'test -d /etc/nginx/sites-enabled/default', warn=True).failed: c.sudo('rm /etc/nginx/sites-enabled/default') c.put('fab_templates/nginx.conf', '/etc/nginx/sites-enabled/course_site_api.conf')
def calls_Remote_run_with_command_and_kwargs_and_returns_its_result( self, Remote, client ): remote = Remote.return_value sentinel = object() remote.run.return_value = sentinel c = Connection('host') r1 = c.run("command") r2 = c.run("command", warn=True, hide='stderr') # NOTE: somehow, .call_args & the methods built on it (like # .assert_called_with()) stopped working, apparently triggered by # our code...somehow...after commit (roughly) 80906c7. # And yet, .call_args_list and its brethren work fine. Wha? Remote.assert_any_call(c) remote.run.assert_has_calls([ call("command"), call("command", warn=True, hide='stderr'), ]) for r in (r1, r2): assert r is sentinel
def uses_gateway_channel_as_sock_for_SSHClient_connect(self, Client): "uses Connection gateway as 'sock' arg to SSHClient.connect" # Setup mock_gw = Mock() mock_main = Mock() Client.side_effect = [mock_gw, mock_main] gw = Connection('otherhost') gw.open = Mock(wraps=gw.open) main = Connection('host', gateway=gw) main.open() # Expect gateway is also open()'d gw.open.assert_called_once_with() # Expect direct-tcpip channel open on 1st client open_channel = mock_gw.get_transport.return_value.open_channel kwargs = open_channel.call_args[1] assert kwargs['kind'] == 'direct-tcpip' assert kwargs['dest_addr'], ('host' == 22) # Expect result of that channel open as sock arg to connect() sock_arg = mock_main.connect.call_args[1]['sock'] assert sock_arg is open_channel.return_value
def is_connected_True_when_successful(self, client): c = Connection('host') c.open() assert c.is_connected is True
def activates_paramiko_agent_forwarding_if_configured( self, Handler, client ): c = Connection('host', forward_agent=True) chan = c.create_session() Handler.assert_called_once_with(chan)
def calls_open_for_you(self, Remote, client): c = Connection('host') c.open = Mock() c.sudo("command") assert c.open.called
def has_no_required_args_and_returns_None(self, client): c = Connection('host') c.open() assert c.close() is None
def short_circuits_if_not_connected(self, client): c = Connection('host') # Won't trigger close() on client because it'll already think it's # closed (due to no .transport & the behavior of .is_connected) c.close() assert not client.close.called
def calls_SSHClient_close(self, client): "calls paramiko.SSHClient.close()" c = Connection('host') c.open() c.close() client.close.assert_called_with()
def calls_open_for_you(self, client): c = Connection('host') c.open = Mock() c.transport = Mock() # so create_session no asplode c.create_session() assert c.open.called
def client_defaults_to_a_new_SSHClient(self): c = Connection('host').client assert isinstance(c, SSHClient) assert c.get_transport() is None
def calls_Transfer_put(self, Transfer): "calls Transfer.put()" c = Connection('host') c.put('meh') Transfer.assert_called_with(c) Transfer.return_value.put.assert_called_with('meh')