def start_node(node): print "Connecting to node%s with hostname %s." % (node["id"], node["host"]) try: remote = SshMachine(node["host"], port = 22022, user = username, keyfile = path_to_keyfile, ssh_opts=["-o", "StrictHostKeyChecking=no"]) except Exception as e: print "Could not connect to %s: %s" % (node["host"], e) return print "[%s]Connected" % node["id"] try: remote["rm"]("node") except commands.processes.ProcessExecutionError: pass print "[%s]Downloading application..." % node["id"] remote["wget"]("-O", "node", "https://www.dropbox.com/s/mjw7dic2ywk5jrp/node") remote["chmod"]("u+x", "node") print "[%s]Starting python node..." % node["id"] try: remote["./node"]("--id", "%s" % (node["id"]), "--neighbours", json.dumps(neighbourhood[node["id"]]), "%s:%s" % (monitor["host"], monitor["tcp_port"])) except commands.processes.ProcessExecutionError as e: print "[%s]Got an exception: %s" % (node["id"], e) remote.close()
def main(machine, instances, queues=['high', 'default', 'low']): r = StrictRedis.from_url(REDIS_URL_RQ) machine_workers = [worker for worker in Worker.all(connection=r) if is_local(machine, worker.name) and \ any(works_on(worker, queue) for queue in queues)] print "%d workers running on %s" % (len(machine_workers), machine) if len(machine_workers): print '\n'.join( map( lambda m: "%s\t%s\t%s" % (m.name, m.get_state(), "stopped" if m.stopped else "running"), machine_workers)) machine_info = workers(machine) rem = SshMachine(machine_info['hostname'], ssh_opts=SSH_OPTS, **machine_info.get('kwargs', {})) dir = rem.path(machine_info['rqdir']) with rem.cwd(dir): for i in xrange(0, instances - len(machine_workers)): rem["./worker.sh"](' '.join(queues)) print "Worker spawned"
def deploy_content(cls): remote = SshMachine(TARGET.DOMAIN) with remote.cwd(TARGET.CHECKOUT_PATH): print(remote['git']('reset', '--hard')) print(remote['git']('clean', '-f', '-d')) print(remote['git']('pull')) with remote.cwd(TARGET.TOOL_PATH): print(remote['adfd']('fix-staging-paths'))
def open_db(urlstr=None): urlstr = urlstr or _get_db_url(urlstr) url = urlparse(urlstr) if url.scheme: machine = SshMachine(url.hostname, user=url.username, port=url.port) else: machine = local yield urlstr, machine, machine.path(url.path) if url.scheme: machine.close()
def run_more_instances(machine, count, queues=['high', 'default', 'low']): rem = SshMachine(machine, ssh_opts=SSH_OPTS, keyfile=KEYFILE, user='******') dir = rem.path('/home/ec2-user/rq') with rem.cwd(dir): for i in xrange(0, count): rem["./worker.sh"](' '.join(queues)) print "Worker spawned"
def initializeDBTier(self): # Run postgresql Server on remote machine 'dbNode' remote = SshMachine(self.dbNode, user = "******") logger.info("Remote connection established to dbNode") r_postgres = remote[REMOTE_POSTGRES] r_postgres("restart") r_postgres("reload") logger.info("Remote postgresql server restarted") print remote.cwd r_ls = remote["ls"] print r_ls() remote.close()
def setUp(self): if sys.platform == "win32": self.server = None os.environ["HOME"] = os.path.expanduser("~") self.remote_machine = SshMachine("localhost") else: # assume "ssh localhost" is configured to run without asking for password self.server = ThreadedServer(SlaveService, hostname="localhost", ipv6=False, port=18888, auto_register=False) self.server._start_in_thread() self.remote_machine = SshMachine("localhost")
class SshHost(BaseHost): def __init__(self, **kwargs): BaseHost.__init__(self, **kwargs) self._mach = None self.deployment = None def connect(self): if self.deployment is None: self._mach = SshMachine(**self.kwargs) self.deployment = DeployedServer(self._mach) return self.deployment.classic_connect() def close(self): if self.deployment is not None: self.deployment.close() self._mach.close()
def test_copy_move_delete(self): from plumbum.cmd import touch with local.tempdir() as dir: (dir / "orog").mkdir() (dir / "orog" / "rec").mkdir() for i in range(20): touch(dir / "orog" / ("f%d.txt" % (i, ))) for i in range(20, 40): touch(dir / "orog" / "rec" / ("f%d.txt" % (i, ))) move(dir / "orog", dir / "orig") s1 = sorted(f.basename for f in (dir / "orig").walk()) copy(dir / "orig", dir / "dup") s2 = sorted(f.basename for f in (dir / "dup").walk()) self.assertEqual(s1, s2) with SshMachine("localhost") as rem: with rem.tempdir() as dir2: copy(dir / "orig", dir2) s3 = sorted(f.basename for f in (dir2 / "orig").walk()) self.assertEqual(s1, s3) copy(dir2 / "orig", dir2 / "dup") s4 = sorted(f.basename for f in (dir2 / "dup").walk()) self.assertEqual(s1, s4) copy(dir2 / "dup", dir / "dup2") s5 = sorted(f.basename for f in (dir / "dup2").walk()) self.assertEqual(s1, s5) with SshMachine("localhost") as rem2: with rem2.tempdir() as dir3: copy(dir2 / "dup", dir3) s6 = sorted(f.basename for f in (dir3 / "dup").walk()) self.assertEqual(s1, s6) move(dir3 / "dup", dir / "superdup") self.assertFalse((dir3 / "dup").exists()) s7 = sorted(f.basename for f in (dir / "superdup").walk()) self.assertEqual(s1, s7) # test rm delete(dir)
def kill_node(node): print "Killing node%s" % node["id"] try: remote = SshMachine(node["host"], port = 22022, user = username, keyfile = path_to_keyfile, ssh_opts=["-o StrictHostKeyChecking=no"]) except Exception as e: print "Could not connect to %s: %s" % (node["host"], e) return try: print remote["killall"]("node") except: print "Could not kill node%s" % node["id"] else: print "Node%s killed!" % node["id"] remote.close()
def test_copy_move_delete(self): from plumbum.cmd import touch with local.tempdir() as dir: (dir / "orog").mkdir() (dir / "orog" / "rec").mkdir() for i in range(20): touch(dir / "orog" / ("f%d.txt" % (i, ))) for i in range(20, 40): touch(dir / "orog" / "rec" / ("f%d.txt" % (i, ))) move(dir / "orog", dir / "orig") s1 = sorted(f.name for f in (dir / "orig").walk()) copy(dir / "orig", dir / "dup") s2 = sorted(f.name for f in (dir / "dup").walk()) assert s1 == s2 with SshMachine("localhost") as rem: with rem.tempdir() as dir2: copy(dir / "orig", dir2) s3 = sorted(f.name for f in (dir2 / "orig").walk()) assert s1 == s3 copy(dir2 / "orig", dir2 / "dup") s4 = sorted(f.name for f in (dir2 / "dup").walk()) assert s1 == s4 copy(dir2 / "dup", dir / "dup2") s5 = sorted(f.name for f in (dir / "dup2").walk()) assert s1 == s5 with SshMachine("localhost") as rem2: with rem2.tempdir() as dir3: copy(dir2 / "dup", dir3) s6 = sorted(f.name for f in (dir3 / "dup").walk()) assert s1 == s6 move(dir3 / "dup", dir / "superdup") assert not (dir3 / "dup").exists() s7 = sorted(f.name for f in (dir / "superdup").walk()) assert s1 == s7 # test rm delete(dir)
def connect(): SshMachine(TEST_HOST, password='******', ssh_opts=[ '-o', 'UserKnownHostsFile=/dev/null', '-o', 'UpdateHostKeys=no' ])
def _get_ssh(self, node, user): """Setup a SshMachine connection for non-rpyc connections""" ssh_opts = () ssh_opts += ('-T', '-oPasswordAuthentication=no', '-oStrictHostKeyChecking=no', '-oPort=22', '-oConnectTimeout=10') keyfile = None if 'ssh_keyfile' in self.global_config: keyfile = self.global_config['ssh_keyfile'] ssh_opts += ('-o', 'IdentityFile=%s' % keyfile) if self.use_controlpersist: ssh_opts += ('-oControlMaster=auto', '-oControlPersist=30m', '-oControlPath=~/.ssh/distaf-ssh-%r@%h:%p') conn_name = "%s@%s" % (user, node) # if no existing connection, create one if conn_name not in self.sshconns: # we already have plumbum imported for rpyc, so let's use it ssh = SshMachine(node, user, ssh_opts=ssh_opts) self.sshconns[conn_name] = ssh else: ssh = self.sshconns[conn_name] if ssh: self.logger.debug("Have ssh for %s. Returning ssh." % conn_name) return ssh self.logger.error("oops. did not get ssh for %s", conn_name) return None
def start_node(node): print "Connecting to node%s with hostname %s" % (node["id"], node["host"]) try: remote = SshMachine(node["host"], port = 22022, user = username, keyfile = path_to_keyfile) except: print "Could not connect to %s" % node return print "[%s]Connected" % node["id"] print "[%s]Killing python..." % node["id"] try: remote["killall"]("node") except Exception as e: print "[%s]Exception: %s" % (node["id"], e) print "[%s]Python could not get killed" % node["id"] remote.close()
def __init__(self, ip_info, modules, username=None, password=None, strict_host_key_checking=True): """ Initializes the context """ self.ips = [] if isinstance(ip_info, basestring): self.ips = [ip_info] elif isinstance(ip_info, list): self.ips = ip_info else: raise ValueError( 'IP info needs to be a single IP or a list of IPs') if not isinstance(modules, list) and not isinstance( modules, set) and not isinstance(modules, tuple): raise ValueError('Modules should be a list, set or tuple') self.username = username if username is not None else check_output( 'whoami').strip() ssh_opts = [] if strict_host_key_checking is False: ssh_opts.append('-o StrictHostKeyChecking=no') self.machines = [ SshMachine(ip, user=self.username, password=password, ssh_opts=tuple(ssh_opts)) for ip in self.ips ] self.servers = [DeployedServer(machine) for machine in self.machines] self.modules = modules
def get_generic_ssh(location, **kwargs): from plumbum import SshMachine password = location.facts.get('password') keyfile = location.facts.get('keyfile') settings = { 'user': location.facts.get('user', 'root'), 'port': location.facts.get('port', 22), 'ssh_opts': SSH_OPTS, 'scp_opts': SSH_OPTS } if password: settings['password'] = location.facts.get('password') if keyfile: keyfile = os.path.expanduser(keyfile) assert os.path.isfile(keyfile), 'No keyfile {} exists?'.format(keyfile) log.debug("Attempting to auth ssh with keyfile {}".format(keyfile)) settings['keyfile'] = keyfile elif password: settings['password'] = location.facts.get('password') ssh = SshMachine(location.hostname, **settings) return ssh
def prefetch(keys): if socket.gethostname() == FS: return with SshMachine(FS) as rem: for key in keys: get_file_remote(':'.join(map(str, key)), session=rem)
def mapleDeployment(host, loacalMaple, loaclMapleHandler, loaclMapleDataMerger, loaclGetDestination): # create the deployment mach = SshMachine(host, user="******", keyfile="/home/zitongc2/.ssh/id_rsa") server = DeployedServer(mach) # and now you can connect to it the usual way conn = server.classic_connect() print("[INFO] Connected to ", host) conn._config['sync_request_timeout'] = None def getResult(string): print(string) remoteMaple = conn.teleport(loacalMaple) remoteMapleHandler = teleport_function(conn, loaclMapleHandler) remoteMapleHandler(remoteMaple, "/home/zitongc2/test/phase0/input", getResult) # print("[INFO] Done processing, now sendind result to desinated location") remoteMapleDataMerger = teleport_function(conn, loaclMapleDataMerger) remoteMapleDataMerger(loaclGetDestination) # when you're done - close the server and everything will disappear print("[INFO] done") server.close()
def connect(): SshMachine(TEST_HOST, password='******', ssh_opts=[ '-o', 'PubkeyAuthentication=no', '-o', 'PreferredAuthentications=password' ])
def __init__(self, name, user=None): self.name = name self.user = user try: self.talk = self._collect_talk() except FileNotFoundError: raise JumpException( "ssh seems to not be working on your machine. Unable to connect." ) try: self.machine = SshMachine(self.name, user=self.user) except ValueError as e: if "write to closed file" in str(e): raise JumpException( textwrap.dedent(""" The initial authentification was successful, but your ssh configuration seems to not support multiplexing. Jump needs ssh multiplexing to work smoothly, i.e. the connection must be kept open. To enable it, add the following lines to your ~/.ssh/config and retry: Host * ControlMaster auto ControlPath /tmp/ssh_mux_%h_%p_%r ControlPersist 30m ServerAliveInterval 90 """))
def test_deploy(self): rem = SshMachine("localhost") SshMachine.python = rem[sys.executable] with DeployedServer(rem) as dep: conn = dep.classic_connect() print(conn.modules.sys) func = conn.modules.os.getcwd print(func()) try: func() except EOFError: pass else: self.fail("expected an EOFError") rem.close()
def test_tunnel(self): tunnel_prog = r"""import sys, socket s = socket.socket() if sys.version_info[0] < 3: b = lambda x: x else: b = lambda x: bytes(x, "utf8") s.bind(("", 0)) s.listen(1) sys.stdout.write(b("%s\n" % (s.getsockname()[1],))) sys.stdout.flush() s2, _ = s.accept() data = s2.recv(100) s2.send(b("hello ") + data) s2.close() s.close() """ with SshMachine("localhost") as rem: p = (rem.python["-u"] << tunnel_prog).popen() try: port = int(p.stdout.readline().strip()) except ValueError: print(p.communicate()) raise with rem.tunnel(12222, port) as tun: s = socket.socket() s.connect(("localhost", 12222)) s.send(six.b("world")) data = s.recv(100) s.close() self.assertEqual(data, six.b("hello world")) p.communicate()
def start_button_clicked(self): print("QDialog Start Button clicked") wanted_client = self.sender().text().encode("ascii") for client in self.clients: if client.hostname == wanted_client: print("Wanted client: ", client) from plumbum import SshMachine print(client.local_path) remote = SshMachine(client.hostname.decode("utf-8")) if client.local_path != "": remote.cwd.chdir(client.local_path) #start_client = remote["/bin/bash"] #start_client.run(["./bin/start_client", "otho", "5555"]) #TODO #start_client = remote["./bin/start_client"] #start_client.run(["otho", "5555"]) #TODO remote.popen(args=["./bin/start_client", "otho", "5555"])#, ssh_opts=("-f",))#TODO server_name port print("Starting done")
def foo(host): rem_std_out = no_host_msg try: with SshMachine(host) as rem: rem_std_out = rem[cmd]['remote']() except plumbum.machines.session.SSHCommsError as e: print(e) return host, rem_std_out
def __enter__(self): """Initialize the ssh connection (blocks until success)""" err = None for _ in range(SSH_MAX_RETRY): time.sleep(1) try: self.ssh = SshMachine('127.0.0.1', user='******', port=HOST_PORT, keyfile=self.key) break except (EOFError, plumbum.machines.session.SSHCommsError) as e: err = e continue else: # Reached maximum retries raise VMException('SSH connection failed after too many retries', err) return self
def __enter__(self): if self._root: username = "******" else: username = self._server.username self.cmd = SshMachine(self._server.host, user=username) if self._chdir: if self._chdir.startswith("/"): self.cmd.cwd.chdir(self._chdir) else: self.cmd.cwd.chdir(self.cmd.cwd / self._chdir) if not self._root: self.docker_compose = self.cmd["docker-compose"] self.git = self.cmd["git"] self.pip = self.cmd["pip"] return self
def test_download_upload(self): with SshMachine("localhost") as rem: rem.upload("test_remote.py", "/tmp") r_ls = rem["ls"] r_rm = rem["rm"] self.assertTrue("test_remote.py" in r_ls("/tmp").splitlines()) rem.download("/tmp/test_remote.py", "/tmp/test_download.txt") r_rm("/tmp/test_remote.py") r_rm("/tmp/test_download.txt")
def test_read_write(self): with SshMachine("localhost") as rem: with rem.tempdir() as dir: self.assertTrue(dir.isdir()) data = "hello world" (dir / "foo.txt").write(data) self.assertEqual((dir / "foo.txt").read(), data) self.assertFalse(dir.exists())
def test_hostpubkey_unknown(sshpass): with pytest.raises(HostPublicKeyUnknown): SshMachine( TEST_HOST, password="******", ssh_opts=[ "-o", "UserKnownHostsFile=/dev/null", "-o", "UpdateHostKeys=no" ], )
def test_env(self): with SshMachine("localhost") as rem: self.assertRaises(ProcessExecutionError, rem.python, "-c", "import os;os.environ['FOOBAR72']") with rem.env(FOOBAR72 = "lala"): with rem.env(FOOBAR72 = "baba"): out = rem.python("-c", "import os;print(os.environ['FOOBAR72'])") self.assertEqual(out.strip(), "baba") out = rem.python("-c", "import os;print(os.environ['FOOBAR72'])") self.assertEqual(out.strip(), "lala")
def test_chown(self): with SshMachine("localhost") as rem: with rem.tempdir() as dir: p = dir / "foo.txt" p.write("hello") # because we're connected to localhost, we expect UID and GID to be the same self.assertEqual(p.uid, os.getuid()) self.assertEqual(p.gid, os.getgid()) p.chown(p.uid.name) self.assertEqual(p.uid, os.getuid())
def RemoteMachine(*pargs, **kwargs): ''' Remote machine constructor function. Forwards all arguments on to the appropriate constructor for this platform. On windows this is ``plumbum.PuttyMachine`` and on other platforms `plumbum.SshMachine` ''' if _WIN32: return PuttyMachine(*pargs, **kwargs) else: return SshMachine(*pargs, **kwargs)
def pbm(self): """ Plumbum lazy property """ if not self.disable_rpyc: from plumbum import SshMachine return SshMachine(host=self.private_hostname, user=self.username, keyfile=self.key_filename, ssh_opts=["-o", "UserKnownHostsFile=/dev/null", "-o", "StrictHostKeyChecking=no"]) else: return None
def _get_ssh_connection(cls, host, user=None): """Setup an SshMachine connection. Args: host (str): Hostname of the system. user (optional[str]): User to use for connection. Returns: An ssh connection object on success. None on failure. """ if not user: user = cls.user ssh_opts = () ssh_opts += ('-oPasswordAuthentication=no', '-oStrictHostKeyChecking=no', '-oPort=22', '-oConnectTimeout=10') keyfile = None if 'ssh_keyfile' in cls.config: keyfile = cls.config['ssh_keyfile'] ssh_opts += ('-o', 'IdentityFile=%s' % keyfile) if cls.use_controlpersist: ssh_opts += ('-oControlMaster=auto', '-oControlPersist=4h', '-oControlPath=~/.ssh/glusto-ssh-%r@%h:%p') scp_opts = ssh_opts ssh_opts += ('-T', ) conn_name = "%s@%s" % (user, host) # if no existing connection, create one if conn_name not in cls._ssh_connections: cls.log.debug("Creating connection: %s" % conn_name) try: ssh = SshMachine(host, user, ssh_opts=ssh_opts, scp_opts=scp_opts) except: cls.log.error("Exception trying to establish SshMachine") return None cls._ssh_connections[conn_name] = ssh else: cls.log.debug("Retrieved connection from cache: %s" % conn_name) ssh = cls._ssh_connections[conn_name] if ssh: return ssh print("oops. did not get ssh for %s", conn_name) return None
def _start_ssh_slave(name, cfg, status): """ Start a slave controller that is a SSH client """ # Improve logging setup!!! logging.getLogger('plumbum').setLevel(logging.DEBUG) # Find a host that supports ssh host = config.resource.allocate_host(name, {'launch_protocol': 'ssh'}, {}) # Get the root of the SIP installation on that host sip_root = config.resource.sip_root(host) # Allocate ports for heatbeat and the RPC interface heartbeat_port = config.resource.allocate_resource(name, "tcp_port") rpc_port = config.resource.allocate_resource(name, "tcp_port") # Get the task control module to use for this task task_control_module = cfg['task_control_module'] # Get the address of the logger (as seen from the remote host) logger_address = _find_route_to_logger(host) ssh_host = SshMachine(host) import pdb # pdb.set_trace() try: py3 = ssh_host['python3'] except: logger.fatal('python3 not available on machine {}'.format(ssh_host)) logger.info('python3 is available at {}'.format(py3.executable)) # Construct the command line to start the slave cmd = py3[os.path.join(sip_root, 'slave/bin/slave')] \ [name][heartbeat_port][rpc_port][logger_address][task_control_module] ssh_host.daemonic_popen(cmd, stdout='{}_sip.output'.format(name)) # Fill in the status dictionary status['address'] = host status['rpc_port'] = rpc_port status['heartbeat_port'] = heartbeat_port status['sip_root'] = sip_root logger.info(name + ' started on ' + host)
def process_draw(): draw = Draw() # SSHLibrary.library.SSHClient.write(text="lala") rem = SshMachine('xxmkt1055.gtk.gtech.com', user='******') # "10.17.20.187" conn = rpyc.classic.ssh_connect(rem, remote_port=22) conn.execute( print("$USEC/bin/rpc -t {} -p {} {} {} {} {} {} {} {} {} {} {} {}". format(draw.user, draw.product, draw.function, draw.action, draw.draw, draw.set_number, draw.win_numbers, draw.win_plus, draw.win_power, draw.win_super, draw.win_promo, draw.greenball, draw.multiplier)))
class SshDriver(object): def __init__(self, host): self.host = host self.machine = SshMachine(self.host) self.rpyc_serv = DeployedServer(self.machine) self.rpyc_conn = self.rpyc_serv.classic_connect() self.rpyc_conn.ping() def upload(self, local_path, remote_path): self.machine.upload(local_path, remote_path) def download(self, remote_path, local_path): self.machine.download(remote_path, local_path) def calculate_checksum(self, filename): return calculate_checksum(filename, self.rpyc_conn) def isfile(self, filename): return self.rpyc_conn.modules.os.path.isfile(filename) def ensure_directory(self, path): ensure_directory(path, self.rpyc_conn) def ensure_parent_directory(self, filename): parent_directory = os.path.dirname(filename) self.ensure_directory(parent_directory) def list_all_files(self, path): return list_all_files(path, self.rpyc_conn) def list_directories(self, path): return list_directories(path, self.rpyc_conn) def remove_file(self, filename): self.rpyc_conn.modules.os.remove(filename) def file_size(self, filename): return self.rpyc_conn.modules.os.path.getsize(filename) def sort_file_entries_by_size(self, file_entries): return sorted(file_entries, key=lambda file_entry: self.file_size(file_entry[0]))
def deploy_code(cls): remote = SshMachine(TARGET.DOMAIN) with remote.cwd(TARGET.TOOL_PATH): print(remote['git']('pull')) print(remote[VIRTENV.PIP_BIN]('install', '-U', '-e', '.'))
from plumbum import SshMachine remote = SshMachine('intrepid', user='******', keyfile='/home/rmeadows/.ssh/id_rsa') r_ls = remote['ls'] print 'intrepid: /opt/repos/github:' with remote.cwd('/opt/repos/github'): print r_ls()
def connect(self): if self.deployment is None: self._mach = SshMachine(**self.kwargs) self.deployment = DeployedServer(self._mach) return self.deployment.classic_connect()
def __init__(self, host): self.host = host self.machine = SshMachine(self.host) self.rpyc_serv = DeployedServer(self.machine) self.rpyc_conn = self.rpyc_serv.classic_connect() self.rpyc_conn.ping()