def start_monitor(self): local_name = platform.node() print "Starting Cassandra Monitor for %s" % self.server while True: try: ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(AutoAddPolicy()) ssh.connect(self.server, timeout=5, allow_agent=False) stdin, stdout, stderr = ssh.exec_command(CASS_CMD) stdin.close() for line in stdout: # Any line that shows up will be a downed server # server datacenter rack status state load owns token downed_node = line.split() raise DownedNodeException(downed_node[0]) stdout.close() err = stderr.read() if err: raise Exception("Unknown error: %s" % str) stderr.close() ssh.close() except DownedNodeException as e: self.mark_error(e.node, e) except SSHException as e: self.mark_error(self.server, "%s could not connect to %s: %s" % (local_name, self.server, e)) except Exception as e: self.mark_error(local_name, "Unknown error: %s" % e) else: self.clear_error() time.sleep(INTERVAL)
def get_rawdata(server, month, tmpfile): serv = server.objects.get(name=server) ssh = SSHClient() ssh.load_system_host_keys() ssh.connect(serv.ipaddress, username=serv.username, password=serv.password) client = SCPClient(ssh.get_transport()) client.get(serv.path + '/volume.' + month+ '.csv', tmpfile)
def update_server(): text = "======================ESB /opt Console Space Report=============\n" print text server_list = ['p1esbapp', 'p2esbapp', 'p3esbapp', 'p4esbapp', 'p5esbapp', 'p6esbapp', 'p7esbapp', 'p8esbapp', 'p9esbapp', 'p10esbapp', 'p11esbapp', 'p12esbapp'] for server in server_list: text = text + "-------------------" + str(server) + "---------------------\n" client = SSHClient() client.load_system_host_keys() client.connect(str(server), username="******") command = 'df -h | grep opt; du -csh /opt/esb/jboss-eap-4.3/jboss-as/console.log' stdin, stdout, stderr = client.exec_command(command) for output in stdout.readlines(): # print output text = text + output + '\n' print text # message = """From: Boomer <*****@*****.**> # To: Boomer <*****@*****.**> # Suject: ESB /opt console.log Report.""" + text msg = MIMEText(text) msg['Subject'] = 'ESB /opt console.log report' msg['From'] = email.utils.formataddr(('Boomer', "william.rehfield\@wellcare.com")) msg['To'] = email.utils.formataddr(('Boomer', "william.rehfield\@wellcare.com")) # s = smtplib.SMTP('localhost') # s.sendmail( server = smtplib.SMTP('mail') server.sendmail("*****@*****.**", "*****@*****.**", msg.as_string())
def connectRemoteAddress(self,remote_user,remote_address,key_file): ssh=SSHClient() ssh.load_system_host_keys() private_key=path.expanduser("~/.ssh/id_rsa") key=RSAKey.from_private_key_file(private_key) ssh.connect(username=remote_user,hostname=remote_address,pkey=key) return ssh
def push_cfg_files(): ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(AutoAddPolicy()) user, server, path = check_connection() ssh.connect(server, username=user) repo = Repo(os.path.curdir) current_branch = repo.active_branch.name with SCPClient(ssh.get_transport()) as scp: files = [ 'versions.cfg', 'distribution-qa.cfg', 'release.cfg', 'sources.cfg', 'qa.cfg', 'solr.cfg', ] remote_files = [] for filename in files: remote_filename = filename if current_branch == '4.x': parts = filename.split('.') parts[0] += '4' remote_filename = '.'.join(parts) remote_files.append(remote_filename) scp.put( filename, remote_path='{0}/{1}'.format(path, remote_filename) ) logger.debug('Files uploaded: ') logger.debug('\n'.join(remote_files))
def upload_ssh(args): if os.path.exists(os.path.expanduser('~/.netrc')): auth_data = netrc.netrc(os.path.expanduser('~/.netrc')).authenticators(cfg.get_contest_host()) else: auth_data = None if not auth_data: login = input('login: '******'password: '******'problems/' + ( # cfg.get_problem_param('system name') or cfg.get_problem_param('system_name'))) path_prefix = cfg.get_server_contest_path() + 'problems/' + ( cfg.get_problem_param('system name') or cfg.get_problem_param('system_name')) if args['checker']: print('Uploading checker') checker_path = cfg.get_problem_param('checker', True) or 'check.cpp' checker_path = os.path.normpath(checker_path) checker_out_path = os.path.join(path_prefix, os.path.basename(checker_path)) scp.put(checker_path, checker_out_path) if args['validator']: print('Uploading validator') validator_path = cfg.get_problem_param('validator', True) or 'validator.cpp' validator_path = os.path.normpath(validator_path) validator_out_path = os.path.join(path_prefix, os.path.basename(validator_path)) scp.put(validator_path, validator_out_path) if args['testlib']: print('Uploading testlib') scp.put('../../lib/testlib.h', os.path.join(path_prefix, 'testlib.h')) if args['tests']: print('Uploading tests') for t in Test.test_gen('tests'): print('uploading {} and {}'.format(t.inf_name(), t.ans_name())) scp.put(t.inf_path(), os.path.join(path_prefix, 'tests', t.inf_name())) scp.put(t.ans_path(), os.path.join(path_prefix, 'tests', t.ans_name())) if args['statement']: print('Uploading statement.xml') scp.put('statement/statement.xml', os.path.join(path_prefix, 'statement.xml')) if args['gvaluer']: print('Uploading valuer.cfg') scp.put('valuer.cfg', os.path.join(path_prefix, 'valuer.cfg'))
def _exec(self): exitcode = 0 client = SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(WarningPolicy()) try: client.connect(self.config.get('hostname'), int(self.config.get('port', 22)), key_filename=self.identity_file, username=self.config.get('user')) stdout, stderr, channel = self.exec_command(client) Printer(self.host, stdout, stderr).loop() if channel: exitcode = channel.recv_exit_status() except IOError as e: print(colored('{0}: {1}'.format(self.host, str(e)), 'red')) exitcode = 1 except (BadHostKeyException, AuthException, SSHException) as e: print(colored('{0}: {1}'.format(self.host, e.message)), 'red') exitcode = 1 except Exception as e: print(colored('{0}: {1}'.format(self.host, e.message)), 'red') exitcode = 1 finally: client.close() return exitcode
def _create_MB_ssh_client(self,server, port, user, password): ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) print "AAAAAAAAAAAAAAAAAAAAAAAA",server,port,user,password ssh.connect(server, port, user, password) return ssh
class Sender(object): def __init__(self,server,username,password,dest_path,from_path): self.dest_path = dest_path self.from_path = from_path self.recorder = Recorder.Recorder() self.ssh = SSHClient() self.ssh.load_system_host_keys() self.ssh.connect(server,username=username,password=password) self.sftp = self.ssh.open_sftp() self.scp = SCPClient(self.ssh.get_transport()) def send(self,file_path): if op.exists(file_path): file_modify_time = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(os.stat(file_path).st_mtime)) if not self.recorder.isSynced(file_path,file_modify_time): new_file_path = os.path.join(self.dest_path+'/',file_path.split(self.from_path+os.sep)[1]) new_file_path = new_file_path.replace('\\','/') new_file_dir,new_file = op.split(new_file_path) if not rexists(self.sftp,new_file_dir): rmkdir(self.sftp,new_file_dir) print 'uploading %s .....' % (file_path) self.scp.put(file_path,new_file_path) self.recorder.record(file_path,file_modify_time) else: pass
def connect(self): """Initiate an ssh connection with properties passed to constructor. :returns: Instance of the paramiko SSHClient class. """ usePrivateKey = True for prop in self.required_props1: if prop not in list(self.properties.keys()): usePrivateKey = False break usePassword = True for prop in self.required_props2: if prop not in list(self.properties.keys()): usePassword = False break if not usePrivateKey and not usePassword: raise ShakeMapException('Either username/password must be specified, or the name of an SSH private key file.') ssh = SSHClient() #load hosts found in ~/.ssh/known_hosts ssh.load_system_host_keys() #should we not assume that the user has these configured already? if usePrivateKey: try: ssh.connect(self.properties['remotehost'], key_filename=self.properties['privatekey'],compress=True) except Exception as obj: raise ShakeMapException('Could not connect with private key file %s' % self.properties['privatekey']) else: try: ssh.connect(self.properties['remotehost'], username=self.properties['username'],password=self.properties['password'], compress=True) except Exception as obj: raise ShakeMapException('Could not connect with private key file %s' % self.properties['privatekey']) return ssh
class DeployTool(object): """docstring for ClassName""" def __init__(self, remote_user, remote_password, remote_server): self.remote_user = remote_user self.remote_password = remote_password self.remote_server = remote_server self.client = SSHClient() self.client.load_system_host_keys() self.client.connect( hostname=self.remote_server, username=self.remote_user, password=self.remote_password ) def _ssh(self, command): stdin, stdout, stderr = self.client.exec_command(command) ssh_output = stdout.read() ssh_error = stderr.read() return (ssh_output, ssh_error) def _sftp(self, infile, outfile): sftp = self.client.open_sftp() sftp.put( infile, outfile ) def deploy(self): print '### Downloading Configurator ###' self._ssh('wget -P /opt/ http://10.100.10.146/config/configurator.sh') self._ssh('chmod 777 /opt/configurator.sh') print '### Running Configurator ###' self._ssh('/opt/configurator.sh')
def send_file(local_file, remote_path, ip, username, password, logger=None): # Отсылает файл local_file в remote_path удаленной машины по scp if remote_path[len(remote_path) - 1] != '/': remote_path += '/' ssh = SSHClient() ssh.set_missing_host_key_policy(AutoAddPolicy()) ssh.load_system_host_keys() if logger is not None: logger.info("SCP SEND: connecting to %s" % (ip)) try: ssh.connect(ip, username=username, password=password) except: if logger is not None: logger.info("SCP SEND: failed to connect to %s" % (ip)) return False else: if logger is not None: logger.info("SCP SEND: connected to %s" % (ip)) try: if logger is not None: logger.info("SCP SEND: sending file %s" % (local_file)) scp = SCPClient(ssh.get_transport()) scp.put(local_file, remote_path) except: if logger is not None: logger.error("SCP SEND: error: failed to send file %s" % (local_file)) ssh.close() return False else: if logger is not None: logger.info("SCP SEND: file sent to %s@%s:%s " % (username, ip, remote_path)) ssh.close() return True
def copySCP(self, datum_original, datum_destination, server, username, option_type): """Copy data via SCP function """ util.log_to_file('ssh.log') ssh = SSHClient() ssh.load_system_host_keys() logging.info("connection to %s" % server) ssh.connect(server, username=username, compress=True, timeout=60) # SCPCLient takes a paramiko transport as its only argument client = scp.SCPClient(ssh.get_transport()) logging.info(option_type + ": %s to %s" % (datum_original['location'], datum_destination['location'])) if option_type == 'upload': client.put(datum_original['location'], datum_destination['location'], recursive=True) else: client.get(datum_original['location'], datum_destination['location'], recursive=True) client.close()
class SSH(object): """ package for ssh_client, in this class stores the sftp handlers and the ssh_client """ def __init__(self, host, user, password, port=22): self.host = host self.user = user self.password = password self.port = port def client_init(self): self.clt = SSHClient() self.clt.load_system_host_keys() self.clt.set_missing_host_key_policy(AutoAddPolicy()) def ssh_connect(self, task_list, ret_map): try: self.client_init() self.clt.connect(hostname=self.host, username=self.user, password=self.password, port=self.port, timeout=3) self.sftp = self.clt.open_sftp() except Exception, e: self.ssh_close() for task in task_list: if len(task) > 1: ret_map[task[0]] = (-1, "ssh connect error %s" % str(e)) return False return True
def get(self, remote_path, local_path=None, out_stream=sys.stdout, verbose=False): """ Copy a file from the remote system to the local system. :param remote_path: :param local_path: :param out_stream: :param verbose: :return: :rtype: """ if local_path is None: local_path = remote_path self.display("scp '{src}' '{dest}'".format(src=remote_path, dest=local_path), out_stream=out_stream, verbose=verbose) names = self.run(['ls', '-1', remote_path]).split('\n') ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(Project.address, Project.port, Project.user, Project.password) # scp = SFTPClient.from_transport(ssh.get_transport()) # output = scp.get(remote_path, local_path, recursive=True) ftp = ssh.open_sftp() for name in names: print(name) ftp.get(name, local_path) output = repr(names) self.display(output, out_stream=out_stream, verbose=verbose) return output
def get_file(remote_file, local_path, ip, username, password, logger=None): # Получает с удаленной машины файл remote_file с помощью scp и сохраняет его в local_path. if local_path[len(local_path) - 1] != '/': local_path += '/' ssh = SSHClient() ssh.set_missing_host_key_policy(AutoAddPolicy()) ssh.load_system_host_keys() if logger is not None: logger.info("SCP GET: connecting to %s" % (ip)) try: ssh.connect(ip, username=username, password=password) except: if logger is not None: logger.info("SCP GET: failed to connect to %s" % (ip)) return False else: if logger is not None: logger.info("SCP GET: connected to %s" % (ip)) try: if logger is not None: logger.info("SCP GET: retrieving file %s" % (remote_file)) scp = SCPClient(ssh.get_transport()) scp.get(remote_file, local_path) except: if logger is not None: logger.error("SCP GET: error: failed to retrieve file %s" % (remote_file)) ssh.close() return False else: if logger is not None: logger.info("SCP GET: file saved to %s folder" % (local_path)) ssh.close() return True
def rollback_using_import_policy(self, version): filenames = self.get_filenames(version) # Create the tar file of the selected JSON files self._generate_tar_gz(filenames, version) # Put the tar file on the APIC ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(AutoAddPolicy()) ssh.connect(self.session.ipaddr, username=self.session.uid, password=self.session.pwd) sftp = ssh.open_sftp() sftp.chdir('/home/%s' % self.session.uid) sftp.put('./ce_snapback.tar.gz', 'ce_snapback.tar.gz') # Remove the local tar file os.remove('./ce_snapback.tar.gz') # Send the import policy to the APIC url = '/api/node/mo/uni/fabric.json' payload = {"configImportP": {"attributes": { "name": "snapback", "fileName": "ce_snapback.tar.gz", "adminSt": "triggered", "importType": "replace", "importMode": "atomic"}, "children": [{"configRsImportSource": {"attributes": {"tnFileRemotePathName": "snapback"}, "children": [] }}]}} resp = self.session.push_to_apic(url, payload) return resp
class SSH(object): """docstring for SSH""" def __init__(self): self.client = SSHClient() self.client.set_missing_host_key_policy(AutoAddPolicy()) self.client.load_system_host_keys() def connect(self, server, user, key): logging.info("Connecting to {u}@{s} [with key:{k}]".format( u=user, s=server, k=key )) self.client.connect(server, username=user, key_filename=key, timeout=2.0) def close(): self.client.close() @property def transport(self): return self.client.get_transport() @staticmethod def SSHFactory(config): logging.debug("Creating SSH Object.") sClient = SSH() sClient.connect(config.staging, config.stagingUser, config.key ) return sClient
def main(file, key, encrypt, host, username, password): """ The main function. Takes multiple parameters which are prompted if not given on the command line. :param file: The paht of the file to encrypt or decrypt and send. :param key: The key to encrypt or decrypt. :param encrypt: Tells if the operation is an encryption or a decryption. :param host: The host where to send the file. :param username: Username on the host. :param password: Password if needed. If not needed '-' should be used to tell that there is no password needed. """ ssh = SSHClient() ssh.load_system_host_keys() if password != "-": ssh.connect(host, username=username, password=password) else: ssh.connect(host, username=username) scp = SCPClient(ssh.get_transport()) if encrypt: print("Encrypting... ", end="") to_send = encrypt_file(file, key) print("Done.") print("Sending to {}...".format(host), end="") scp.put(to_send) print("Done.") else: print(decrypt_file(file, key))
def executeOnMaster(self, cmd): """ Execute command on the current master leader """ self.log.debug("Executing on master: " + cmd) if self._hostnameResolves(self.getManagementEndpoint()): ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect( self.getManagementEndpoint(), username = self.config.get('ACS', "username"), port = 2200, key_filename = os.path.expanduser(self.config.get('SSH', "privatekey"))) session = ssh.get_transport().open_session() AgentRequestHandler(session) stdin, stdout, stderr = ssh.exec_command(cmd) stdin.close() result = "" for line in stdout.read().splitlines(): self.log.debug(line) result = result + line.decode("utf-8") + "\n" for line in stderr.read().splitlines(): self.log.error(line) else: self.log.error("Endpoint " + self.getManagementEndpoint() + " does not exist, cannot SSH into it.") result = "Exception: No cluster is available at " + self.getManagementEndpoint() ssh.close() return result
class SFTP(Operations): """A simple SFTP filesystem. Requires paramiko: http://www.lag.net/paramiko/ You need to be able to login to remote host without entering a password. """ def __init__(self, host, path='.'): self.client = SSHClient() self.client.load_system_host_keys() self.client.connect(host) self.sftp = self.client.open_sftp() self.root = path def __del__(self): self.sftp.close() self.client.close() def __call__(self, op, path, *args): print '->', op, path, args[0] if args else '' ret = '[Unhandled Exception]' try: ret = getattr(self, op)(self.root + path, *args) return ret except OSError, e: ret = str(e) raise except IOError, e: ret = str(e) raise OSError(*e.args)
def get_file(fname): ssh = SSHClient() ssh.load_system_host_keys() ssh.connect(file_server) scp = SCPClient(ssh.get_transport()) scp.get(fname) scp.close()
def verify_ssh_login(userid): client = SSHClient() client.load_system_host_keys() # client.set_missing_host_key_policy(WarningPolicy) client.set_missing_host_key_policy(AutoAddPolicy()) # TEST ONLY hosts = ["india.futuregrid.org"] key = os.path.expanduser(os.path.join("~", ".ssh", "id_rsa")) print "[key: %s]" % key if not userid: userid = getpass.getuser() for host in hosts: try: client.connect(host, username=userid, key_filename=key) client.close() print "[%s] succeeded with %s." % (host, userid) except (BadHostKeyException, AuthenticationException, SSHException) as e: # print sys.exc_info() print ("[%s] %s with %s. Please check your ssh setup (e.g. key " + "files, id, known_hosts)") % (host, e, userid)
def connect(self): logger.debug("Opening SSH connection to {host}:{port}".format(host=self.host, port=self.port)) client = SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(AutoAddPolicy()) try: client.connect(self.host, port=self.port, username=self.username, timeout=self.timeout) except ValueError, e: logger.error(e) logger.warning( """ Patching Crypto.Cipher.AES.new and making another attempt. See here for the details: http://uucode.com/blog/2015/02/20/workaround-for-ctr-mode-needs-counter-parameter-not-iv/ """ ) client.close() import Crypto.Cipher.AES orig_new = Crypto.Cipher.AES.new def fixed_AES_new(key, *ls): if Crypto.Cipher.AES.MODE_CTR == ls[0]: ls = list(ls) ls[1] = "" return orig_new(key, *ls) Crypto.Cipher.AES.new = fixed_AES_new client.connect(self.host, port=self.port, username=self.username, timeout=self.timeout)
def _ssh_connect(args, direction): """Creates an ssh connection using the arguments provided""" try: ssh = SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.load_system_host_keys() address = args.__getattribute__(direction + '_address') port = args.__getattribute__(direction + '_port') if isinstance(port, str): port = int(port) user = args.__getattribute__(direction + '_user') passw = args.__getattribute__(direction + '_passw') fkey = args.__getattribute__(direction + '_filekey') lib.log.info('Connecting to %s', address) if passw: ssh.connect(address, port=port, username=user, password=passw) elif fkey: key = paramiko.RSAKey.from_private_key_file(fkey) # In case you need to set an user if user: ssh.connect(address, port=port, username=user, pkey=key) else: ssh.connect(address, port=port, pkey=key) lib.log.info('Connection successful') return ssh except Exception as exc: # 'dict_keys' object is not subscriptable # It means that the connection was not successful lib.log.error('Unable to connect: ' + str(exc)) exit(-1) return None
def __startInstance( device , datatype , response , pcount ): processorNode = settings.PROCCESSOR_NODES[ (pcount + 1 ) % len( settings.PROCCESSOR_NODES ) ] logger.debug("Creating Node On " + processorNode ) client = SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) rsa_private_key = paramiko.RSAKey.from_private_key_file( settings.SSHID ) client.connect( processorNode , username="******" , pkey = rsa_private_key ) logger.debug( 'docker run -d ' + datatype.processor + ' python /root/Processor.py ' + settings.MASTER_HOSTNAME + \ ' ' + device.identifier + ' ' + datatype.identifier + ' ' + response.identifier ) stdin, stdout, stderr = client.exec_command( 'docker run -d ' + datatype.processor + ' python /root/Processor.py ' + settings.MASTER_HOSTNAME + \ ' ' + device.identifier + ' ' + datatype.identifier + ' ' + response.identifier ) for i in stderr.readlines(): logger.error(str(i)) for i in stdout.readlines(): logger.debug(str(i)) identifier = i return { 'identifier' : identifier , 'node' : processorNode }
def _exec(self): exitcode = 0 client = SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(WarningPolicy) try: client.connect(self.config.get('hostname'), int(self.config.get('port', 22)), username=self.config.get('user')) stdin, stdout, stderr = self.exec_command(client) for i, line in enumerate(stdout): line = line.rstrip() print("{0}: {1}".format(self.host, line)) for i, line in enumerate(stderr): line = line.rstrip() print(colored("{0}: {1}".format(self.host, line), 'red')) exitcode = 1 except IOError as e: print(colored('{0}: {1}'.format(self.host, str(e)), 'red')) exitcode = 1 except (BadHostKeyException, AuthException, SSHException) as e: print(colored('{0}: {1}'.format(self.host, e.message)), 'red') exitcode = 1 finally: client.close() return exitcode
def main(): commands = {"deps": deps, "remote-version": remote_version, "push": push, "clean": clean, "versions": versions, "list": list_available, "stage": stage, "info": child_information, "history": child_history, "help": print_help} sys.argv.pop(0) #remove filename command = None if len(sys.argv) > 0: command = sys.argv.pop(0) if command is None or command not in commands: command = "help" try: if command=="help": client = None else: print "Setting up remote repo connection..." client = SSHClient() client.load_system_host_keys() client.connect(REPOSITORY_HOST, username=REPOSITORY_USER, key_filename=REPOSITORY_KEY) print "Connected to repo" try: commands[command](client, sys.argv) if command != "help": print "" print command + " [SUCCESSFUL]" except Exception, e: print e print command + " [FAILED]" finally: if client is not None: client.close()
def put(self, files, remote_path=None, out_stream=sys.stdout, verbose=False): """ Copy a file from the local system to the remote system. :param files: :param remote_path: :param out_stream: :param verbose: :return: :rtype: """ if remote_path is None: remote_path = files self.display("scp '{src}' '{dest}'".format(src=files, dest=remote_path), out_stream=out_stream, verbose=verbose) ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(Project.address, Project.port, Project.user, Project.password) scp = SCPClient(ssh.get_transport()) # scp = SCPClient(self.ssh.get_transport()) # noinspection PyBroadException try: info("\nfiles: %s" % repr(files)) info("remote_path: %s" % remote_path) output = scp.put(files, '"{dest}"'.format(dest=remote_path), recursive=True) or '' except Exception: try: output = scp.put(files, remote_path, recursive=True) or '' except Exception as ex: output = str(ex) self.display("\n" + output, out_stream=out_stream, verbose=verbose) return output
class Server(object): """ Represents a target remote server """ SCRIPT_DEP_INSTALLED = 'dependencies_installed' SCRIPT_DETECT_PM = 'detect_pm' def __init__(self, address, user): self.address = address self.user = user self.password = None self.ssh_client = SSHClient() self.ssh_client.load_system_host_keys() self.ssh_client.set_missing_host_key_policy(AutoAddPolicy()) def has_valid_connection(self): """ Validates the SSH connection to a remote server. Returns: bool: If the connection is valid. """ try: self.ssh_client.connect(self.address, username=self.user) except IOError, e: return False, e finally:
import six from paramiko import SSHClient from paramiko.util import log_to_file log_to_file("paramiko-ssh_client_test.log", level="DEBUG") base_url = 'ssh://[email protected]:2375' ssh_client = None try: ssh_client = SSHClient() ssh_client.load_system_host_keys() parsed = six.moves.urllib_parse.urlparse(base_url) ssh_client.connect(parsed.hostname, parsed.port, parsed.username) stdin, stdout, stderr = ssh_client.exec_command('docker version') print(stdout.read()) finally: if ssh_client: ssh_client.close()
import random import sys import os import select import paramiko from paramiko import SSHClient import subprocess import time client_master = SSHClient() client_master.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client_master.load_system_host_keys() client_master.connect("10.10.1.89", username="******", password="******") client_slave1 = SSHClient() client_slave1.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client_slave1.load_system_host_keys() client_slave1.connect("10.10.1.90", username="******", password="******") client_slave2 = SSHClient() client_slave2.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client_slave2.load_system_host_keys() client_slave2.connect("10.10.1.51", username="******", password="******") def write_stop(file,diff,misconfig_code,client,write_dir): file_lines = [] ftp = client.open_sftp() remote_file = ftp.open("/home/ubuntu/Neha_Shreya/"+file) for line in remote_file: file_lines.append(line) file_lines.append("TIME:"+str(diff)+",CODE:"+str(misconfig_code)+"\n") file_lines.append("STOP\n") with open(write_dir+file, "a") as dump:
from paramiko import SSHClient # "Paramiko is a Python implementation of SSHv2 protocol # providing both client and server functionality" from scp import SCPClient ssh = SSHClient() ssh.load_system_host_keys() ssh.connect('user@server:path') with SCPClient(ssh.get_transport()) as scp: scp.put('my_file.txt', 'my_file.txt') # Copy my_file.txt to the server
def take_snapshot_using_export_policy(self, callback=None): """ Perform an immediate snapshot of the APIC configuration. :param callback: Optional callback function that can be used to notify applications when a snapshot is taken. Used by the GUI to update the snapshots view when recurring snapshots are taken. """ tag_name = time.strftime("%Y-%m-%d_%H.%M.%S", time.localtime()) url = '/api/node/mo/uni/fabric.json' remote_path_payload = { "fileRemotePath": { "attributes": { "remotePort": "22", "name": "snapback", "host": "%s" % self.session.ipaddr, "remotePath": "/home/%s" % self.session.uid, "protocol": "scp", "userName": "******" % self.session.uid, "userPasswd": "%s" % self.session.pwd }, "children": [] } } resp = self.session.push_to_apic(url, remote_path_payload) export_policy_payload = { "configExportP": { "attributes": { "name": "snapback", "adminSt": "triggered" }, "children": [ { "configRsRemotePath": { "attributes": { "tnFileRemotePathName": "snapback" }, "children": [] } } ] } } resp = self.session.push_to_apic(url, export_policy_payload) if not resp.ok: print resp, resp.text time.sleep(10) ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(AutoAddPolicy()) ssh.connect(self.session.ipaddr, username=self.session.uid, password=self.session.pwd) sftp = ssh.open_sftp() sftp.chdir('/home/%s' % self.session.uid) file_names = sftp.listdir() print file_names for file_name in file_names: if str(file_name).startswith('ce2_snapback-'): sftp.get('/home/' + self.session.uid + '/' + file_name, './' + file_name) sftp.remove('/home/' + self.session.uid + '/' + file_name) with tarfile.open(file_name, 'r:gz') as tfile: tfile.extractall(self.repo_dir) os.remove(file_name) for json_filename in os.listdir(self.repo_dir): print 'checking', json_filename if json_filename.startswith('ce2_snapback') and json_filename.endswith('.json'): new_filename = 'snapshot_' + self.session.ipaddr + '_' + json_filename.rpartition('_')[2] new_filename = os.path.join(self.repo_dir, new_filename) print 'renaming', json_filename, 'to', new_filename json_filename = os.path.join(self.repo_dir, json_filename) with open(json_filename, 'r') as old_file: config = json.loads(old_file.read()) with open(new_filename, 'w') as new_file: new_file.write(json.dumps(config, indent=4, separators=(',', ':'))) os.remove(json_filename) # Add the file to Git self.repo.index.add([new_filename]) # Commit the files and tag with the timestamp self.repo.index.commit(tag_name) self.repo.git.tag(tag_name) if callback: callback()
def main(args=None): init = None if args: init = args[:-1] pwd = args[-1] save_path = "2p_init_"+str(init[0])+"_"+str(init[1])+"_"+str(init[2])+"_"+str(init[3])+"_sol.bo" if os.path.exists(save_path): return model_path = ( "../models/jumper2contacts.bioMod", "../models/jumper1contacts.bioMod", ) time_min = [0.2, 0.05] time_max = [0.5, 1] phase_time = [0.6, 0.2] number_shooting_points = [30, 15] tic = time() ocp = prepare_ocp( model_path=model_path, phase_time=phase_time, ns=number_shooting_points, time_min=time_min, time_max=time_max, init=init, ) sol = ocp.solve( show_online_optim=False, solver_options={"hessian_approximation": "limited-memory", "max_iter": 200} ) utils.warm_start_nmpc(sol, ocp) ocp.solver.set_lagrange_multiplier(sol) sol = ocp.solve( show_online_optim=True, solver_options={"hessian_approximation": "exact", "max_iter": 1000, "warm_start_init_point": "yes", } ) toc = time() - tic print(f"Time to solve : {toc}sec") if init: ocp.save(sol, save_path) ocp.save_get_data(sol, save_path + 'b') ssh = SSHClient() ssh.load_system_host_keys() ssh.connect('pariterre.net', username='******', password=pwd) with SCPClient(ssh.get_transport()) as scp: scp.put(save_path, save_path) scp.get(save_path) scp.put(save_path + 'b', save_path + 'b') scp.get(save_path + 'b') result = ShowResult(ocp, sol) result.animate(nb_frames=241)
def run_test(self, context, test_name, target_resource_name, thread_count): """ :type context ResourceCommandContext :param context: :param test_name: :return: """ api = self.__initApiSession__(context) reservation_details = api.GetReservationDetails( context.reservation.reservation_id) jmeter_resource = Jmeter.create_from_context(context) current_timestamp = datetime.datetime.strftime(datetime.datetime.now(), "%m-%d_%H-%M") artifacts_folder_name = "artifacts-" + current_timestamp if not target_resource_name: target_resource_name = self._get_connected_entity_name(context) if target_resource_name in [ res.Name for res in reservation_details.ReservationDescription.Resources ]: target_resource_details = api.GetResourceDetails( target_resource_name) target_ip = target_resource_details.FullAddress target_port = next( attribute.Value for attribute in target_resource_details.ResourceAttributes if attribute.Name == "WWW_Port") else: target_service = next( service for service in reservation_details.ReservationDescription.Services if target_resource_name == service.Alias) target_service_attributes = { att.Name: att.Value for att in target_service.Attributes } target_ip = target_service_attributes["External_URL"] if target_ip.startswith("http://"): target_ip = target_ip[len("http://"):] target_port = 80 test_url = jmeter_resource.tests_location + test_name + ".jmx" api.WriteMessageToReservationOutput( context.reservation.reservation_id, "Retrieving test: {0}".format(test_url)) if os.path.isdir(artifacts_folder_name): shutil.rmtree(path=artifacts_folder_name, ignore_errors=True) os.mkdir(artifacts_folder_name) testfile = urllib.URLopener() testfile.retrieve(test_url, test_name + ".jmx") api.WriteMessageToReservationOutput( context.reservation.reservation_id, "Running Test '{0}' with {1} threads".format( test_name, thread_count)) test_results_filename = None return_message = "" try: # Call the python executable running this driver to run the test. params_string = "target_ip,target_port\n{0},{1}".format( target_ip, target_port) with open(os.path.join(artifacts_folder_name, "config.csv"), mode="w") as config_file: config_file.write(params_string) ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(AutoAddPolicy()) ssh.connect(hostname=context.resource.address, username=jmeter_resource.user, password=api.DecryptPassword( jmeter_resource.password).Value, timeout=300) sftp = ssh.open_sftp() sftp.put(os.path.join(artifacts_folder_name, "config.csv"), r"C:\JMeter\config.csv") sftp.put(test_name + ".jmx", r"C:\JMeter\{0}.jmx".format(test_name)) stdin, stdout, stderr = ssh.exec_command( r"jmeter -Jparam_file=C:\JMeter\config.csv -Jthreads={0} -Jreport_file=C:\JMeter\report.csv -n -t c:\JMeter\{1}.jmx" .format(thread_count, test_name)) test_output = stdout.read() sftp.get(r"C:\JMeter\report.csv", os.path.join(artifacts_folder_name, "report.csv")) sftp.remove(r"C:\JMeter\report.csv") sftp.remove(r"C:\JMeter\config.csv") sftp.remove(r"C:\JMeter\{0}.jmx".format(test_name)) sftp.close() ssh.close() with open(os.path.join(artifacts_folder_name, "test_output.txt"), mode="w") as output_file: output_file.write(test_output) return_message = "Test {test_name} Completed successfully, See report in Sandbox attachments".format( test_name=test_name) except Exception as ex: exc_type, exc_value, exc_traceback = sys.exc_info() with open(os.path.join(artifacts_folder_name, "test_output.txt"), mode="w") as output_file: output_file.write("\n".join( traceback.format_exception(exc_type, exc_value, exc_traceback))) return_message = "Test {test_name} Exited with error, See report in Sandbox attachments".format( test_name=test_name) finally: test_results_filename = test_name + "-result-" + current_timestamp shutil.make_archive(base_name=test_results_filename, format="zip", root_dir=artifacts_folder_name) attach_file_result_code = self._attach_file_to_reservation( context, test_results_filename + ".zip", test_results_filename + ".zip") shutil.rmtree(path=artifacts_folder_name, ignore_errors=True) os.remove(test_name + ".jmx") if test_results_filename: os.remove(test_results_filename + ".zip") if not 200 <= attach_file_result_code < 300: return "Error Attaching File to reservation" else: return return_message
class RemoteClient: """Client to interact with a remote host via SSH & SCP.""" def __init__(self, host, user, ssh_key_filepath, remote_path): self.host = host self.user = user self.ssh_key_filepath = ssh_key_filepath self.remote_path = remote_path self.client = None self.scp = None self.conn = None self._upload_ssh_key() def _get_ssh_key(self): """ Fetch locally stored SSH key. """ try: self.ssh_key = RSAKey.from_private_key_file(self.ssh_key_filepath) logger.info(f'Found SSH key at self {self.ssh_key_filepath}') except SSHException as error: logger.error(error) return self.ssh_key def _upload_ssh_key(self): try: system(f'ssh-copy-id -i {self.ssh_key_filepath} {self.user}@{self.host}>/dev/null 2>&1') system(f'ssh-copy-id -i {self.ssh_key_filepath}.pub {self.user}@{self.host}>/dev/null 2>&1') logger.info(f'{self.ssh_key_filepath} uploaded to {self.host}') except FileNotFoundError as error: logger.error(error) def _connect(self): """ Open connection to remote host. """ if self.conn is None: try: self.client = SSHClient() self.client.load_system_host_keys() self.client.set_missing_host_key_policy(AutoAddPolicy()) self.client.connect(self.host, username=self.user, key_filename=self.ssh_key_filepath, look_for_keys=True, timeout=5000) self.scp = SCPClient(self.client.get_transport()) except AuthenticationException as error: logger.info('Authentication failed: did you remember to create an SSH key?') logger.error(error) raise error return self.client def disconnect(self): """ Close ssh connection. """ self.client.close() self.scp.close() def bulk_upload(self, files): """ Upload multiple files to a remote directory. :param files: List of strings representing file paths to local files. """ self.conn = self._connect() uploads = [self._upload_single_file(file) for file in files] logger.info(f'Finished uploading {len(uploads)} files to {self.remote_path} on {self.host}') def _upload_single_file(self, file): """Upload a single file to a remote directory.""" try: self.scp.put(file, recursive=True, remote_path=self.remote_path) except SCPException as error: logger.error(error) raise error finally: logger.info(f'Uploaded {file} to {self.remote_path}') def download_file(self, file): """Download file from remote host.""" self.conn = self._connect() self.scp.get(file) def execute_commands(self, commands): """ Execute multiple commands in succession. :param commands: List of unix commands as strings. """ self.conn = self._connect() for cmd in commands: stdin, stdout, stderr = self.client.exec_command(cmd) stdout.channel.recv_exit_status() response = stdout.readlines() for line in response: logger.info(f'INPUT: {cmd} | OUTPUT: {line}')
def push(target_dir, files_to_push, files_to_unzip=None): """Push (and optionally unzip) files on dataportal. Parameters: target_dir (string): The directory on dataportal where files should be uploaded. files_to_push (list of strings): A list of string paths to files that should all be uploaded to ``target_dir``. files_to_unzip (list of strings): A list of string paths that are in ``files_to_push`` that should be unzipped after upload. Returns: None.""" if files_to_unzip is None: files_to_unzip = [] LOGGER.info('Writing paramiko logging to paramiko-log.txt') paramiko.util.log_to_file('paramiko-log.txt') ssh = SSHClient() ssh.load_system_host_keys() # Automatically add host key if needed ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) private_key = paramiko.RSAKey.from_private_key_file( JENKINS_PRIVATE_KEY_PATH) ssh.connect(DATAPORTAL_HOST, 22, username=DATAPORTAL_USER, password=None, pkey=private_key) # Make folders on remote if needed. ssh.exec_command( 'if [ ! -d "{dir}" ]; then mkdir -p -v "{dir}"; fi'.format( dir=_fix_path(target_dir))) print 'Opening SCP connection' sftp = paramiko.SFTPClient.from_transport(ssh.get_transport()) for transfer_file in files_to_push: target_filename = os.path.join(target_dir, os.path.basename(transfer_file)) # Convert windows to linux paths target_filename = _fix_path(target_filename) print 'Transferring %s -> %s ' % (transfer_file, target_filename) for repeat in [True, True, False]: try: sftp.put(transfer_file, target_filename, callback=_sftp_callback) except IOError as filesize_inconsistency: # IOError raised when the file on the other end reports a # different filesize than what we sent. if not repeat: raise filesize_inconsistency for filename in files_to_unzip: remote_zipfile_path = _fix_path(os.path.join( target_dir, os.path.basename(filename))) print 'Unzipping %s on remote' % remote_zipfile_path _, stdout, stderr = ssh.exec_command( ('cd {releasedir}; ' 'unzip -o `ls -tr {zipfile} | tail -n 1`').format( releasedir=_fix_path(target_dir), zipfile=os.path.basename(remote_zipfile_path))) print "STDOUT:" for line in stdout: print line print "STDERR:" for line in stderr: print line print 'Closing down SCP' sftp.close() print 'Closing down SSH' ssh.close()
def connect(hostname,username,password): remote = SSHClient() remote.load_system_host_keys() remote.connect(hostname=hostname, port=22, username=username, password=password)
def main(): parser = argparse.ArgumentParser( description='Creates a Satori image from a remote OS using SSH') parser.add_argument( 'user_host', metavar = 'USER@HOSTNAME|HOSTNAME',\ help = 'The user and host to connect and run the satori-imager.py' ) parser.add_argument('--arguments', '-args',\ help = '''The whole argument string to pass to remote 'satori-imager.py'. For help on those arguments type "satori-imager.py -h"''',\ default = '--threads 2' ) parser.add_argument('--key', '-i', help='SSH Key to connect', default=None) parser.add_argument( '--password', '-p',\ help = '''SSH password. Avoid this option, as the password will be shown in bash history and 'ps' command''') parser.add_argument( '--not-purge', help = 'Do not purges the Satori files from target machine when finished',\ action = 'store_true', default = False ) parser.add_argument( '--r-dir', help = 'Directory to copy the Satori files in the remote host',\ default = '/tmp/' ) verb = parser.add_mutually_exclusive_group() verb.add_argument('--verbose', '-v', help='verbose mode', action='count', default=0) verb.add_argument('--debug', '-d', help='debugging mode', action='store_true', default=False) verb.add_argument('--quiet', '-q', help='quiet mode', action='store_true', default=False) __log.warning(header) args = parser.parse_args() ''' ================================================ VERBOSITY CHECKS ================================================ ''' if args.debug: __log.setLevel(log.DEBUG) elif args.quiet: __log.setLevel(log.ERROR) elif args.verbose == 0: __log.setLevel(log.WARNING) elif args.verbose == 1: __log.setLevel(log.INFO) if not args.password: ssh_pass = getpass.getpass() else: __log.info("Password provided!") ssh_pass = args.password remote_dir = args.r_dir arguments = args.arguments __log.info("Remote directory is '%s'" % remote_dir) satoriFolder = sys.path[0] __log.info("Satori will be copied from '%s'" % satoriFolder) ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) user = None if '@' in args.user_host: user = args.user_host.split('@')[0] host = args.user_host.split('@')[1] else: host = args.user_host __log.info("Attempting SSH connection to '{0}' as user '{1}'".format( host, user)) ssh.connect(host, username=user, password=ssh_pass, key_filename=args.key) __log.warning("Connection Established!") sftp = ssh.open_sftp() __log.info("SFTP channel opened!") # sftp = RecursiveSFTPClient( ssh.channel ) with closing(WriteDir(ssh.get_transport(), remote_dir)) as scp: scp.send_dir(satoriFolder, preserve_times=True) __log.info("Satori folder copied at remote location '%s'" % remote_dir) # sftp.put_dir( satoriFolder, remote_dir ) remote_satori = remote_dir + '/satori' old_contents = sftp.listdir(remote_satori) __log.debug(old_contents) rem_command = 'chmod 775 {0}; cd {0}; ./satori-imager.py {1} '.format( remote_satori, arguments) # rem_command = 'chmod 775 {0}; cd {0}; ./satori-imager.py {1} 2>&1 |tee /tmp/satori.log'. format( remote_satori, arguments ) __log.info("The command to run in remote host is:") __log.info("'%s'" % rem_command) stdin, stdout, stderr = ssh.exec_command(rem_command) __log.warning("Executing... ") __log.info("Output from Remote Execution:") __log.info("===========================================") __log.info(defs.bash_l_gray) for line in iter(lambda: stderr.readline(2048), ""): __log.info(line[:-1]) # last char is the next_line __log.info(defs.bash_n_color) __log.info("===========================================") exit_status = stdout.channel.recv_exit_status() new_contents = sftp.listdir(remote_satori) __log.debug(new_contents) output = list(set(new_contents) - set(old_contents))[0] if len(output) == 0: __log.critical('No output file. Something went wrong. Exiting...') sftp.close() ssh.close() sys.exit(1) localfile = os.getcwd() + '/' + output remotefile = remote_satori + '/' + output i = 1 new_localfile = localfile while os.path.exists(new_localfile): new_localfile = localfile + '_' + str(i) i += 1 localfile = new_localfile __log.info("Getting remote file '%s'" % remotefile) sftp.get(remotefile, localfile) sftp.close() __log.warning("Done!") __log.warning("Satori image is located at '%s'" % localfile) if not args.not_purge: stdin, stdout, stderr = ssh.exec_command('rm -rf %s' % remote_satori) exit_status = stdout.channel.recv_exit_status() __log.info("Remote copied files purged!") ssh.close() __log.info("SSH session closed!")
def ssh_connect(serv, **kwargs): import paramiko from paramiko import SSHClient import sql ssh_enable, ssh_user_name, ssh_user_password, ssh_key_name = return_ssh_keys_path( serv) servers = sql.select_servers(server=serv) ssh_port = 22 for server in servers: ssh_port = server[10] ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) try: if ssh_enable == 1: cloud = sql.is_cloud() if cloud != '': k = paramiko.pkey.load_private_key_file(ssh_key_name, password=cloud) else: k = paramiko.pkey.load_private_key_file(ssh_key_name) ssh.connect(hostname=serv, port=ssh_port, username=ssh_user_name, pkey=k, timeout=11, banner_timeout=200) else: ssh.connect(hostname=serv, port=ssh_port, username=ssh_user_name, password=ssh_user_password, timeout=11, banner_timeout=200) return ssh except paramiko.AuthenticationException: return 'Authentication failed, please verify your credentials' pass except paramiko.SSHException as sshException: return 'error: Unable to establish SSH connection: %s ' % sshException pass except paramiko.PasswordRequiredException as e: return 'error: %s ' % e pass except paramiko.BadHostKeyException as badHostKeyException: return 'error: Unable to verify server\'s host key: %s ' % badHostKeyException pass except Exception as e: if e == "No such file or directory": return 'error: %s. Check ssh key' % e pass elif e == "Invalid argument": error = 'error: Check the IP of the server' pass else: error = e pass return str(error)
def get_ssh_client(ip, pk): ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(ip, username='******', key_filename=pk) return ssh
class RemoteCommandExecutor: """Execute remote commands.""" def __init__(self, hostname, user, ssh_key_file=None): try: if not ssh_key_file: ssh_key_file = os.path.expanduser("~" + user) + "/.ssh/id_rsa" self.__ssh_client = SSHClient() self.__ssh_client.load_system_host_keys() self.__ssh_client.set_missing_host_key_policy(AutoAddPolicy()) self.__ssh_client.connect(hostname=hostname, username=user, key_filename=ssh_key_file) self.__user_at_hostname = "{0}@{1}".format(user, hostname) except Exception as e: logging.error("Failed when connecting to host %s with error: %s", hostname, e) raise def __del__(self): try: self.__ssh_client.close() except Exception as e: # Catch all exceptions if we fail to close the clients logging.warning( "Exception raised when closing remote clients: {0}".format(e)) def run_remote_command(self, command, timeout=seconds(5), log_error=True, fail_on_error=True): """ Execute remote command on the configured host. :param command: command to execute. :param log_error: log errors. :return: result of the execution. """ if isinstance(command, list): command = " ".join(command) logging.info("Executing remote command command on {0}: {1}".format( self.__user_at_hostname, command)) result = None try: stdin, stdout, stderr = self.__ssh_client.exec_command( command, get_pty=True) self._wait_for_command_execution(timeout, stdout) result = RemoteCommandResult( return_code=stdout.channel.recv_exit_status(), stdout="\n".join(stdout.read().decode().splitlines()), stderr="\n".join(stderr.read().decode().splitlines()), ) if result.return_code != 0 and fail_on_error: raise RemoteCommandExecutionError(result) return result except Exception: if log_error and result: logging.error( "Command {0} failed with error:\n{1}\nand output:\n{2}". format(command, result.stderr, result.stdout)) raise @staticmethod def _wait_for_command_execution(timeout, stdout): # Using the non-blocking exit_status_ready to avoid being stuck forever on recv_exit_status # especially when a compute node is terminated during this operation while timeout > 0 and not stdout.channel.exit_status_ready(): timeout = timeout - 1 time.sleep(1) if not stdout.channel.exit_status_ready(): raise RemoteCommandExecutionError( "Timeout occurred when executing remote command") @staticmethod def run_remote_command_on_multiple_hosts(command, hostnames, user, ssh_key_file=None, parallelism=10, timeout=10, fail_on_error=True): if not hostnames: return {} pool = Pool(parallelism) try: r = pool.map_async( _pickable_run_remote_command, [(hostname, command, user, ssh_key_file, timeout, fail_on_error) for hostname in hostnames], ) # The pool timeout is computed by adding 2 times the command timeout for each batch of hosts that is # processed in sequence. Where the size of a batch is given by the degree of parallelism. results = r.get(timeout=int( ceil(len(hostnames) / float(parallelism)) * (2 * timeout))) finally: pool.terminate() return dict(results)
def _connect(self): client = SSHClient() client.set_missing_host_key_policy(AutoAddPolicy()) client.load_system_host_keys() # 0. If host in SUCCESS_CACHE try this first success_cache_key = frozenset((self.host, self.port, self.user)) if success_cache_key in SUCCESS_CACHE: auth_info = SUCCESS_CACHE.get(success_cache_key) if auth_info.get('password'): try: client.connect(password=auth_info['password'], hostname=self.host, port=self.port, username=auth_info['user'], allow_agent=False, look_for_keys=False, gss_auth=False, compress=not self._interactive, timeout=self.timeout) self._client = client self._success_args = auth_info self._success_args['cached'] = True return True except SSHException: client.close() elif auth_info.get('agent_socket'): SSH_AUTH_SOCK_bak = environ.get('SSH_AUTH_SOCK', None) environ['SSH_AUTH_SOCK'] = auth_info['agent_socket'] try: client.connect(hostname=self.host, port=self.port, username=auth_info['user'], allow_agent=True, look_for_keys=False, password=None, compress=not self._interactive, timeout=self.timeout) self._client = client self._success_args = auth_info self._success_args['cached'] = True return True except SSHException: client.close() finally: if SSH_AUTH_SOCK_bak is None: del environ['SSH_AUTH_SOCK'] else: environ['SSH_AUTH_SOCK'] = SSH_AUTH_SOCK_bak elif auth_info.get('pkey'): try: client.connect(hostname=self.host, port=self.port, username=auth_info['user'], pkey=auth_info['pkey'], allow_agent=False, look_for_keys=False, gss_auth=False, compress=not self._interactive, timeout=self.timeout) self._client = client self._success_args = auth_info self._success_args['cached'] = True return True except SSHException: client.close() current_user = getuser() # 1. If password try password if self.passwords: for password in self.passwords: username = self.user or current_user try: client.connect(password=password, hostname=self.host, port=self.port, username=username, allow_agent=False, look_for_keys=False, gss_auth=False, compress=not self._interactive, timeout=self.timeout) self._client = client self._success_args = { 'host': self.host, 'port': self.port, 'user': username, 'password': password, 'auto': True, 'cached': False, } return True except SSHException: client.close() # 2. Try agent, default methods etc for username, SSH_AUTH_SOCK in self._find_agent_sockets(): SSH_AUTH_SOCK_bak = environ.get('SSH_AUTH_SOCK', None) environ['SSH_AUTH_SOCK'] = SSH_AUTH_SOCK username = self.user or username try: client.connect(hostname=self.host, port=self.port, username=username, allow_agent=True, look_for_keys=False, password=None, compress=not self._interactive, timeout=self.timeout) self._client = client self._success_args = { 'host': self.host, 'port': self.port, 'user': username, 'agent_socket': SSH_AUTH_SOCK, 'auto': False, 'cached': False } return True except SSHException: client.close() finally: if SSH_AUTH_SOCK_bak is None: del environ['SSH_AUTH_SOCK'] else: environ['SSH_AUTH_SOCK'] = SSH_AUTH_SOCK_bak # 3. Try all found pkeys for key_file, key_data in self._iter_private_keys: username = self.user or current_user key_passwords = list(self.key_passwords) key_passwords.insert(0, None) found_key_password = None key_data = self._convert(key_data, key_passwords) pkey_obj = BytesIO(str(key_data)) pkey = None for klass in KEY_CLASSES: for key_password in key_passwords: try: pkey_obj.seek(0) pkey = klass.from_private_key(pkey_obj, password=key_password) found_key_password = key_password break except SSHException: continue if pkey is None: continue try: client.connect(hostname=self.host, port=self.port, username=username, pkey=pkey, allow_agent=False, look_for_keys=False, gss_auth=False, compress=not self._interactive, timeout=self.timeout) self._client = client self._success_args = { 'host': self.host, 'port': self.port, 'user': username, 'key': key_data, 'key_file': key_file, 'key_password': found_key_password, 'pkey': pkey, 'auto': False, 'cached': False } return True except SSHException: client.close() if not self._client and client: client.close()
def deploy(address, model, identity_file=None, password=None): """ Deploys the a model to the Coral Board. Connects to the coral board via ssh to deploy the model. You must pass a tflite model. For maximum performance, you should pass a quantized tflite model, which can be generated using the convert_to_edgetpu module. After deploying the model, the function will start a webserver publishing the results. Args: address: Address of the coral board model: Path to the tflite model to deploy identity_file: [Optional] Path to the identity file. Identity file must be provided if password is not provided. password: [Optional] Password to use for ssh authentication. Password must be provided if identity_file is not provided. Returns: None """ # Flag for whether or not authenticating with key use_key = False if (identity_file is not None): use_key = True elif (password is None): raise Exception("Must pass identity file OR password") ssh = SSHClient() ssh.load_system_host_keys() # Connect to coral logger.info("Connecting to Anything Sensor...") if (use_key): # Connect using private key ssh.connect(hostname=address, username=DEFAULT_USERNAME, key_filename=identity_file) else: # Connect using password ssh.connect(hostname=address, username=DEFAULT_USERNAME, password=password) logger.info("Successfully connected to Anything Sensor v1!") # Transfer model to coral logger.info("Transferring model to Anything Sensor...") # SCPCLient takes a paramiko transport as an argument with SCPClient(ssh.get_transport()) as scp: scp.put( model, "/home/mendel/learn_ml/coral_inference/classification/" + os.path.basename(model)) logger.info("Transfer Successful!") # Start model execution ssh.exec_command("pkill screen") ssh.exec_command( "cd /home/mendel/learn_ml/coral_inference/classification && screen -d -m python3 " + "app.py --mnist -m " + os.path.basename(model)) logger.info("Started execution!") logger.info("Stream accessible at {}:5000".format(address)) ssh.close()
class Rampager(object): """ Define methods here that will cause rampage. Each "break_" method should have a corresponding "restore_" method that will heal the breakage. If your break functionality self heals (say timing based), use the keyword "time_" to start your method. @todo Add "other" support here. @todo Add providers instead of directly using vcenter provider. """ def __init__(self): self.vc = VCHelper() self._ssh = SSHClient() self._ssh.load_system_host_keys() self._ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) def __del__(self): if self._ssh._transport: self._ssh.close() def _connect_to_host(self, host_ip): # Check if already connected if self._ssh._transport: return self._ssh.connect(host_ip, os.getenv('HOST_PORT', 22), os.getenv('HOST_USERNAME', 'ubuntu'), os.getenv('HOST_PASSWORD', 'ubuntu')) def break_server_power(self, server_uuid): LOG.debug("Executing break power task for server uuid: %s" % server_uuid) vm_obj = self.vc.get_vm_from_uuid(server_uuid) if vm_obj: self.vc.power_off_vm(vm_obj) def restore_server_power(self, server_uuid): LOG.debug("Executing restore power task for server uuid: %s" % server_uuid) vm_obj = self.vc.get_vm_from_uuid(server_uuid) if vm_obj: self.vc.power_on_vm(vm_obj) def break_service(self, server_ip): """ Kill services such as ostackhost, hostagent, neutron, cider etc. :return: """ pass def restore_service(self, server_ip): pass def break_nfs_mount(self, server_ip): pass def restore_nfs_mount(self, server_ip): pass def break_data_network(self, server_ip): pass def restore_data_network(self, server_ip): pass def time_cpu_usage(self, server_ip, t_secs=20, threads=None): LOG.debug("Executing increased cpu load on %s for %s seconds" % (server_ip, time)) self._connect_to_host(server_ip) with SCPClient(self._ssh.get_transport()) as scp: scp.put('rampage/utils/kill_cpu.sh', remote_path="/tmp/") cmd = '/tmp/kill_cpu.sh %s' % t_secs if threads: cmd += " %s" % threads self._ssh.exec_command(cmd) time.sleep(t_secs) def time_memory_usage(self, server_ip): pass
def ssh_connect(serv, **kwargs): import sql ssh_enable = sql.ssh_enable() ssh_user_name = sql.select_ssh_username() ssh = SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) try: if ssh_enable == 1: k = paramiko.RSAKey.from_private_key_file( get_config_var('ssh', 'ssh_keys')) ssh.connect(hostname=serv, username=ssh_user_name, pkey=k) else: ssh.connect(hostname=serv, username=ssh_user_name, password=sql.select_ssh_password()) if kwargs.get('check'): return True else: return ssh except paramiko.AuthenticationException: if kwargs.get('check'): print( '<div class="alert alert-danger">Authentication failed, please verify your credentials</div>' ) return False else: return 'Authentication failed, please verify your credentials' pass except paramiko.SSHException as sshException: if kwargs.get('check'): print( '<div class="alert alert-danger">Unable to establish SSH connection: %s </div>' % sshException) return False else: return 'Unable to establish SSH connection: %s ' % sshException pass except paramiko.BadHostKeyException as badHostKeyException: if kwargs.get('check'): print( '<div class="alert alert-danger">Unable to verify server\'s host key: %s </div>' % badHostKeyException) return False else: return 'Unable to verify server\'s host key: %s ' % badHostKeyException pass except Exception as e: if e.args[1] == "No such file or directory": if kwargs.get('check'): print( '<div class="alert alert-danger">{}. Check ssh key</div>'. format(e.args[1])) else: return '{}. Check ssh key'.format(e.args[1]) pass elif e.args[1] == "Invalid argument": if kwargs.get('check'): print( '<div class="alert alert-danger">Check the IP of the new server</div>' ) else: error = 'Check the IP of the new server' pass else: if kwargs.get('check'): print('<div class="alert alert-danger">{}</div>'.format( e.args[1])) else: error = e.args[1] pass if kwargs.get('check'): return False else: return error
class SFTP(LoggingMixIn, Operations): ''' A simple SFTP filesystem. Requires paramiko: http://www.lag.net/paramiko/ You need to be able to login to remote host without entering a password. ''' def __init__(self, host, path='.'): self.client = SSHClient() self.client.load_system_host_keys() self.client.connect(host) self.sftp = self.client.open_sftp() self.root = path def chmod(self, path, mode): return self.sftp.chmod(path, mode) def chown(self, path, uid, gid): return self.sftp.chown(path, uid, gid) def create(self, path, mode): f = self.sftp.open(path, 'w') f.chmod(mode) f.close() return 0 def destroy(self, path): self.sftp.close() self.client.close() def getattr(self, path, fh=None): st = self.sftp.lstat(path) return dict((key, getattr(st, key)) for key in ('st_atime', 'st_gid', 'st_mode', 'st_mtime', 'st_size', 'st_uid')) def mkdir(self, path, mode): return self.sftp.mkdir(path, mode) def read(self, path, size, offset, fh): f = self.sftp.open(path) f.seek(offset, 0) buf = f.read(size) f.close() return buf def readdir(self, path, fh): return ['.', '..' ] + [name.encode('utf-8') for name in self.sftp.listdir(path)] def readlink(self, path): return self.sftp.readlink(path) def rename(self, oldpath, newpath): return self.sftp.rename(oldpath, newpath) def rmdir(self, path): return self.sftp.rmdir(path) def symlink(self, oldpath, newpath): return self.sftp.symlink(oldpath, newpath) def truncate(self, path, length, fh=None): return self.sftp.truncate(path, length) def unlink(self, path): return self.sftp.unlink(path) def utimens(self, path, times=None): return self.sftp.utime(path, times) def write(self, path, data, offset, fh): f = self.sftp.open(path, 'r+') f.seek(offset, 0) f.write(data) f.close() return len(data)
#This function will take a windows path string (make sure it uses double backslash) #as an argument and return a unix path as a string def unix_path(windows_path): out = windows_path.replace('\\', '/') out = out.replace(':', '') out = out.split('/') out[0] = out[0].lower() out = '/'.join(out) out = '/mnt/' + out return out ### Opening secure connection print("Establishing secure connection...") ssh = SSHClient() #open SSH connection ssh.load_system_host_keys() #get list of known hosts ssh.connect( 'spiedie.binghamton.edu', 22, username='******' ) #connect to specified host and account (username). Shouldn't need a password argument if key authentification is set up properly # SCPCLient takes a paramiko transport as an argument scp = SCPClient(ssh.get_transport()) #open secure copy client sftp = ssh.open_sftp() #open secure file transfer client ### ### Getting completed files #This loop checks the results directory, and if any h5 files are found it will copy the plots #folder over to the local directory, and then remove it. It will also move the local h5 file to the #results path and the local pl2 file to the completed path print("Pulling completed data from the server...")
from paramiko import SSHClient, AutoAddPolicy ''' EXERCISE 2, point no. 1 ''' # to telnet to a server # telnet <ip address> <port no> ssh = SSHClient() ssh.load_system_host_keys() # to look for trusted hosts # to add the host if not already added ssh.set_missing_host_key_policy(AutoAddPolicy()) ssh.connect('49.36.141.4', username='******', password='******') # to check disk usage print("//////////////Disk Usage//////////////") ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command('df -H') for i in ssh_stdout: print(i.strip('\n')) # to check inode usage print("//////////////iNode Usage//////////////") ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command('df -i') for i in ssh_stdout: print(i.strip('\n')) # to list files & directory ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command('cd code/; ls -l') for i in ssh_stdout: print(i.strip('\n')) # to use ftp & sftp # ftp <ip address> # <username>
class RemoteClient(object): """Client to interact with remote host via ssh and scp""" def __init__(self, host, user, password, ssh_key_filepath): super(RemoteClient, self).__init__() self.host = host self.user = user self.password = password self.ssh_key_filepath = ssh_key_filepath self.ssh_key = None # self.remote_path = remote_path self.client = None self.scp = None # self.__get_ssh_key() # self.__upload_ssh_key() def __get_ssh_key(self): """ Fetch local ssh key """ try: self.ssh_key = RSAKey.from_private_key_file( self.ssh_key_filepath, self.password) logger.info(f'Found SSH key at {self.ssh_key_filepath}') except SSHException as error: logger.error(error) return self.ssh_key # def __upload_ssh_key(self): # try: # system(f'ssh') # except Exception as e: # raise e def __connect(self): try: logger.info('Attempting Authentication') self.client = SSHClient() self.client.load_system_host_keys() self.client.set_missing_host_key_policy(MissingHostKeyPolicy()) self.client.connect(self.host, username=self.user, password=self.password, key_filename=self.ssh_key_filepath, look_for_keys=True, timeout=5000) logger.info('Successfully Authenticated') # self.scp = SCPClient(self.client.get_transport()) except AuthenticationException as e: logger.error('Authentication failed.') logger.error(e) raise e except Exception as err: logger.error('connection failed') logger.error(err) raise err finally: return self.client def disconnect(self): """Close the ssh connection""" self.client.close() # self.scp.close() def execute_commands(self, commands): """execute multiple commands in succession""" if self.client is None: try: self.client = self.__connect() except Exception as err: logger.error("server connection failed") self.client.close() return for cmd in commands: logger.info(f'Executing: {cmd}') stdin, stdout, stderr = self.client.exec_command(cmd) stdout.channel.recv_exit_status() response = stdout.readlines() for line in response: logger.info(f'INPUT: {cmd} | OUTPUT: {line}')
class RemoteProcess(object): STOPPABLE = False def __init__(self, client, name, cmd, error_event, log_entry, sudo_passwd=None): self.name = name self.client = None self.cmd = cmd self.exited = False self.joined = False self.error_event = error_event self.monitor_thread = None self.first_line = None self.log_entry = log_entry self.sudo_passwd = sudo_passwd def connect(self, ssh_cfg, addr): self.client = SSHClient() self.client.set_missing_host_key_policy(AutoAddPolicy()) self.client.load_system_host_keys() try: self.client.connect(addr, username=ssh_cfg['user'].format(), port=ssh_cfg['port'].format(), key_filename=os.path.expanduser( ssh_cfg['key'].format())) except (paramiko.ssh_exception.AuthenticationException, socket.gaierror): log_error( "Could not connect to {user}@{addr}:{port} with key {key}". format(addr=addr, **ssh_cfg)) raise except Exception as e: log_error( "Unknown error connecting to {user}@{addr}:{port} with key {key}" .format(addr=addr, **ssh_cfg)) raise def wait(self, seconds): if monitor_wait(self.error_event, seconds): log_warn("Error encountered in other thread while executing %s" % self.name) return True return False def start(self, sudo_passwd=None): if self.client is None: log_error( "Cannot start remote process when client is not connected") self.stdin, self.stdout, self.stderr = self.client.exec_command( self.cmd) if sudo_passwd is not None: self.stdin.write(sudo_passwd + '\n') self.stdin.flush() def run_process_monitor(self, **kwargs): try: monitor_process(self, **kwargs) while not self.exited and self.poll() is None: time.sleep(.1) except: if self.error_event: self.error_event.set() while not self.exited and self.poll() is None: time.sleep(.1) raise def monitor(self, daemon=False, **kwargs): self.monitor_thread = Thread(target=self.run_process_monitor, kwargs=kwargs) self.monitor_thread.daemon = daemon self.monitor_thread.start() return self.monitor_thread def connect_and_run_process_monitor(self, ssh_cfg, addr, **kwargs): try: self.connect(ssh_cfg, addr) self.start(self.sudo_passwd) except: if self.error_event: self.error_event.set() raise self.run_process_monitor(**kwargs) def connect_and_monitor(self, ssh_cfg, addr, daemon=False, **kwargs): self.monitor_thread = Thread( target=self.connect_and_run_process_monitor, args=(ssh_cfg, addr), kwargs=kwargs) self.monitor_thread.daemon = daemon self.monitor_thread.start() return self.monitor_thread def join(self): joined = True self.monitor_thread.join() self.exited = True self.client.close() def log_output(self): if self.stdout.channel.recv_ready(): data = self.stdout.channel.recv(1024).decode('utf-8') if self.first_line is None: self.first_line = data.split('\n')[0] # Do not print first line, which should be PID data = '\n'.join(data.split('\n')[1:]) if len(data): log("{} - {}:{}".format(self.name, "stdout", data)) if self.stderr.channel.recv_stderr_ready(): log("{} - {}:{}".format( self.name, "stderr", self.stderr.channel.recv(1024).decode('utf-8'))) def poll(self): exited = self.stdout.channel.exit_status_ready() self.log_output() if not exited: return None self.exited = True if self.log_entry is not None: self.log_entry['stop_time_'] = float(time.time()) rtn = self.stdout.channel.recv_exit_status() return rtn
from paramiko import SSHClient, AutoAddPolicy client = SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(AutoAddPolicy()) client.connect('192.168.56.101', username='******', password='******') stdin, stdout, stderr = client.exec_command('passwd chandra') stdin.write("python123\n") stdin.flush() stdin.write("python123\n") stdin.flush() data = stdout.readlines() client.close() print(data)
def _createSSHClient(server, port=22, user=None, password=None): client = SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(AutoAddPolicy()) client.connect(server, port, user, password) return client
class ParamikoConnection(ConnectionBase): def __init__(self, host, user, password, port=22): super(ParamikoConnection, self).__init__(host, user, password, port) self._sclint = None self._transport = None def _init_client(self): if not self._sclint: self._sclint = SSHClient() self._sclint.load_system_host_keys() self._sclint.set_missing_host_key_policy(WarningPolicy) self._sclint.connect(self._host, self._port, self._user, self._password) self._transport = self._sclint.get_transport() self._transport.set_keepalive(5) def inner_execute_cmd(self, cmd): channel, stdin, stdout = self.newChannel() channel.exec_command(cmd) return channel, stdin, stdout def connect(self): self._init_client() @initial_only def newChannel(self, combine=True): channel = self._sclint.get_transport().open_session() channel.get_pty('vt100', 200, 50) stdout = channel.makefile('rb', self.DEFAULT_BUFFER_SIZE) stdin = channel.makefile_stdin('wb', self.DEFAULT_BUFFER_SIZE) if combine: channel.set_combine_stderr(True) return channel, stdin, stdout else: stderror = channel.makefile_stderr('rb', self.DEFAULT_BUFFER_SIZE) return channel, stdin, stdout, stderror def wait_until_end(self, channel: Channel): status = channel.recv_exit_status() return status def close_channel(self, channel: Channel): channel.close() def close(self): if self._sclint: self._sclint.close() # del self._sclint @initial_only def open_sftp(self): return self._sclint.open_sftp() def execute_backupground(self, cmd, consumeoutput=True, logfile=None, logmode='r', mode='w', wait=False, wait_time=None, wait_join=False): cmd = cmd.strip() if cmd[-1] != '&': cmd += ' &' log.debug(cmd) channel, stdin, stdout = self.newChannel() channel.invoke_shell() stdin.write(to_bytes(cmd + '\r\n')) stdin.flush() def timerstop(spread=2): threading.Event().wait(spread) channel.close() def wait_process_end(processid): while (True): threading.Event().wait(2) cmd = 'ps --no-header -p %s' % processid st, pinfo = self.execute_cmd(cmd) if st == 0 and pinfo: tpid = to_text(pinfo).strip().split()[0] if tpid == processid: continue break safe_doing(channel.close) def outputlog(): while (not self.fileExists(logfile) and not channel.closed): threading.Event().wait(1) if self.fileExists(logfile) and not channel.closed: cmd = 'tail -f %s \r\n' % logfile stdin.write(to_bytes(cmd)) stdin.flush() data = stdout.readline() while (data or not channel.closed): log.info(to_text(data)) data = stdout.readline() data = stdout.readline() while (data): pid = to_text(data) if len(pid.split()) > 1: pid = pid.split()[1] if re.match('[0-9]+', pid): break data = stdout.readline() if wait: if wait_time: threading.Thread(target=timerstop, args=(wait_time, )).start() if re.match('[0-9]+', pid): if wait_join or not wait_time: threading.Thread(target=wait_process_end, args=(pid, )).start() outputlog() else: safe_doing(channel.close) else: threading.Thread(target=timerstop, args=(2, )).start() while (data or not channel.closed): if consumeoutput: log.info(to_text(data)) if logfile: pass data = stdout.readline() def outputlog(self, logfile, wait_time): finish = False def timerstop( func, spread=30, ): threading.Event().wait(spread) finish = True threading.Thread(target=timerstop, args=(wait_time, )).start() while (not self.fileExists(logfile) and not finish): threading.Event().wait(1) if self.fileExists(logfile): cmd = 'tail -f %s \r\n' % logfile log.debug(cmd) channel, stdin, stdout = self.newChannel() channel.invoke_shell() stdin.write(to_bytes(cmd)) stdin.flush() def timerstopchannel( func, spread=30, ): threading.Event().wait(spread) channel.close() threading.Thread(target=timerstopchannel, args=(wait_time, )).start() data = stdout.readline() while (data or not channel.closed): log.info(to_text(data)) data = stdout.readline() def execute_cmd(self, cmd, consumeoutput=True, logfile=None, mode='w', wait_time=None): try: log.debug(cmd) channel, _, _ = self.inner_execute_cmd(to_bytes(cmd)) result = bytes() data = channel.recv(self.DEFAULT_BUFFER_SIZE) l = None if logfile: l = open(logfile, mode) def timerstop(spread=2): threading.Event().wait(spread) channel.close() if wait_time: threading.Thread(target=timerstop, args=(wait_time, )).start() while (data or not channel.closed): if consumeoutput: log.info(to_text(data)) else: result += data if l: l.write(to_text(data)) data = channel.recv(self.DEFAULT_BUFFER_SIZE) stat = channel.recv_exit_status() if channel: del channel if l: safe_doing(l.close) return stat, result except BaseException as e: log.error(traceback.format_exc()) return None, None def stat(self, remotepath): try: return self.open_sftp().stat(remotepath) except IOError as e: return None def fileExists(self, path): msg = self.stat(path) if msg: return True return False def isDir(self, path): msg = self.stat(path) if msg: return stat.S_ISDIR(msg.st_mode) else: return None def isFile(self, path): msg = self.stat(path) if msg: return stat.S_ISREG(msg.st_mode) else: return None def isLink(self, path): msg = self.stat(path) if msg: return stat.S_ISLNK(msg.st_mode) else: None def listdir(self, dir): return self.open_sftp().listdir(dir)
class RemoteClient: """Client to interact with a remote host via SSH & SCP.""" def __init__(self, host, user, password, ssh_key_filepath=""): self.host = host self.user = user self.password = password self.ssh_key_filepath = ssh_key_filepath self.client = None self.scp = None self.conn = None if self.ssh_key_filepath: self._upload_ssh_key() @logger.catch def _get_ssh_key(self): """ Fetch locally stored SSH key. """ try: self.ssh_key = RSAKey.from_private_key_file(self.ssh_key_filepath) logger.info(f'Found SSH key at self {self.ssh_key_filepath}') except SSHException as error: logger.error(error) return self.ssh_key @logger.catch def _upload_ssh_key(self): try: system( f'ssh-copy-id -i {self.ssh_key_filepath} {self.user}@{self.host}>/dev/null 2>&1' ) system( f'ssh-copy-id -i {self.ssh_key_filepath}.pub {self.user}@{self.host}>/dev/null 2>&1' ) logger.info(f'{self.ssh_key_filepath} uploaded to {self.host}') except FileNotFoundError as error: logger.error(error) @logger.catch def _connect(self): """Open connection to remote host.""" if self.conn is None: logger.info(f'host: {self.host}, user: {self.user}') try: self.client = SSHClient() self.client.load_system_host_keys() self.client.set_missing_host_key_policy(AutoAddPolicy()) # If using ssh key if self.ssh_key_filepath: self.client.connect(self.host, username=self.user, key_filename=self.ssh_key_filepath, look_for_keys=True, timeout=5000) else: self.client.connect(self.host, username=self.user, password=self.password, timeout=5000) self.scp = SCPClient(self.client.get_transport()) except AuthenticationException as error: logger.error(f'Authentication failed: \ did you remember to create an SSH key? {error}') raise error return self.client def disconnect(self): """Close ssh connection.""" if self.client: self.client.close() if self.scp: self.scp.close() def download_single_file(self, remote_file, local_file, out): """Download a single file from a remote directory.""" self.conn = self._connect() download = None try: out.write( f'Started download from: {remote_file} to: {local_file}<br>') out.flush() self.scp.get(remote_file, local_file) download = local_file logger.info(f'Downloaded {remote_file} to {download}') out.write(f'Finished download of {remote_file}<br>') out.flush() except SCPException as error: logger.error(f"client: SCPException! %s" % traceback.format_exc()) raise error except Exception as e: logger.error(f"client: Exception! %s" % traceback.format_exc()) raise e else: return download @logger.catch def bulk_download(self, remote_directory, local_directory, out): """ Download multiple files from a remote directory. :param files: List of paths to remote files. :type files: List[str] """ # Ensure valid input if remote_directory is None: print( "Error: Arg 'remote_directory' is null in RemoteClient.bulk_download " ) return logger.info( f'Downloading from {remote_directory} to {local_directory}') self.conn = self._connect() # Ensure Windows formatting of directory path # - assuming paths that start with '/' are not Windows if not remote_directory.startswith("/"): windows_dir = remote_directory.replace("/", "\\") stdin, stdout, stderr = self.conn.exec_command( f"dir {windows_dir}") else: stdin, stdout, stderr = self.conn.exec_command( f"ls {remote_directory}") # Find the listing of images on the remote machine stdout = stdout.readlines() stderr = stderr.readlines() # Collect the image filenames (expecting '.tif') images = [] for line in stdout: res = re.search(r"\w+.tif", line) if res: images.append(res.group(0)) # Was there an error? # ..if so, stop outerr = "" for line in stderr: outerr = outerr + line if outerr != "": print(f"error: {outerr}") out.write(f"error: {outerr}") return downloads = [ self.download_single_file(remote_directory + file, local_directory + file.lower(), out) for file in images ] logger.info( f'Finished downloading {len(downloads)} files ({images}) from {remote_directory} to {local_directory} from {self.host}' ) out.write( f'<p>Finished downloading {len(downloads)} files ({images}) from {remote_directory} to {local_directory} from {self.host}</p>' )
class Cluster: def __init__(self, address, cname, username=None, pw=None, keyfile=None, keyfile_pw=None): self.address = address self.cname = cname logname = time.strftime("%d%b%Y") self.logfile = open( os.path.join('./logs', '%s-%s.log' % (cname, logname)), 'a') self.name = None self.uuid = None self.serialnumber = None self.location = None self.contact = None self.nodes = {} self.svms = {} self.snapmirrors = [] self.aggregates = {} self.peers = {} self.peersrev = {} self.ssh = SSHClient() self.ssh.load_system_host_keys() self.ssh.set_missing_host_key_policy(AutoAddPolicy) self.username = username self.pw = pw self.pkey_filename = keyfile self.pkey_pw = keyfile_pw self.pkey = None if self.pkey_filename: self.pkey = RSAKey.from_private_key_file(self.pkey_filename, password=self.pkey_pw) self.fetchclusterinfo() self.fetchnodes() self.fetchsvms() def fetchclusterinfo(self): output = self.runcmd('cluster identity show') for line in output: if not line: continue if 'Cluster UUID:' in line: self.uuid = line.split(':', 1)[1].strip() if 'Cluster Name:' in line: self.name = line.split(':', 1)[1].strip() if 'Cluster Serial Number:' in line: self.serialnumber = line.split(':')[1].strip() if 'Cluster Location:' in line: tlist = line.split(':', 1) if len(tlist) > 1: self.location = line.split(':', 1)[1].strip() if 'Cluster Contact:' in line: tlist = line.split(':', 1) if len(tlist) > 1: self.contact = line.split(':', 1)[1].strip() def fetchnodes(self): """ fetch the nodes of the system """ output = self.runcmd('system node show -instance') node = {} currentnode = '' for line in output: if not line: continue if 'Node:' in line: if node: self.nodes[node['Node']] = node currentnode = line.split(':')[-1].strip() node = {} node['Node'] = currentnode continue if ':' in line: line = line.strip() tlist = line.split(':', 1) slist = [x.strip() for x in tlist] lastkey = slist[0] value = slist[1] node[lastkey] = value if node: self.nodes[node['Node']] = node def fetchclusterstats(self): output = self.runcmd('statistics system show') for line in output: if line.startswith('cluster'): (cpu, ops, bps, latency) = line.split()[1:] return int(cpu), int(ops), int(bps), int(latency) def fetchpeers(self): output = self.runcmd('vserver peer show-all -instance') peer = {} for line in output: if not line: continue if 'Local Vserver Name' in line: if peer: self.peers[peer['Local Vserver Name']] = peer self.peersrev[peer['Peer Vserver Name']] = peer peer = {} local = line.split(':', 1)[1].strip() peer['Local Vserver Name'] = local elif 'Peer Vserver Name' in line: peername = line.split(':', 1)[1].strip() peer['Peer Vserver Name'] = peername else: line = line.strip() tlist = line.split(':', 1) slist = [x.strip() for x in tlist] try: peer[slist[0]] = slist[1] except IndexError: print('The following line was malformed') print(line) if peer: self.peers[peer['Local Vserver Name']] = peer self.peersrev[peer['Peer Vserver Name']] = peer def getsecdstats(self): """ return true if node is over threshhold """ for node in self.nodes.values(): cmdnode = "set d;diag secd echo -echo-text showLimits -node %s" % node[ 'Node'] output = self.runinteractivecmd(cmdnode) node['secdstats'] = {} for line in output: if ':' in line: line = line.strip() tlist = line.split(':', 1) slist = [x.strip() for x in tlist] lastkey = slist[0] value = slist[1] try: node['secdstats'][lastkey] = int(value) except: node['secdstats'][lastkey] = value #print(node['secdstats']) def fetchsnapmirrors(self): output = self.runcmd('snapmirror show -instance') count = 0 cursnap = {} for line in output: if not line: continue if 'Source Path:' in line: if cursnap: self.snapmirrors.append(cursnap) cursnap = {} tlist = line.split(':') cursnap['Source Path'] = { 'svm': tlist[1].strip(), 'vol': tlist[2].strip() } elif 'Destination Path' in line: tlist = line.split(':') cursnap['Destination Path'] = { 'svm': tlist[1].strip(), 'vol': tlist[2].strip() } else: line = line.strip() tlist = line.split(':', 1) slist = [x.strip() for x in tlist] cursnap[slist[0]] = slist[1] def fetchaggrs(self): output = self.runcmd('aggr show -instance') currentaggr = {} lastkey = None for line in output: if not line: lastkey = None continue if ' Aggregate:' in line and currentaggr: try: self.aggregates[currentaggr['Aggregate']] = AGGR( currentaggr['Aggregate'], self, data=currentaggr) except KeyError: print(currentaggr) currentaggr = {} if ':' in line: line = line.strip() tlist = line.split(':', 1) slist = [x.strip() for x in tlist] lastkey = slist[0] value = slist[1] if 'size' in lastkey or 'Size' in lastkey: nvalue = convertnetappsize(value) if nvalue != value: value = nvalue currentaggr[lastkey] = value else: if lastkey: if type(currentaggr[lastkey]) == list: currentaggr[lastkey].append(line.strip()) else: nvalue = [currentaggr[lastkey], line.strip()] currentaggr[lastkey] = nvalue if currentaggr: self.aggregates[currentaggr['Aggregate']] = AGGR( currentaggr['Aggregate'], self, data=currentaggr) def fetchsvms(self): output = self.runcmd('vserver show -instance') currentsvm = '' lastkey = None for line in output: if not line: lastkey = None continue if 'Vserver:' in line: currentsvm = line.split()[1] self.svms[currentsvm] = SVM(currentsvm, self) else: line = line.strip() tlist = line.split(':', 1) slist = [x.strip() for x in tlist] try: self.svms[currentsvm].sset(slist[0], slist[1]) lastkey = slist[0] except IndexError: if lastkey and len(slist) == 1: if type(self.svms[currentsvm].attr[lastkey]) == list: self.svms[currentsvm].attr[lastkey].append( slist[0]) else: nvalue = [ self.svms[currentsvm].attr[lastkey], slist[0] ] self.svms[currentsvm].sset(lastkey, nvalue) def findvolume(self, volume, svm=None, exact=True): if svm in self.svms: return self.svms[svm].findvolume(volume, exact) else: foundvolumes = [] for svmo in self.svms.values(): nvolumes = svmo.findvolume(volume, exact) foundvolumes.extend(nvolumes) return foundvolumes def log(self, line): """ write to the log file """ self.logfile.write(line) def connect(self): # if self.ssh.get_transport(): # print('Connect: session active %s' % self.ssh.get_transport().is_active()) if not self.ssh.get_transport() or not self.ssh.get_transport( ).is_active(): # print('Connect: creating connection') if self.pkey: self.ssh.connect(self.address, username=self.username, pkey=self.pkey) else: self.ssh.connect(self.address, username=self.username, password=self.pw) transport = self.ssh.get_transport() transport.set_keepalive(5) def runcmd(self, cmd, excludes=None): """ run a command and returns the output """ self.connect() if not excludes: excludes = [] excludes.append('entries were displayed') excludes.append('There are no entries matching your query.') excludes.append('\a') excludes.append('Last login time:') self.log('%s - %s - %s\n' % (time.strftime("%a %d %b %Y %H:%M:%S %Z"), self.name, cmd)) output = [] stdin, stdout, stderr = self.ssh.exec_command(cmd) for line in stdout: line = line.rstrip() self.log(' %s\n' % line) if "Press <space> to page down" in line: stdin.write(' ') stdin.flush() else: save = True if excludes: for exc in excludes: if exc in line: save = False break if save: output.append(line) return output def runinteractivecmd(self, cmd, respondto=' {y|n}:', response='y\n'): """ run a command that requires a response, also used to see output of a command """ output = [] self.connect() #print("command: %s" % cmd) self.log('%s - %s - %s\n' % (time.strftime("%a %d %b %Y %H:%M:%S %Z"), self.name, cmd)) stdin, stdout, stderr = self.ssh.exec_command(cmd) for line in stdout: #print(line) line = line.rstrip() self.log(' %s\n' % line) if respondto in line: #print('sending %s to server' % response) stdin.write(response) stdin.flush() else: output.append(line) return output
class ExacloudConnection: """Resource manager for exacloud commands. Helpful exacloud commands: - View jobs in queue by user: $ squeue -u <user> - View all jobs by user (default is jobs since midnight, add in date to see further back): $ sacct -X -u <user> [--startime YYYY-MM-DD] - Above but with better formatting (assuming full width terminal window): $ sacct -X -u <user> --format "JobID,Elapsed,Timelimit,ReqMem,MaxVMSize,State,NodeList,Partition,JobName%25,Comment%125" - Cancel a job $ scancel <jobid> - Cancel all jobs by user: $ scancel -u <user> - Cancel all pending jobs by user: $ scancel -u <user> --state=pending """ def __init__(self, user, ssh_key=None, password=None): """Prepares the SSH connection paramters. :param user: Exacloud username. :param command: Command to be sent to exacloud. :param ssh_key: Path to key. Required if password is None. :param password: Exacloud password. Required if ssh_key is None. """ self.host = 'exahead1.ohsu.edu' self.user = user # setup credentials if password is None and ssh_key is None: raise ValueError('One of password or ssh_key should be passed.') # if both included, default to ssh key elif ssh_key is not None: assert Path(ssh_key).exists(), f'Could not find ssh key "{ssh_key}". Make sure it exists and '\ f'is located adjacent to the public key.' self.creds = {'key_filename': ssh_key} else: self.creds = {'password': password} def __enter__(self): """Makes the SSH connection.""" self.client = SSHClient() self.client.load_system_host_keys() self.client.set_missing_host_key_policy(AutoAddPolicy()) self.client.connect(self.host, username=self.user, **self.creds) return self def __exit__(self, exc_type, exc_val, exc_tb): """Closes the SSH connection.""" self.client.close() def send_command(self, command): """Sends the command. :param command: The command to send. """ stdin, stdout, stderr = self.client.exec_command(command) return stdin, stdout, stderr def cancel_job(self, job_id): """Cancels specified job.""" self.send_command(f'scancel {job_id}') def cancel_all_user_jobs(self): """Cancels all jobs by the user.""" self.send_command(f'scancel -u {self.user}') def cancel_pending_user_jobs(self): """Cancels all jobs pending by the user.""" self.send_command(f'scancel -u {self.user} --state=pending')
def parser(name): while True: if dates.empty() == True: return date = dates.get() filename = './data/{0}.csv'.format(date.strftime('%Y-%m-%d')) thread_start = time.time() with open(filename, 'a+') as f: f.write( 'collector_name,rir,ip_type,prefix,origin_asns,attack_type\n') for collector in collectors: start = time.time() collector_name, rir = collector collector_name = '' if collector_name == 'route-views2' else collector_name # get file if collector_name in no_links: url_fmt = '/mnt/storage/{0}/bgpdata/{1}/RIBS/rib.{2}.bz2' url = url_fmt.format(collector_name, date.strftime('%Y.%m'), date.strftime('%Y%m%d.%H%M')) collector_name = 'route-views2' if collector_name == '' else collector_name print('{0} - {1} downloading file'.format( collector_name, date)) ssh = SSHClient() ssh.load_system_host_keys() ssh.connect('archive.routeviews.org') scp = SCPClient(ssh.get_transport()) try: scp.get(url, local_path=name) except: scp.close() ssh.close() print('SCP: {0} - {1} file not found'.format( collector_name, date)) continue scp.close() ssh.close() else: url_fmt = 'http://archive.routeviews.org/{0}/bgpdata/{1}/RIBS/rib.{2}.bz2' url = url_fmt.format(collector_name, date.strftime('%Y.%m'), date.strftime('%Y%m%d.%H%M')) collector_name = 'route-views2' if collector_name == '' else collector_name print('{0} - {1} downloading file'.format( collector_name, date)) raw = requests.get(url) if raw.status_code != 200: print('HTTP: {0} - {1} file not found'.format( collector_name, date)) continue with open(name, 'wb+') as f: f.write(raw.content) print('{0} - {1} starting parse'.format(collector_name, date)) # parse cmd = 'bgpreader -d singlefile -o rib-file={0}'.format(name).split( ' ') prefixes = dict() with Popen(cmd, stdout=PIPE, stderr=DEVNULL, text=True) as parsed: for line in iter(parsed.stdout.readline, ''): line = line.split('|') try: if line[9] in prefixes.keys(): prefixes[line[9]].add(line[12]) else: prefixes[line[9]] = set() prefixes[line[9]].add(line[12]) except: continue print('{0} - {1} writing results'.format(collector_name, date)) with open(filename, 'a+') as f: for prefix, asns in prefixes.items(): if prefix.endswith('/0'): continue attack = 'N' if len(asns) == 1 else ( 'I' if len(asns) == 2 else 'D') af = '4' if '.' in prefix else '6' f.write('{0},{1},{2},{3},"{4}",{5}\n'.format( collector_name, rir, af, prefix, ','.join(asns), attack)) del prefixes os.system('rm {0}'.format(name)) end = time.time() print('{0} - {1} {2:.3f} sec'.format(collector_name, date, end - start)) print('{0} compressing'.format(date)) os.system('bzip2 {0}'.format(filename)) thread_end = time.time() print('{0} - {1} {2:.3f} sec'.format(name, date, thread_end - thread_start))
def start_tunnel(database, use_ssh_config=False): if not database: return from paramiko import AutoAddPolicy, SSHClient, SSHConfig db = settings.DATABASES[database] if SSH_CLIENT_KEY in db: # Tunnel is already running return if not 'REMOTE_HOST' in db: raise ValueError('REMOTE_HOST not specified for ' + database) if not 'TUNNEL_HOST' in db: raise ValueError('TUNNEL_HOST not specified for ' + database) kwargs = {} hostname = db['TUNNEL_HOST'] # Setup the kwargs if 'TUNNEL_USER' in db: kwargs['username'] = db['TUNNEL_USER'] if 'TUNNEL_PASSWORD' in db: kwargs['password'] = db['TUNNEL_PASSWORD'] if 'TUNNEL_IDENTITY' in db: kwargs['key_filename'] = db['TUNNEL_IDENTITY'] if 'TUNNEL_PORT' in db: kwargs['port'] = int(db['TUNNEL_PORT']) if use_ssh_config: try: with open(os.path.expanduser('~/.ssh/config')) as f: sshConfig = SSHConfig() sshConfig.parse(f) config = sshConfig.lookup(db['TUNNEL_HOST']) hostname = config['hostname'] # Use username and port if missing if not 'username' in kwargs and 'user' in config: kwargs['username'] = config['user'] if not 'port' in kwargs and 'port' in config: kwargs['port'] = int(config['port']) # Add identityfile (a list) if 'identityfile' in config: if 'key_filename' in kwargs: if type(kwargs['key_filename']) is list: kwargs['key_filename'] += config['identityfile'] else: kwargs['key_filename'] = [ kwargs['key_filename'] ] + config['identityfile'] else: kwargs['key_filename'] = config['identityfile'] except: pass # Fix the identity files if 'key_filename' in kwargs: if type(kwargs['key_filename']) is list: for i in range(len(kwargs['key_filename'])): if kwargs['key_filename'][i].startswith('~'): kwargs['key_filename'][i] = os.path.expanduser( kwargs['key_filename'][i]) elif kwargs['key_filename'].startswith('~'): kwargs['key_filename'] = os.path.expanduser(kwargs['key_filename']) # Setup the client client = SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(AutoAddPolicy()) client.connect(hostname, **kwargs) # Setup the port forwarding server class __SubPortForwardingServerHandler(__PortForwardingServerHandler): chain_host = db['REMOTE_HOST'] chain_port = int(db['PORT']) ssh_transport = client.get_transport() server = __PortForwardingServer(('', int(db['PORT'])), __SubPortForwardingServerHandler) # Save a reference to the client and port forwarding server db[SSH_TUNNEL_KEY] = server db[SSH_CLIENT_KEY] = client # Start port forwarding server on another thread _thread.start_new_thread(__start_tunnel, (server, ))