def save(self, **kwargs): super().save(**kwargs) rsakey = RSAKey.from_private_key_file(settings.MEDIA_ROOT + "/" + self.file.name) rsakey.write_private_key_file( settings.MEDIA_ROOT + "/" + self.file.name, settings.SECRET_KEY) os.chmod(settings.MEDIA_ROOT + "/" + self.file.name, 0o640)
def init_key(self): if self.ssh_custom_key: return for filename in os.listdir(self.config.get('local', 'data_dir')): if not filename.startswith('yakey') or not os.path.isfile( os.path.join(self.config.get('local', 'data_dir'), filename)): continue key_path = os.path.join(self.config.get('local', 'data_dir'), filename) self.key_name = key_path.split(os.sep)[-1] pmk_key = RSAKey.from_private_key_file(key_path) logging.info('LOADED KEY %s' % key_path) break else: self.key_name = self.get_rnd_name('yakey') key = rsa.generate_private_key(backend=crypto_default_backend(), public_exponent=65537, key_size=2048) pmk_key = RSAKey(key=key) key_path = os.path.join(self.config.get('local', 'data_dir'), self.key_name) pmk_key.write_private_key_file(key_path) logging.info('WRITTEN KEY %s' % key_path) self.public_key = "%s %s" % (pmk_key.get_name(), pmk_key.get_base64()) self.ssh_custom_key = {'pkey': pmk_key} if self.yascheduler: self.yascheduler.ssh_custom_key = self.ssh_custom_key
def _pushOtherKey(self, client, keyfile): sftp = client.openSftp() if not os.path.exists(keyfile): print("Can't copy {0}: no such file or directory.".format(keyfile)) return 1 source_key = '' with open(keyfile, 'r') as fp: source_key = fp.readline() # Paramiko RSA private key -- get the public part by converting if source_key.startswith('-----BEGIN RSA PRIVATE KEY-----'): pkey = RSAKey.from_private_key_file(keyfile) source_key = keys.GenerateAuthorizedKeysLine(pkey) try: sftp.chdir('/home/mendel/.ssh') except FileNotFoundError as e: sftp.mkdir('/home/mendel/.ssh', mode=0o700) with sftp.open('/home/mendel/.ssh/authorized_keys', 'a+b') as fp: fp.write(source_key) print("Key {0} pushed.".format(keyfile)) return 0
def create_worker(host): config = SSHConfig() proxy = None if os.path.exists(os.path.expanduser('~/.ssh/config')): config.parse(open(os.path.expanduser('~/.ssh/config'))) if host.hostname is not None and \ 'proxycommand' in config.lookup(host.hostname): proxy = ProxyCommand(config.lookup(host.hostname)['proxycommand']) # proxy = paramiko.ProxyCommand("ssh -o StrictHostKeyChecking=no [email protected] nc 118.138.239.241 22") worker = SSHClient() worker.load_system_host_keys() worker.set_missing_host_key_policy(AutoAddPolicy()) worker.hostname = host.hostname # store all this for later reference (e.g., logging, reconnection) worker.username = host.username worker.password = host.password worker.proxy = proxy if not host.key_filename is None: worker.pkey = RSAKey.from_private_key_file(host.key_filename, host.key_password) else: worker.pkey = None # time.sleep(4) # worker.connect(hostname=host.hostname, username=host.username, password=host.password, key_filename=host.key_filename, sock=proxy, timeout=3600) worker.connect(hostname=host.hostname, username=host.username, password=host.password, pkey=worker.pkey, sock=proxy) return worker
def sign_token(key_path, fingerprint, data): # from agent pkey = get_key_from_agent(fingerprint) if not pkey: # or from file (without passphrase) # assuming '.pub' file extension if not os.path.exists(key_path[:-4]): raise SignatureException('WrongKeyPath') try: pkey = RSAKey.from_private_key_file(key_path[:-4]) except PasswordRequiredException: raise SignatureException('EncryptedKey') if not pkey: raise SignatureException('KeyNotFound') try: # paramiko is inconsistent here in that the agent's key # returns Message objects for 'sign_ssh_data' whereas RSAKey # objects returns byte strings. # Workaround: cast both return values to string and build a # new Message object s = str(pkey.sign_ssh_data(data)) m = Message(s) m.rewind() if not m.get_string() == 'ssh-rsa': raise SignatureException('RSAKeyRequired') return base64.b64encode(m.get_string()) except Exception: raise SignatureException('SignatureCreateFailure')
def init_ssh_key(path: PosixPath): if path.exists(): key = RSAKey.from_private_key_file(str(path)) log.info('[⚙] Using existing SSH key in {}'.format(path)) else: key = generate_cert(path) log.info('[⚙] Generated SSH key in {}'.format(path)) return key
def app_from_config_file(filename): """Loads the app from the YAML-encoded config file, and updates the config file if needed. :param filename: the filename of the config to load :type filename: :class:`basestring` :returns: the loaded app :rtype: :class:`~asuka.app.App` """ dirname = os.path.dirname(filename) with open(filename) as fp: loaded_config = load(fp) config = dict(loaded_config) config['ec2_connection'] = connect_to_region(**config['ec2_connection']) try: private_key = config['private_key'] except KeyError: pass else: private_key = RSAKey.from_private_key_file( os.path.join(dirname, private_key)) config['private_key'] = private_key gh_auth = None try: gh_token = config['repository']['token'] gh_repository = config['repository']['repository'] except KeyError: try: gh_login = config['repository']['login'] gh_password = config['repository']['password'] gh_repository = config['repository']['repository'] except KeyError: gh_token = None else: gh_auth = authorize(gh_login, gh_password, ['repo'], 'Asuka Deployment System') gh_token = str(gh_auth.token) if gh_token: gh = login(token=gh_token) config['repository'] = gh.repository(*gh_repository.split('/', 1)) app, delta = app_from_config(config) if gh_auth: delta['repository'] = {'token': gh_token, 'repository': gh_repository} if delta: try: private_key = delta['private_key'] except KeyError: pass else: key_filename = app.name + '_id_rsa' private_key.write_private_key_file( os.path.join(dirname, key_filename)) delta['private_key'] = key_filename loaded_config.update(delta) with open(filename, 'w') as fp: dump(loaded_config, fp, default_flow_style=False) return app
def __loadKey(self): if self.__keyFilePath == 'agent': return self.__loadFromAgent() try: return RSAKey.from_private_key_file(self.__keyFilePath) except FileNotFoundError: print("Failed to load "+self.__keyFilePath+" trying the ssh-agent.") return self.__loadFromAgent()
def get_sftp_client(): global _sftp_client, _identity_file, _repository_server, _repository_port, _repository_user try: if not _sftp_client: key = RSAKey.from_private_key_file(_identity_file) t = Transport((_repository_server, _repository_port)) t.connect(username=_repository_user, pkey=key) _sftp_client = DistSFTPClient.from_transport(t) except Exception, e: print e
def createClient(host, username=None, pkeyPath=None): """ Creates an SSH client object that can be used to perform SSH-related operations """ client = SSHClient() client.set_missing_host_key_policy(AutoAddPolicy()) pkey = RSAKey.from_private_key_file(os.path.expanduser(pkeyPath)) if pkeyPath else None client.connect(host, username=username, pkey=pkey) return client
def createClient(host, username=None, pkeyPath=None): """ Creates an SSH client object that can be used to perform SSH-related operations """ client = SSHClient() client.set_missing_host_key_policy(AutoAddPolicy()) pkey = RSAKey.from_private_key_file( os.path.expanduser(pkeyPath)) if pkeyPath else None client.connect(host, username=username, pkey=pkey) return client
def load_pkey(clasobj, keytype, location): kt = keytype.upper() if kt == "RSA": return RSAKey.from_private_key_file(location) elif kt == "DSA": return DSSKey.from_private_key_file(location) else: raise Exception( "Unknown key type '{0}' in sftp_account::load_pkey".format( keytype))
def __init__(self, cb, config=None, address='', port=58337, backlog=100): self.cb = cb # Parse config <3 if config is not None: with open(config, 'r') as f: cfg = yaml.load(f) else: cfg = {} logfile = cfg.get('logfile', None) if logfile is not None: paramiko.util.log_to_file(logile) host_key_path = cfg.get('host_key', 'server.key') host_key_password = cfg.get('host_key_password', None) try: self.host_key = RSAKey.from_private_key_file( host_key_path, host_key_password) except paramiko.ssh_exception.PasswordRequiredException: print 'Invalid host_key_password' sys.exit(1) except IOError: print '*****************************************' print '** host_key does not exists! **' print '** In the name of security by default, **' print '** Sheet will generate one for you. **' print '*****************************************' RSAKey.generate(2048).write_private_key_file( host_key_path, host_key_password) self.handler = Broker.get(cfg.get('auth_handler', 'BaseAuth')) self.handler_conf = cfg.get('auth_handler_config', {}) try: self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.bind((address, port)) except Exception as e: print 'Bind failed: ', str(e) traceback.print_exc() sys.exit(1) try: self.socket.listen(backlog) except Exception as e: print 'Listen/accept failed:', str(e) traceback.print_exc() sys.exit(1)
def __init__(self): if not os.path.exists(config.CONFIG_BASEDIR): os.makedirs(CONFIG_BASEDIR, mode=0o700) if not os.path.exists(KEYSDIR): os.makedirs(KEYSDIR, mode=0o700) if not os.path.exists(KEYFILE_PATH): self.pkey = None else: try: self.pkey = RSAKey.from_private_key_file(KEYFILE_PATH) except IOError as e: print("Unable to read private key from file: {0}".format(e)) sys.exit(1) except PasswordRequiredException as e: print("Unable to load in private key: {0}".format(e)) sys.exit(1)
def __init__(self, cb, config=None, address='', port=58337, backlog=100): self.cb = cb # Parse config <3 if config is not None: with open(config, 'r') as f: cfg = yaml.load(f) else: cfg = {} logfile = cfg.get('logfile', None) if logfile is not None: paramiko.util.log_to_file(logile) host_key_path = cfg.get('host_key', 'server.key') host_key_password = cfg.get('host_key_password', None) try: self.host_key = RSAKey.from_private_key_file(host_key_path, host_key_password) except paramiko.ssh_exception.PasswordRequiredException: print 'Invalid host_key_password' sys.exit(1) except IOError: print '*****************************************' print '** host_key does not exists! **' print '** In the name of security by default, **' print '** Sheet will generate one for you. **' print '*****************************************' RSAKey.generate(2048).write_private_key_file(host_key_path, host_key_password) self.handler = Broker.get(cfg.get('auth_handler', 'BaseAuth')) self.handler_conf = cfg.get('auth_handler_config', {}) try: self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.bind((address, port)) except Exception as e: print 'Bind failed: ', str(e) traceback.print_exc() sys.exit(1) try: self.socket.listen(backlog) except Exception as e: print 'Listen/accept failed:', str(e) traceback.print_exc() sys.exit(1)
def public_key(): if not path.exists(PATH_TO_KEYFILE): abort('Required public key file does not exist. Create it with ssh-keygen.') comment = '{}@{}'.format(getuser(), gethostname().split('.')[0]) pkey = None try: pkey = RSAKey.from_private_key_file(PATH_TO_KEYFILE) except PasswordRequiredException: abort('Keys with passphrases are not supported.') grep_cmd = 'grep {} "$HOME"/{}'.format(pkey.get_base64(), path.join('.ssh', 'authorized_keys')) grep = None with hide('everything'): grep = run(grep_cmd, warn_only=True) if grep.failed: with cd('"$HOME"'), hide('everything'): run('[ -d .ssh ] || (mkdir -p .ssh; chmod 700 .ssh)') key_entry = ' '.join((pkey.get_name(), pkey.get_base64(), comment)) run('touch .ssh/authorized_keys') run('chmod 600 .ssh/authorized_keys') run('echo {} >> .ssh/authorized_keys'.format(key_entry)) puts('Your public key is set up on {}.'.format(env.host_string))
def importKey(self, keyfile): try: self.pkey = RSAKey.from_private_key_file(keyfile) except IOError as e: print("Unable to read private key from file: {0}".format(e)) return False except PasswordRequiredException as e: print("Unable to load in private key: {0}".format(e)) return False except SSHException as e: print("Unable to import private key: {0}".format(e)) print( "Note: Only OpenSSH keys generated using ssh-keygen in PEM format are supported." ) return False try: self.pkey.write_private_key_file(KEYFILE_PATH) except IOError as e: print("Unable to write private key to disk: {0}".format(e)) return False else: return True
def __init__ \ (self, host, privkey, remote_dir = '/tmp', local_dir = '/tmp' , password = None, port = 22, user = '******' ) : self.ssh = SSHClient () self.host = host self.remote_dir = remote_dir self.local_dir = local_dir self.key = RSAKey.from_private_key_file (privkey, password = password) home = os.environ.get ('HOME', '/root') path = os.path.join (home, '.ssh', 'known_hosts_paramiko') self.known_hosts = path self.ssh.load_host_keys (path) self.ssh.set_missing_host_key_policy (AutoAddPolicy ()) self.ssh.connect \ ( host , pkey = self.key , port = port , username = user , look_for_keys = False , allow_agent = False ) self.sftp = self.ssh.open_sftp () self.sftp.chdir (self.remote_dir)
def run_browser_tests(self): common_options = [ 'phantomjs', '--debug=true', '--ignore-ssl-errors=true', '--local-url-access=false', '--local-storage-path=/dev/null', '--offline-storage-path=/dev/null', '/dev/stdin', ] browser_command = ' '.join(common_options + [shlex.quote(self.idna_url)]) # Read the private key private_key = RSAKey.from_private_key_file(settings.SSH_PRIVATE_KEY) # Do the v4-only, v6-only and the NAT64 request in parallel v4only_client = SSHClient() v4only_client.load_host_keys(settings.SSH_KNOWN_HOSTS) v4only_client.connect(settings.V4_HOST, username=settings.SSH_USERNAME, pkey=private_key, allow_agent=False, look_for_keys=False) logger.debug("Running '{}' on {}".format(browser_command, settings.V4_HOST)) v4only_stdin, v4only_stdout, v4only_stderr = v4only_client.exec_command( browser_command, timeout=120) if self.ipv6_dns_results: v6only_client = SSHClient() v6only_client.load_host_keys(settings.SSH_KNOWN_HOSTS) v6only_client.connect(settings.V6_HOST, username=settings.SSH_USERNAME, pkey=private_key, allow_agent=False, look_for_keys=False) logger.debug("Running '{}' on {}".format(browser_command, settings.V4_HOST)) v6only_stdin, v6only_stdout, v6only_stderr = v6only_client.exec_command( browser_command, timeout=120) else: v6only_client = v6only_stdin = v6only_stdout = v6only_stderr = None nat64_client = SSHClient() nat64_client.load_host_keys(settings.SSH_KNOWN_HOSTS) nat64_client.connect(settings.NAT64_HOST, username=settings.SSH_USERNAME, pkey=private_key, allow_agent=False, look_for_keys=False) logger.debug("Running '{}' on {}".format(browser_command, settings.V4_HOST)) nat64_stdin, nat64_stdout, nat64_stderr = nat64_client.exec_command( browser_command, timeout=120) # Placeholders v4only_img = None v4only_img_bytes = None v6only_img = None v6only_img_bytes = None nat64_img = None nat64_img_bytes = None self.v4only_data = {} self.v6only_data = {} self.nat64_data = {} # Push the test script to the workers script_filename = os.path.realpath( os.path.join(os.path.dirname(__file__), 'render_page.js')) script = open(script_filename, 'rb').read() v4only_stdin.write(script) v4only_stdin.close() v4only_stdin.channel.shutdown_write() if v6only_client: v6only_stdin.write(script) v6only_stdin.close() v6only_stdin.channel.shutdown_write() nat64_stdin.write(script) nat64_stdin.close() nat64_stdin.channel.shutdown_write() # Wait for tests to finish try: logger.debug("Receiving data from IPv4-only test") v4only_json = v4only_stdout.read() v4only_debug = v4only_stderr.read() v4only_exit = v4only_stdout.channel.recv_exit_status() self.v4only_data = json.loads( v4only_json.decode('utf-8'), object_pairs_hook=OrderedDict) if v4only_json else {} self.v4only_debug = v4only_debug.decode('utf-8') self.v4only_data['exit_code'] = v4only_exit if 'image' in self.v4only_data: if self.v4only_data['image']: v4only_img_bytes = base64.decodebytes( self.v4only_data['image'].encode('ascii')) # noinspection PyTypeChecker v4only_img = skimage.io.imread( io.BytesIO(v4only_img_bytes)) del self.v4only_data['image'] except socket.timeout: logger.error("{}: IPv4-only load timed out".format(self.url)) if v6only_client: try: logger.debug("Receiving data from IPv6-only test") v6only_json = v6only_stdout.read() v6only_debug = v6only_stderr.read() v6only_exit = v6only_stdout.channel.recv_exit_status() self.v6only_data = json.loads( v6only_json.decode('utf-8'), object_pairs_hook=OrderedDict) if v6only_json else {} self.v6only_debug = v6only_debug.decode('utf-8') self.v6only_data['exit_code'] = v6only_exit if 'image' in self.v6only_data: if self.v6only_data['image']: v6only_img_bytes = base64.decodebytes( self.v6only_data['image'].encode('ascii')) # noinspection PyTypeChecker v6only_img = skimage.io.imread( io.BytesIO(v6only_img_bytes)) del self.v6only_data['image'] except socket.timeout: logger.error("{}: IPv6-only load timed out".format(self.url)) else: logger.info("{}: Not running IPv6-only test".format(self.url)) try: logger.debug("Receiving data from NAT64 test") nat64_json = nat64_stdout.read() nat64_debug = nat64_stderr.read() nat64_exit = nat64_stdout.channel.recv_exit_status() self.nat64_data = json.loads( nat64_json.decode('utf-8'), object_pairs_hook=OrderedDict) if nat64_json else {} self.nat64_debug = nat64_debug.decode('utf-8') self.nat64_data['exit_code'] = nat64_exit if 'image' in self.nat64_data: if self.nat64_data['image']: nat64_img_bytes = base64.decodebytes( self.nat64_data['image'].encode('ascii')) # noinspection PyTypeChecker nat64_img = skimage.io.imread(io.BytesIO(nat64_img_bytes)) del self.nat64_data['image'] except socket.timeout: logger.error("{}: NAT64 load timed out".format(self.url)) # Done talking to workers, close connections if v4only_client: v4only_client.close() if v6only_client: v6only_client.close() if nat64_client: nat64_client.close() # Calculate score based on resources v4only_resources_ok = self.v4only_resources[0] if v4only_resources_ok > 0: self.v6only_resource_score = min( self.v6only_resources[0] / v4only_resources_ok, 1) logger.info("{}: IPv6-only Resource Score = {:0.2f}".format( self.url, self.v6only_resource_score)) self.nat64_resource_score = min( self.nat64_resources[0] / v4only_resources_ok, 1) logger.info("{}: NAT64 Resource Score = {:0.2f}".format( self.url, self.nat64_resource_score)) else: logger.error( "{}: did not load over IPv4-only, unable to perform resource test" .format(self.url)) return_value = 0 if v4only_img_bytes: # Store the image self.v4only_image.save('v4.png', ContentFile(v4only_img_bytes), save=False) else: return_value |= 1 if v6only_img_bytes: # Store the image self.v6only_image.save('v6.png', ContentFile(v6only_img_bytes), save=False) else: return_value |= 2 if nat64_img_bytes: # Store the image self.nat64_image.save('nat64.png', ContentFile(nat64_img_bytes), save=False) else: return_value |= 4 if v4only_img is not None: logger.debug("{}: Loading IPv4-only screenshot".format(self.url)) if v6only_img is not None: logger.debug("{}: Loading IPv6-only screenshot".format( self.url)) # Suppress stupid warnings with warnings.catch_warnings(record=True): self.v6only_image_score = compare_ssim(v4only_img, v6only_img, multichannel=True) logger.info("{}: IPv6-only Image Score = {:0.2f}".format( self.url, self.v6only_image_score)) else: logger.warning( "{}: did not load over IPv6-only, 0 score".format( self.url)) self.v6only_image_score = 0.0 if nat64_img is not None: logger.debug("{}: Loading NAT64 screenshot".format(self.url)) # Suppress stupid warnings with warnings.catch_warnings(record=True): self.nat64_image_score = compare_ssim(v4only_img, nat64_img, multichannel=True) logger.info("{}: NAT64 Image Score = {:0.2f}".format( self.url, self.nat64_image_score)) else: logger.warning("{}: did not load over NAT64, 0 score".format( self.url)) self.nat64_image_score = 0.0 else: logger.error( "{}: did not load over IPv4-only, unable to perform image test" .format(self.url)) self.save() return return_value
def app_from_config_file(filename): """Loads the app from the YAML-encoded config file, and updates the config file if needed. :param filename: the filename of the config to load :type filename: :class:`basestring` :returns: the loaded app :rtype: :class:`~asuka.app.App` """ dirname = os.path.dirname(filename) with open(filename) as fp: loaded_config = load(fp) config = dict(loaded_config) config['ec2_connection'] = connect_to_region(**config['ec2_connection']) try: private_key = config['private_key'] except KeyError: pass else: private_key = RSAKey.from_private_key_file( os.path.join(dirname, private_key) ) config['private_key'] = private_key gh_auth = None try: gh_token = config['repository']['token'] gh_repository = config['repository']['repository'] except KeyError: try: gh_login = config['repository']['login'] gh_password = config['repository']['password'] gh_repository = config['repository']['repository'] except KeyError: gh_token = None else: gh_auth = authorize(gh_login, gh_password, ['repo'], 'Asuka Deployment System') gh_token = str(gh_auth.token) if gh_token: gh = login(token=gh_token) config['repository'] = gh.repository(*gh_repository.split('/', 1)) app, delta = app_from_config(config) if gh_auth: delta['repository'] = { 'token': gh_token, 'repository': gh_repository } if delta: try: private_key = delta['private_key'] except KeyError: pass else: key_filename = app.name + '_id_rsa' private_key.write_private_key_file( os.path.join(dirname, key_filename) ) delta['private_key'] = key_filename loaded_config.update(delta) with open(filename, 'w') as fp: dump(loaded_config, fp, default_flow_style=False) return app
def check_status(): parser = argparse.ArgumentParser( description="Submit task to yascheduler daemon") parser.add_argument('-j', '--jobs', required=False, default=None, nargs='*') parser.add_argument('-v', '--view', required=False, default=None, nargs='?', type=bool, const=True) parser.add_argument('-o', '--convergence', required=False, default=None, nargs='?', type=bool, const=True, help='needs -v option') parser.add_argument('-i', '--info', required=False, default=None, nargs='?', type=bool, const=True) parser.add_argument('-k', '--kill', required=False, default=None, nargs='?', type=bool, const=True) args = parser.parse_args() config = ConfigParser() config.read(CONFIG_FILE) yac = Yascheduler(config) statuses = { yac.STATUS_TO_DO: "QUEUED", yac.STATUS_RUNNING: "RUNNING", yac.STATUS_DONE: "FINISHED" } local_parsing_ready, local_calc_snippet = False, False if args.jobs: tasks = yac.queue_get_tasks(jobs=args.jobs) else: tasks = yac.queue_get_tasks(status=(yac.STATUS_RUNNING, yac.STATUS_TO_DO)) if args.view or args.kill: if not tasks: print('NO MATCHING TASKS FOUND') return ssh_custom_key = {} for filename in os.listdir(config.get('local', 'data_dir')): if not filename.startswith('yakey') or not os.path.isfile( os.path.join(config.get('local', 'data_dir'), filename)): continue key_path = os.path.join(config.get('local', 'data_dir'), filename) pmk_key = RSAKey.from_private_key_file(key_path) print('LOADED KEY %s' % key_path) ssh_custom_key = {'pkey': pmk_key} break if args.convergence: try: from pycrystal import CRYSTOUT from numpy import nan local_parsing_ready = True except: pass if args.view: yac.cursor.execute( 'SELECT task_id, label, metadata, ip FROM yascheduler_tasks WHERE status=%s AND task_id IN (%s);' % (yac.STATUS_RUNNING, ', '.join( [str(task['task_id']) for task in tasks]))) for row in yac.cursor.fetchall(): print("." * 50 + "ID%s %s at %s@%s:%s" % (row[0], row[1], config.get('remote', 'user'), row[3], row[2]['remote_folder'])) ssh_conn = SSH_Connection(host=row[3], user=config.get('remote', 'user'), connect_kwargs=ssh_custom_key) try: result = ssh_conn.run('tail -n15 %s/OUTPUT' % row[2]['remote_folder'], hide=True) except UnexpectedExit: print('OUTDATED TASK, SKIPPING') else: print(result.stdout) if local_parsing_ready: local_calc_snippet = os.path.join( config.get('local', 'data_dir'), 'local_calc_snippet.tmp') try: ssh_conn.get(row[2]['remote_folder'] + '/OUTPUT', local_calc_snippet) except IOError as err: continue calc = CRYSTOUT(local_calc_snippet) output_lines = '' if calc.info['convergence']: output_lines += str(calc.info['convergence']) + "\n" if calc.info['optgeom']: for n in range(len(calc.info['optgeom'])): try: ncycles = calc.info['ncycles'][n] except IndexError: ncycles = "^" output_lines += "{:8f}".format(calc.info['optgeom'][n][0] or nan) + " " + \ "{:8f}".format(calc.info['optgeom'][n][1] or nan) + " " + \ "{:8f}".format(calc.info['optgeom'][n][2] or nan) + " " + \ "{:8f}".format(calc.info['optgeom'][n][3] or nan) + " " + \ "E={:12f}".format(calc.info['optgeom'][n][4] or nan) + " eV" + " " + \ "(%s)" % ncycles + "\n" print(output_lines) elif args.kill: if not args.jobs: print('NO JOBS GIVEN') return yac.cursor.execute( 'SELECT ip FROM yascheduler_tasks WHERE status=%s AND task_id IN (%s);' % (yac.STATUS_RUNNING, ', '.join( [str(task['task_id']) for task in tasks]))) for row in yac.cursor.fetchall(): ssh_conn = SSH_Connection(host=row[0], user=config.get('remote', 'user'), connect_kwargs=ssh_custom_key) try: result = ssh_conn.run('pkill %s' % yac.RUNNING_MARKER, hide=True) except: pass elif args.info: for task in tasks: print('task_id={}\tstatus={}\tlabel={}\tip={}'.format( task['task_id'], statuses[task['status']], task['label'], task['ip'] or '-')) else: for task in tasks: print('{} {}'.format(task['task_id'], statuses[task['status']])) yac.connection.close() if local_calc_snippet and os.path.exists(local_calc_snippet): os.unlink(local_calc_snippet)
def test_generate_cert_generated_cert_is_loadable(saved_key, key_path): RSAKey.from_private_key_file(str(key_path))
def host_key(self): return RSAKey.from_private_key_file(SERVER_KEY_PRIVATE)
def connect(self, host, port=22, user=None, passw=None, cert=None, path='/', timeout=10): """Method connects to server Args: host (str): server host port (int): server port, default protocol port user (str): username passw (str): password cert (str): path to certificate file path (str): server path timeout (int): timeout Returns: bool: result Raises: event: ftp_before_connect event: ftp_after_connect """ try: message = '{0}/{1}@{2}:{3}{4} cert:{5}, timeout:{6}'.format( user, passw, host, port, path, cert, timeout) self._mh.demsg('htk_on_debug_info', self._mh._trn.msg( 'htk_ftp_connecting', message), self._mh.fromhere()) ev = event.Event( 'ftp_before_connect', host, port, user, passw, cert, path, timeout) if (self._mh.fire_event(ev) > 0): host = ev.argv(0) port = ev.argv(1) user = ev.argv(2) passw = ev.argv(3) cert = ev.argv(4) path = ev.argv(5) timeout = ev.argv(6) self._host = host self._port = port self._user = user self._passw = passw self._cert = cert if (ev.will_run_default()): setdefaulttimeout(timeout) t = Transport((host, self._port)) if (user != None or cert != None): pkey = RSAKey.from_private_key_file( self._cert) if (cert != None) else None t.connect(username=user, password=passw, pkey=pkey) self._client = SFTPClient.from_transport(t) self._is_connected = True if (path != None): self.change_dir(path) ev = event.Event('ftp_after_connect') self._mh.fire_event(ev) return True except (SSHException, NoValidConnectionsError, error) as ex: self._mh.demsg( 'htk_on_error', 'error: {0}'.format(ex), self._mh.fromhere()) return False
def __init__(self, hosts, **kwargs): etce.fieldclient.FieldClient.__init__(self, hosts) self._connection_dict = {} self._execute_threads = [] # ssh authentication is revised (5/7/2019): # # As tested against paramiko 1.16 # # User must specify the ssh key file to use for authentication. They # can specify the key file explicitly with the sshkey parameter - # if the filename is not absolute, it is assumed to be a file located # in ~/.ssh. If sshkey is None, try to determine the key file from # ~/.ssh/config. If that also fails, check for the default ssh rsa # key ~/.ssh/id_rsa and attempt to use that. # # paramiko also allows provides a paramiko.agent.Agent class for # querying a running ssh-agent for its loaded keys. The agent # agent can be used: # # 1. by calling connect with allow_agent = True (the default) # 2. by calling Agent().get_keys() and passing to connect as pkey # # In the first case, the connect call selects the first key found # in the running agent and prompts for a passphrase - without indicating # the key it is prompting for. In the second case, the only identifying # information that can be obtained from an agent returned key object is # its md5 fingerprint - which is correct but not convenient for # helping the user select and identify the agent key to use. For these # reasons, ignore the agent for authentication and make the user identify # the key file(s) to use - preferable via there .ssh/config file. user = kwargs.get('user', None) port = kwargs.get('port', None) policystr = kwargs.get('policy', 'reject') sshkey = kwargs.get('sshkey', None) user_specified_key_file = None if sshkey: if sshkey[0] == '/': user_specified_key_file = sshkey else: user_specified_key_file = os.path.expanduser( os.path.join('~/.ssh', sshkey)) if not os.path.exists(user_specified_key_file): raise FieldConnectionError( 'sshkey "%s" doesn\'t exist. Quitting.' % \ user_specified_key_file) self._envfile = kwargs.get('envfile', None) self._config = ConfigDictionary() ssh_config_file = os.path.expanduser('~/.ssh/config') ssh_config = None if os.path.exists(ssh_config_file): ssh_config = paramiko.SSHConfig() ssh_config.parse(open(ssh_config_file)) authenticated_keys = {} policy = RejectPolicy if policystr == 'warning': policy = WarningPolicy elif policystr == 'autoadd': policy = AutoAddPolicy policy = self._set_unknown_hosts_policy(hosts, port, ssh_config, policy) for host in hosts: host_config = None if ssh_config: host_config = ssh_config.lookup(host) host_user = os.path.basename(os.path.expanduser('~')) if user: host_user = user elif host_config: host_user = host_config.get('user', host_user) host_port = 22 if port: host_port = port elif host_config: host_port = host_config.get('port', host_port) host_key_filenames = [] if user_specified_key_file: host_key_filenames = [user_specified_key_file] elif host_config: host_key_filenames = host_config.get('identityfile', host_key_filenames) if not host_key_filenames: default_rsa_keyfile = os.path.join(os.path.expanduser('~'), '.ssh', 'id_rsa') if os.path.exists(default_rsa_keyfile) and os.path.isfile( default_rsa_keyfile): host_key_filenames = [default_rsa_keyfile] else: message = 'Unable to find an RSA SSH key associated with host "%s". '\ 'Either:\n\n' \ ' 1) specify a key using the "sshkey" option\n' \ ' 2) add a "Host" rule to your ~/.ssh/config file identifying the key\n' \ ' 3) create a default RSA key ~/.ssh/id_rsa".\n\n' \ 'Quitting.' % host raise FieldConnectionError(message) try: pkey = None for host_key_file in host_key_filenames: if host_key_file in authenticated_keys: pkey = authenticated_keys[host_key_file] else: pkey = None try: # Assume key is not passphrase protected first pkey = RSAKey.from_private_key_file( host_key_file, None) except PasswordRequiredException as pre: # if that fails, prompt for passphrase pkey = RSAKey.from_private_key_file( host_key_file, getpass.getpass('Enter passphrase for %s: ' % host_key_file)) authenticated_keys[host_key_file] = pkey break if not pkey: message = 'Unable to connect to host "%s", cannot authenticate. ' \ 'Quitting.' % host, raise FieldConnectionError(message) client = paramiko.SSHClient() client.load_system_host_keys() client.load_host_keys(os.path.expanduser('~/.ssh/known_hosts')) client.set_missing_host_key_policy(policy()) client.connect(hostname=host, username=host_user, port=int(host_port), pkey=pkey, allow_agent=False) self._connection_dict[host] = client except socket.gaierror as ge: message = '%s "%s". Quitting.' % (ge.strerror, host) raise FieldConnectionError(message) except paramiko.ssh_exception.NoValidConnectionsError as e: raise FieldConnectionError('Unable to connect to host "%s", ' \ 'NoValidConnectionsError. Quitting.' % host) except Exception as e: raise FieldConnectionError(e)