class HaproxyStats: socket_path = "/var/lib/haproxy/stats" ssh = None def __init__(self, host): self.ssh = Ssh(host) def getStats(self): stats = {} com = "echo 'show stat' | socat stdio unix-connect:" + self.socket_path (ret, content) = self.ssh.commandAsRoot(com) if ret == 0 and content: lines = content.splitlines() params = lines[0].strip("# ").split(",") for i in range(1, len(lines)): cols = lines[i].split(",") values = {} for j in range(0, len(cols)): if params[j]: values[params[j]] = cols[j] name = cols[0] + "/" + cols[1] stats[name] = values return stats def socketCommand(self, com): com = "echo '" + com + "' | socat stdio unix-connect:" + self.socket_path (ret, content) = self.ssh.commandAsRoot(com) if ret == 0: if content: return content return "[comannd sent]" return "[socket command error]: " + content
def check_boxes(box_ids, boxes_tobe=[1] * 8): boxes_are_online = [] for box_id in box_ids: ip = '192.168.' + box_ips[box_id][0] ssh = Ssh() try: ssh.login(ip) time.sleep(1) print("{}: current box: {} is online.".format( nowtimefmt(), box_ips[box_id][1])) boxes_are_online.append(1) except Exception as e: if "Unable to connect" or "连接尝试失败" in e: print("{}: current box: {} is offline.".format( nowtimefmt(), box_ips[box_id][1])) else: print("{}: 未知错误 {}".format(nowtimefmt(), e)) boxes_are_online.append(0) finally: try: ssh.ssh.close() except Exception: pass if len(boxes_are_online): return boxes_are_online == boxes_tobe
def __init__(self, remote_address: str, remote_username: str, remote_port: int, remote_path: str, file_name: str): super().__init__(name=self.__class__.__name__) self.__remote_path = remote_path self.__file_name = file_name self.__ssh = Ssh(host=remote_address, port=remote_port, user=remote_username)
def __init__(self, args): self.command = args.command self.server_name = args.server_name self.git = Git() self.error = Error() if self.server_name != '': self.conf = self._load_conf() self.ssh = Ssh(self.conf.ssh_infos)
def test_scan_when_ok(self): # Prepare data and mocks test_subject = Ssh(None) test_subject.check_protocol = MagicMock(return_value=True) # Run test scenario result = test_subject.scan() # Assertions test_subject.check_protocol.assert_called_once_with('/etc/ssh/sshd_config') self.assertEqual(result[0], ScanStatus.success)
def test_check_protocol_when_ok_no_protocol(self): # Prepare data and mocks with patch('builtins.open', mock_open(read_data='# $OpenBSD: s\n\n# Host *\nSomeConfig')) as mock_file: test_subject = Ssh(None) # Run test scenario result = test_subject.check_protocol('/etc/ssh/sshd_config') # Assertions mock_file.assert_called_once_with('/etc/ssh/sshd_config') self.assertTrue(result)
def test_scan_when_ok(self): # Prepare data and mocks test_subject = Ssh(None) test_subject.check_protocol = MagicMock(return_value=True) # Run test scenario result = test_subject.scan() # Assertions test_subject.check_protocol.assert_called_once_with( '/etc/ssh/sshd_config') self.assertEqual(result[0], ScanStatus.success)
def test_check_protocol_when_ok_no_protocol(self): # Prepare data and mocks with patch( 'builtins.open', mock_open(read_data='# $OpenBSD: s\n\n# Host *\nSomeConfig') ) as mock_file: test_subject = Ssh(None) # Run test scenario result = test_subject.check_protocol('/etc/ssh/sshd_config') # Assertions mock_file.assert_called_once_with('/etc/ssh/sshd_config') self.assertTrue(result)
def __init__(self, remote_address: str, remote_username: str, remote_port: int, remote_path_to_scan: str, local_path_to_scan_script: str, remote_path_to_scan_script: str): self.logger = logging.getLogger("RemoteScanner") self.__remote_path_to_scan = remote_path_to_scan self.__local_path_to_scan_script = local_path_to_scan_script self.__remote_path_to_scan_script = remote_path_to_scan_script self.__ssh = Ssh(host=remote_address, port=remote_port, user=remote_username) self.__scp = Scp(host=remote_address, port=remote_port, user=remote_username) self.__first_run = True
def __init__(self): self.config = self.read_config() self.args = self.parse_args() cmd = self.make_cmd() print(cmd) for host in self.config.get('ssh', 'hosts').split(','): ssh = Ssh(host, int(self.config.get('ssh', 'port')), self.config.get('ssh', 'login'), self.config.get('ssh', 'password')) try: grep_out = ssh.remote_cmd(cmd) for row in grep_out: print(row) except ValueError as e: print(e)
class Parser: def __init__(self): self.config = Configuration() self.launch = Init() date_from = self.launch.dates[0] date_to = self.launch.dates[1] self.db = Db() for host in self.config.hosts: self.ssh = Ssh(host, 22, self.config.login, self.config.password) logs = self.find_logs(date_from, date_to) for log in logs: strings = self.find_strings(log) batch_counter = 0 batch = [] for string in strings: parsed = self.parse_row(string) if re.match(r'(\d{4})-(\d{2})-(\d{2})', parsed[0]): batch.append(parsed) batch_counter += 1 if batch_counter >= 100: self.db.insert(batch) batch = [] batch_counter = 0 def find_logs(self, date_from, date_to): try: cmd = "find {0}/ -maxdepth 1 -name 'server.log.*' -newermt '{1}' " \ "! -newermt '{2}' | sort".format(self.config.log_path, date_from, date_to) logs = self.ssh.remote_cmd(cmd) return logs except ValueError as e: print(e) def find_strings(self, log): try: cmd = "zgrep -a 'INFO' {0} | zgrep -a '\[statistics\]' ".format( log) cmd += "| awk '{ print $1\" \"$2\";\"$7}'" strings = self.ssh.remote_cmd(cmd) return strings except ValueError as e: print(e) def parse_row(self, row): date = row.split(';')[0].split(',')[0] method = row.split(';')[1] result = [date, method] return result
def getStorage(url): parseUrl = urlparse(url) protocolName = parseUrl.scheme # Remove the / parseUrl.path always starts with backupPath = parseUrl.path[1:] login = parseUrl.username password = parseUrl.password server = parseUrl.hostname if protocolName == 'file': from file import File storage = File(backupPath) elif protocolName == 'ssh': from ssh import Ssh print('Sauvegarde ssh dans : ' + backupPath) (login, password) = getIds(login, password, server) storage = Ssh(backupPath, login, password, server) elif protocolName == 'imap' or protocolName == 'imaps': from imaps import Imaps (login, password) = getIds(login, password, server) if backupPath == '': storage = Imaps(login, password, server) else: storage = Imaps(login, password, server, box=backupPath) elif protocolName == 'http' or protocolName == 'https': from webdav import Webdav (login, password) = getIds(login, password, server) storage = Webdav('', urljoin(protocolName+'://'+server, backupPath), login, password) return storage
def main(): """ Runs the program """ config = get_config() scanners = [] if config['enabled']['openports']: scanners.append(OpenPorts(config)) if config['enabled']['root']: scanners.append(Root(config)) if config['enabled']['ssh']: scanners.append(Ssh(config)) if config['enabled']['umask']: scanners.append(Umask(config)) if config['enabled']['update']: scanners.append(Update(config)) if config['enabled']['worldwritable']: scanners.append(WorldWritable(config)) for scanner in scanners: print('-' * 79) print('Running:', scanner.__class__.__name__) result = scanner.scan() print('Status:', result[0]) print('Message:', result[1]) print()
def __init__(self, config): self.config = config self.mountpoint = self.config.get('folders', 'check_mountpoint') self.ssh = Ssh.fromconfig(self.config.items("ssh")) self.last = "last" self.target = self.config.get('folders', 'target') self.rsyncconf = self.config.get('rsync', 'conf') self.link = "{target}/{last}".format(target=self.target, last=self.last)
class DeleteRemoteProcess(AppOneShotProcess): def __init__(self, remote_address: str, remote_username: str, remote_port: int, remote_path: str, file_name: str): super().__init__(name=self.__class__.__name__) self.__remote_path = remote_path self.__file_name = file_name self.__ssh = Ssh(host=remote_address, port=remote_port, user=remote_username) def run_once(self): self.__ssh.set_base_logger(self.logger) file_path = os.path.join(self.__remote_path, self.__file_name) self.logger.debug("Deleting remote file {}".format(self.__file_name)) try: out = self.__ssh.run_command("rm -rf '{}'".format(file_path)) self.logger.debug("Remote delete output: {}".format(out.decode())) except SshError: self.logger.exception("Exception while deleting remote file")
def create_protocol(): protocol = os.environ.get("protocol") logger.info("Using protocol: %s" % protocol) if protocol.lower() == "ftp": return Ftp() elif protocol.lower() == "ssh": return Ssh() elif protocol.lower() == "json": return Json() else: raise Exception("Unknown protocol: '%s'" % protocol)
def check_box_single(box_id, box_tobe): box_is_online = None ip = '192.168.' + box_ips[box_id][0] ssh = Ssh() try: ssh.login(ip) time.sleep(1) print("{}: current box: {} is online.".format(nowtimefmt(), box_id)) box_is_online = 1 except Exception as e: if "Unable to connect" or "连接尝试失败" in e: print("{}: current box: {} is offline.".format( nowtimefmt(), box_id)) else: print("{}: {}发生未知错误 {}".format(nowtimefmt(), box_id, e)) box_is_online = 0 finally: try: ssh.ssh.close() except Exception: pass return box_is_online == box_tobe
def __init__(self): self.config = Configuration() self.launch = Init() date_from = self.launch.dates[0] date_to = self.launch.dates[1] self.db = Db() for host in self.config.hosts: self.ssh = Ssh(host, 22, self.config.login, self.config.password) logs = self.find_logs(date_from, date_to) for log in logs: strings = self.find_strings(log) batch_counter = 0 batch = [] for string in strings: parsed = self.parse_row(string) if re.match(r'(\d{4})-(\d{2})-(\d{2})', parsed[0]): batch.append(parsed) batch_counter += 1 if batch_counter >= 100: self.db.insert(batch) batch = [] batch_counter = 0
class Deploy: clrTerm = { 'A': '\033[92m', # add 'C': '\033[92m', # copied 'D': '\033[31m', # deleted 'M': '\033[92m', # modified 'R': '\033[94m', # renamed 'END': '\033[0m', # close color } def __init__(self, args): self.command = args.command self.server_name = args.server_name self.git = Git() self.error = Error() if self.server_name != '': self.conf = self._load_conf() self.ssh = Ssh(self.conf.ssh_infos) def run_manager(self): command = 'action_' + self.command is_hasattr = hasattr(self, command) is_method = inspect.ismethod(getattr(self, command)) if is_hasattr and is_method: getattr(self, command)() def _load_conf(self): return ConfParser(self.server_name) def action_on(self): # appel de la methode on pour envoyer sur le serveur pass def action_check(self): remote_branch, remote_sha = self.ssh.get_rev() if self.git.branch != remote_branch: self.error.stop_with_msg(self.error._ERR_DIFF_BRANCH_) diff = self.git.diff_file_name(remote_sha) for line in diff: stat, file_name = line[0], line[1] clrTerm, clrClose = self.clrTerm[stat], self.clrTerm['END'] print("{} {} {} {}".format(clrTerm, stat, file_name, clrClose))
class RemoteScanner(IScanner): """ Scanner implementation to scan the remote filesystem """ RETRY_COUNT = 5 def __init__(self, remote_address: str, remote_username: str, remote_port: int, remote_path_to_scan: str, local_path_to_scan_script: str, remote_path_to_scan_script: str): self.logger = logging.getLogger("RemoteScanner") self.__remote_path_to_scan = remote_path_to_scan self.__local_path_to_scan_script = local_path_to_scan_script self.__remote_path_to_scan_script = remote_path_to_scan_script self.__ssh = Ssh(host=remote_address, port=remote_port, user=remote_username) self.__scp = Scp(host=remote_address, port=remote_port, user=remote_username) self.__first_run = True @overrides(IScanner) def set_base_logger(self, base_logger: logging.Logger): self.logger = base_logger.getChild("RemoteScanner") self.__ssh.set_base_logger(self.logger) self.__scp.set_base_logger(self.logger) @overrides(IScanner) def scan(self) -> List[SystemFile]: if self.__first_run: self._install_scanfs() self.__first_run = False retries = 0 out = None while out is None: try: out = self.__ssh.run_command("{} {}".format( self.__remote_path_to_scan_script, self.__remote_path_to_scan)) except SshError as e: # Suppress specific errors and retry a fixed number of times # Otherwise raise a fatal AppError if RemoteScanner.__suppress_error( e) and retries < RemoteScanner.RETRY_COUNT: self.logger.warning( "Retrying remote scan after error: {}".format(str(e))) out = None retries += 1 else: self.logger.exception("Caught an SshError") raise AppError(Localization.Error.REMOTE_SERVER_SCAN) remote_files = pickle.loads(out) return remote_files def _install_scanfs(self): self.logger.info("Installing local:{} to remote:{}".format( self.__local_path_to_scan_script, self.__remote_path_to_scan_script)) if not os.path.isfile(self.__local_path_to_scan_script): raise RemoteScannerError( "Failed to find scanfs executable at {}".format( self.__local_path_to_scan_script)) try: self.__scp.copy(local_path=self.__local_path_to_scan_script, remote_path=self.__remote_path_to_scan_script) except ScpError: self.logger.exception("Caught scp exception") raise AppError(Localization.Error.REMOTE_SERVER_INSTALL) @staticmethod def __suppress_error(error: SshError) -> bool: return "text file busy" in str(error).lower()
def setUp(self): self.ssh = Ssh(ip='127.0.0.1', user='******', password='', debug=True)
class SshTestCase(unittest.TestCase): # Always run before any test def setUp(self): self.ssh = Ssh(ip='127.0.0.1', user='******', password='', debug=True) # Always run after any test def tearDown(self): pass # Reminder : test name must start with test ... def test_connect_and_close(self): self.ssh.connect() self.assertTrue(self.ssh.connected) self.ssh.close() self.assertFalse(self.ssh.connected) def test_connect_key_authentication_failure(self): self.ssh.private_key_file = "whateverfile" self.ssh.mock(exception=paramiko.AuthenticationException) self.ssh.connect() self.ssh.close() self.assertRaises(paramiko.AuthenticationException) def test_ssh_connection_failure(self): self.ssh.mock(exception=paramiko.SSHException) self.ssh.connect() self.ssh.close() self.assertRaises(paramiko.SSHException) def test_socket_timeout(self): self.ssh.mock(exception=socket.timeout) self.ssh.connect() self.ssh.close() self.assertRaises(socket.timeout) def test_genuine_exception(self): self.ssh.mock(exception=Exception) self.ssh.connect() self.ssh.close() self.assertRaises(Exception) def test_sshcmd_single_command(self): self.ssh.mock(context='default') self.ssh.connect() self.ssh.commands(["uptime"]) # Check we can see the work average in the output : 12:09:34 up 2:49, 1 user, load average: 0.01, 0.05, 0.0 self.ssh.close() self.assertNotEqual(self.ssh.output.find("load average"), -1) def test_sshcmd_default_command(self): self.ssh.connect() self.ssh.commands(["whatever"]) self.ssh.close() self.assertTrue(True) #@unittest.skip("by-passed for now") def test_sshcmd_double_command(self): self.ssh.mock(context='default') self.ssh.connect() self.ssh.commands(["uptime", "ps -ef"]) self.ssh.close() self.assertNotEqual(self.ssh.output.find("load average"), -1) def test_sshcmd_commands_timeout(self): self.ssh.mock(exception=socket.timeout) self.ssh.connect() self.ssh.commands(["whatever"]) self.ssh.close() self.assertRaises(socket.timeout) def test_sshcmd_commands_failure(self): self.ssh.mock(exception=paramiko.SSHException) self.ssh.connect() self.ssh.commands(["whatever"]) self.ssh.close() self.assertRaises(paramiko.SSHException) def test_sshcmd_execute(self): self.ssh.mock(context='default') self.ssh.connect() self.ssh.execute(["uptime", "ps -ef"]) self.ssh.close() self.assertNotEqual(self.ssh.output.find("load average"), -1) def test_shell_send_single(self): self.ssh.mock(context='default') self.ssh.connect() self.ssh.commands(["uptime"]) self.ssh.close() self.assertNotEqual(self.ssh.output.find("load average"), -1) def test_ssh_ouputfile(self): self.ssh.trace_open(filename="myTraceFile.log") self.ssh.trace_write("\n*** This is test mark line 1 ***\n") self.ssh.trace_write("\n*** This is test mark line 2 ***\n") self.ssh.trace_mark("MARK1 - command sample") self.ssh.mock(context='default') self.ssh.connect() self.ssh.commands(["uptime"]) self.ssh.trace_mark("MARK2 - execute sample") self.ssh.mock(context='ps') self.ssh.execute(["ps -ef"]) self.ssh.shell_read() self.ssh.close()
def __init__(self,host): self.ssh = Ssh(host) self.data = self.load()
class HaproxyConfig(): data = haproxy_cfg() cfg_tmp_path = "/etc/haproxy/haproxy_tmp.cfg" loadflag_path = "/etc/haproxy/haproxy_ctl_not_config_loaded" ssh = None def __init__(self,host): self.ssh = Ssh(host) self.data = self.load() def setLoaded(self,loaded): if loaded and not self.isLoaded(): com = "rm -f " + self.loadflag_path self.ssh.commandAsRoot(com) if not loaded and self.isLoaded(): com = "touch " + self.loadflag_path self.ssh.commandAsRoot(com) def isLoaded(self): if os.path.exists(self.loadflag_path): return False return True def isSectionHeader(self,key): if key == "global" or key == "defaults" or key == "frontend" or key == "backend": return True return False def getSections(self,conf): lines = conf.splitlines() item_list = [] sections = [] sec = section() for line in lines: columns = line.strip().split() if len(columns) > 0 and columns[0][0] == '#': continue if len(columns) == 0: continue if self.isSectionHeader(columns[0]): if sec.name: sections.append(sec) sec = section() sec.name = columns[0] for i in range(1,len(columns)): sec.attributes.append(columns[i]) continue if sec.name: values = [] for i in range(0,len(columns)): values.append(columns[i]) sec.params.append(values) if sec.name: sections.append(sec) return sections def load(self): cfg = haproxy_cfg() com = "cat " + cfg.cfg_path (ret,content) = self.ssh.commandAsRoot(com) if ret == 0: cfg.sections = self.getSections(content) return cfg def writeConf(self, line): com = "echo '" + line + "' >>" + self.cfg_tmp_path self.ssh.commandAsRoot(com) def save(self): if not self.data.edit: return "edit flag not set" com = "echo '# config written by haproxy_ctl' > " + self.cfg_tmp_path self.ssh.commandAsRoot(com) for s in self.data.sections: line = s.name for a in s.attributes: line += " " + a self.writeConf(line) for p in s.params: line = " " for v in p: line += " " + v self.writeConf(line) com = "cat " + self.cfg_tmp_path + " > " + self.data.cfg_path (ret,content) = self.ssh.commandAsRoot(com) if ret != 0: return content self.setLoaded(False) return ""
from ssh import Ssh import time ssh = Ssh(ip='127.0.0.1', user='******', password='', private_key_file='/home/cgustave/.ssh/id_rsa', port=22, debug=True) ssh.trace_open(filename="myTraceFileChannel.log") ssh.connect() ssh.invoke_channel() ssh.channel_send('nc -l 7890\n') ssh.shell_read() time.sleep(5) ssh.shell_read() time.sleep(10) ssh.channel_send('toto\n') ssh.shell_read() time.sleep(10) ssh.close()
from ssh import Ssh # Sample code to test paramiko on a real ssh device using the 'shell' channel myssh = Ssh(ip='10.205.10.120', user='******', password='******', port=10106, debug=False) myssh.connect() myssh.shell_send(['show configuration commands | grep network-emulator']) myssh.shell_read() print(myssh.output) myssh.close()
class HaproxyHttpLog(): log_path = "/var/log/haproxy.log" ssh = None def __init__(self,host): self.ssh = Ssh(host) def getLogLineCount(self): com = "wc -l " + self.log_path (ret,content) = self.ssh.command(com) count = 0 if ret == 0 and content: count = int(content.splitlines()[0].strip().split()[0]) return count def getMilisec(self,timestr): cols = timestr.split(':') sec = 0 if len(cols) > 3: sec += int(cols[1])*60*60*1000 sec += int(cols[2])*60*1000 sec += int(cols[3].split('.')[0])*1000 sec += int(cols[3].split('.')[1]) return sec def getDuration(self, first, last): if first == last: return 0 msec = self.getMilisec(last) - self.getMilisec(first) return msec def getHowLong(self, to, fm): msec = self.getDuration(to, fm) sec = msec/1000 if sec < 60: return str(sec) + " sec. ago" min = sec/60 if min < 60: return str(min) + " min. ago" hour = min/60 return str(hour) + " hr ago" def getServerLastTimestamp(self, server): com = "grep " + server + " " + self.log_path + " | grep HTTP | tail -1" (ret,content) = self.ssh.commandAsRoot(com) timestamp = "N/A" if ret == 0 and content: columns = content.splitlines()[0].strip().split() if len(columns) > 6: timestamp = columns[6].strip("[]") return timestamp def getLog(self, startLogLine, servers): totalLines = self.getLogLineCount() if totalLines - int(startLogLine) > 10000: startLogLine = str(totalLines - 10000) if int(startLogLine) > 0: startLogLine = str(int(startLogLine) - 1) data = http_data() for s in servers: data.server_data[s] = http_server_data() com = "tail -n +" + startLogLine + " " + self.log_path (ret,content) = self.ssh.command(com) endLogLine = startLogLine if ret == 0 and content: lines = content.splitlines() last_conn_str = None # counting for line in lines: cols = line.strip().split() if len(cols) > 6 and data.timestamp_first == "N/A": data.timestamp_first = cols[6].strip("[]") continue data.acc_cnt += 1 if len(cols) > 6: data.timestamp_last = cols[6].strip("[]") if len(cols) > 8 and data.server_data.has_key(cols[8]): data.server_data[cols[8]].acc_cnt += 1 data.server_data[cols[8]].timestamp_last = cols[6].strip("[]") if len(cols) > 15: data.server_data[cols[8]].last_conn_str = cols[15] if len(cols) > 15: last_conn_str = cols[15] if data.timestamp_last == "N/A": now = datetime.datetime.now() data.timestamp_last = now.strftime("%d/%b/%Y:%H:%M:%S") + ".%03d" % (now.microsecond / 1000) # durations and throughput data.duration_millisec = self.getMilisec(data.timestamp_last) - self.getMilisec(data.timestamp_first) if data.duration_millisec > 0: data.acc_per_sec = float(data.acc_cnt) / float(data.duration_millisec) * 1000 # last conn state if last_conn_str: conn = last_conn_str.split("/") if len(conn) > 1: data.fe_conn_cnt = conn[1] if len(conn) > 2: data.be_conn_cnt = conn[2] # per server data for s in servers: if data.duration_millisec > 0: data.server_data[s].acc_per_sec = float(data.server_data[s].acc_cnt) / float(data.duration_millisec) * 1000 if data.server_data[s].last_conn_str: conn = data.server_data[s].last_conn_str.split("/") if len(conn) > 3: data.server_data[s].conn_cnt = conn[3] if len(conn) > 4: data.server_data[s].retry_cnt = conn[4] if data.server_data[s].timestamp_last == "N/A": data.server_data[s].timestamp_last = self.getServerLastTimestamp(s) # update current log line endLogLine = int(startLogLine) + len(lines) return (str(endLogLine), data) def getLastLog(self): com = "tail -1 " + self.log_path (ret,content) = self.ssh.command(com) data = http_data() if ret == 0 and content: columns = content.splitlines()[0].strip().split() if len(columns) > 6: data.timestamp = columns[6].strip("[]") if len(columns) > 15: conn = columns[15].split("/") if len(conn) > 1: data.fe_conn_cnt = conn[1] if len(conn) > 2: data.be_conn_cnt = conn[2] return data
def __init__(self,name,host): self.name = name self.ssh = Ssh(host) self.os_version = self.getOSVersion() self.state = self.getStatus()
if loc.message: message = loc.message ### service #### service = Service("iptables", req['host']) if req["service_action"] == "stop": message += service.stop() if req["service_action"] == "start": message += service.start() if req["service_action"] == "reload": message += service.reload() if service.state == "running": ### modify rules ### if req['command']: ssh = Ssh(req['host']) if req['command'] == "replace": com = "iptables -R FORWARD " + req['rule_id'] + " -j " + req['target'] if req['command'] == "insert": com = "iptables -I FORWARD " + req['rule_id'] + " -j " + req['target'] if req['command'] == "add": com = "iptables -A FORWARD " + " -j " + req['target'] if req['command'] == "delete": com = "iptables -D FORWARD " + req['rule_id'] else: # protocol if req['prot'] != "" and req['prot'] != "all": com += " -p " + req['prot'] # input interface if req['ifin'] != "" and req['ifin'] != "*":
## request params ######### form = cgi.FieldStorage() req = { 'refresh':"0", 'service_action':"none" ,'y_scroll':"0", 'lang':"en", 'tab_id' : "-99", 'host': "localhost", 'cmd': "ls" } for key in req.keys(): if form.has_key(key): req[key] = form[key].value loc = Localize(req['lang']) if loc.message: message = loc.message if req['cmd']: ssh = Ssh(req['host']) com = req['cmd'] (ret,content) = ssh.commandAsRoot(com) if ret == 0 and content: message = "" lines = content.splitlines() for i in range(1,len(lines)): message += lines[i] + "<br>" else: if ret != 0: message = "[error] Command failed." else: message = "[no output]" ### render html #####
def __init__(self, host, stat_str): self.ssh = Ssh(host) self.tables = self.getTables(stat_str) self.getInterfaces() self.forward_chain = self.getChain("FORWARD")
from ssh import Ssh import time ssh = Ssh(ip='127.0.0.1', user='******', password='', private_key_file='/home/cgustave/.ssh/id_rsa', port=22, debug=True) ssh.trace_open(filename="myTraceFileChannel.log") #ssh.connect() #ssh.invoke_channel() ssh.channel_send('ls -la\n') data = ssh.channel_read() print("received {}".format(data)) ssh.close()
from ssh import Ssh # Sample code to test paramiko on a real ssh device using the 'shell' channel myssh = Ssh(ip='10.5.0.31', user='******', password='******', port=22, debug=True) myssh.connect() myssh.shell_send(["ps -xww | grep qemu-system-x86\n"]) print(myssh.output) for line in myssh.output.splitlines(): print("\nline={}".format(line)) myssh.close()
class IptablesStats(): tables = {} ssh = None forward_chain = None interfaces = [] message = "" def __init__(self, host, stat_str): self.ssh = Ssh(host) self.tables = self.getTables(stat_str) self.getInterfaces() self.forward_chain = self.getChain("FORWARD") def isTableHeader(self,key): if key == "table:" or key == "Table:" or key == "テーブル:": return True return False def getMessage(self): return self.message; def getTables(self,stat_str): lines = stat_str.splitlines() tables = {} tablename = "none" chainname = "" for line in lines: columns = line.strip().split() if len(columns) == 0: continue # label if columns[0] == "num": continue if self.isTableHeader(columns[0]): tablename = columns[1] tables[tablename] = iptables_table() tables[tablename].chains = {} continue if columns[0] == "Chain": chainname = columns[1] tables[tablename].chains[chainname] = iptables_chain() tables[tablename].chains[chainname].policy = columns[3].strip("()") tables[tablename].chains[chainname].rules = [] continue # the line must be a rule: if len(columns) >= 6: rule = iptables_rule() rule.num = columns[0] rule.target = columns[1] rule.prot = columns[2] rule.opt = columns[3] rule.source = columns[4] rule.destination = columns[5] if len(columns) >= 7: for i in range(6, len(columns)): if rule.misc: rule.misc += " " rule.misc += columns[i] tables[tablename].chains[chainname].rules.append(rule) return tables def octet2int(self, ip): intIp = [] for oct in ip.split('.'): intIp.append(int(oct)) return intIp def mask2Bits(self, mask): intMask = self.octet2int(mask) bits = 0 for i in range(0, len(intMask)): binary = bin(intMask[i]) bits += len(binary.strip('0b')) return bits def bits2Mask(self, bits): b = bits mask = "" for i in range(0, 4): if b >= 8: val = 255 else: x = 128 val = 0 for j in range(0, b): val += x x /= 2 mask += str(val) if i < 3: mask += "." b -= 8 return mask def getInterfaces(self): com = "ip route" self.interfaces = [] (ret, content) = self.ssh.commandAsRoot(com) if ret == 0: self.message = content lines = content.splitlines() for line in lines: columns = line.split() if columns[0] == 'default': continue iface = interface() sn = columns[0].split('/') iface.subnet = sn[0] iface.mask = sn[1] iface.name = columns[2] self.interfaces.append(iface) def getChain(self, chain): com = "iptables -vnx --line-numbers -L " + chain chain = iptables_chain() (ret, content) = self.ssh.commandAsRoot(com) if ret == 0: lines = content.splitlines() for line in lines: columns = line.strip().split() if len(columns) == 0: continue # label if columns[0] == "num": continue if columns[0] == "Chain": chain.policy = columns[3].strip("()") chain.rules = [] continue # the line must be a rule: if len(columns) >= 6: rule = iptables_rule() rule.num = columns[0] rule.pkts = columns[1] rule.bytes = columns[2] rule.target = columns[3] rule.prot = columns[4] rule.opt = columns[5] rule.ifin = columns[6] rule.ifout = columns[7] rule.source = columns[8] rule.destination = columns[9] if len(columns) >= 11: for i in range(10, len(columns)): if rule.misc: rule.misc += " " rule.misc += columns[i] miscItem = columns[i].split(":") if len(miscItem) == 2: if miscItem[0] == "dpt": rule.dport = miscItem[1] if miscItem[0] == "spt": rule.sport = miscItem[1] if i > 10 and columns[i-1] == "state": rule.state = columns[i] chain.rules.append(rule) return chain
import env from ssh import Ssh ssh = Ssh() ssh_cylc = Ssh()
class Service(): state = None ssh = None name = None content = None os_version = "el6" def __init__(self,name,host): self.name = name self.ssh = Ssh(host) self.os_version = self.getOSVersion() self.state = self.getStatus() def getOSVersion(self): com = "uname -r" (ret, val) = self.ssh.commandAsRoot(com) if val.find("el6") > -1: return "el6" elif val.find("el7") > -1: return "el7" else: return "unknown" def getStatus(self): if self.os_version == "el7": if self.name == "iptables": com = "iptables -V" (ret, self.content) = self.ssh.commandAsRoot(com) if ret == 0: (ret, content) = self.ssh.commandAsRoot("iptables -L") self.content = "table: filter\r" + content (ret, content) = self.ssh.commandAsRoot("iptables -L -t nat") self.content += "\rtable: nat\r" + content (ret, content) = self.ssh.commandAsRoot("iptables -L -t mangle") self.content += "\rtable: mangle\r" + content return "running" if ret == 3: return "stopped" return "unknown :" + str(ret) return "OS(el7) not supported." com = "service " + self.name +" status" (ret, self.content) = self.ssh.commandAsRoot(com) ret = ret >> 8; if ret == 0: return "running" if ret == 3: return "stopped" return "unknown :" + str(ret) def start(self): com = "service " + self.name +" start" (ret, self.content) = self.ssh.commandAsRoot(com) if ret: self.state = self.getStatus() return "cannot start " + self.name +":" + self.content self.state = "running" return "" def stop(self): com = "service " + self.name +" stop" (ret, self.content) = self.ssh.commandAsRoot(com) if ret: self.state = self.getStatus() return "cannot stop " + self.name +":" + self.content self.state = "stopped" return "" def reload(self): com = "service " + self.name +" reload" (ret, self.content) = self.ssh.commandAsRoot(com) if ret: self.state = self.getStatus() return "cannot reload " + self.name +":" + self.content self.state = "running" return "" def getMessage(self): return self.content
def deploy_to_stage(self): if self.is_config_valid(self.config_json, 'stage'): print('===== Preparing deploy to STAGE Server =====') ssh = Ssh(self.get_ssh_opts('stage')) ssh.deploy()
def __init__(self, host): self.ssh = Ssh(host)
def __init__(self): self.__connect = Ssh(SSH_IP, SSH_USERNAME, SSH_PASSWORD) raw_response = self.__connect.sendCommand("config paging disable\n")
def deploy_to_dev(self): if self.is_config_valid(self.config_json, 'dev'): print('===== Preparing deploy to DEV Server =====') ssh = Ssh(self.get_ssh_opts('dev')) ssh.deploy()
class Finder: __connect = None def __init__(self): self.__connect = Ssh(SSH_IP, SSH_USERNAME, SSH_PASSWORD) raw_response = self.__connect.sendCommand("config paging disable\n") def findSingleDevice(self, macAddress): if self.__connect != None: raw_response = self.__connect.sendCommand("show client summary\n") if raw_response != None: resp_array=raw_response.split('\n') for item in resp_array: if re.search(r'\b([0-9a-fA-F]{2}:??){5}([0-9a-fA-F]{2})\b', item): if macAddress in item: device_info = item.split(' ') data = {"MacAddress" : device_info[0].upper(), "AccessPoint" : device_info[1].upper()} json_data = json.dumps(data, encoding='UTF-8') return json_data else: return None def findMultipleDevices(self, devices): if self.__connect != None: raw_response = self.__connect.sendCommand("show client summary\n") if raw_response != None: resp_array=raw_response.split('\n') client_array = [] for device in devices: mac_add = device['MacAddress'].upper() if mac_add != "00:00:00:00:00:00": for item in resp_array: if re.search(r'\b([0-9a-fA-F]{2}:??){5}([0-9a-fA-F]{2})\b', item): device_info = item.split(' ') #macaddress to uppercase device_info[0] = device_info[0].upper() device_info[1] = device_info[1].upper() if mac_add in device_info[0]: item_data = {"MacAddress" : device_info[0], "AccessPoint" : device_info[1]} client_array.append(item_data) if client_array != None: data = {"Clients":client_array} json_data = json.dumps(data, encoding='UTF-8') return json_data else: return None else: return None def findAllDevices(self): if self.__connect != None: raw_response = self.__connect.sendCommand("show client summary") if raw_response != None: resp_array=raw_response.split('\n') client_array = [] for item in resp_array: if re.search(r'\b([0-9a-fA-F]{2}:??){5}([0-9a-fA-F]{2})\b', item): device_info = item.split(' ') item_data = {"MacAddress" : device_info[0].upper(), "AccessPoint" : device_info[1].upper()} client_array.append(item_data) if client_array != None: data = {"Clients":client_array} json_data = json.dumps(data, encoding='UTF-8') return json_data else: return None else: return None else: return None
def deploy_to_prod(self): if self.is_config_valid(self.config_json, 'prod'): print('===== Preparing deploy to PROD Server =====') ssh = Ssh(self.get_ssh_opts('prod')) ssh.deploy()
from ssh import Ssh myssh = Ssh(ip='127.0.0.1', user='******', password='', debug=True) myssh.trace_open(filename="test4File.log") myssh.mock(context='default') myssh.connect() myssh.execute(["ps -ef"]) myssh.shell_read() myssh.close()