class FsmParser(object): """A log parser using the FSM template. """ def __init__(self, fsm_file=None): self.fsm = None if fsm_file: self.load_template(fsm_file) def load_template(self, fsm_file): """load FSM template and generate a textfsm parser. """ with open(fsm_file) as fp: self.fsm = TextFSM(fp) return self.fsm def parse(self, logfile=None, logbuf=None): """Return a list of dict of row. """ if logfile: with open(logfile) as fp: logtext = fp.read() elif logbuf: logtext = "".join(logbuf) #print(logtext) else: return None self.fsm.Reset() rows = self.fsm.ParseText(logtext) return [dict(zip(self.fsm.header, row)) for row in rows]
def load_template(self, fsm_file): """load FSM template and generate a textfsm parser. """ with open(fsm_file) as fp: self.fsm = TextFSM(fp) return self.fsm
def parse(path): plugin_loader.content.py_load_plugins(globals()) from os.path import dirname raw_data = file.load(path) base_path = dirname(__file__) fields = None for line in raw_data.split("\n"): if line.startswith("#Fields"): fields = line.replace("#Fields: ", "") break if not fields: raise ValueError(f"Cannot find field description in file {path}") fsm_mappings = [] for field, mapping in yaml.loads( file.load( f"{base_path}/fsm/aclog.mapping.yaml"))["fields"].items(): if isinstance(mapping["name"], str): name = str(mapping["name"]) fields = fields.replace("%" + field, "${" + name + "}") if (f"Value Required {name} {mapping['regex']}" ) not in fsm_mappings: fsm_mappings.append( f"Value Required {name} {mapping['regex']}") elif isinstance(mapping["name"], list): replacement = "" for index, name in enumerate(mapping["name"]): if name: replacement += "${" + str(name) + "}" if (f"Value Required {name} {mapping['regex'][index]}" ) not in fsm_mappings: fsm_mappings.append( f"Value Required {name} {mapping['regex'][index]}") else: replacement += str(mapping["regex"][index]) fields = fields.replace("%" + field, replacement) if "%" in fields: raise NotImplementedError(f"Missing mapping for field. {fields}") dynamic_fsm_template = task.run( dirname(__file__) + "/fsm/aclog.textfsm.j2", { "MAPPINGS": "\n".join(fsm_mappings), "LINE_SPEC": fields }, output_plugin="null", )[0]["result"] log.debug(dynamic_fsm_template) re_table = TextFSM(StringIO(dynamic_fsm_template)) retval = [] for row in re_table.ParseText(raw_data): tmp = {} for i, v in enumerate(re_table.header): tmp[v] = row[i] retval.append(tmp) return retval
def execute(self, line, timeout=-1, wait=0.0, regex="", auto_endline=True, template="", timeout_fail=False): retval = list() # Always return a list, even if no template is given fh = None if timeout < 0: timeout = self.command_timeout assert (self.child is not None), "Cannot execute a command on a closed session" assert isinstance(line, str) or isinstance(line, unicode) assert isinstance(timeout, int) assert isinstance(wait, float) or isinstance(wait, int) assert isinstance(regex, str) assert isinstance(auto_endline, bool) assert isinstance(template, str) assert timeout > 0 assert float(wait) >= 0.0 expect_prompts = ['[\n\r]\S+?>', '[\n\r]\S+?#'] if regex: expect_prompts.append(regex) try: if auto_endline: self.child.sendline(line) else: self.child.send(line) self.child.expect(expect_prompts, timeout) time.sleep(wait) except pexpect.TIMEOUT: if timeout_fail: raise ExecuteTimeout( "Timeout after executing '{0}'".format(line)) else: self._go_interact_timeout() except pexpect.EOF: raise UnexpectedConnectionClose( "Connection died while executing".format(line)) if template: if os.path.isfile(template): fh = open(template) else: fh = StringIO(template) fsm = TextFSM(fh) retval = fsm.ParseText(self.response) fh.close() return retval
def parse_command_output(template, command_output): with open(template) as f: tabl = TextFSM(f) headers = tabl.header res = tabl.ParseText(command_output) result, intdict = [], {} for interf in res: for i in range(0, len(headers)): intdict[headers[i]] = interf[i] result.append(intdict.copy()) return result
def scrub_data_with_fsm(raw_data, fsm_file): try: fsm_file_with_full_path = os.path.join(FSMS_DIR, fsm_file) with open(fsm_file_with_full_path, "r") as fsm_data: message.info(f"Scrubing Output with {fsm_file}") fsm = TextFSM(fsm_data) parsed_ouput = fsm.ParseText(raw_data) return [dict(zip(fsm.header, output)) for output in parsed_ouput] except FileNotFoundError: message.fail("FSM file not found in Alfred's Directory") return raw_data
def func(txt_file, tmpl_file): try: tm = TextFSM(open(tmpl_file)) raw = open(txt_file).read() header = tm.header fsm = tm.ParseText(raw) print(raw) print(header) print(fsm) except Exception as e: raise e
def get_command_fsm(self, command_output, template_name): """ sample docstring""" try: self._template = TextFSM(open(template_name)) self._fsm = self._template.ParseText(command_output) self._json_output = [ dict(zip(self._template.header, fsm)) for fsm in self._fsm ] self._header = self._template.header self._status = True self.name = template_name except Exception: raise
def job( self, payload: dict, device: Optional[Device] = None, parent: Optional[Job] = None, ) -> dict: result, success = {}, True for i in range(1, 4): variable = getattr(self, f"variable{i}") if not variable: continue query = self.sub(getattr(self, f"query{i}"), locals()) try: engine = factory.YaqlFactory().create() value = engine(query).evaluate(data=payload) except Exception as exc: success = False result[variable] = f"Wrong YaQL query for {variable} ({exc})" continue match_type = getattr(self, f"match_type{i}") match = getattr(self, f"match{i}") result[variable] = { "query": query, "match_type": match_type, "match": match, "value": (value if match_type == "none" else findall(match, value) if match_type == "regex" else TextFSM( StringIO(match)).ParseText(value)), } return {"result": result, "success": success}
def job(self, run, payload, device=None): result, success = {}, True for i in range(1, 4): variable = getattr(run, f"variable{i}") if not variable: continue query = getattr(run, f"query{i}") try: variables = locals() variables.pop("query") value = run.eval(query, **variables) except Exception as exc: success = False result[variable] = f"Wrong Python query for {variable} ({exc})" continue match_type = getattr(run, f"match_type{i}") match = getattr(run, f"match{i}") operation = getattr(run, f"operation{i}") value = ( value if match_type == "none" else findall(match, value) if match_type == "regex" else TextFSM(StringIO(match)).ParseText(value) ) run.payload_helper( payload, variable, value, device=device.name, operation=operation ) result[variable] = { "query": query, "match_type": match_type, "match": match, "value": value, } return {"result": result, "success": success}
def get_command_fsm(self, command_output, template_name): """ sample docstring """ _res = {} try: self._template = TextFSM(open(template_name)) _raw = open(command_output).read() self._fsm = self._template.ParseText(_raw) _res['PARSE_OUTPUT'] = bool(len(self._fsm)) _res['fsm'] = self._fsm _res['fsm_dict'] = [dict(zip(self._template.header, fsm)) for fsm in self._fsm] _res['header'] = self._template.header except Exception as _e: _res['output'] = str(_e) _res['PARSE_OUTPUT'] = False _res['fsm_dict'] = [] _res['header'] = [] print (_res) print("-".center(100, "-")) data = [] for j in [dict(zip(self._template.header, i)) for i in self._fsm]: print(j) data.append(j) print("-".center(100, "-")) no_of_records = len(_res['fsm_dict']) no_of_header_attribute = len(_res['header']) try: percetange_of_record = 100 / no_of_records percetange_of_attribute = percetange_of_record / no_of_header_attribute except: print("Divide By Zero") template_percetange = 0 for i in _res['fsm_dict']: for j in _res['header']: if i[j] != '': template_percetange += percetange_of_attribute print("Total_Records:",no_of_records) print("No_Header_Attribute:",no_of_header_attribute) print("-".center(100, "-")) print("Accuracy =",template_percetange,"%") print("-".center(100, "-"))
def runTFSMParser(outputtext, templatefile): print("running parser... using file: " + templatefile) #open the textfsm template file - TextFSM requires an already open file handle template_file_handle = open(templatefile) tfsmObject = TextFSM(template_file_handle) fsm_results = tfsmObject.ParseText(outputtext) resultlist = [] for eachline in fsm_results: resultitem = {} current_column = 0 for eachcolname in tfsmObject.header: resultitem[eachcolname] = eachline[current_column] current_column = current_column + 1 resultlist.append(resultitem) if debug: pp(resultlist) return (resultlist)
def get_optics(self, interface=None): template_path = os.path.join(self._template_location(), 'show_ports_transceiver_information_detail.tpl') template = open(template_path) re_table = TextFSM(template) command = 'show ports transceiver information detail' show_ports = self.device.send_command(command) structured = re_table.ParseText(show_ports) optics = {} for item in structured: if not item[8] or item[8] == "1": # First / only channel optics[item[0]] = {} optics[item[0]]["physical_channels"] = {} optics[item[0]]["physical_channels"]["channel"] = [] channel = { "index": int(item[8]) - 1 if item[8] else 0, "state": { "input_power": { "instant": float(item[9] or '0.0'), "avg": 0.0, "min": 0.0, "max": 0.0 }, "output_power": { "instant": float(item[10] or '0.0'), "avg": 0.0, "min": 0.0, "max": 0.0 }, "laser_bias_current": { "instant": float(item[11] or '0.0'), "avg": 0.0, "min": 0.0, "max": 0.0 } } } optics[item[0]]["physical_channels"]["channel"].append(channel) return optics
def f1(input_file, template_file): try: _tm = TextFSM(open(template_file)) _raw = open(input_file).read() _header = _tm.header _fsm = _tm.ParseText(_raw) print("-".center(100, "-")) print(_raw) print("-".center(100, "-")) print(_header) print("-".center(100, "-")) print(_fsm) print("-".center(100, "-")) for j in [dict(zip(_header, i)) for i in _fsm]: print(j) print("-".center(100, "-")) print(len(_fsm)) print("-".center(100, "-")) except Exception as e: raise e
def get_interfaces_list(device): output_interfaces = device.send_command('show interfaces') current_dir = os.getcwd() template_file = open(current_dir + "/controller/show_interface.template", "r") template = TextFSM(template_file) parsed_interfaces = template.ParseText(output_interfaces) interface_list = [] for interface_data in parsed_interfaces: resultDict = {} resultDict["interface"] = interface_data[0] resultDict["mac address"] = interface_data[1] resultDict["ip address"] = interface_data[2] resultDict["MTU"] = interface_data[3] resultDict["bandwith"] = interface_data[4] interface_list.append(resultDict) return interface_list
def tfsm(self, template=None, input_str=None): """Run the textfsm template against input_str""" assert isinstance(template, str) assert isinstance(input_str, str) if os.path.isfile(os.path.expanduser(str(template))): # open the textfsm template from disk... fh = open(template, "r") else: # build a fake filehandle around textfsm template string fh = StringIO(template) fsm = TextFSM(fh) header = fsm.header values = fsm.ParseText(input_str) assert values != [], "Could not match any values with the template." ## Pack the extracted values into a list of dicts, using keys from ## the header file retval = list() # Values is a nested list of captured information for ii in (values, values[0]): if not isinstance(ii, list): continue for row in ii: try: # Require the row to be a list assert isinstance(row, list) # Require row to be exactly as long as the header list assert len(row) == len(header) row_dict = {} for idx, value in enumerate(row): row_dict[header[idx]] = value retval.append(row_dict) except AssertionError: break if len(retval) > 0: return retval else: raise ValueError("Cannot parse the textfsm template")
def f1(input_file, template_file): try: _tm = TextFSM(open(template_file)) _raw = open(input_file).read() _header = _tm.header _fsm = _tm.ParseText(_raw) print("-".center(100, "-")) print(_raw) print("-".center(100, "-")) print(_header) print("-".center(100, "-")) print(_fsm) print("-".center(100, "-")) data = [] for j in [dict(zip(_header, i)) for i in _fsm]: print(j) data.append(j) print("-".center(100, "-")) print(len(_fsm)) print("-".center(100, "-")) open(output_file, 'w').write(json.dumps(data)) except Exception as e: raise e
class TextFSMHandler: def __init__(self): self._fsm = [] self._template = "" def get_command_fsm(self, command_output, template_name): """ sample docstring """ _res = {} try: self._template = TextFSM(open(template_name)) _raw = open(command_output).read() self._fsm = self._template.ParseText(_raw) _res['PARSE_OUTPUT'] = bool(len(self._fsm)) _res['fsm'] = self._fsm _res['fsm_dict'] = [ dict(zip(self._template.header, fsm)) for fsm in self._fsm ] _res['header'] = self._template.header except Exception as _e: _res['output'] = str(_e) _res['PARSE_OUTPUT'] = False _res['fsm_dict'] = [] _res['header'] = [] print(_res) print("-".center(100, "-")) data = [] for j in [dict(zip(self._template.header, i)) for i in self._fsm]: print(j) data.append(j) print("-".center(100, "-")) print(len(self._fsm)) print("-".center(100, "-")) no_of_records = len(_res['fsm_dict']) no_of_header_attribute = len(_res['header']) percetange_of_record = 100 / no_of_records percetange_of_attribute = percetange_of_record / no_of_header_attribute template_percetange = 0 for i in _res['fsm_dict']: for j in _res['header']: if i[j] != '': template_percetange += percetange_of_attribute print(template_percetange)
E1 - OSPF external type 1, E2 - OSPF external type 2 i - IS-IS, su - IS-IS summary, L1 - IS-IS level-1, L2 - IS-IS level-2 ia - IS-IS inter area, * - candidate default, U - per-user static route o - ODR, P - periodic downloaded static route Gateway of last resort is 194.0.0.2 to network 0.0.0.0 1.0.0.0/32 is subnetted, 1 subnets S 1.1.1.1 [1/0] via 212.0.0.1 [1/0] via 192.168.0.1 2.0.0.0/24 is subnetted, 1 subnets S 2.2.2.0 is directly connected, FastEthernet0/0.100 4.0.0.0/16 is subnetted, 1 subnets O E2 4.4.0.0 [110/20] via 194.0.0.2, 1d18h, FastEthernet0/0.100 5.0.0.0/24 is subnetted, 1 subnets D EX 5.5.5.0 [170/2297856] via 10.0.1.2, 00:12:01, Serial0/0""" ] for r in res: print("=" * 80) print(r) print("*" * 80) parser = TextFSM(StringIO(template)) hdrs, vals = parser.header, parser.ParseText(r) df = DataFrame(vals, columns=hdrs) print(tabulate(df, headers=hdrs)) print("=" * 80) import pdb pdb.set_trace()
class TextFSMHandler: """ Textfsm handler class """ def insert_to_csv(self, _record): self.csv_file.write(_record + "\n") def __init__(self): """ Init value to default """ self._fsm = [] self._template = None self._json_output = [] self._status = False self._header = [] self.csv_file = open('output_template.csv', 'w') def get_command_fsm(self, command_output, template_name): """ sample docstring""" try: self._template = TextFSM(open(template_name)) self._fsm = self._template.ParseText(command_output) self._json_output = [ dict(zip(self._template.header, fsm)) for fsm in self._fsm ] self._header = self._template.header self._status = True self.name = template_name except Exception: raise def get_template_accuracy(self): no_of_records = len(self._json_output) no_of_header_attribute = len(self._header) #try: percetange_of_record = 100 / no_of_records #except Exception: # raise percetange_of_attribute = percetange_of_record / no_of_header_attribute template_percetange = 0 temp_name = self.name for i in self._json_output: for j in self._header: if i[j] != '': template_percetange += percetange_of_attribute print("-".center(100, "-")) print("Template_Location:", temp_name) print("Accuracy :", template_percetange, "%") print("No_of_Header:", no_of_header_attribute) print("No_of_Records:", no_of_records) print("Percentage_of_Record", percetange_of_record) print("Percentage_of_attribute", percetange_of_attribute) print("-".center(100, "-")) record = { "Template_name": temp_name, "No_of_Header": no_of_header_attribute, "Accuracy": template_percetange, "Percentage_of_Record": percetange_of_record, "No_of_Records": no_of_records, "Percentage_of_attribute": percetange_of_attribute } result = mycol.insert_one(record) csv_output_record = temp_name + "," + str( template_percetange) + "," + str( no_of_header_attribute) + "," + str(no_of_records) + "," + str( percetange_of_record) + "," + str(percetange_of_attribute) self.insert_to_csv(csv_output_record)
def execute(self, line, timeout=-1, wait=0.0, regex="", auto_endline=True, template="", timeout_fail=False): retval = list() # Always return a list, even if no template is given fh = None if timeout < 0: timeout = self.command_timeout assert (self.child is not None), "Cannot execute a command on a closed session" assert self.hostname is not None assert isinstance(line, str) or isinstance(line, unicode) assert isinstance(timeout, int) assert isinstance(wait, float) or isinstance(wait, int) assert isinstance(regex, str) assert isinstance(auto_endline, bool) assert isinstance(template, str) assert timeout > 0 assert float(wait) >= 0.0 expect_prompts = ['[\n\r]{0}\S*?>'.format(self.hostname), '[\n\r]{0}\S*?#'.format(self.hostname)] if regex: expect_prompts.append(regex) try: if self.debug: _log.debug("sending: '{0}'".format( Fore.YELLOW + line + Fore.GREEN)) if auto_endline: self.child.sendline(line) else: self.child.send(line) if self.debug: dbgmsg = Fore.GREEN + "Waiting for prompts using list:\n" for idx, ii in enumerate(expect_prompts): ## Using repr() below to make newlines printable... dbgmsg += Fore.MAGENTA + " index {0}: ".format(idx) + \ Fore.YELLOW + repr(ii) + Fore.GREEN + os.linesep _log.debug(dbgmsg) result = self.child.expect(expect_prompts, timeout) if self.debug: _log.debug("Matched prompt index: '{0}'".format( Fore.MAGENTA + str(result) + Fore.GREEN)) time.sleep(wait) except pexpect.TIMEOUT: if timeout_fail: raise ExecuteTimeout("Timeout after executing '{0}'".format( line)) else: self._go_interact_timeout() except pexpect.EOF: raise UnexpectedConnectionClose(Fore.RED + "Connection to {0} ({1}) died while executing '{2}'".format(self.host, self.hostname, line) + Fore.GREEN) if template: if os.path.isfile(template): fh = open(template) else: fh = StringIO(template) fsm = TextFSM(fh) if self.debug: _log.debug("Sending to textfsm: '''{0}'''".format( Fore.YELLOW + self.response + Fore.GREEN)) retval = fsm.ParseText(self.response) fh.close() return retval
def parse_command_output(template, command_output): with open(template) as f: tabl = TextFSM(f) headers = tabl.header res = tabl.ParseText(command_output) return [headers, res]
from net_12_ssh.ssh_sftp.ssh_client_netmiko import netmiko_show_cred from textfsm import TextFSM # TextFSM的中文翻译https://www.jianshu.com/p/e75daa3af0a4 from pprint import pprint raw_result = netmiko_show_cred('10.1.1.253', 'admin', 'Cisc0123', 'show ip interface brief') print(raw_result) # raw_result = """ # Interface IP-Address OK? Method Status Protocol # GigabitEthernet1 10.1.1.253 YES NVRAM up up # GigabitEthernet2 20.1.1.1 YES manual up up # GigabitEthernet3 30.1.1.253 YES manual administratively down down # """ f = open('testfsm_template_(show ip interface brief).template') # 模板正则表达式 ?: 的介绍 # 匹配 'x' 但是不记住匹配项。这种括号叫作非捕获括号,使得你能够定义与正则表达式运算符一起使用的子表达式。 # 看看这个例子 /(?:foo){1,2}/。如果表达式是 /foo{1,2}/,{1,2} 将只应用于 'foo' 的最后一个字符 'o'。 # 如果使用非捕获括号,则 {1,2} 会应用于整个 'foo' 单词。 # https://developer.mozilla.org/zh-CN/docs/Web/JavaScript/Guide/Regular_Expressions # Value INTER (\D+\d+((/\d+)+(\.\d+)?)?) # \D+ 首位不能为数字 \d+ 紧接着是数字 ((/\d+)+(\.\d+)?)? 后续可能有/或者.的子接口 # Value IPADD (\b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b) # \b匹配字符边界 第一位可能25x 2[0-4]x 1xx .... template = TextFSM(f) show_vlan_dict = template.ParseText(raw_result) pprint(show_vlan_dict)
for ip in list_ip: print("connecting to router at " + ip) cisco_vios = { 'device_type': 'cisco_ios', 'ip': ip, 'username': credential['username'], 'password': credential['password'], } net_connect = ConnectHandler(**cisco_vios) neighbors = net_connect.send_command( "show cdp neighbors detail | include Device").replace( "Device ID: ", "") neighbors_raw = neighbors.split('\n') running_configuration = net_connect.send_command("show run") template_file = open("show_run.template") template = TextFSM(template_file) result = template.ParseText(running_configuration) for item in result: hostname = ('.'.join(item)) for item in neighbors_raw: graph.add_edge(hostname, item) draw(graph, with_labels=True) show()