def process_files(conf, targets, rflag, fflag): if len(targets) == 0: if conf.getboolean("general", "src_recur") or rflag: l_fp = fslib.recur_dir(conf.getlist("general", "src_path")) else: l_fp = fslib.rep_dir(conf.getlist("general", "src_path")) else: if rflag: l_fp = fslib.recur_dir(targets) else: l_fp = fslib.rep_dir(targets) lp = logparser.LogParser(conf) ld = LogData(conf, fflag) ld.set_ltm() start_dt = datetime.datetime.now() _logger.info("log_db task start") for fp in l_fp: with open(fp, 'r') as f: _logger.info("log_db processing {0}".format(fp)) for line in f: dt, host, l_w, l_s = lp.process_line(line) if l_w is None: continue ltline = ld.ltm.process_line(l_w, l_s) if ltline is None: _logger.warning("Log template not found " + \ "for message [{0}]".format(line)) else: ld.add_line(ltline.ltid, dt, host, l_w) ld.commit_db() end_dt = datetime.datetime.now() _logger.info("log_db task done ({0})".format(end_dt - start_dt))
def mk_dict(self, src_path): for fp in fslib.rep_dir(src_path): with open(fp, 'r') as f: for line in f: dt, host, l_w, l_s = logparser.process_line(line) if l_w is None: continue self._count_line(l_w)
def result_areas(conf): s_area = set() src_dir = conf.get("dag", "output_dir") for fp in fslib.rep_dir(src_dir): r = PCOutput(conf).load(fp) s_area.add(r.area) return list(s_area)
def whole_netsize(conf): src_dir = conf.get("dag", "output_dir") d_size = {} for fp in fslib.rep_dir(src_dir): r = PCOutput(conf).load(fp) for net in graph_network(r.graph): d_size[len(net)] = d_size.get(len(net), 0) + 1 for size, cnt in d_size.items(): print size, cnt
def list_results(conf): src_dir = conf.get("dag", "output_dir") l_result = [] for fp in fslib.rep_dir(src_dir): l_result.append(PCOutput(conf).load(fp)) l_result.sort(key=lambda r: r.area) print "datetime\t\tarea\tnodes\tedges\tfilepath" for r in l_result: print "\t".join((str(r.top_dt), r.area, str(len(r.graph.nodes())), str(len(r.graph.edges())), r.result_fn()))
def test_parse(conf): LP = LogParser(conf) ret = [] if conf.getboolean("general", "src_recur"): l_fp = fslib.recur_dir(conf.getlist("general", "src_path")) else: l_fp = fslib.rep_dir(conf.getlist("general", "src_path")) for fp in l_fp: with open(fp, 'r') as f: for line in f: ret.append(LP.process_line(line.rstrip("\n"))) return ret
def list_results(conf): src_dir = conf.get("dag", "output_dir") l_result = [] for fp in fslib.rep_dir(src_dir): l_result.append(PCOutput(conf).load(fp)) l_result.sort(key = lambda r: r.area) print "datetime\t\tarea\tnodes\tedges\tfilepath" for r in l_result: print "\t".join((str(r.top_dt), r.area, str(len(r.graph.nodes())), str(len(r.graph.edges())), r.result_fn()))
def list_netsize(conf): src_dir = conf.get("dag", "output_dir") for fp in fslib.rep_dir(src_dir): r = PCOutput(conf).load(fp) d_size = {} for net in graph_network(r.graph): d_size[len(net)] = d_size.get(len(net), 0) + 1 buf = [] for size, cnt in sorted(d_size.items(), reverse = True): if cnt == 1: buf.append(str(size)) else: buf.append("{0}x{1}".format(size, cnt)) print "{0} : {1}".format(r.result_fn(), ", ".join(buf))
def list_netsize(conf): src_dir = conf.get("dag", "output_dir") for fp in fslib.rep_dir(src_dir): r = PCOutput(conf).load(fp) d_size = {} for net in graph_network(r.graph): d_size[len(net)] = d_size.get(len(net), 0) + 1 buf = [] for size, cnt in sorted(d_size.items(), reverse=True): if cnt == 1: buf.append(str(size)) else: buf.append("{0}x{1}".format(size, cnt)) print "{0} : {1}".format(r.result_fn(), ", ".join(buf))
def list_detailed_results(conf): src_dir = conf.get("dag", "output_dir") splitter = "," print splitter.join(["dt", "area", "node", "edge", "edge_oh", "d_edge", "d_edge_oh", "fn"]) for fp in fslib.rep_dir(src_dir): r = PCOutput(conf).load(fp) row = [] row.append(str(r.top_dt)) row.append(str(r.area)) row.append(str(len(r.graph.nodes()))) row.append(str(len(r.graph.edges()))) row.append(str(len(r._edge_across_host()))) dedges, udedges = r._separate_edges() row.append(str(len(dedges))) row.append(str(len(r._edge_across_host(dedges)))) row.append(r.result_fn()) print ",".join(row)
def list_detailed_results(conf): src_dir = conf.get("dag", "output_dir") splitter = "," print splitter.join( ["dt", "area", "node", "edge", "edge_oh", "d_edge", "d_edge_oh", "fn"]) for fp in fslib.rep_dir(src_dir): r = PCOutput(conf).load(fp) row = [] row.append(str(r.top_dt)) row.append(str(r.area)) row.append(str(len(r.graph.nodes()))) row.append(str(len(r.graph.edges()))) row.append(str(len(r._edge_across_host()))) dedges, udedges = r._separate_edges() row.append(str(len(dedges))) row.append(str(len(r._edge_across_host(dedges)))) row.append(r.result_fn()) print ",".join(row)
return None, None, None, None l_word, l_symbol = self.split_message(message) return dt, host, l_word, l_symbol def test_parse(conf): LP = LogParser(conf) ret = [] if conf.getboolean("general", "src_recur"): l_fp = fslib.recur_dir(conf.getlist("general", "src_path")) else: l_fp = fslib.rep_dir(conf.getlist("general", "src_path")) for fp in l_fp: with open(fp, 'r') as f: for line in f: ret.append(LP.process_line(line.rstrip("\n"))) return ret if __name__ == "__main__": if len(sys.argv) < 3: sys.exit("usage: {0} config targets".format(sys.argv[0])) conf = config.open_config(sys.argv[1]) LP = LogParser(conf) for fp in fslib.rep_dir(sys.argv[2:]): with open(fp) as f: for line in f: print LP.process_line(line.rstrip("\n"))
return None, None, None, None elif self._is_removed(message): return None, None, None, None l_word, l_symbol = self.split_message(message) return dt, host, l_word, l_symbol def test_parse(conf): LP = LogParser(conf) ret = [] if conf.getboolean("general", "src_recur"): l_fp = fslib.recur_dir(conf.getlist("general", "src_path")) else: l_fp = fslib.rep_dir(conf.getlist("general", "src_path")) for fp in l_fp: with open(fp, 'r') as f: for line in f: ret.append(LP.process_line(line.rstrip("\n"))) return ret if __name__ == "__main__": if len(sys.argv) < 3: sys.exit("usage: {0} config targets".format(sys.argv[0])) conf = config.open_config(sys.argv[1]) LP = LogParser(conf) for fp in fslib.rep_dir(sys.argv[2:]): with open(fp) as f: for line in f: print LP.process_line(line.rstrip("\n"))