Example #1
0
def get_parser_from_args():

    parser = ArgumentParser()
    parser.add_argument('-p',
                        '--path',
                        type=str,
                        default='ip',
                        help='The path of thing to parse. Default value is ip')
    parser.add_argument(
        '-e',
        '--extension',
        type=str,
        default='csv',
        choices=['csv', 'txt'],
        help='The type which is csv or txt. Default value is csv')

    args = parser.parse_args()
    args.path = args.path.replace('\\', '/')

    if os.path.isdir(args.path):
        logParser = logparser.LogParser(args.path, "dir", args.extension)
    elif os.path.isfile(args.path):
        logParser = logparser.LogParser(args.path, "file", args.extension)
    else:
        raise Exception('Invalid path')

    return logParser
Example #2
0
def process_files(conf, targets, reset_db, isnew_check = False):
    """Add log messages to DB from files.

    Args:
        conf (config.ExtendedConfigParser): A common configuration object.
        targets (List[str]): A sequence of filepaths to process.
        reset_db (bool): True if DB needs to reset before adding,
            False otherwise. 
        isnew_check (Optional[bool]): If True, add message to DB
            only if its timestamp is newest of existing messages in DB.

    Raises:
        IOError: If a file in targets not found.
    """
    ld = LogData(conf, edit = True, reset_db = reset_db)
    ld.init_ltmanager()
    lp = logparser.LogParser(conf)
    ha = host_alias.HostAlias(conf)
    latest = ld.dt_term()[1] if isnew_check else None
    drop_undefhost = conf.getboolean("database", "undefined_host")

    for line in _iter_line_from_files(targets):
        process_line(line, ld, lp, ha, isnew_check, latest, drop_undefhost)

    ld.commit_db()
Example #3
0
def process_files(conf, targets, rflag, fflag):
    if len(targets) == 0:
        if conf.getboolean("general", "src_recur") or rflag:
            l_fp = fslib.recur_dir(conf.getlist("general", "src_path"))
        else:
            l_fp = fslib.rep_dir(conf.getlist("general", "src_path"))
    else:
        if rflag:
            l_fp = fslib.recur_dir(targets)
        else:
            l_fp = fslib.rep_dir(targets)

    lp = logparser.LogParser(conf)
    ld = LogData(conf, fflag)
    ld.set_ltm()

    start_dt = datetime.datetime.now()
    _logger.info("log_db task start")

    for fp in l_fp:
        with open(fp, 'r') as f:
            _logger.info("log_db processing {0}".format(fp))
            for line in f:
                dt, host, l_w, l_s = lp.process_line(line)
                if l_w is None: continue
                ltline = ld.ltm.process_line(l_w, l_s)
                if ltline is None:
                    _logger.warning("Log template not found " + \
                            "for message [{0}]".format(line))
                else:
                    ld.add_line(ltline.ltid, dt, host, l_w)
    ld.commit_db()
    
    end_dt = datetime.datetime.now()
    _logger.info("log_db task done ({0})".format(end_dt - start_dt))
Example #4
0
def generate_lt_from_file(conf, fn):
    _logger.info("job for ({0}) start".format(fn))

    import logparser
    lp = logparser.LogParser(conf)
    table = lt_common.TemplateTable()
    sym = conf.get("log_template", "variable_symbol")
    d_symlist = {}
    ltgen = LTGenCRF(table, sym, conf)

    with open(fn, "r") as f:
        for line in f:
            dt, org_host, l_w, l_s = lp.process_line(line)
            if l_w is None: continue
            l_w = [strutil.add_esc(w) for w in l_w]
            tid, dummy = ltgen.process_line(l_w, l_s)
            d_symlist[tid] = l_s

    ret = []
    for tid in table.tids():
        tpl = table.get_template(tid)
        l_s = d_symlist[tid]
        ret.append((tpl, l_s))

    _logger.info("job for ({0}) done".format(fn))
    return ret
Example #5
0
def init_ltmanager(conf, db, table, reset_db):
    """Initializing ltmanager by loading argument parameters."""
    lt_alg = conf.get("log_template", "lt_alg")
    ltg_alg = conf.get("log_template", "ltgroup_alg")
    post_alg = conf.gettuple("log_template", "post_alg")
    sym = conf.get("log_template", "variable_symbol")
    ltm = LTManager(conf, db, table, reset_db, lt_alg, ltg_alg, post_alg)

    if lt_alg == "shiso":
        import lt_shiso
        ltgen = lt_shiso.LTGenSHISO(
            ltm._table,
            sym,
            threshold=conf.getfloat("log_template_shiso", "ltgen_threshold"),
            max_child=conf.getint("log_template_shiso", "ltgen_max_child"))
    elif lt_alg == "import":
        fn = conf.get("log_template_import", "def_path")
        mode = conf.get("log_template_import", "mode")
        import logparser
        lp = logparser.LogParser(conf, sep_variable=True)
        import lt_import
        ltgen = lt_import.LTGenImport(ltm._table, sym, fn, mode, lp)
    elif lt_alg == "crf":
        import lt_crf
        ltgen = lt_crf.LTGenCRF(ltm._table, sym, conf)
    #elif lt_alg == "va":
    #    import lt_va
    #    ltm = lt_va.LTManager(conf, self.db, self.table,
    #            self._reset_db, ltg_alg)
    else:
        raise ValueError("lt_alg({0}) invalid".format(lt_alg))
    ltm._set_ltgen(ltgen)

    if ltg_alg == "shiso":
        import lt_shiso
        ltgroup = lt_shiso.LTGroupSHISO(
            table,
            ngram_length=conf.getint("log_template_shiso",
                                     "ltgroup_ngram_length"),
            th_lookup=conf.getfloat("log_template_shiso", "ltgroup_th_lookup"),
            th_distance=conf.getfloat("log_template_shiso",
                                      "ltgroup_th_distance"),
            mem_ngram=conf.getboolean("log_template_shiso",
                                      "ltgroup_mem_ngram"))
    elif ltg_alg == "ssdeep":
        import lt_misc
        ltgroup = lt_misc.LTGroupFuzzyHash(table)
    elif ltg_alg == "none":
        ltgroup = LTGroup()
    else:
        raise ValueError("ltgroup_alg({0}) invalid".format(ltg_alg))
    ltm._set_ltgroup(ltgroup)

    ltspl = LTPostProcess(conf, ltm._table, ltm._lttable, post_alg)
    ltm._set_ltspl(ltspl)

    if os.path.exists(ltm.filename) and not reset_db:
        ltm.load()

    return ltm
Example #6
0
def add_db(conf, l_event, verbose, reset_db):
    ld = log_db.LogData(conf, edit = True, reset_db = reset_db)
    ld.init_ltmanager()
    lp = logparser.LogParser(conf)
    ha = host_alias.HostAlias(conf)
    for eid, t in l_event:
        msg = message(eid, t)
        log_db.process_line(msg, ld, lp, ha)
        if verbose: print msg
Example #7
0
    def __init__(self, parent=None):
        """The object constructor."""

        # Create the interface
        self.builder = gtk.Builder()
        self.builder.set_translation_domain(AppContext.DEFAULT_APP)
        glade_file = os.path.join(WHERE_AM_I, 'viewer.glade')
        self.builder.add_from_file(glade_file)

        # Get the main objects
        self.dialog_log = self.builder.get_object('dialog_log')

        self.view_log = self.builder.get_object('view_log')
        self.view_latex = self.builder.get_object('view_latex')

        self.summary = self.builder.get_object('summary')
        self.summary_liststore = self.summary.get_model()

        self.error_msg = self.builder.get_object('error_msg')
        self.warning_msg = self.builder.get_object('warning_msg')
        self.info_msg = self.builder.get_object('info_msg')
        self.log_parser = logparser.LogParser()

        # Get the pixbuf icons for the summary
        self.icon_warning = self.summary.render_icon(
            stock_id=gtk.STOCK_DIALOG_INFO,
            size=gtk.ICON_SIZE_MENU,
            detail=None)
        self.icon_error = self.summary.render_icon(
            stock_id=gtk.STOCK_DIALOG_ERROR,
            size=gtk.ICON_SIZE_MENU,
            detail=None)

        # Configure interface
        if parent is not None:
            self.dialog_log.set_transient_for(parent)

        def _configure_text_view(textview):
            # Configure View
            textview.modify_font(pango.FontDescription('DejaVu Sans Mono 10'))
            linenumbers = LineNumbers(textview)
            # Configure Model
            textbuffer = textview.get_buffer()
            textbuffer.create_tag(self.LINE_SEARCH,
                                  weight=pango.WEIGHT_BOLD,
                                  foreground='red',
                                  background='yellow')
            textbuffer.create_mark(self.LINE_SEARCH,
                                   textbuffer.get_start_iter(), True)

        _configure_text_view(self.view_log)
        _configure_text_view(self.view_latex)

        # Connect signals
        self.builder.connect_signals(self)
Example #8
0
 def _open_def(self):
     lp = logparser.LogParser(self.conf)
     with open(self.def_path, 'r') as f:
         for line in f:
             if self.mode == "plain":
                 line = line.rstrip("\n")
                 ltw, lts = lp.split_message(line)
                 ltline = self.add_lt(ltw, lts, 0)
                 self.searchtree.add(ltline.ltid, ltw)
             else:
                 raise ValueError("import_mode string is invalid")
Example #9
0
def remake_ltgroup(conf):
    lp = logparser.LogParser(conf)
    ld = LogData(conf)
    ld.set_ltm()
    
    start_dt = datetime.datetime.now()
    _logger.info("log_db remake_ltg task start")
    
    ld.ltm.remake_ltg()
    ld.commit_db()
    
    end_dt = datetime.datetime.now()
    _logger.info("log_db remake_ltg task done ({0})".format(end_dt - start_dt))
Example #10
0
def search_exception(conf, targets):
    
    import logparser
    import strutil
    lp = logparser.LogParser(conf)
    def_path = conf.get("log_template_import", "def_path")
    sym = conf.get("log_template", "variable_symbol")
    mode = conf.get("log_template_import", "mode")
    table = lt_common.TemplateTable()
    temp_lp = logparser.LogParser(conf, sep_variable = True)
    ltgen = LTGenImport(table, sym, def_path, mode, temp_lp)

    for fn in targets:
        _logger.info("lt_import job for ({0}) start".format(fn))
        with open(fn, "r") as f:
            for line in f:
                dt, org_host, l_w, l_s = lp.process_line(line)
                if l_w is None: continue
                l_w = [strutil.add_esc(w) for w in l_w]
                tid, dummy = ltgen.process_line(l_w, l_s)
                if tid is None:
                    print line.rstrip("\n")
        _logger.info("lt_import job for ({0}) done".format(fn))
Example #11
0
def process_init_data(conf, targets, isnew_check = False):
    """Add log messages to DB from files. This function do NOT process
    messages incrementally. Use this to avoid bad-start problem of
    log template generation with clustering or training methods.

    Note:
        This function needs large memory space.

    Args:
        conf (config.ExtendedConfigParser): A common configuration object.
        targets (List[str]): A sequence of filepaths to process.
        isnew_check (Optional[bool]): If True, add message to DB
            only if its timestamp is newest of existing messages in DB.

    Raises:
        IOError: If a file in targets not found.
    """
    ld = LogData(conf, edit = True, reset_db = True)
    ld.init_ltmanager()
    lp = logparser.LogParser(conf)
    ha = host_alias.HostAlias(conf)
    latest = ld.dt_term()[1] if isnew_check else None
    drop_undefhost = conf.getboolean("database", "undefined_host")

    l_line = []
    l_data = []
    for line in _iter_line_from_files(targets):
        dt, org_host, l_w, l_s = lp.process_line(line)
        if latest is not None and dt < latest: continue
        if l_w is None: continue
        l_w = [strutil.add_esc(w) for w in l_w]
        host = ha.resolve_host(org_host)
        if host is None:
            if drop_undefhost:
                ld.ltm.failure_output(msg)
                return None
            else:
                host = org_host

        l_line.append((l_w, l_s))
        l_data.append((dt, host))

    for ltline, line, data in zip(ld.ltm.process_init_data(l_line),
                                  l_line, l_data):
        l_w, l_s = line
        dt, host = data
        ld.add_line(ltline.ltid, dt, host, l_w)

    ld.commit_db()
Example #12
0
def remake_area(conf):
    lp = logparser.LogParser(conf)
    ld = LogData(conf)
    ld.update_area()
    ld.commit_db()
Example #13
0
def migrate(conf):
    lp = logparser.LogParser(conf)
    ld = LogData(conf)
    ld.db._init_index()
    ld.update_area()
    ld.commit_db()
Example #14
0
        return socket.gethostbyaddr(ip)[0][:-13]
    except socket.gaierror:
        return ip
    except socket.herror:
        return ip
    except socket.error:
        return 'local'


def ip_to_hostname(res):
    """
    Takes the results dictionary and 'prettifies' the IP addresses.
    """

    tmp = res.copy()

    for k in 'source destination'.split():
        tmp[k] = get_host(tmp[k])

    return tmp


if __name__ == '__main__':
    two_MB = 2097152
    lp = logparser.LogParser(LOG_DIR)
    rl = rotatinglog.RotatingLogfile(OUT_FILE, two_MB)

    for result in lp.get_lines('CONFIG|LOGIN'):
        output = '%(time)s - %(user)s - Source: %(source)s - Destination: %(destination)s - %(msg)s' % ip_to_hostname(
            result)
        rl.writeline(make_color(output))