コード例 #1
0
 def __init__(self, abs_path, log_dirs, archive=False):
     """ Initializes LogDive configuration, Parsers, History recording
     and summary writer
     """
     _dirs = log_dirs.split(',')
     self.dirs = map(lambda x: '/'.join([x, self.LOG_DIR]), _dirs)
     self._arch = archive
     self.store = LogStore('/'.join([abs_path, self.DAT_FILE]))
     self.history = ObjectStore('/'.join([abs_path, self.HIST_FILE]))
     self.texter = TextParser()
     self.xster = XMLParser()
コード例 #2
0
ファイル: LogDive.py プロジェクト: comamitc/LogDive
 def __init__(self, abs_path, log_dirs, archive=False):
     """ Initializes LogDive configuration, Parsers, History recording
     and summary writer
     """
     _dirs = log_dirs.split(",")
     self.dirs = map(lambda x: "/".join([x, self.LOG_DIR]), _dirs)
     self._arch = archive
     self.store = LogStore("/".join([abs_path, self.DAT_FILE]))
     self.history = ObjectStore("/".join([abs_path, self.HIST_FILE]))
     self.texter = TextParser()
     self.xster = XMLParser()
コード例 #3
0
ファイル: LogDive.py プロジェクト: comamitc/LogDive
class LogDive(object):

    LOG_DIR = "build/log"  # logs in PPSS Directory
    DAT_FILE = "logs/LogAnalysis%s.log" % int(time.time())  # database file
    HIST_FILE = "data/history.dat"  # data file for scan tracking
    ARCH_LIM = 5 * 24 * 60 * 60  # 5 day threshold for archiving

    def __init__(self, abs_path, log_dirs, archive=False):
        """ Initializes LogDive configuration, Parsers, History recording
        and summary writer
        """
        _dirs = log_dirs.split(",")
        self.dirs = map(lambda x: "/".join([x, self.LOG_DIR]), _dirs)
        self._arch = archive
        self.store = LogStore("/".join([abs_path, self.DAT_FILE]))
        self.history = ObjectStore("/".join([abs_path, self.HIST_FILE]))
        self.texter = TextParser()
        self.xster = XMLParser()

    def archive(self, dir, fullpath, file):
        """ archive the old log files if mode is turned on"""
        zf = zipfile.ZipFile("\\".join([dir, "archive.zip"]), "a")
        zf.write(fullpath, file)
        os.remove(fullpath)
        zf.close()

    def _get_lines(self, ff, ts):
        """ generic abstraction for triggering the right type if 
        parser based on a File's ext"""
        _funcs = {".xml": self.xster.parse_xml, ".txt": self.texter.parse_text, ".log": self.texter.parse_text}
        f_ext = ff[-4:]
        if _funcs.has_key(f_ext):
            func = _funcs[f_ext]
            _data = func(ff, ts)
            self.store.put_many(ff, _data)
            self.history[ff] = {"last": time.time()}

    # TODO:
    # - add encoding for umlauts in logs
    def parse_logs(self):
        """ Function pages through given directories in config file
        then files in each directory, firing parsing based on 
        files modified at time and historical scan time."""
        for dir in self.dirs:
            for f in os.listdir(dir):
                ff = "/".join([dir, f])
                ffmod = int(os.stat(ff)[8])
                start = time.time()
                if self.history.has_key(ff):  # old file that has be scanned
                    fflast = self.history[ff]["last"]
                    if ffmod > fflast:  # has file been modified?
                        self._get_lines(ff, fflast)
                    elif ffmod < (time.time() - self.ARCH_LIM) and self._arch:  # is file ready for archiving
                        self.archive(dir, ff, f)
                else:  # new file that has never be scanned
                    self._get_lines(ff, float(0))
                end = time.time()
                print("%s :: %s ms" % ((ff.split("/")[-1]).ljust(56), str(end - start)[:10].rjust(15)))
                # sync after each file
                self.history.sync()
        # sync summary
        self.store.close()
コード例 #4
0
class LogDive(object):

    LOG_DIR = 'build/log'  #logs in PPSS Directory
    DAT_FILE = 'logs/LogAnalysis%s.log' % int(time.time())  #database file
    HIST_FILE = 'data/history.dat'  #data file for scan tracking
    ARCH_LIM = (5 * 24 * 60 * 60)  #5 day threshold for archiving

    def __init__(self, abs_path, log_dirs, archive=False):
        """ Initializes LogDive configuration, Parsers, History recording
        and summary writer
        """
        _dirs = log_dirs.split(',')
        self.dirs = map(lambda x: '/'.join([x, self.LOG_DIR]), _dirs)
        self._arch = archive
        self.store = LogStore('/'.join([abs_path, self.DAT_FILE]))
        self.history = ObjectStore('/'.join([abs_path, self.HIST_FILE]))
        self.texter = TextParser()
        self.xster = XMLParser()

    def archive(self, dir, fullpath, file):
        """ archive the old log files if mode is turned on"""
        zf = zipfile.ZipFile('\\'.join([dir, 'archive.zip']), 'a')
        zf.write(fullpath, file)
        os.remove(fullpath)
        zf.close()

    def _get_lines(self, ff, ts):
        """ generic abstraction for triggering the right type if 
        parser based on a File's ext"""
        _funcs = {
            '.xml': self.xster.parse_xml,
            '.txt': self.texter.parse_text,
            '.log': self.texter.parse_text
        }
        f_ext = ff[-4:]
        if _funcs.has_key(f_ext):
            func = _funcs[f_ext]
            _data = func(ff, ts)
            self.store.put_many(ff, _data)
            self.history[ff] = {'last': time.time()}

    #TODO:
    # - add encoding for umlauts in logs
    def parse_logs(self):
        """ Function pages through given directories in config file
        then files in each directory, firing parsing based on 
        files modified at time and historical scan time."""
        for dir in self.dirs:
            for f in os.listdir(dir):
                ff = '/'.join([dir, f])
                ffmod = int(os.stat(ff)[8])
                start = time.time()
                if self.history.has_key(ff):  #old file that has be scanned
                    fflast = self.history[ff]['last']
                    if ffmod > fflast:  #has file been modified?
                        self._get_lines(ff, fflast)
                    elif ffmod < (
                            time.time() - self.ARCH_LIM
                    ) and self._arch:  #is file ready for archiving
                        self.archive(dir, ff, f)
                else:  #new file that has never be scanned
                    self._get_lines(ff, float(0))
                end = time.time()
                print("%s :: %s ms" % ((ff.split('/')[-1]).ljust(56),
                                       str(end - start)[:10].rjust(15)))
                # sync after each file
                self.history.sync()
        # sync summary
        self.store.close()