Ejemplo n.º 1
0
  def parseLog(self):
    logData = {}
    logRE = re.compile('^.*/([1-9][0-9]*(\.[0-9]+|))_[^/]+/step([1-9])_.*\.log$')
    max_steps = 0
    for logFile in glob.glob(self.basedir+'/[1-9]*/step[0-9]*.log'):
      m = logRE.match(logFile)
      if not m: continue
      wf = m.group(1)
      step = int(m.group(3))
      if step>max_steps: max_steps=step
      if not logData.has_key(wf):
        logData[wf] = {'steps': {}, 'events' : [], 'failed' : [], 'warning' : []}
      if not logData[wf]['steps'].has_key(step):
        logData[wf]['steps'][step]=logFile
    cache_read=0
    log_processed=0
    for wf in logData:
      for k in logData[wf]:
        if k == 'steps': continue
        for s in range(0, max_steps):
          logData[wf][k].append(-1)
      index =0
      for step in sorted(logData[wf]['steps']):
        data = [0, 0, 0]
        logFile = logData[wf]['steps'][step]
        json_cache = os.path.dirname(logFile)+"/logcache_"+str(step)+".json"
        if (not os.path.exists(json_cache)) or (os.path.getmtime(logFile)>os.path.getmtime(json_cache)):
          try:
            es_parse_log(logFile)
          except Exception as e:
            print "Sending log information to elasticsearch failed" , str(e)
          inFile = open(logFile)
          for line in inFile:
            if '%MSG-w' in line: data[1]=data[1]+1
            if '%MSG-e' in line: data[2]=data[2]+1
            if 'Begin processing the ' in line: data[0]=data[0]+1
          inFile.close()
          jfile = open(json_cache,"w")
          json.dump(data,jfile)
          jfile.close()
          log_processed+=1
        else:
          jfile = open(json_cache,"r")
          data = json.load(jfile)
          jfile.close()
          cache_read+=1
        logData[wf]['events'][index] = data[0]
        logData[wf]['failed'][index] = data[2]
        logData[wf]['warning'][index] = data[1]
        index+=1
      del logData[wf]['steps']

    print "Log processed: ",log_processed
    print "Caches read:",cache_read
    from pickle import Pickler
    outFile = open(os.path.join(self.outdir,'runTheMatrixMsgs.pkl'), 'w')
    pklFile = Pickler(outFile)
    pklFile.dump(logData)
    outFile.close()
    return
Ejemplo n.º 2
0
  def parseLog(self):
    logData = {}
    logRE = re.compile('^.*/([1-9][0-9]*(\.[0-9]+|))_[^/]+/step([1-9])_.*\.log$')
    max_steps = 0
    for logFile in glob.glob(self.basedir+'/[1-9]*/step[0-9]*.log'):
      m = logRE.match(logFile)
      if not m: continue
      wf = m.group(1)
      step = int(m.group(3))
      if step>max_steps: max_steps=step
      if wf not in logData:
        logData[wf] = {'steps': {}, 'events' : [], 'failed' : [], 'warning' : []}
      if step not in logData[wf]['steps']:
        logData[wf]['steps'][step]=logFile
    cache_read=0
    log_processed=0
    for wf in logData:
      for k in logData[wf]:
        if k == 'steps': continue
        for s in range(0, max_steps):
          logData[wf][k].append(-1)
      index =0
      for step in sorted(logData[wf]['steps']):
        data = [0, 0, 0]
        logFile = logData[wf]['steps'][step]
        json_cache = os.path.dirname(logFile)+"/logcache_"+str(step)+".json"
        log_reader_config_path = logFile + "-read_config"
        config_list = []
        cache_ok = False
        if (os.path.exists(json_cache)) and (os.path.getmtime(logFile)<=os.path.getmtime(json_cache)):
          try:
            jfile = open(json_cache,"r")
            data = json.load(jfile)
            jfile.close()
            cache_read+=1
            cache_ok = True
          except:
            os.remove(json_cache)
        if not cache_ok:
          try:
            es_parse_log(logFile)
          except Exception as e:
            print("Sending log information to elasticsearch failed" , str(e))
          inFile = open(logFile)
          for line_nr, line in enumerate(inFile):
            config_list = add_exception_to_config(line, line_nr, config_list)
            if '%MSG-w' in line: data[1]=data[1]+1
            if '%MSG-e' in line: data[2]=data[2]+1
            if 'Begin processing the ' in line: data[0]=data[0]+1
          inFile.close()
          jfile = open(json_cache,"w")
          json.dump(data,jfile)
          jfile.close()
          transform_and_write_config_file(log_reader_config_path, config_list)
          log_processed+=1
        logData[wf]['events'][index] = data[0]
        logData[wf]['failed'][index] = data[2]
        logData[wf]['warning'][index] = data[1]
        index+=1
      del logData[wf]['steps']

    print("Log processed: ",log_processed)
    print("Caches read:",cache_read)
    from pickle import Pickler
    outFile = open(os.path.join(self.outdir,'runTheMatrixMsgs.pkl'), 'w')
    pklFile = Pickler(outFile)
    pklFile.dump(logData)
    outFile.close()
    return
Ejemplo n.º 3
0
    def parseLog(self):
        logData = {}
        logRE = re.compile("^.*/([1-9][0-9]*(\.[0-9]+|))_[^/]+/step([1-9])_.*\.log$")
        max_steps = 0
        for logFile in glob.glob(self.basedir + "/[1-9]*/step[0-9]*.log"):
            m = logRE.match(logFile)
            if not m:
                continue
            wf = m.group(1)
            step = int(m.group(3))
            if step > max_steps:
                max_steps = step
            if not logData.has_key(wf):
                logData[wf] = {"steps": {}, "events": [], "failed": [], "warning": []}
            if not logData[wf]["steps"].has_key(step):
                logData[wf]["steps"][step] = logFile
        cache_read = 0
        log_processed = 0
        for wf in logData:
            for k in logData[wf]:
                if k == "steps":
                    continue
                for s in range(0, max_steps):
                    logData[wf][k].append(-1)
            index = 0
            for step in sorted(logData[wf]["steps"]):
                data = [0, 0, 0]
                logFile = logData[wf]["steps"][step]
                json_cache = os.path.dirname(logFile) + "/logcache_" + str(step) + ".json"
                cache_ok = False
                if (os.path.exists(json_cache)) and (os.path.getmtime(logFile) <= os.path.getmtime(json_cache)):
                    try:
                        jfile = open(json_cache, "r")
                        data = json.load(jfile)
                        jfile.close()
                        cache_read += 1
                        cache_ok = True
                    except:
                        os.remove(json_cache)
                if not cache_ok:
                    try:
                        es_parse_log(logFile)
                    except Exception as e:
                        print "Sending log information to elasticsearch failed", str(e)
                    inFile = open(logFile)
                    for line in inFile:
                        if "%MSG-w" in line:
                            data[1] = data[1] + 1
                        if "%MSG-e" in line:
                            data[2] = data[2] + 1
                        if "Begin processing the " in line:
                            data[0] = data[0] + 1
                    inFile.close()
                    jfile = open(json_cache, "w")
                    json.dump(data, jfile)
                    jfile.close()
                    log_processed += 1
                logData[wf]["events"][index] = data[0]
                logData[wf]["failed"][index] = data[2]
                logData[wf]["warning"][index] = data[1]
                index += 1
            del logData[wf]["steps"]

        print "Log processed: ", log_processed
        print "Caches read:", cache_read
        from pickle import Pickler

        outFile = open(os.path.join(self.outdir, "runTheMatrixMsgs.pkl"), "w")
        pklFile = Pickler(outFile)
        pklFile.dump(logData)
        outFile.close()
        return