Esempio n. 1
0
def process_addon_log(logFile):
  t = getmtime(logFile)
  timestp = int(t*1000)
  pathInfo = logFile.split('/')
  architecture = pathInfo[4]
  release = pathInfo[8]
  week, rel_sec  = cmsswIB2Week (release)
  datasets = []
  payload = {"type" : "addon"}
  payload["release"]=release
  payload["architecture"]=architecture
  payload["@timestamp"]=timestp
  payload["name"] = pathInfo[-1].split("-")[1].split("_cmsRun_")[0].split("_cmsDriver.py_")[0]
  id = sha1(release + architecture + "addon" + payload["name"]).hexdigest()
  config_list = []
  for index, l in enumerate(file(logFile).read().split("\n")):
    config_list = add_exception_to_config(l,index, config_list)
    if " Initiating request to open file " in l:
      try:
        rootfile = l.split(" Initiating request to open file ")[1].split(" ")[0]
        if (not "file:" in rootfile) and (not rootfile in datasets): datasets.append(rootfile)
      except: pass
  send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week,"addon-dataset")
  write_config_file(logFile + "-read_config", config_list)
  return
Esempio n. 2
0
def process_addon_log(logFile):
  t = getmtime(logFile)
  timestp = int(t*1000)
  pathInfo = logFile.split('/')
  architecture = pathInfo[4]
  release = pathInfo[8]
  week, rel_sec  = cmsswIB2Week (release)
  datasets = []
  payload = {"type" : "addon"}
  payload["release"]=release
  payload["architecture"]=architecture
  payload["@timestamp"]=timestp
  payload["name"] = pathInfo[-1].split("-")[1].split("_cmsRun_")[0].split("_cmsDriver.py_")[0]
  id = sha1(release + architecture + "addon" + payload["name"]).hexdigest()
  config_list = []
  with open(logFile) as f:
    for index, l in enumerate(f):
      l = l.strip()
      config_list = add_exception_to_config(l,index, config_list)
      if " Initiating request to open file " in l:
        try:
          rootfile = l.split(" Initiating request to open file ")[1].split(" ")[0]
          if (not "file:" in rootfile) and (not rootfile in datasets): datasets.append(rootfile)
        except: pass
  send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week,"addon-dataset")
  transform_and_write_config_file(logFile + "-read_config", config_list)
  return
Esempio n. 3
0
 def test_unittestlogs(self):
     config_list = []
     custom_rule_set = [
         {"str_to_match": "test (.*) had ERRORS", "name": "{0}{1}{2} failed", "control_type": ResultTypeEnum.ISSUE},
         {"str_to_match": '===== Test "([^\s]+)" ====', "name": "{0}", "control_type": ResultTypeEnum.TEST}
     ]
     for index, l in enumerate(unittestlog.split("\n")):
         config_list = add_exception_to_config(l, index, config_list, custom_rule_set)
     transform_and_write_config_file("/tmp/unittestlogs.log-read_config", config_list)
     print("Example config file in %s" % ("/tmp/unittestlogs.log-read_config"))
Esempio n. 4
0
def process_unittest_log(logFile):
    t = getmtime(logFile)
    timestp = int(t * 1000)
    pathInfo = logFile.split('/')
    architecture = pathInfo[4]
    release = pathInfo[8]
    week, rel_sec = cmsswIB2Week(release)
    package = pathInfo[-3] + "/" + pathInfo[-2]
    payload = {"type": "unittest"}
    payload["release"] = release
    payload["architecture"] = architecture
    payload["@timestamp"] = timestp
    config_list = []
    custom_rule_set = [{
        "str_to_match": "test (.*) had ERRORS",
        "name": "{0} failed",
        'control_type': ResultTypeEnum.ISSUE
    }, {
        "str_to_match": '===== Test "([^\s]+)" ====',
        "name": "{0}",
        'control_type': ResultTypeEnum.TEST
    }]
    with open(logFile) as f:
        utname = None
        datasets = []
        xid = None
        for index, l in enumerate(f):
            l = l.strip()
            config_list = add_exception_to_config(l, index, config_list,
                                                  custom_rule_set)
            if l.startswith('===== Test "') and l.endswith('" ===='):
                if utname:
                    send_unittest_dataset(datasets, payload, xid,
                                          "ib-dataset-" + week,
                                          "unittest-dataset")
                datasets = []
                utname = l.split('"')[1]
                payload["name"] = "%s/%s" % (package, utname)
                xid = sha1(release + architecture + package +
                           str(utname)).hexdigest()
            elif " Initiating request to open file " in l:
                try:
                    rootfile = l.split(
                        " Initiating request to open file ")[1].split(" ")[0]
                    if (not "file:" in rootfile) and (not rootfile
                                                      in datasets):
                        datasets.append(rootfile)
                except Exception as e:
                    print("ERROR: ", logFile, e)
                    traceback.print_exc(file=sys.stdout)
        if datasets and xid:
            send_unittest_dataset(datasets, payload, xid, "ib-dataset-" + week,
                                  "unittest-dataset")
    transform_and_write_config_file(logFile + "-read_config", config_list)
    return
Esempio n. 5
0
 def test_unittestlogs(self):
     config_list = []
     custom_rule_set = [{
         "str_to_match": "test (.*) had ERRORS",
         "name": "{0}{1}{2} failed"
     }, {
         "str_to_match": '===== Test "([^\s]+)" ====',
         "name": "{0}"
     }]
     for index, l in enumerate(unittestlog.split("\n")):
         config_list = add_exception_to_config(l, index, config_list,
                                               custom_rule_set)
     write_config_file("tmp/unittestlogs.log" + "-read_config", config_list)
Esempio n. 6
0
def process_unittest_log(logFile):
    t = getmtime(logFile)
    timestp = int(t * 1000)
    pathInfo = logFile.split('/')
    architecture = pathInfo[4]
    release = pathInfo[8]
    week, rel_sec = cmsswIB2Week(release)
    package = pathInfo[-3] + "/" + pathInfo[-2]
    utname = None
    datasets = []
    payload = {"type": "unittest"}
    payload["release"] = release
    payload["architecture"] = architecture
    payload["@timestamp"] = timestp
    id = None
    config_list = []
    custom_rule_set = [{
        "str_to_match": "test (.*) had ERRORS",
        "name": "{0} failed"
    }, {
        "str_to_match": '===== Test "([^\s]+)" ====',
        "name": "{0}"
    }]
    for index, l in enumerate(file(logFile).read().split("\n")):
        config_list = add_exception_to_config(l, index, config_list,
                                              custom_rule_set)
        if l.startswith('===== Test "') and l.endswith('" ===='):
            if utname:
                send_unittest_dataset(datasets, payload, id,
                                      "ib-dataset-" + week, "unittest-dataset")
            datasets = []
            utname = l.split('"')[1]
            payload["name"] = "%s/%s" % (package, utname)
            id = sha1(release + architecture + package +
                      str(utname)).hexdigest()
        elif " Initiating request to open file " in l:
            try:
                rootfile = l.split(
                    " Initiating request to open file ")[1].split(" ")[0]
                if (not "file:" in rootfile) and (not rootfile in datasets):
                    datasets.append(rootfile)
            except:
                pass
    if datasets:
        send_unittest_dataset(datasets, payload, id, "ib-dataset-" + week,
                              "unittest-dataset")
    write_config_file(logFile + "-read_config", config_list)
    return
Esempio n. 7
0
 def test_unittestlogs(self):
     config_list = []
     custom_rule_set = [{
         "str_to_match": "test (.*) had ERRORS",
         "name": "{0}{1}{2} failed",
         "control_type": ResultTypeEnum.ISSUE
     }, {
         "str_to_match": '===== Test "([^\s]+)" ====',
         "name": "{0}",
         "control_type": ResultTypeEnum.TEST
     }]
     for index, l in enumerate(unittestlog.split("\n")):
         config_list = add_exception_to_config(l, index, config_list,
                                               custom_rule_set)
     transform_and_write_config_file(
         "/tmp/unittestlogs.log" + "-read_config", config_list)
Esempio n. 8
0
def process_unittest_log(logFile):
  t = getmtime(logFile)
  timestp = int(t*1000)
  pathInfo = logFile.split('/')
  architecture = pathInfo[4]
  release = pathInfo[8]
  week, rel_sec  = cmsswIB2Week (release)
  package = pathInfo[-3]+"/"+ pathInfo[-2]
  payload = {"type" : "unittest"}
  payload["release"]=release
  payload["architecture"]=architecture
  payload["@timestamp"]=timestp
  config_list = []
  custom_rule_set = [
    {"str_to_match": "test (.*) had ERRORS", "name": "{0} failed", 'control_type': ResultTypeEnum.ISSUE },
    {"str_to_match": '===== Test "([^\s]+)" ====', "name": "{0}", 'control_type': ResultTypeEnum.TEST }
  ]
  with open(logFile) as f:
    utname = None
    datasets = []
    xid = None
    for index, l in enumerate(f):
      l = l.strip()
      config_list = add_exception_to_config(l,index,config_list,custom_rule_set)
      if l.startswith('===== Test "') and l.endswith('" ===='):
        if utname: send_unittest_dataset(datasets, payload, xid, "ib-dataset-"+week, "unittest-dataset")
        datasets = []
        utname = l.split('"')[1]
        payload["name"] = "%s/%s" % (package, utname)
        xid = sha1(release + architecture + package + str(utname)).hexdigest()
      elif " Initiating request to open file " in l:
        try:
          rootfile = l.split(" Initiating request to open file ")[1].split(" ")[0]
          if (not "file:" in rootfile) and (not rootfile in datasets): datasets.append(rootfile)
        except Exception as e:
          print("ERROR: ",e)
    if datasets and xid:
      send_unittest_dataset(datasets, payload, xid, "ib-dataset-"+week,"unittest-dataset")
  transform_and_write_config_file(logFile + "-read_config", config_list)
  return
Esempio n. 9
0
  def parseLog(self):
    logData = {}
    logRE = re.compile('^.*/([1-9][0-9]*(\.[0-9]+|))_[^/]+/step([1-9])_.*\.log$')
    max_steps = 0
    for logFile in glob.glob(self.basedir+'/[1-9]*/step[0-9]*.log'):
      m = logRE.match(logFile)
      if not m: continue
      wf = m.group(1)
      step = int(m.group(3))
      if step>max_steps: max_steps=step
      if wf not in logData:
        logData[wf] = {'steps': {}, 'events' : [], 'failed' : [], 'warning' : []}
      if step not in logData[wf]['steps']:
        logData[wf]['steps'][step]=logFile
    cache_read=0
    log_processed=0
    for wf in logData:
      for k in logData[wf]:
        if k == 'steps': continue
        for s in range(0, max_steps):
          logData[wf][k].append(-1)
      index =0
      for step in sorted(logData[wf]['steps']):
        data = [0, 0, 0]
        logFile = logData[wf]['steps'][step]
        json_cache = os.path.dirname(logFile)+"/logcache_"+str(step)+".json"
        log_reader_config_path = logFile + "-read_config"
        config_list = []
        cache_ok = False
        if (os.path.exists(json_cache)) and (os.path.getmtime(logFile)<=os.path.getmtime(json_cache)):
          try:
            jfile = open(json_cache,"r")
            data = json.load(jfile)
            jfile.close()
            cache_read+=1
            cache_ok = True
          except:
            os.remove(json_cache)
        if not cache_ok:
          try:
            es_parse_log(logFile)
          except Exception as e:
            print("Sending log information to elasticsearch failed" , str(e))
          inFile = open(logFile)
          for line_nr, line in enumerate(inFile):
            config_list = add_exception_to_config(line, line_nr, config_list)
            if '%MSG-w' in line: data[1]=data[1]+1
            if '%MSG-e' in line: data[2]=data[2]+1
            if 'Begin processing the ' in line: data[0]=data[0]+1
          inFile.close()
          jfile = open(json_cache,"w")
          json.dump(data,jfile)
          jfile.close()
          transform_and_write_config_file(log_reader_config_path, config_list)
          log_processed+=1
        logData[wf]['events'][index] = data[0]
        logData[wf]['failed'][index] = data[2]
        logData[wf]['warning'][index] = data[1]
        index+=1
      del logData[wf]['steps']

    print("Log processed: ",log_processed)
    print("Caches read:",cache_read)
    from pickle import Pickler
    outFile = open(os.path.join(self.outdir,'runTheMatrixMsgs.pkl'), 'w')
    pklFile = Pickler(outFile)
    pklFile.dump(logData)
    outFile.close()
    return
Esempio n. 10
0
  def parseLog(self):
    logData = {}
    logRE = re.compile('^.*/([1-9][0-9]*(\.[0-9]+|))_[^/]+/step([1-9])_.*\.log$')
    max_steps = 0
    for logFile in glob.glob(self.basedir+'/[1-9]*/step[0-9]*.log'):
      m = logRE.match(logFile)
      if not m: continue
      wf = m.group(1)
      step = int(m.group(3))
      if step>max_steps: max_steps=step
      if not logData.has_key(wf):
        logData[wf] = {'steps': {}, 'events' : [], 'failed' : [], 'warning' : []}
      if not logData[wf]['steps'].has_key(step):
        logData[wf]['steps'][step]=logFile
    cache_read=0
    log_processed=0
    for wf in logData:
      for k in logData[wf]:
        if k == 'steps': continue
        for s in range(0, max_steps):
          logData[wf][k].append(-1)
      index =0
      for step in sorted(logData[wf]['steps']):
        data = [0, 0, 0]
        logFile = logData[wf]['steps'][step]
        json_cache = os.path.dirname(logFile)+"/logcache_"+str(step)+".json"
        log_reader_config_path = logFile + "-read_config"
        config_list = []
        cache_ok = False
        if (os.path.exists(json_cache)) and (os.path.getmtime(logFile)<=os.path.getmtime(json_cache)):
          try:
            jfile = open(json_cache,"r")
            data = json.load(jfile)
            jfile.close()
            cache_read+=1
            cache_ok = True
          except:
            os.remove(json_cache)
        if not cache_ok:
          try:
            es_parse_log(logFile)
          except Exception as e:
            print "Sending log information to elasticsearch failed" , str(e)
          inFile = open(logFile)
          for line_nr, line in enumerate(inFile):
            config_list = add_exception_to_config(line, line_nr, config_list)
            if '%MSG-w' in line: data[1]=data[1]+1
            if '%MSG-e' in line: data[2]=data[2]+1
            if 'Begin processing the ' in line: data[0]=data[0]+1
          inFile.close()
          jfile = open(json_cache,"w")
          json.dump(data,jfile)
          jfile.close()
          write_config_file(log_reader_config_path,config_list)
          log_processed+=1
        logData[wf]['events'][index] = data[0]
        logData[wf]['failed'][index] = data[2]
        logData[wf]['warning'][index] = data[1]
        index+=1
      del logData[wf]['steps']

    print "Log processed: ",log_processed
    print "Caches read:",cache_read
    from pickle import Pickler
    outFile = open(os.path.join(self.outdir,'runTheMatrixMsgs.pkl'), 'w')
    pklFile = Pickler(outFile)
    pklFile.dump(logData)
    outFile.close()
    return