Exemple #1
0
def es_send_resource_stats(release,
                           arch,
                           name,
                           version,
                           sfile,
                           hostname,
                           exit_code,
                           params=None,
                           cpu_normalize=1,
                           index="relvals_stats_summary",
                           doc="runtime-stats-summary"):
    week, rel_sec = cmsswIB2Week(release)
    rel_msec = rel_sec * 1000
    if "_X_" in release:
        release_queue = release.split("_X_", 1)[0] + "_X"
    else:
        release_queue = "_".join(release.split("_")[:3]) + "_X"
    sdata = {
        "release": release,
        "release_queue": release_queue,
        "architecture": arch,
        "step": version,
        "@timestamp": rel_msec,
        "workflow": name,
        "hostname": hostname,
        "exit_code": exit_code
    }
    average_stats = get_summary_stats_from_json_file(sfile, cpu_normalize)
    sdata.update(average_stats)
    if params: sdata.update(params)
    idx = sha1(release + arch + name + version + str(rel_sec)).hexdigest()
    try:
        send_payload(index + "-" + week, doc, idx, json.dumps(sdata))
    except Exception as e:
        print(e.message)
Exemple #2
0
def process_addon_log(logFile):
  t = getmtime(logFile)
  timestp = int(t*1000)
  pathInfo = logFile.split('/')
  architecture = pathInfo[4]
  release = pathInfo[8]
  week, rel_sec  = cmsswIB2Week (release)
  datasets = []
  payload = {"type" : "addon"}
  payload["release"]=release
  payload["architecture"]=architecture
  payload["@timestamp"]=timestp
  payload["name"] = pathInfo[-1].split("-")[1].split("_cmsRun_")[0].split("_cmsDriver.py_")[0]
  id = sha1(release + architecture + "addon" + payload["name"]).hexdigest()
  config_list = []
  for index, l in enumerate(file(logFile).read().split("\n")):
    config_list = add_exception_to_config(l,index, config_list)
    if " Initiating request to open file " in l:
      try:
        rootfile = l.split(" Initiating request to open file ")[1].split(" ")[0]
        if (not "file:" in rootfile) and (not rootfile in datasets): datasets.append(rootfile)
      except: pass
  send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week,"addon-dataset")
  write_config_file(logFile + "-read_config", config_list)
  return
Exemple #3
0
def process_addon_log(logFile):
  t = getmtime(logFile)
  timestp = int(t*1000)
  pathInfo = logFile.split('/')
  architecture = pathInfo[4]
  release = pathInfo[8]
  week, rel_sec  = cmsswIB2Week (release)
  datasets = []
  payload = {"type" : "addon"}
  payload["release"]=release
  payload["architecture"]=architecture
  payload["@timestamp"]=timestp
  payload["name"] = pathInfo[-1].split("-")[1].split("_cmsRun_")[0].split("_cmsDriver.py_")[0]
  id = sha1(release + architecture + "addon" + payload["name"]).hexdigest()
  config_list = []
  with open(logFile) as f:
    for index, l in enumerate(f):
      l = l.strip()
      config_list = add_exception_to_config(l,index, config_list)
      if " Initiating request to open file " in l:
        try:
          rootfile = l.split(" Initiating request to open file ")[1].split(" ")[0]
          if (not "file:" in rootfile) and (not rootfile in datasets): datasets.append(rootfile)
        except: pass
  send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week,"addon-dataset")
  transform_and_write_config_file(logFile + "-read_config", config_list)
  return
Exemple #4
0
def process_unittest_log(logFile):
  t = getmtime(logFile)
  timestp = int(t*1000)
  pathInfo = logFile.split('/')
  architecture = pathInfo[4]
  release = pathInfo[8]
  week, rel_sec  = cmsswIB2Week (release)
  package = pathInfo[-3]+"/"+ pathInfo[-2]
  utname = None
  datasets = []
  payload = {"type" : "unittest"}
  payload["release"]=release
  payload["architecture"]=architecture
  payload["@timestamp"]=timestp
  id = None
  for l in file(logFile).read().split("\n"):
    if l.startswith('===== Test "') and l.endswith('" ===='):
      if utname: send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week, "unittest-dataset")
      datasets = []
      utname = l.split('"')[1]
      payload["name"] = "%s/%s" % (package, utname)
      id = sha1(release + architecture + package + str(utname)).hexdigest()
    elif " Initiating request to open file " in l:
      try:
        rootfile = l.split(" Initiating request to open file ")[1].split(" ")[0]
        if (not "file:" in rootfile) and (not rootfile in datasets): datasets.append(rootfile)
      except: pass
  if datasets: send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week,"unittest-dataset")
  return
Exemple #5
0
def process_unittest_log(logFile):
    t = getmtime(logFile)
    timestp = int(t * 1000)
    pathInfo = logFile.split('/')
    architecture = pathInfo[4]
    release = pathInfo[8]
    week, rel_sec = cmsswIB2Week(release)
    package = pathInfo[-3] + "/" + pathInfo[-2]
    payload = {"type": "unittest"}
    payload["release"] = release
    payload["architecture"] = architecture
    payload["@timestamp"] = timestp
    config_list = []
    custom_rule_set = [{
        "str_to_match": "test (.*) had ERRORS",
        "name": "{0} failed",
        'control_type': ResultTypeEnum.ISSUE
    }, {
        "str_to_match": '===== Test "([^\s]+)" ====',
        "name": "{0}",
        'control_type': ResultTypeEnum.TEST
    }]
    with open(logFile) as f:
        utname = None
        datasets = []
        xid = None
        for index, l in enumerate(f):
            l = l.strip()
            config_list = add_exception_to_config(l, index, config_list,
                                                  custom_rule_set)
            if l.startswith('===== Test "') and l.endswith('" ===='):
                if utname:
                    send_unittest_dataset(datasets, payload, xid,
                                          "ib-dataset-" + week,
                                          "unittest-dataset")
                datasets = []
                utname = l.split('"')[1]
                payload["name"] = "%s/%s" % (package, utname)
                xid = sha1(release + architecture + package +
                           str(utname)).hexdigest()
            elif " Initiating request to open file " in l:
                try:
                    rootfile = l.split(
                        " Initiating request to open file ")[1].split(" ")[0]
                    if (not "file:" in rootfile) and (not rootfile
                                                      in datasets):
                        datasets.append(rootfile)
                except Exception as e:
                    print("ERROR: ", logFile, e)
                    traceback.print_exc(file=sys.stdout)
        if datasets and xid:
            send_unittest_dataset(datasets, payload, xid, "ib-dataset-" + week,
                                  "unittest-dataset")
    transform_and_write_config_file(logFile + "-read_config", config_list)
    return
Exemple #6
0
def process_unittest_log(logFile):
    t = getmtime(logFile)
    timestp = int(t * 1000)
    pathInfo = logFile.split('/')
    architecture = pathInfo[4]
    release = pathInfo[8]
    week, rel_sec = cmsswIB2Week(release)
    package = pathInfo[-3] + "/" + pathInfo[-2]
    utname = None
    datasets = []
    payload = {"type": "unittest"}
    payload["release"] = release
    payload["architecture"] = architecture
    payload["@timestamp"] = timestp
    id = None
    config_list = []
    custom_rule_set = [{
        "str_to_match": "test (.*) had ERRORS",
        "name": "{0} failed"
    }, {
        "str_to_match": '===== Test "([^\s]+)" ====',
        "name": "{0}"
    }]
    for index, l in enumerate(file(logFile).read().split("\n")):
        config_list = add_exception_to_config(l, index, config_list,
                                              custom_rule_set)
        if l.startswith('===== Test "') and l.endswith('" ===='):
            if utname:
                send_unittest_dataset(datasets, payload, id,
                                      "ib-dataset-" + week, "unittest-dataset")
            datasets = []
            utname = l.split('"')[1]
            payload["name"] = "%s/%s" % (package, utname)
            id = sha1(release + architecture + package +
                      str(utname)).hexdigest()
        elif " Initiating request to open file " in l:
            try:
                rootfile = l.split(
                    " Initiating request to open file ")[1].split(" ")[0]
                if (not "file:" in rootfile) and (not rootfile in datasets):
                    datasets.append(rootfile)
            except:
                pass
    if datasets:
        send_unittest_dataset(datasets, payload, id, "ib-dataset-" + week,
                              "unittest-dataset")
    write_config_file(logFile + "-read_config", config_list)
    return
Exemple #7
0
def process_ib_utests(logFile):
    t = getmtime(logFile)
    timestp = datetime.datetime.fromtimestamp(
        int(t)).strftime('%Y-%m-%d %H:%M:%S')
    payload = {}
    pathInfo = logFile.split('/')
    architecture = pathInfo[4]
    release = pathInfo[8]
    week, rel_sec = cmsswIB2Week(release)
    index = "ibs-" + week
    document = "unittests"
    payload["release"] = release
    payload["architecture"] = architecture
    payload["@timestamp"] = timestp

    if exists(logFile):
        with open(logFile) as f:
            try:
                it = iter(f)
                line = it.next()
                while '--------' not in line:
                    line = it.next()
                while True:
                    line = it.next().strip()
                    if ":" in line:
                        pkg = line.split(':')[0].strip()
                        payload[
                            "url"] = 'https://cmssdt.cern.ch/SDT/cgi-bin/buildlogs/' + architecture + '/' + release + '/unitTestLogs/' + pkg
                        line = it.next().strip()
                        while ':' not in line:
                            if "had ERRORS" in line:
                                payload["status"] = 1
                            else:
                                payload["status"] = 0
                            utest = line.split(' ')[0]
                            payload["package"] = pkg
                            payload["name"] = utest
                            id = sha1(release + architecture +
                                      utest).hexdigest()
                            send_payload(index, document, id,
                                         json.dumps(payload))
                            line = it.next().strip()
            except Exception as e:
                print "File processed:", e
    else:
        print "Invalid File Path"
Exemple #8
0
def process_ib_utests(logFile):
  t = getmtime(logFile)
  timestp = int(t*1000)
  payload = {}
  pathInfo = logFile.split('/')
  architecture = pathInfo[4]
  release = pathInfo[8]
  week, rel_sec  = cmsswIB2Week (release)
  index = "ibs-"+week
  document = "unittests"
  payload["release"] = release
  payload["architecture"] = architecture
  payload["@timestamp"] = timestp

  if exists(logFile):
    with open(logFile) as f:
      try:
        it = iter(f)
        line = next(it)
        while '--------' not in line:
          line = next(it)
        while True:
          line=it.next().strip()
          if ":" in line:
            pkg = line.split(':')[0].strip()
            payload["url"] = 'https://cmssdt.cern.ch/SDT/cgi-bin/buildlogs/'+ architecture +'/'+ release +'/unitTestLogs/' + pkg
            line = it.next().strip()
            while ':' not in line:
              if "had ERRORS" in line:
                payload["status"] = 1
              else:
                payload["status"] = 0
              utest= line.split(' ')[0]
              payload["package"] = pkg
              payload["name"] = utest
              id = sha1(release + architecture + utest).hexdigest()
              send_payload(index,document,id,json.dumps(payload))
              line = it.next().strip()
      except Exception as e:
        print("File processed:", e)
  else:
    print("Invalid File Path")
Exemple #9
0
def process_unittest_log(logFile):
  t = getmtime(logFile)
  timestp = int(t*1000)
  pathInfo = logFile.split('/')
  architecture = pathInfo[4]
  release = pathInfo[8]
  week, rel_sec  = cmsswIB2Week (release)
  package = pathInfo[-3]+"/"+ pathInfo[-2]
  payload = {"type" : "unittest"}
  payload["release"]=release
  payload["architecture"]=architecture
  payload["@timestamp"]=timestp
  config_list = []
  custom_rule_set = [
    {"str_to_match": "test (.*) had ERRORS", "name": "{0} failed", 'control_type': ResultTypeEnum.ISSUE },
    {"str_to_match": '===== Test "([^\s]+)" ====', "name": "{0}", 'control_type': ResultTypeEnum.TEST }
  ]
  with open(logFile) as f:
    utname = None
    datasets = []
    xid = None
    for index, l in enumerate(f):
      l = l.strip()
      config_list = add_exception_to_config(l,index,config_list,custom_rule_set)
      if l.startswith('===== Test "') and l.endswith('" ===='):
        if utname: send_unittest_dataset(datasets, payload, xid, "ib-dataset-"+week, "unittest-dataset")
        datasets = []
        utname = l.split('"')[1]
        payload["name"] = "%s/%s" % (package, utname)
        xid = sha1(release + architecture + package + str(utname)).hexdigest()
      elif " Initiating request to open file " in l:
        try:
          rootfile = l.split(" Initiating request to open file ")[1].split(" ")[0]
          if (not "file:" in rootfile) and (not rootfile in datasets): datasets.append(rootfile)
        except Exception as e:
          print("ERROR: ",e)
    if datasets and xid:
      send_unittest_dataset(datasets, payload, xid, "ib-dataset-"+week,"unittest-dataset")
  transform_and_write_config_file(logFile + "-read_config", config_list)
  return
Exemple #10
0
def process_addon_log(logFile):
  t = getmtime(logFile)
  timestp = int(t*1000)
  pathInfo = logFile.split('/')
  architecture = pathInfo[4]
  release = pathInfo[8]
  week, rel_sec  = cmsswIB2Week (release)
  datasets = []
  payload = {"type" : "addon"}
  payload["release"]=release
  payload["architecture"]=architecture
  payload["@timestamp"]=timestp
  payload["name"] = pathInfo[-1].split("-")[1].split("_cmsRun_")[0].split("_cmsDriver.py_")[0]
  id = sha1(release + architecture + "addon" + payload["name"]).hexdigest()
  for l in file(logFile).read().split("\n"):
    if " Initiating request to open file " in l:
      try:
        rootfile = l.split(" Initiating request to open file ")[1].split(" ")[0]
        if (not "file:" in rootfile) and (not rootfile in datasets): datasets.append(rootfile)
      except: pass
  send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week,"addon-dataset")
  return
Exemple #11
0
def process_build_any_ib(logFile):
  rel = ""
  arch = ""
  uploadTime=0
  stime=0
  upload=False
  jstart = 0
  jend=0
  patch = 0
  finished = False
  with open(logFile) as f:
    for line in f:
      line = line.strip()
      if not jstart:
        m=ReDate.match(line)
        if m:
          jstart = datetime.strptime(m.group(1)+m.group(2), "%b %d %H:%M:%S %Y")
        continue
      if not arch:
        m=ReArch.match(line)
        if m: arch=m.group(1)
        continue
      if not rel:
        m=ReRel.match(line)
        if m: rel=m.group(1)
        continue
      if ReFinish.match(line):
        finished = True
        if "ABORTED" in line: return True
        break
      if ReUpload.match(line):
        upload=True
        print "Upload: ",stime,line
        continue
      if ReType.match(line): patch=1
      m=ReDate.match(line)
      if not m: continue
      xtime = datetime.strptime(m.group(1)+m.group(2), "%b %d %H:%M:%S %Y")
      jend = xtime
      if not upload:
        stime = xtime
      else:
        upload=False
        dtime = xtime - stime 
        uploadTime += dtime.seconds
  print "FINISHED: ",finished,rel, arch,uploadTime,jstart,upload,patch
  if not rel or not arch or not finished: return finished
  urlx = logFile.split("/")
  url = "https://cmssdt.cern.ch/jenkins/job/build-any-ib/"+logFile.split("/")[-2]+"/console"
  timestp  = getmtime(logFile)
  ttime=0
  if jend and jstart:
    dtime = jend - jstart
    ttime = dtime.seconds 
  print ttime, uploadTime, rel, arch, patch, url
  payload = {}
  payload["release"] = rel
  payload["architecture"] = arch
  payload["total_time"] = ttime
  payload["upload_time"] = uploadTime
  payload["patch"] = patch
  payload["@timestamp"] = int(timestp*1000)
  payload["url"]=url
  week, rel_sec = cmsswIB2Week(rel)
  print payload
  id = sha1(rel + arch).hexdigest()
  send_payload("jenkins-ibs-"+week,"timings",id,json.dumps(payload))
  return finished
Exemple #12
0
def process_build_any_ib(logFile):
    rel = ""
    arch = ""
    uploadTime = 0
    stime = 0
    upload = False
    jstart = 0
    jend = 0
    patch = 0
    finished = False
    with open(logFile) as f:
        for line in f:
            line = line.strip()
            if not jstart:
                m = ReDate.match(line)
                if m:
                    jstart = datetime.strptime(
                        m.group(1) + m.group(2), "%b %d %H:%M:%S %Y")
                continue
            if not arch:
                m = ReArch.match(line)
                if m: arch = m.group(1)
                continue
            if not rel:
                m = ReRel.match(line)
                if m: rel = m.group(1)
                continue
            if ReFinish.match(line):
                finished = True
                if "ABORTED" in line: return True
                break
            if ReUpload.match(line):
                upload = True
                print "Upload: ", stime, line
                continue
            if ReType.match(line): patch = 1
            m = ReDate.match(line)
            if not m: continue
            xtime = datetime.strptime(
                m.group(1) + m.group(2), "%b %d %H:%M:%S %Y")
            jend = xtime
            if not upload:
                stime = xtime
            else:
                upload = False
                dtime = xtime - stime
                uploadTime += dtime.seconds
    print "FINISHED: ", finished, rel, arch, uploadTime, jstart, upload, patch
    if not rel or not arch or not finished: return finished
    urlx = logFile.split("/")
    url = "https://cmssdt.cern.ch/jenkins/job/build-any-ib/" + logFile.split(
        "/")[-2] + "/console"
    timestp = getmtime(logFile)
    ttime = 0
    if jend and jstart:
        dtime = jend - jstart
        ttime = dtime.seconds
    print ttime, uploadTime, rel, arch, patch, url
    payload = {}
    payload["release"] = rel
    payload["architecture"] = arch
    payload["total_time"] = ttime
    payload["upload_time"] = uploadTime
    payload["patch"] = patch
    payload["@timestamp"] = int(timestp * 1000)
    payload["url"] = url
    week, rel_sec = cmsswIB2Week(rel)
    print payload
    id = sha1(rel + arch).hexdigest()
    send_payload("jenkins-ibs-" + week, "timings", id, json.dumps(payload))
    return finished
Exemple #13
0
from os.path import dirname, abspath
import sys
sys.path.append(dirname(dirname(abspath(__file__))))
from hashlib import sha1
import json
from es_utils import send_payload
from _py2with3compatibility import run_cmd
from cmsutils import cmsswIB2Week

err, logs = run_cmd(
    "find /data/sdt/SDT/jenkins-artifacts/cmssw-afs-eos-comparison -mindepth 1 -maxdepth 1 -name '*.json' -type f"
)
for jfile in logs.split('\n'):
    if not jfile: continue
    print("Processing file", jfile)
    payload = {}
    try:
        payload = json.load(open(jfile))
    except ValueError as err:
        print(err)
        run_cmd("rm -f %s" % jfile)
        continue
    week, rel_sec = cmsswIB2Week(payload["release"])
    payload["@timestamp"] = rel_sec * 1000
    id = sha1("%s-%s-%s" % (payload["release"], payload["architecture"],
                            payload["fstype"])).hexdigest()
    print(payload)
    if send_payload("cmssw-afs-eos-%s" % week, "build", id,
                    json.dumps(payload)):
        run_cmd("rm -f %s" % jfile)
Exemple #14
0
    except Exception as e:
        print e
    return


partial_log_dirpath = argv[1]
jobs = 6
try:
    jobs = int(argv[2])
except:
    jobs = 6
items = partial_log_dirpath.split("/")
if items[-1] != "pyRelValPartialLogs": exit(1)
release = items[-2]
arch = items[-6]
week, rel_sec = cmsswIB2Week(release)
rel_msec = rel_sec * 1000
ex_fields = ["rss", "vms", "pss", "uss", "shared", "data", "cpu"]
e, o = getstatusoutput("ls -d %s/*" % partial_log_dirpath)
threads = []
for wf in o.split("\n"):
    if not isdir(wf): continue
    if exists(join(wf, "wf_stats.done")): continue
    wfnum = basename(wf).split("_", 1)[0]
    hostname = ""
    if exists(join(wf, "hostname")):
        hostname = open(join(wf, "hostname")).read().split("\n")[0]
    exit_codes = {}
    if exists(join(wf, "workflow.log")):
        e, o = getstatusoutput(
            "head -1 %s/workflow.log  | sed 's|.* exit: *||'" % wf)
Exemple #15
0
def es_parse_log(logFile):
  t = os.path.getmtime(logFile)
  timestp = int(t*1000)
  payload = {}
  pathInfo = logFile.split('/')
  architecture = pathInfo[4]
  release = pathInfo[8]
  workflow = pathInfo[10].split('_')[0]
  step = pathInfo[11].split('_')[0]
  week, rel_sec  = cmsswIB2Week(release)
  index = "ib-matrix-" + week
  document = "runTheMatrix-data"
  id = sha1(release + architecture + workflow + str(step)).hexdigest()
  logdir = '/'.join(logFile.split('/')[:-1])
  cmdfile = logdir + '/cmdLog'
  cmd_step = find_step_cmd(cmdfile,step)
  if cmd_step: payload["command"] = cmd_step
  wf_log = logdir + '/workflow.log'
  exitcode = get_exit_code(wf_log,int(step[-1]))
  if exitcode != -1 : payload["exitcode"] = exitcode
  payload["workflow"] = workflow
  payload["release"] = release
  payload["architecture"] = architecture
  payload["step"] = step
  payload["@timestamp"] = timestp
  hostFile = "/".join(logFile.split('/')[:-1]) + "/hostname"
  if os.path.exists (hostFile):
    with open(hostFile,'r') as hname:
      payload["hostname"] = hname.readlines()[0].strip()
  exception = ""
  error = ""
  errors = []
  inException = False
  inError = False
  datasets = []
  error_count = 0
  if exists(logFile):
    lines = file(logFile).read().split("\n")
    payload["url"] = 'https://cmssdt.cern.ch/SDT/cgi-bin/buildlogs/'+pathInfo[4]+'/'+pathInfo[8]+'/pyRelValMatrixLogs/run/'+pathInfo[-2]+'/'+pathInfo[-1]
    total_lines = len(lines)
    for i in range(total_lines):
      l = lines[i]
      if " Initiating request to open file " in l:
        try:
          rootfile = l.split(" Initiating request to open file ")[1].split(" ")[0]
          if (not "file:" in rootfile) and (not rootfile in datasets):
            #if (i+2)<total_lines:
            #  if (rootfile in lines[i+1]) and (rootfile in lines[i+2]) and ("Successfully opened file " in lines[i+1]) and ("Closed file " in lines[i+2]):
            #    print "File read with no valid events: %s" % rootfile
            #    continue
            datasets.append(rootfile)
        except: pass
        continue
      if l.startswith("----- Begin Fatal Exception"):
        inException = True
        continue
      if l.startswith("----- End Fatal Exception"):
        inException = False
        continue
      if l.startswith("%MSG-e"):
        inError = True
        error = l
        error_kind = re.split(" [0-9a-zA-Z-]* [0-9:]{8} CET", error)[0].replace("%MSG-e ", "")
        continue
      if inError == True and l.startswith("%MSG"):
        inError = False
        if len(errors)<10:
          errors.append({"error": error, "kind": error_kind})
        error_count += 1
        error = ""
        error_kind = ""
        continue
      if inException:
        exception += l + "\n"
      if inError:
        error += l + "\n"
  if exception:
    payload["exception"] = exception
  if errors:
    payload["errors"] = errors
  payload["error_count"] = error_count
  try:
    payload = es_parse_jobreport(payload,logFile)
  except Exception, e:
    print e
Exemple #16
0
def es_parse_log(logFile):
    t = os.path.getmtime(logFile)
    timestp = int(t * 1000)
    payload = {}
    pathInfo = logFile.split('/')
    architecture = pathInfo[4]
    release = pathInfo[8]
    workflow = pathInfo[10].split('_')[0]
    step = pathInfo[11].split('_')[0]
    week, rel_sec = cmsswIB2Week(release)
    index = "ib-matrix-" + week
    document = "runTheMatrix-data"
    id = sha1(release + architecture + workflow + str(step)).hexdigest()
    logdir = '/'.join(logFile.split('/')[:-1])
    cmdfile = logdir + '/cmdLog'
    cmd_step = find_step_cmd(cmdfile, step)
    if cmd_step: payload["command"] = cmd_step
    wf_log = logdir + '/workflow.log'
    exitcode = get_exit_code(wf_log, int(step[-1]))
    if exitcode != -1: payload["exitcode"] = exitcode
    payload["workflow"] = workflow
    payload["release"] = release
    payload["architecture"] = architecture
    payload["step"] = step
    payload["@timestamp"] = timestp
    hostFile = "/".join(logFile.split('/')[:-1]) + "/hostname"
    if os.path.exists(hostFile):
        with open(hostFile, 'r') as hname:
            payload["hostname"] = hname.readlines()[0].strip()
    exception = ""
    error = ""
    errors = []
    inException = False
    inError = False
    datasets = []
    error_count = 0
    if exists(logFile):
        with open(logFile) as f:
            lines = f.readlines()
        payload[
            "url"] = 'https://cmssdt.cern.ch/SDT/cgi-bin/buildlogs/' + pathInfo[
                4] + '/' + pathInfo[8] + '/pyRelValMatrixLogs/run/' + pathInfo[
                    -2] + '/' + pathInfo[-1]
        total_lines = len(lines)
        for i in range(total_lines):
            l = lines[i].strip()
            if " Initiating request to open file " in l:
                try:
                    rootfile = l.split(
                        " Initiating request to open file ")[1].split(" ")[0]
                    if (not "file:" in rootfile) and (not rootfile
                                                      in datasets):
                        #if (i+2)<total_lines:
                        #  if (rootfile in lines[i+1]) and (rootfile in lines[i+2]) and ("Successfully opened file " in lines[i+1]) and ("Closed file " in lines[i+2]):
                        #    print "File read with no valid events: %s" % rootfile
                        #    continue
                        datasets.append(rootfile)
                except:
                    pass
                continue
            if l.startswith("----- Begin Fatal Exception"):
                inException = True
                continue
            if l.startswith("----- End Fatal Exception"):
                inException = False
                continue
            if l.startswith("%MSG-e"):
                inError = True
                error = l
                error_kind = re.split(" [0-9a-zA-Z-]* [0-9:]{8} CET",
                                      error)[0].replace("%MSG-e ", "")
                continue
            if inError == True and l.startswith("%MSG"):
                inError = False
                if len(errors) < 10:
                    errors.append({"error": error, "kind": error_kind})
                error_count += 1
                error = ""
                error_kind = ""
                continue
            if inException:
                exception += l + "\n"
            if inError:
                error += l + "\n"
    if exception:
        payload["exception"] = exception
    if errors:
        payload["errors"] = errors
    payload["error_count"] = error_count
    try:
        payload = es_parse_jobreport(payload, logFile)
    except Exception as e:
        print(e)
    print("sending data for ", logFile)
    try:
        send_payload(index, document, id, json.dumps(payload))
    except:
        pass
    if datasets:
        dataset = {
            "type": "relvals",
            "name": "%s/%s" % (payload["workflow"], payload["step"])
        }
        for fld in ["release", "architecture", "@timestamp"]:
            dataset[fld] = payload[fld]
        for ds in datasets:
            ds_items = ds.split("?", 1)
            ds_items.append("")
            ibeos = "/store/user/cmsbuild"
            if ibeos in ds_items[0]:
                ds_items[0] = ds_items[0].replace(ibeos, "")
            else:
                ibeos = ""
            dataset["protocol"] = ds_items[0].split("/store/", 1)[0] + ibeos
            dataset["protocol_opts"] = ds_items[1]
            dataset["lfn"] = "/store/" + ds_items[0].split("/store/",
                                                           1)[1].strip()
            idx = sha1(id + ds).hexdigest()
            print(dataset)
            send_payload("ib-dataset-" + week, "relvals-dataset", idx,
                         json.dumps(dataset))
Exemple #17
0
            sdata[x+"_"+t]=data[0]
    idx = sha1(release + arch + wfnum + s + str(rel_sec)).hexdigest()
    try:send_payload("relvals_stats_summary-"+week,"runtime-stats-summary",idx,json.dumps(sdata))
    except Exception as e: print(e)
  except Exception as e: print(e)
  return

partial_log_dirpath=argv[1]
jobs=6
try: jobs=int(argv[2])
except: jobs=6
items = partial_log_dirpath.split("/")
if items[-1]!="pyRelValPartialLogs": exit(1)
release=items[-2]
arch=items[-6]
week, rel_sec  = cmsswIB2Week(release)
rel_msec = rel_sec*1000
ex_fields=["rss", "vms", "pss", "uss", "shared", "data", "cpu"]
if not exists("%s/threads.txt" % partial_log_dirpath):
   e, o = run_cmd("grep ' --nThreads ' %s/*/cmdLog  | tail -1  | sed 's|.* *--nThreads *||;s| .*||'" % partial_log_dirpath)
   if e:
     print(o)
     exit(1)
   if not o: o="1"
   run_cmd("echo %s > %s/threads.txt" % (o, partial_log_dirpath))

e, o = run_cmd("head -1 %s/threads.txt" % partial_log_dirpath)
if e:
  print(o)
  exit(1)
cmsThreads = o.strip('\n')