Exemplo n.º 1
0
def backport_pull (repo, pr, branch):
  pr_branch = pr.base.label.split(":")[1]
  print("Source Branch:",pr_branch)
  if pr_branch == branch: return "Warning: Can not backport, same branch %s vs %s" % (pr_branch, branch),False
  br = gh_repo.get_branch(branch)
  commits = []
  for c in pr.get_commits().reversed: commits.insert(0,"git cherry-pick %s" % c.sha)
  if not commits: return "There are no commits to backport",False
  print("Cherry-pick commands:")
  print("  "+"\n  ".join(commits))
  if len(commits)>=250:
    return "Error: Too many commits in PR %s\nBot can only handle max 250 commits." % len(commits),False
  new_branch = "backport-%s-%s" % (branch.replace("/","_"), pr.number)
  print("New Branch:",new_branch)
  git_ref = ""
  if repo.name == "cmssw": git_ref = "--reference "+CMSSW_GIT_REF
  print("GIT REF:",git_ref)
  e , o = run_cmd("rm -rf pr_backport; git clone --branch %s %s [email protected]:%s pr_backport && cd pr_backport && git checkout -b %s" % (branch, git_ref, repo.full_name, new_branch))
  if e:
    print(o)
    exit(1)
  e, o = run_cmd('cd pr_backport; %s' % ";".join(commits))
  if e: return "Error: Failed to cherry-pick commits. Please backport this PR yourself.\n```"+o+"\n```",False
  e, o = run_cmd("cd pr_backport; git push origin %s" % new_branch)
  if e:
    print(o)
    exit(1)
  run_cmd("rm -rf pr_backport")
  newBody = "backport of #%s\n\n%s" %(pr.number, pr.body)
  newPR = repo.create_pull(title = pr.title, body = newBody, base = branch, head = new_branch )
  return "Successfully backported PR #%s as #%s for branch %s" % (pr.number, newPR.number, branch),True
Exemplo n.º 2
0
def cleanup_exit(msg, tmpdirs=None, image_hash="", exit_code=1):
  if not tmpdirs:
    tmpdirs = []
  if msg: print(msg)
  for tdir in tmpdirs: run_cmd("rm -rf %s" % tdir)
  if image_hash: run_cmd("docker rm -f %s" % image_hash)
  exit(exit_code)
Exemplo n.º 3
0
def write_json(outfile, cache):
  outdir = dirname(outfile)
  if not exists(outdir): run_cmd("mkdir -p %s" % outdir)
  ofile = open(outfile, 'w')
  if ofile:
    ofile.write(json.dumps(cache, sort_keys=True, indent=2,separators=(',',': ')))
    ofile.close()
Exemplo n.º 4
0
def fix_lognames(workflow_dir):
  workflow_id = os.path.basename(workflow_dir).split("_",1)[1]
  for log in glob.glob(os.path.join(workflow_dir,"step*_*.log")):
    logname = os.path.basename(log)
    step = logname.split("_",1)[0]
    deslog = step+".log"
    if logname.endswith('_dasquery.log'): deslog = '%s_%s.log' % (step, workflow_id)
    run_cmd("cp %s %s/%s" % (log, workflow_dir, deslog))
Exemplo n.º 5
0
def cleanup_exit(msg, tmpdirs=None, image_hash="", exit_code=1):
    if not tmpdirs:
        tmpdirs = []
    if msg: print(msg)
    for tdir in tmpdirs:
        run_cmd("rm -rf %s" % tdir)
    if image_hash: run_cmd("docker rm -f %s" % image_hash)
    exit(exit_code)
Exemplo n.º 6
0
def fix_lognames(workflow_dir):
  workflow_id = os.path.basename(workflow_dir).split("_",1)[1]
  for log in glob.glob(os.path.join(workflow_dir,"step*_*.log")):
    logname = os.path.basename(log)
    step = logname.split("_",1)[0]
    deslog = step+".log"
    if logname.endswith('_dasquery.log'): deslog = '%s_%s.log' % (step, workflow_id)
    run_cmd("cp %s %s/%s" % (log, workflow_dir, deslog))
Exemplo n.º 7
0
def update_worklog(workflow_dir, jobs):
  if not jobs["commands"]: return False
  workflow_logfile=os.path.join(workflow_dir,"workflow.log")
  if not os.path.exists(workflow_logfile): return False
  workflow_time=0
  exit_codes=""
  test_passed=""
  test_failed=""
  steps_res=[]
  failed=False
  step_num = 0
  for job in jobs["commands"]:
    step_num+=1
    try:
      cmd_step = int(job['command'].split(" step",1)[-1].strip().split(" ")[0])
      while cmd_step>step_num:
        das_log = os.path.join(workflow_dir,"step%s_dasquery.log" % step_num)
        step_num+=1
        if os.path.exists(das_log):
          e, o = run_cmd("grep ' tests passed,' %s" % workflow_logfile)
          if o=="": return False
          ecodes = o.split()
          if ecodes[step_num-2]=="0":
            exit_codes+=" 1"
            test_passed+=" 0"
            test_failed+=" 1"
            failed=True
            steps_res.append("FAILED")
            continue
        exit_codes+=" 0"
        test_passed+=" 1"
        test_failed+=" 0"
        steps_res.append("PASSED")
    except Exception as e:
      pass
    if job["exit_code"]==-1: failed=True
    if job["exit_code"]>0:
      exit_codes+=" "+str(job["exit_code"])
      test_passed+=" 0"
      test_failed+=" 1"
      failed=True
      steps_res.append("FAILED")
    else:
      exit_codes+=" 0"
      test_failed+=" 0"
      if failed: test_passed+=" 0"
      else: test_passed+=" 1"
      steps_res.append("NORUN" if failed else "PASSED")
  step_str = ""
  for step, res in enumerate(steps_res): step_str = "%s Step%s-%s" % (step_str, step, res)
  e, o = run_cmd("grep ' exit: ' %s | sed 's|exit:.*$|exit: %s|'" % (workflow_logfile, exit_codes.strip()))
  o = re.sub("\s+Step0-.+\s+-\s+time\s+",step_str+"  - time ",o)
  wfile = open(workflow_logfile,"w")
  wfile.write(o+"\n")
  wfile.write("%s tests passed, %s failed\n" % (test_passed.strip(), test_failed.strip()))
  wfile.close()
  return True
Exemplo n.º 8
0
def send_comparison_ready_message(repo, pr_number, tests_results_url, comparison_errors_file, wfs_with_das_inconsistency_file, missing_map ):
  pull_request = repo.get_pull(pr_number)
  message = COMPARISON_READY_MSG +'\n' + tests_results_url

  wfs_with_errors = ''
  for line in open( comparison_errors_file ):
    line = line.rstrip()
    parts = line.split( ';' )
    wf = parts[ 0 ]
    step = parts[ 1 ]
    wfs_with_errors += ( wf + ' step ' + step + '\n' )

  if wfs_with_errors != '':
    error_info = COMPARISON_INCOMPLETE_MSG.format( workflows=wfs_with_errors )
    message += '\n\n' + error_info

  wfs_das_inconsistency = open( wfs_with_das_inconsistency_file ).readline().rstrip().rstrip(',').split( ',' )

  if '' in wfs_das_inconsistency:
    wfs_das_inconsistency.remove( '' )

  if wfs_das_inconsistency:
    das_inconsistency_info = DAS_INCONSISTENCY_MSG.format( workflows=', '.join( wfs_das_inconsistency ) )
    message += '\n\n' + das_inconsistency_info

  if missing_map and exists (missing_map):
    missing = []
    for line in open(missing_map):
      line = line.strip()
      if line: missing.append("   * "+line)
    if missing:
      from categories import COMPARISON_MISSING_MAP
      map_notify = ", ".join([ "@"+u for u in COMPARISON_MISSING_MAP] )
      message += "\n\n"+map_notify+" comparisons for the following workflows were not done due to missing matrix map:\n"+"\n".join(missing)

  alt_comp_dir = join(dirname(comparison_errors_file), "upload","alternative-comparisons")
  print("Alt comparison directory: ",alt_comp_dir)
  if exists(alt_comp_dir):
    err, out = run_cmd("grep ' Compilation failed' %s/runDQMComp-*.log" % alt_comp_dir)
    print(out)
    if not err:
      err_wfs = {}
      for line in out.split("\n"):
        wf = line.split(".log:",1)[0].split("runDQMComp-")[-1]
        err_wfs [wf]=1
      if err_wfs: message += "\n\nAlternative comparison was/were failed for workflow(s):\n"+"\n".join(list(err_wfs.keys()))

  JRCompSummaryLog = join(dirname(comparison_errors_file), "upload/validateJR/qaResultsSummary.log")
  print("JR comparison Summary: ",JRCompSummaryLog)
  if exists(JRCompSummaryLog):
    err, out = run_cmd("cat %s" % JRCompSummaryLog)
    if (not err) and out:
      message += "\n\nComparison Summary:\n"
      for l in out.split("\n"):
        if l.strip(): message += " - %s\n" % l.strip()

  send_message_pr( pull_request, message )
Exemplo n.º 9
0
def send_comparison_ready_message(repo, pr_number, tests_results_url, comparison_errors_file, wfs_with_das_inconsistency_file, missing_map ):
  pull_request = repo.get_pull(pr_number)
  message = COMPARISON_READY_MSG +'\n' + tests_results_url

  wfs_with_errors = ''
  for line in open( comparison_errors_file ):
    line = line.rstrip()
    parts = line.split( ';' )
    wf = parts[ 0 ]
    step = parts[ 1 ]
    wfs_with_errors += ( wf + ' step ' + step + '\n' )

  if wfs_with_errors != '':
    error_info = COMPARISON_INCOMPLETE_MSG.format( workflows=wfs_with_errors )
    message += '\n\n' + error_info

  wfs_das_inconsistency = open( wfs_with_das_inconsistency_file ).readline().rstrip().rstrip(',').split( ',' )

  if '' in wfs_das_inconsistency:
    wfs_das_inconsistency.remove( '' )

  if wfs_das_inconsistency:
    das_inconsistency_info = DAS_INCONSISTENCY_MSG.format( workflows=', '.join( wfs_das_inconsistency ) )
    message += '\n\n' + das_inconsistency_info

  if missing_map and exists (missing_map):
    missing = []
    for line in open(missing_map):
      line = line.strip()
      if line: missing.append("   * "+line)
    if missing:
      from categories import COMPARISON_MISSING_MAP
      map_notify = ", ".join([ "@"+u for u in COMPARISON_MISSING_MAP] )
      message += "\n\n"+map_notify+" comparisons for the following workflows were not done due to missing matrix map:\n"+"\n".join(missing)

  alt_comp_dir = join(dirname(comparison_errors_file), "upload","alternative-comparisons")
  print("Alt comparison directory: ",alt_comp_dir)
  if exists(alt_comp_dir):
    err, out = run_cmd("grep ' Compilation failed' %s/runDQMComp-*.log" % alt_comp_dir)
    print(out)
    if not err:
      err_wfs = {}
      for line in out.split("\n"):
        wf = line.split(".log:",1)[0].split("runDQMComp-")[-1]
        err_wfs [wf]=1
      if err_wfs: message += "\n\nAlternative comparison was/were failed for workflow(s):\n"+"\n".join(list(err_wfs.keys()))

  JRCompSummaryLog = join(dirname(comparison_errors_file), "upload/validateJR/qaResultsSummary.log")
  print("JR comparison Summary: ",JRCompSummaryLog)
  if exists(JRCompSummaryLog):
    err, out = run_cmd("cat %s" % JRCompSummaryLog)
    if (not err) and out:
      message += "\n\nComparison Summary:\n"
      for l in out.split("\n"):
        if l.strip(): message += " - %s\n" % l.strip()

  send_message_pr( pull_request, message )
Exemplo n.º 10
0
def createJob(workflow, cmssw_ver, arch):
  workflow_args = FixWFArgs(cmssw_ver, arch, workflow, GetMatrixOptions(cmssw_ver, arch))
  cmd = format("rm -rf %(workflow)s %(workflow)s_*; mkdir %(workflow)s; cd %(workflow)s; PATH=%(das_utils)s:$PATH runTheMatrix.py --maxSteps=0 -l %(workflow)s %(workflow_args)s",workflow=workflow,workflow_args=workflow_args, das_utils=CMS_BOT_DIR+"/das-utils")
  print("Running ",cmd)
  e, o = run_cmd(cmd)
  if e: print("ERROR:%s:%s" % (workflow, o))
  try:
    workflow_dir = glob.glob(format("%(workflow)s/%(workflow)s_*", workflow=workflow))[0]
    run_cmd(format("mv %(workflow)s/runall-report-step123-.log %(workflow_dir)s/workflow.log; touch %(workflow_dir)s/cmdLog; mv %(workflow_dir)s .; rm -rf %(workflow)s", workflow=workflow, workflow_dir=workflow_dir))
    print("Commands for workflow %s generated" % workflow)
  except Exception as e:
    print("ERROR: Creating workflow job:",workflow,str(e))
    run_cmd("rm -rf %s %s_*" % (workflow,workflow))
Exemplo n.º 11
0
def createJob(workflow, cmssw_ver, arch):
  workflow_args = FixWFArgs(cmssw_ver, arch, workflow, GetMatrixOptions(cmssw_ver, arch))
  cmd = format("rm -rf %(workflow)s %(workflow)s_*; mkdir %(workflow)s; cd %(workflow)s; PATH=%(das_utils)s:$PATH runTheMatrix.py --maxSteps=0 -l %(workflow)s %(workflow_args)s",workflow=workflow,workflow_args=workflow_args, das_utils=CMS_BOT_DIR+"/das-utils")
  print("Running ",cmd)
  e, o = run_cmd(cmd)
  if e: print("ERROR:%s:%s" % (workflow, o))
  try:
    workflow_dir = glob.glob(format("%(workflow)s/%(workflow)s_*", workflow=workflow))[0]
    run_cmd(format("mv %(workflow)s/runall-report-step123-.log %(workflow_dir)s/workflow.log; touch %(workflow_dir)s/cmdLog; mv %(workflow_dir)s .; rm -rf %(workflow)s", workflow=workflow, workflow_dir=workflow_dir))
    print("Commands for workflow %s generated" % workflow)
  except Exception as e:
    print("ERROR: Creating workflow job:",workflow,str(e))
    run_cmd("rm -rf %s %s_*" % (workflow,workflow))
Exemplo n.º 12
0
 def run(self):
     IBThreadBase.run(self)
     if platform.system() == 'Darwin':
         print('unitTest> Skipping unit tests for MacOS')
         return
     if (self.xType == 'GPU') or ("_GPU_X" in os.environ["CMSSW_VERSION"]):
         cmd = "cd " + self.startDir + "; scram b -f echo_cuda_USED_BY | tr ' ' '\\n' | grep '^\\(self\\|cmssw\\)/' | cut -d/ -f2-3 > cuda_pkgs.txt; "
         cmd = cmd + " cat  cuda_pkgs.txt; mv src src.full;"
         cmd = cmd + " for p in $(cat cuda_pkgs.txt); do mkdir -p src/${p} ; rsync -a src.full/${p}/ src/${p}/ ; done ; scram build -r echo_CXX"
         ret = runCmd(cmd)
         if ret != 0:
             print("ERROR when getting GPU unit-tests sources: cmd returned " + str(ret))
     skiptests = ""
     if 'lxplus' in getHostName(): skiptests = 'SKIP_UNITTESTS=ExpressionEvaluatorUnitTest'
     TEST_PATH = os.environ['CMSSW_RELEASE_BASE'] + "/test/" + os.environ['SCRAM_ARCH']
     err, cmd = run_cmd(
         "cd " + self.startDir + ";scram tool info cmssw 2>&1 | grep CMSSW_BASE= | sed 's|^CMSSW_BASE=||'")
     if cmd: TEST_PATH = TEST_PATH + ":" + cmd + "/test/" + os.environ['SCRAM_ARCH']
     print(TEST_PATH)
     try:
         cmd = "cd " + self.startDir + "; touch nodelete.root nodelete.txt nodelete.log;  sed -i -e 's|testing.log; *$(CMD_rm)  *-f  *$($(1)_objdir)/testing.log;|testing.log;|;s|test $(1) had ERRORS\") *\&\&|test $(1) had ERRORS\" >> $($(1)_objdir)/testing.log) \&\&|' config/SCRAM/GMake/Makefile.rules; "
         cmd += " if which timeout 2>/dev/null; then TIMEOUT=timeout; fi ; "
         cmd += 'PATH=' + TEST_PATH + ':$PATH ${TIMEOUT+timeout 3h} scram b -f -k -j ' + str(MachineCPUCount) + ' unittests ' + skiptests + ' >unitTests1.log 2>&1 ; '
         cmd += 'touch nodelete.done; ls -l nodelete.*'
         print('unitTest> Going to run ' + cmd)
         ret = runCmd(cmd)
         if ret != 0:
             print("ERROR when running unit-tests: cmd returned " + str(ret))
     except Exception as e:
         print("ERROR during runtests : caught exception: " + str(e))
         pass
     try:
         testLog = self.startDir + '/tmp/' + os.environ['SCRAM_ARCH'] + '/src/'
         logFile = self.startDir + '/unitTests.log'
         runCmd('rm -f %s; touch %s' % (logFile, logFile))
         for packDir in glob.glob(testLog + '*/*'):
             pack = packDir.replace(testLog, '')
             runCmd("echo '>> Entering Package %s' >> %s" % (pack, logFile))
             packDir += '/test'
             if os.path.exists(packDir):
                 err, testFiles = run_cmd('find ' + packDir + ' -maxdepth 2 -mindepth 2 -name testing.log -type f')
                 for lFile in testFiles.strip().split('\n'):
                     if lFile: runCmd("cat %s >> %s" % (lFile, logFile))
             runCmd("echo '>> Leaving Package %s' >> %s" % (pack, logFile))
             runCmd("echo '>> Tests for package %s ran.' >> %s" % (pack, logFile))
     except Exception as e:
         pass
     self.checkTestLogs()
     self.logger.updateUnitTestLogs(self.xType)
     return
Exemplo n.º 13
0
def upload_logs(workflow, workflow_dir,exit_code):
  files_to_keep = [ ".txt", ".xml", ".log", ".py", ".json","/cmdLog", "/hostname",".done" ]
  basedir = os.path.dirname(workflow_dir)
  for wf_file in glob.glob("%s/*" % workflow_dir):
    found=False
    for ext in files_to_keep:
      if wf_file.endswith(ext):
        found=True
        break
    if not found:
      print("Removing ",wf_file)
      run_cmd("rm -rf %s" % wf_file)
  logger=LogUpdater(dirIn=os.environ["CMSSW_BASE"])
  logger.updateRelValMatrixPartialLogs(basedir, os.path.basename(workflow_dir))
Exemplo n.º 14
0
 def run(self):
     IBThreadBase.run(self)
     if platform.system() == 'Darwin':
         print('unitTest> Skipping unit tests for MacOS')
         return
     if self.xType == 'GPU':
         cmd = "cd " + self.startDir + "; scram b -f echo_cuda_USED_BY | tr ' ' '\\n' | grep '^self/' | cut -d/ -f2-3 > cuda_pkgs.txt; mv src src.full;"
         cmd = cmd + " for p in $(cat cuda_pkgs.txt); do mkdir -p src/${p} ; rsync -a src.full/${p}/ src/${p}/ ; done ; scram build -r echo_CXX"
         ret = runCmd(cmd)
         if ret != 0:
             print("ERROR when getting GPU unit-tests sources: cmd returned " + str(ret))
     skiptests = ""
     if 'lxplus' in getHostName(): skiptests = 'SKIP_UNITTESTS=ExpressionEvaluatorUnitTest'
     TEST_PATH = os.environ['CMSSW_RELEASE_BASE'] + "/test/" + os.environ['SCRAM_ARCH']
     err, cmd = run_cmd(
         "cd " + self.startDir + ";scram tool info cmssw 2>&1 | grep CMSSW_BASE= | sed 's|^CMSSW_BASE=||'")
     if cmd: TEST_PATH = TEST_PATH + ":" + cmd + "/test/" + os.environ['SCRAM_ARCH']
     print(TEST_PATH)
     try:
         cmd = "cd " + self.startDir + "; touch nodelete.root nodelete.txt nodelete.log;  sed -i -e 's|testing.log; *$(CMD_rm)  *-f  *$($(1)_objdir)/testing.log;|testing.log;|;s|test $(1) had ERRORS\") *\&\&|test $(1) had ERRORS\" >> $($(1)_objdir)/testing.log) \&\&|' config/SCRAM/GMake/Makefile.rules; "
         cmd += " if which timeout 2>/dev/null; then TIMEOUT=timeout; fi ; "
         cmd += 'PATH=' + TEST_PATH + ':$PATH ${TIMEOUT+timeout 3h} scram b -f -k -j ' + str(
             MachineCPUCount) + ' unittests ' + skiptests + ' >unitTests1.log 2>&1 ; '
         cmd += 'touch nodelete.done; ls -l nodelete.*'
         print('unitTest> Going to run ' + cmd)
         ret = runCmd(cmd)
         if ret != 0:
             print("ERROR when running unit-tests: cmd returned " + str(ret))
     except Exception as e:
         print("ERROR during runtests : caught exception: " + str(e))
         pass
     try:
         testLog = self.startDir + '/tmp/' + os.environ['SCRAM_ARCH'] + '/src/'
         logFile = self.startDir + '/unitTests.log'
         runCmd('rm -f %s; touch %s' % (logFile, logFile))
         for packDir in glob.glob(testLog + '*/*'):
             pack = packDir.replace(testLog, '')
             runCmd("echo '>> Entering Package %s' >> %s" % (pack, logFile))
             packDir += '/test'
             if os.path.exists(packDir):
                 err, testFiles = run_cmd('find ' + packDir + ' -maxdepth 2 -mindepth 2 -name testing.log -type f')
                 for lFile in testFiles.strip().split('\n'):
                     if lFile: runCmd("cat %s >> %s" % (lFile, logFile))
             runCmd("echo '>> Leaving Package %s' >> %s" % (pack, logFile))
             runCmd("echo '>> Tests for package %s ran.' >> %s" % (pack, logFile))
     except Exception as e:
         pass
     self.checkTestLogs()
     self.logger.updateUnitTestLogs(self.xType)
     return
Exemplo n.º 15
0
def doCmd(cmd, dryRun=False, inDir=None, debug=True):
    if not inDir:
        if debug:
            print("--> " + asctime() + " in ", getcwd(), " executing ", cmd)
    else:
        if debug:
            print("--> " + asctime() + " in " + inDir + " executing ", cmd)
        cmd = "cd " + inDir + "; " + cmd
    sys.stdout.flush()
    sys.stderr.flush()
    start = time()
    ret = 0
    outX = ""
    while cmd.endswith(";"):
        cmd = cmd[:-1]
    if dryRun:
        print("DryRun for: " + cmd)
    else:
        ret, outX = run_cmd(cmd)
        if debug:
            print(outX)
    stop = time()
    if debug:
        print("--> " + asctime() + " cmd took", stop - start,
              "sec. (" + strftime("%H:%M:%S", gmtime(stop - start)) + ")")
    sys.stdout.flush()
    sys.stderr.flush()
    return (ret, outX)
Exemplo n.º 16
0
def upload_logs(workflow, workflow_dir,exit_code):
  files_to_keep = [ ".txt", ".xml", ".log", ".py", ".json","/cmdLog", "/hostname",".done" ]
  if (exit_code in [34304, 35584, 22016]) and os.getenv("CMSSW_VERSION","").startswith("CMSSW_10_1_"):
    files_to_keep.append(".root")
  basedir = os.path.dirname(workflow_dir)
  for wf_file in glob.glob("%s/*" % workflow_dir):
    found=False
    for ext in files_to_keep:
      if wf_file.endswith(ext):
        found=True
        break
    if not found:
      print("Removing ",wf_file)
      run_cmd("rm -rf %s" % wf_file)
  logger=LogUpdater(dirIn=os.environ["CMSSW_BASE"])
  logger.updateRelValMatrixPartialLogs(basedir, os.path.basename(workflow_dir))
Exemplo n.º 17
0
def runCmd(cmd):
    while cmd.endswith(";"):
        cmd = cmd[:-1]
    print("Running cmd> ", cmd)
    ret, out = run_cmd(cmd)
    if out: print(out)
    return ret
Exemplo n.º 18
0
def get_merge_prs(prev_tag,
                  this_tag,
                  git_dir,
                  cmsprs,
                  cache={},
                  repo_name=None):
    print("Getting merged Pull Requests b/w", prev_tag, this_tag)
    cmd = format(
        "GIT_DIR=%(git_dir)s"
        " git log --graph --merges --pretty='%%s: %%P' %(previous)s..%(release)s | "
        " grep ' Merge pull request #[1-9][0-9]* from ' | "
        " sed 's|^.* Merge pull request #||' | "
        " sed 's|Dr15Jones:clangRecoParticleFlowPFProducer:|Dr15Jones/clangRecoParticleFlowPFProducer:|' | "
        " sed 's|/[^:]*:||;s|from ||'",
        git_dir=git_dir,
        previous=prev_tag,
        release=this_tag)
    error, notes = run_cmd(cmd)
    print("Getting Merged Commits:", cmd)
    print(notes)
    if error:
        print("Error while getting release notes.")
        print(notes)
        exit(1)
    if not repo_name:
        repo_name = basename(git_dir[:-4])
    return fill_notes_description(notes, "cms-sw/" + repo_name, cmsprs, cache)
Exemplo n.º 19
0
def get_docker_token(repo):
  print("Getting docker.io token ....")
  e, o = run_cmd('curl --silent --request "GET" "https://auth.docker.io/token?service=registry.docker.io&scope=repository:%s:pull"' % repo)
  if e:
    print(o)
    exit(1)
  return loads(o)['token']
Exemplo n.º 20
0
def load_graph(release_queue, maxNodes):
    command = MAGIC_COMMAND_GRAPH.replace('RELEASE_QUEUE', release_queue)

    error, out = run_cmd(command)

    prev_node_lane = {}

    previous_lane = 1
    node_number = 0

    all_nodes = {}

    for line in out.splitlines():
        if maxNodes != -1 and node_number > maxNodes:
            identify_automated_merges(all_nodes)
            return all_nodes
        #check if the line contains a node
        if INFO_SEPARATOR in line:

            node_number += 1
            line_parts = line.split(INFO_SEPARATOR)
            lanes = line_parts[0].replace('"', '').replace(' ', '')
            lane = lanes.index('*') + 1

            node_info = line_parts[1]
            node_info_parts = node_info.split(",")

            #hash, description
            new_node = Node(node_info_parts[0], node_info_parts[1], lane)
            all_nodes[node_info_parts[0]] = new_node

            # for the first node I just add it without any conection
            if node_number == 1:
                set_previous_node_lane(prev_node_lane, lane, new_node)
                continue

            #changed lane?
            if previous_lane < lane:
                #connect this node with the preivous one from the previous lane
                previous_node = get_previous_node_lane(prev_node_lane,
                                                       previous_lane)
            else:
                #connect this node with the previous one from of the same lane
                previous_node = get_previous_node_lane(prev_node_lane, lane)

            if previous_node == None:
                set_previous_node_lane(prev_node_lane, lane, new_node)
                previous_lane = lane
                continue

            link_nodes(new_node, previous_node)
            set_previous_node_lane(prev_node_lane, lane, new_node)

            all_nodes[node_info_parts[0]] = new_node
            previous_lane = lane

    identify_automated_merges(all_nodes)

    return all_nodes
Exemplo n.º 21
0
def get_ref_commit(repo, ref):
    for n in ["tags", "heads"]:
        error, out = run_cmd("curl -s -L https://api.github.com/repos/%s/git/refs/%s/%s" % (repo, n, ref))
        if not error:
            info = json.loads(out)
            if "object" in info: return info["object"]["sha"]
    print("Error: Unable to get sha for %s" % ref)
    return None
Exemplo n.º 22
0
def doDu(what):
  error, out = run_cmd('du -k -s %s' % what)
  if error:
    print("Error while getting directory size.")
    sys.exit(1)
  results = [l.split() for l in out.split("\n")]
  return dict([(pkg.strip().replace("src/", ''), int(sz.strip()*1024))
               for (sz, pkg) in results])
Exemplo n.º 23
0
def get_unused_days(eosfile):
    e, o = run_cmd(
        "%s fileinfo %s | grep 'Modify:' | sed 's|.* Timestamp: ||'" %
        (eos_cmd, eosfile))
    if e or (o == ""):
        print("Error: Getting timestamp for %s\n%s" % (eosfile, o))
        return -1
    return int((time() - float(o)) / 86400)
Exemplo n.º 24
0
def cmd_to_addon_test(command, addon_dir):
  commandbase = command.replace(' ','_').replace('/','_')
  logfile='%s.log' % commandbase[:150].replace("'",'').replace('"','').replace('../','')
  e, o = run_cmd("ls -d %s/*/%s 2>/dev/null | tail -1" % (addon_dir, logfile))
  if e or (o==""):
    print("ERROR: %s -> %s" % (command, o))
    return ("", "")
  return (o.split("/")[-2], get_wf_error_msg(o, False).strip())
Exemplo n.º 25
0
 def get_command(self):
     if self.input: return self.input.readline().strip()
     cFile = 'auto-load'
     while not exists(cFile):
         sleep(0.2)
     sleep(0.5)
     o, cmd = run_cmd("head -1 %s; rm -f %s" % (cFile, cFile))
     return cmd.strip()
Exemplo n.º 26
0
def doDu(what):
    error, out = run_cmd('du -k -s %s' % what)
    if error:
        print("Error while getting directory size.")
        sys.exit(1)
    results = [l.split() for l in out.split("\n")]
    return dict([(pkg.strip().replace("src/", ''), int(sz.strip() * 1024))
                 for (sz, pkg) in results])
Exemplo n.º 27
0
def get_ref_commit(repo, ref):
    for n in ["tags", "heads"]:
        error, out = run_cmd("curl -s -L https://api.github.com/repos/%s/git/refs/%s/%s" % (repo, n, ref))
        if not error:
            info = json.loads(out)
            if "object" in info: return info["object"]["sha"]
    print("Error: Unable to get sha for %s" % ref)
    return None
Exemplo n.º 28
0
def get_git_mt(path, filename):
    status, rez = run_cmd(
        'cd %s; git log -1 --format="%%ad" --date=unix -- %s' %
        (path, filename))
    if status is not 0:
        print("ERROR, " + rez)
        sys.exit(1)  # todo throws an exception
    return rez
Exemplo n.º 29
0
def get_docker_manifest(repo, tag):
  token = get_docker_token(repo)
  print("Getting manifest for %s/%s" % (repo, tag))
  e, o = run_cmd('curl --silent --request "GET" --header "Authorization: Bearer %s" "https://registry-1.docker.io/v2/%s/manifests/%s"' % (get_docker_token(repo), repo, tag))
  if e:
    print(o)
    exit(1)
  return loads(o)
Exemplo n.º 30
0
def get_commit_info(repo, commit):
    error, out = run_cmd("curl -s -L https://api.github.com/repos/%s/git/commits/%s" % (repo, commit))
    if error:
        tag = 'X (tag is undefined)'  # TODO tag is undefined
        print("Error, unable to get sha for tag %s" % tag)
        return {}
    commit_info = json.loads(out)
    if "sha" in commit_info: return commit_info
    return {}
Exemplo n.º 31
0
def get_docker_token(repo):
    print("Getting docker.io token ....")
    e, o = run_cmd(
        'curl --silent --request "GET" "https://auth.docker.io/token?service=registry.docker.io&scope=repository:%s:pull"'
        % repo)
    if e:
        print(o)
        exit(1)
    return loads(o)['token']
Exemplo n.º 32
0
def update_timestamp(timestramps, timestramps_file, store):
    e, o = run_cmd("find %s -name '*.timestamp'" % store)
    for ts_file in o.split("\n"):
        if not ts_file.endswith('.timestamp'): continue
        sha = basename(ts_file).replace(".timestamp", "")
        with open(ts_file) as f:
            timestramps[sha] = int(float(f.readlines()[0].strip()))
    write_json(timestramps_file, timestramps)
    cleanup_timestamps(store)
Exemplo n.º 33
0
def get_commit_info(repo, commit):
    error, out = run_cmd("curl -s -L https://api.github.com/repos/%s/git/commits/%s" % (repo, commit))
    if error:
        tag = 'X (tag is undefined)'  # TODO tag is undefined
        print("Error, unable to get sha for tag %s" % tag)
        return {}
    commit_info = json.loads(out)
    if "sha" in commit_info: return commit_info
    return {}
Exemplo n.º 34
0
def get_docker_manifest(repo, tag):
    token = get_docker_token(repo)
    print("Getting manifest for %s/%s" % (repo, tag))
    e, o = run_cmd(
        'curl --silent --request "GET" --header "Authorization: Bearer %s" "https://registry-1.docker.io/v2/%s/manifests/%s"'
        % (get_docker_token(repo), repo, tag))
    if e:
        print(o)
        exit(1)
    return loads(o)
Exemplo n.º 35
0
def backport_pull(repo, pr, branch):
    pr_branch = pr.base.label.split(":")[1]
    print("Source Branch:", pr_branch)
    if pr_branch == branch:
        return "Warning: Can not backport, same branch %s vs %s" % (
            pr_branch, branch), False
    br = gh_repo.get_branch(branch)
    commits = []
    for c in pr.get_commits().reversed:
        commits.insert(0, "git cherry-pick %s" % c.sha)
    if not commits: return "There are no commits to backport", False
    print("Cherry-pick commands:")
    print("  " + "\n  ".join(commits))
    if len(commits) >= 250:
        return "Error: Too many commits in PR %s\nBot can only handle max 250 commits." % len(
            commits), False
    new_branch = "backport-%s-%s" % (branch.replace("/", "_"), pr.number)
    print("New Branch:", new_branch)
    git_ref = ""
    if repo.name == "cmssw": git_ref = "--reference " + CMSSW_GIT_REF
    print("GIT REF:", git_ref)
    e, o = run_cmd(
        "rm -rf pr_backport; git clone --branch %s %s [email protected]:%s pr_backport && cd pr_backport && git checkout -b %s"
        % (branch, git_ref, repo.full_name, new_branch))
    if e:
        print(o)
        exit(1)
    e, o = run_cmd('cd pr_backport; %s' % ";".join(commits))
    if e:
        return "Error: Failed to cherry-pick commits. Please backport this PR yourself.\n```" + o + "\n```", False
    e, o = run_cmd("cd pr_backport; git push origin %s" % new_branch)
    if e:
        print(o)
        exit(1)
    run_cmd("rm -rf pr_backport")
    newBody = "backport of #%s\n\n%s" % (pr.number, pr.body)
    newPR = repo.create_pull(title=pr.title,
                             body=newBody,
                             base=branch,
                             head=new_branch)
    return "Successfully backported PR #%s as #%s for branch %s" % (
        pr.number, newPR.number, branch), True
Exemplo n.º 36
0
 def update_runall(self):
     self.update_known_errors()
     runall = os.path.join(self.outdir, "runall-report-step123-.log")
     outFile = open(runall + ".tmp", "w")
     status_ok = []
     status_err = []
     len_ok = 0
     len_err = 0
     for logFile in glob.glob(self.basedir + '/*/workflow.log'):
         inFile = open(logFile)
         for line in inFile:
             if re.match("^\s*(\d+\s+)+tests passed,\s+(\d+\s+)+failed\s*$",
                         line):
                 res = line.strip().split(" tests passed, ")
                 res[0] = res[0].split()
                 res[1] = res[1].replace(" failed", "").split()
                 len_res = len(res[0])
                 if len_res > len_ok:
                     for i in range(len_ok, len_res):
                         status_ok.append(0)
                     len_ok = len_res
                 for i in range(0, len_res):
                     status_ok[i] = status_ok[i] + int(res[0][i])
                 len_res = len(res[1])
                 if len_res > len_err:
                     for i in range(len_err, len_res):
                         status_err.append(0)
                     len_err = len_res
                 for i in range(0, len_res):
                     status_err[i] = status_err[i] + int(res[1][i])
             else:
                 outFile.write(line)
         inFile.close()
     outFile.write(" ".join(str(x) for x in status_ok) + " tests passed, " +
                   " ".join(str(x) for x in status_err) + " failed\n")
     outFile.close()
     save = True
     if os.path.exists(runall):
         e, o = run_cmd("diff %s.tmp %s | wc -l" % (runall, runall))
         if o == "0": save = False
     if save: run_cmd("mv %s.tmp %s" % (runall, runall))
     return
Exemplo n.º 37
0
def _getCPUCount():
    cmd = "nproc"
    if platform == "darwin":
        cmd = "sysctl -n hw.ncpu"
    error, count = run_cmd(cmd)
    if error:
        print("Warning: unable to detect cpu count. Using 4 as default value")
        out = "4"
    if not count.isdigit():
        return 4
    return int(count)
Exemplo n.º 38
0
def get_repos(user, cache):
    if user not in cache:
        cache[user] = []
        url = 'https://hub.docker.com/v2/repositories/%s?page_size=100' % user
        while True:
            e, o = run_cmd('curl -s -L %s' % url)
            repo_data = json.loads(o)
            if "results" in repo_data:
                for r in repo_data["results"]:
                    cache[user].append(r["name"])
            if "next" in repo_data and repo_data["next"]:
                url = repo_data["next"]
            else:
                break
    return cache[user]
Exemplo n.º 39
0
def get_repos(user, cache):
  if user not in cache:
    cache[user] = []
    url = 'https://hub.docker.com/v2/repositories/%s?page_size=100' % user
    while True:
      e , o = run_cmd('curl -s -L %s' % url)
      repo_data = json.loads(o)
      if "results" in repo_data:
        for r in repo_data["results"]:
          cache[user].append(r["name"])
      if "next" in repo_data and repo_data["next"]:
        url = repo_data["next"]
      else:
        break
  return cache[user]
Exemplo n.º 40
0
def _memorySizeGB():
    cmd = ""
    if platform == "darwin":
      cmd = "sysctl -n hw.memsize"
    elif platform.startswith("linux"):
      cmd = "free -t -m | grep '^Mem: *' | awk '{print $2}'"
    error, out = run_cmd(cmd)
    if error:
      print("Warning: unable to detect memory info. Using 8GB as default value")
      return 8
    if not out.isdigit():
      return 8
    from math import ceil
    count = int(ceil(float(out)/1024))
    if count == 0: count =1
    return count
Exemplo n.º 41
0
def auto_node_schedule(auto_jobs):
    count=0
    for job in auto_jobs:
        jid = auto_jobs[job]
        err, out = run_cmd("cat %s/jenkins/find-jenkins-job.groovy | %s groovy = '%s' 'JENKINS_DYNAMIC_JOB_ID=%s'" % (CMS_BOT_DIR, environ['JENKINS_CLI_CMD'],job,jid))
        if err:
             count+=1
             prop_file = "jenkins-trigger-dynamic-job-%s.txt" % count
             jpram = join(SCRIPT_DIR, 'auto-nodes', job)
             run_cmd("echo 'JENKINS_DYNAMIC_JOB_NAME=%s' > %s" % (job, prop_file))
             run_cmd("echo 'JENKINS_DYNAMIC_JOB_ID=%s' >> %s" % (jid, prop_file))
             if exists (jpram):
                 run_cmd("cat %s >> %s" % (jpram, prop_file))
        else:
            print(out)
    return
Exemplo n.º 42
0
def get_merge_prs(prev_tag, this_tag, git_dir, cmsprs, cache={}, repo_name=None):
    print("Getting merged Pull Requests b/w", prev_tag, this_tag)
    cmd = format("GIT_DIR=%(git_dir)s"
                 " git log --graph --merges --pretty='%%s: %%P' %(previous)s..%(release)s | "
                 " grep ' Merge pull request #[1-9][0-9]* from ' | "
                 " sed 's|^.* Merge pull request #||' | "
                 " sed 's|Dr15Jones:clangRecoParticleFlowPFProducer:|Dr15Jones/clangRecoParticleFlowPFProducer:|' | "
                 " sed 's|/[^:]*:||;s|from ||'",
                 git_dir=git_dir,
                 previous=prev_tag,
                 release=this_tag)
    error, notes = run_cmd(cmd)
    print("Getting Merged Commits:", cmd)
    print(notes)
    if error:
        print("Error while getting release notes.")
        print(notes)
        exit(1)
    if not repo_name:
        repo_name = basename(git_dir[:-4])
    return fill_notes_description(notes, "cms-sw/" + repo_name, cmsprs, cache)
Exemplo n.º 43
0
def parse_workflows(workflow_file):
  err, out = run_cmd("cat %s" % workflow_file)
  if err:
    print(out)
    exit(1)

  wf = ""
  wfs = {}
  steps = 0
  for line in out.split("\n"):
    line =line.strip()
    m = re.match("^.*\[(\d+)\] *: *(.+)$",line)
    if not m: continue
    step = m.group(1)
    cmd = m.group(2).strip()
    prefix, rest = line.split(":",1)
    items = prefix.split(" ")
    if re.match("^\d+(\.\d+|)$",items[0]): wf = items[0]
    if not wf in wfs: wfs[wf]={}
    wfs[wf][step]=re.sub("  +"," ",cmd)
    steps += 1
  print("%s: %s workflows, %s steps" % (workflow_file, len(wfs), steps))
  return wfs
Exemplo n.º 44
0
  str_out = str_in.split(' ')
  return str_out[-4].lstrip('/').rstrip('"'),int(str_out[-2])


def map_int_val(pair):
  key , val = pair
  return key , int(val)


#get relevant info
match_hn = re.compile('.*\|\/.*emails')
match_tmp = re.compile('.*\|\/.*Time\(s\)')
temp_fails = []
egrps = []
payload = {}
err , cmd_out = run_cmd('logwatch --range yesterday --detail 10 --service sendmail')
if err:
  sys.exit(1)
for line in cmd_out.split('\n'):
  if re.match(match_tmp,line): temp_fails.append(line)
  elif 'Messages To Recipients:' in line: msgs_num = line
  elif 'Addressed Recipients:' in line : adrpts = line
  elif 'Bytes Transferred:' in line: byttr = line
  elif re.match(match_hn,line): egrps.append(line)

#process info
yesterday = datetime.date.today() - datetime.timedelta(1)
timestp = int(yesterday.strftime("%s")) * 1000
temp_fails = dict(list(map(map_int_val,list((dict([list(map(cust_strip,x.split(' ')[-3:-1])) for x in temp_fails])).items()))))
msgs_num= list(map(str.strip,msgs_num.split(':')))
adrpts = list(map(str.strip, adrpts.split(':')))
Exemplo n.º 45
0
from _py2with3compatibility import run_cmd

WF_PATH=sys.argv[1]
WF_DIR=basename(WF_PATH)
DES_DIR=join(sys.argv[2],WF_DIR)

htmls=[basename(h)[:-5] for h in glob.glob(WF_PATH+"/*.html")]
all_ok=[]
DES_DIR_CREATED=False
for h in sorted(htmls):
  for s in all_ok[::-1]:
    if h.startswith(s):
      h = None
      break
  if not h: continue
  e, o = run_cmd("grep 'Skipped:\|Null:\|Fail:' '%s/%s.html' | wc -l" % (WF_PATH,h))
  if not e:
    if int(o)>0:
      if not DES_DIR_CREATED:
        run_cmd("mkdir -p %s" % DES_DIR)
        DES_DIR_CREATED=True
      run_cmd("mv '%s/%s.html' '%s/%s.html'" % (WF_PATH,h,DES_DIR,h))
    else:
      all_ok.append("%s_" % h)
  else:
    print("ERROR: %s/%s\n%s" %(WF_DIR,h,o))

if DES_DIR_CREATED:
  run_cmd("mv %s/*.png %s/" % (WF_PATH, DES_DIR))
  run_cmd("echo ErrorDocument 404 /SDT/html/pr_comparison_ok.html > %s/.htaccess" % DES_DIR)
print("DONE:", WF_DIR)
Exemplo n.º 46
0
        errs[isrc][inc]=includes[isrc][inc]

#Free memory
checked = {}
includes = {}
uses = {}
usedby = {}

pkg_errs = {}
for e in errs:
  pkg = '/'.join(e.split('/')[:2])
  if pkg not in pkg_errs: pkg_errs[pkg] = {}
  pkg_errs[pkg][e]=errs[e]

outdir = 'invalid-includes'
run_cmd('rm -f %s; mkdir %s' % (outdir, outdir))
all_count = {}
for p in sorted(pkg_errs):
  all_count[p]=len(pkg_errs[p])
  pdir = join(outdir, p)
  run_cmd('mkdir -p %s' % pdir)
  with open(join(pdir, 'index.html'),'w') as ref:
    ref.write("<html><head></head><body>\n")
    for e in sorted(pkg_errs[p]):
      ref.write("<h3>%s:</h3>\n" % e)
      for inc in sorted(errs[e].keys()):
        url = 'https://github.com/cms-sw/cmssw/blob/%s/%s#L%s' % (environ['CMSSW_VERSION'],e,errs[e][inc])
        ref.write('<l1><a href="%s">%s</a></l1></br>\n' % (url, inc))
      ref.write("</ul></br>\n")
    ref.write("</body></html>\n")
Exemplo n.º 47
0
def port_pr(repo, pr_num, des_branch, dryRun=False):
    pr = repo.get_pull(pr_num)
    if pr.base.ref == des_branch:
        print("Warning: Requested to make a PR to same branch", pr.base.ref)
        return False
    done_prs_id = get_ported_PRs(repo, pr.base.ref, des_branch)
    if pr_num in done_prs_id:
        print("Already ported as #", done_prs_id[pr.number])
        return True
    branch = repo.get_branch(des_branch)
    print("Preparing checkout area:", pr_num, repo.full_name, pr.head.user.login, pr.head.ref, des_branch)
    prepare_cmd = format(
        "%(cmsbot)s/prepare-repo-clone-for-port.sh %(pr)s %(pr_user)s/%(pr_branch)s %(repo)s %(des_branch)s",
        cmsbot=scriptPath,
        pr=pr_num,
        repo=repo.full_name,
        pr_user=pr.head.user.login,
        pr_branch=pr.head.ref,
        des_branch=des_branch)
    err, out = run_cmd(prepare_cmd)
    print(out)
    if err: return False
    all_commits = set([])
    for c in pr.get_commits():
        all_commits.add(c.sha)
        git_cmd = format("cd %(clone_dir)s; git cherry-pick -x %(commit)s",
                         clone_dir=pr.base.repo.name,
                         commit=c.sha)
        err, out = run_cmd(git_cmd)
        print(out)
        if err: return False
    git_cmd = format("cd %(clone_dir)s; git log %(des_branch)s..",
                     clone_dir=pr.base.repo.name,
                     des_branch=des_branch)
    err, out = run_cmd(git_cmd)
    print(out)
    if err: return False
    last_commit = None
    new_commit = None
    new_commits = {}
    for line in out.split("\n"):
        m = re.match('^commit\s+([0-9a-f]+)$', line)
        if m:
            print("New commit:", m.group(1), last_commit)
            if last_commit:
                new_commits[new_commit] = last_commit
            new_commit = m.group(1)
            new_commits[new_commit] = None
            continue
        m = re.match('^\s*\(cherry\s+picked\s+from\s+commit\s([0-9a-f]+)\)$', line)
        if m:
            print("found commit", m.group(1))
            last_commit = m.group(1)
    if last_commit: new_commits[new_commit] = last_commit
    if pr.commits != len(new_commits):
        print("Error: PR has ", pr.commits, " commits while we only found ", len(new_commits), ":", new_commits)
    for c in new_commits:
        all_commits.remove(new_commits[c])
    if all_commits:
        print("Something went wrong: Following commists not cherry-picked", all_commits)
        return False
    git_cmd = format("cd %(clone_dir)s; git rev-parse --abbrev-ref HEAD", clone_dir=pr.base.repo.name)
    err, out = run_cmd(git_cmd)
    print(out)
    if err or not out.startswith("port-" + str(pr_num) + "-"): return False
    new_branch = out
    git_cmd = format("cd %(clone_dir)s; git push origin %(new_branch)s",
                     clone_dir=pr.base.repo.name,
                     new_branch=new_branch)
    if not dryRun:
        err, out = run_cmd(git_cmd)
        print(out)
        if err: return False
    else:
        print("DryRun: should have push %s branch" % new_branch)
    from cms_static import GH_CMSSW_ORGANIZATION
    newHead = "%s:%s" % (GH_CMSSW_ORGANIZATION, new_branch)
    newBody = pr.body + "\nAutomatically ported from " + pr.base.ref + " #%s (original by @%s)." % (
    pr_num, str(pr.head.user.login))
    print(newHead)
    print(newBody)
    if not dryRun:
        newPR = repo.create_pull(title=pr.title, body=newBody, base=des_branch, head=newHead)
    else:
        print("DryRun: should have created Pull Request for %s using %s" % (des_branch, newHead))
    print("Every thing looks good")
    git_cmd = format("cd %(clone_dir)s; git branch -d %(new_branch)s",
                     clone_dir=pr.base.repo.name,
                     new_branch=new_branch)
    err, out = run_cmd(git_cmd)
    print("Local branch %s deleted" % new_branch)
    return True
Exemplo n.º 48
0
sys.path.append(dirname(dirname(abspath(__file__))))  # in order to import top level modules
from _py2with3compatibility import run_cmd

script_path = abspath(dirname(argv[0]))
eos_cmd = "EOS_MGM_URL=root://eoscms.cern.ch /usr/bin/eos"
eos_base = "/eos/cms/store/user/cmsbuild"
unused_days_threshold = 180
try:
  days=int(argv[1])
except:
  days=30
if days<30:
  days=30
if (unused_days_threshold-days)<30: unused_days_threshold=days+30

e , o = run_cmd("PYTHONPATH=%s/.. %s/ib-datasets.py --days %s" % (script_path, script_path, days))
if e:
  print(o)
  exit(1)

jdata = json.loads(o)
used = {}
for o in jdata['hits']['hits']:
  used[o['_source']['lfn'].strip()]=1

e, o = run_cmd("%s find -f %s" % (eos_cmd, eos_base))
if e:
  print(o)
  exit(1)

total = 0
Exemplo n.º 49
0
  except Exception as e: print(e)
  return

partial_log_dirpath=argv[1]
jobs=6
try: jobs=int(argv[2])
except: jobs=6
items = partial_log_dirpath.split("/")
if items[-1]!="pyRelValPartialLogs": exit(1)
release=items[-2]
arch=items[-6]
week, rel_sec  = cmsswIB2Week(release)
rel_msec = rel_sec*1000
ex_fields=["rss", "vms", "pss", "uss", "shared", "data", "cpu"]
if not exists("%s/threads.txt" % partial_log_dirpath):
   e, o = run_cmd("grep ' --nThreads ' %s/*/cmdLog  | tail -1  | sed 's|.* *--nThreads *||;s| .*||'" % partial_log_dirpath)
   if e:
     print(o)
     exit(1)
   if not o: o="1"
   run_cmd("echo %s > %s/threads.txt" % (o, partial_log_dirpath))

e, o = run_cmd("head -1 %s/threads.txt" % partial_log_dirpath)
if e:
  print(o)
  exit(1)
cmsThreads = o.strip('\n')
e, o = run_cmd("ls -d %s/*" % partial_log_dirpath)
threads = []
for wf in o.split("\n"):
  if not isdir(wf): continue
Exemplo n.º 50
0
def createJob(workflow, cmssw_ver, arch):
  workflow_args = FixWFArgs(cmssw_ver, arch, workflow, GetMatrixOptions(cmssw_ver, arch))
  cmd = format("rm -rf %(workflow)s %(workflow)s_*; mkdir %(workflow)s; cd %(workflow)s; PATH=%(das_utils)s:$PATH runTheMatrix.py --maxSteps=0 -l %(workflow)s %(workflow_args)s",workflow=workflow,workflow_args=workflow_args, das_utils=CMS_BOT_DIR+"/das-utils")
  print("Running ",cmd)
  e, o = run_cmd(cmd)
  if e: print("ERROR:%s:%s" % (workflow, o))
  try:
    workflow_dir = glob.glob(format("%(workflow)s/%(workflow)s_*", workflow=workflow))[0]
    run_cmd(format("mv %(workflow)s/runall-report-step123-.log %(workflow_dir)s/workflow.log; touch %(workflow_dir)s/cmdLog; mv %(workflow_dir)s .; rm -rf %(workflow)s", workflow=workflow, workflow_dir=workflow_dir))
    print("Commands for workflow %s generated" % workflow)
  except Exception as e:
    print("ERROR: Creating workflow job:",workflow,str(e))
    run_cmd("rm -rf %s %s_*" % (workflow,workflow))

pyRunDir=os.path.join(os.environ["CMSSW_BASE"],"pyRelval")
run_cmd("rm -rf %s; mkdir -p %s" % (pyRunDir, pyRunDir))
os.chdir(pyRunDir)

cmssw_ver = os.environ["CMSSW_VERSION"]
arch = os.environ["SCRAM_ARCH"]
#Run runTheMatrix with maxStep=0
thrds=[]
jobs=MachineCPUCount
wf_query=""
print("Creating jobs (%s) ...." % jobs)
for wf in sys.argv[1].split(","):
  wf_query+=" OR workflow:"+wf
  while len(thrds)>=jobs:
    sleep(1)
    thrds = [ t for t in thrds if t.is_alive() ]
  t = threading.Thread(target=createJob, args=(wf, cmssw_ver, arch))
Exemplo n.º 51
0
#!/bin/env python
from __future__ import print_function
from os.path import basename
from sys import argv, exit
from _py2with3compatibility import run_cmd
from json import loads, dumps
try:
  authors_info = {}
  repo = argv[1]
  err, output = run_cmd("curl -s https://api.github.com/repos/" + repo + "/stats/contributors")
  if err:
    print(output)
    exit(1)
  data = loads(output)
  for item in data:
    authors_info[item['author']['login']] = item['total']
  if not authors_info:
    print(output)
    exit(1)
  print(basename(repo).upper().replace('-','_') + "_AUTHORS="+dumps(authors_info,sort_keys=True, indent=2))
except IndexError:
  print("Repo Name Required ... Arugement missing !!!!")
  exit (1)

Exemplo n.º 52
0
  for data in repos:
    for u in data:
      existing_repos = get_repos(u, cache)
      for r in data[u]:
        if r not in existing_repos:
          print("%s/%s NEW" % (u, r))

if __name__ == "__main__":
  parser = OptionParser(usage="%prog <pull-request-id>")
  parser.add_option("-n", "--dry-run",    dest="dryRun",     action="store_true", help="Do not modify Github", default=False)
  parser.add_option("-r", "--repo-list",  dest="repo_list",  help="Yaml file with list of repositories to create under docker hun", type=str, default=None)
  opts, args = parser.parse_args()

  repos = {}
  if not opts.repo_list:
    parser.error("Missing repository list file, please use -r|--repo-list option")
  
  if opts.repo_list.startswith('https://'):
    e, o = run_cmd('curl -s -L %s' %  opts.repo_list)
    if e:
      print (o)
      exit(1)
    repos = yaml.load_all(o)
  elif exists(opts.repo_list):
    repos = yaml.load(open(opts.repo_list))
  else:
    print ("Error: No such file: %s" % opts.repo_list)
    exit (1)
  repo_cache = {}
  process(repos, opts.dryRun, repo_cache)
Exemplo n.º 53
0
def get_git_mt(path, filename):
    status, rez = run_cmd('cd %s; git log -1 --format="%%ad" --date=unix -- %s' % (path, filename))
    if status is not 0:
        print("ERROR, " + rez)
        sys.exit(1)  # todo throws an exception
    return rez
Exemplo n.º 54
0
                payload["status"] = 1
              else:
                payload["status"] = 0
              utest= line.split(' ')[0]
              payload["package"] = pkg
              payload["name"] = utest
              id = sha1(release + architecture + utest).hexdigest()
              send_payload(index,document,id,json.dumps(payload))
              line = it.next().strip()
      except Exception as e:
        print("File processed:", e)
  else:
    print("Invalid File Path")

#get log files
logs = run_cmd("find /data/sdt/buildlogs -mindepth 6 -maxdepth 6 -name 'unitTests-summary.log'")
logs = logs[1].split('\n')
#process log files
for logFile in logs:
  flagFile = logFile + '.checked'
  if not exists(flagFile):
    print("Working on ",logFile)
    process_ib_utests(logFile)
    os.system('touch "' + flagFile + '"')

logs = run_cmd("find /data/sdt/buildlogs -mindepth 6 -maxdepth 6 -name 'unitTestLogs.zip'")
logs = logs[1].split('\n')
#process zip log files
for logFile in logs:
  flagFile = logFile + '.checked'
  if not exists(flagFile):
Exemplo n.º 55
0
parser.add_argument("-m", "--message", dest="msg", help="Message to be posted s body of the GH issue",type=str, default='')
parser.add_argument("-R", "--report_file", dest="report_file", help="File name contaning the issue message",type=str, default='')

args = parser.parse_args()
mgs=""
if not args.repo: parser.error("Missing Repo")
if not args.title: parser.error("Missing PR title")
if args.msg: msg = re.sub("@N@","\n",args.msg)
elif args.report_file: msg = open(args.report_file).read()
else: parser.error("Missing issue message: -m|--message <message> OR -R|--report-file <file-path>")

print("Authenticating to Github and connecting to repo")
repo_dir = join(SCRIPT_DIR,'repos',args.repo.replace("-","_"))
if exists(join(repo_dir,"repo_config.py")): sys.path.insert(0,repo_dir)
import repo_config
gh = Github(login_or_token=open(expanduser(repo_config.GH_TOKEN)).read().strip())
print("Authentication succeeeded")
gh_repo = gh.get_repo(args.repo)
e, o = run_cmd("curl -s 'https://api.github.com/search/issues?q=%s+repo:%s+in:title+type:issue' | grep '\"number\"' | sed -e 's|.*: ||;s|,.*||'" % (quote(args.title),args.repo))
issue = None
if not e:
  try:issue = gh_repo.get_issue(int(o))
  except: pass
if issue:
  print("Updaing comment")
  issue.create_comment(msg)
else:
  print("Creating issue request")
  gh_repo.create_issue(args.title, msg)

Exemplo n.º 56
0
def update_hostname(workflow_dir): run_cmd("hostname > %s/hostname" % workflow_dir)

def update_known_error(worflow, workflow_dir):
Exemplo n.º 57
0
  for wf_file in glob.glob("%s/*" % workflow_dir):
    found=False
    for ext in files_to_keep:
      if wf_file.endswith(ext):
        found=True
        break
    if not found:
      print("Removing ",wf_file)
      run_cmd("rm -rf %s" % wf_file)
  logger=LogUpdater(dirIn=os.environ["CMSSW_BASE"])
  logger.updateRelValMatrixPartialLogs(basedir, os.path.basename(workflow_dir))

if __name__ == "__main__":
  jobs=json.load(open(sys.argv[1]))
  exit_code = 0
  for cmd in jobs["commands"]:
    if cmd["exit_code"]>0:
      exit_code=cmd["exit_code"]
      break
  workflow = jobs["name"]
  workflow_dir=os.path.abspath(glob.glob("%s_*" % workflow)[0])
  run_cmd("mv %s %s/job.json" % (sys.argv[1], workflow_dir))
  fix_lognames(workflow_dir)
  if update_worklog(workflow_dir, jobs):
    update_cmdlog(workflow_dir, jobs)
  update_timelog(workflow_dir, jobs)
  update_hostname(workflow_dir)
  update_known_error(workflow, workflow_dir)
  if not 'CMSSW_DRY_RUN' in os.environ: upload_logs(workflow, workflow_dir, exit_code)