Esempio n. 1
0
def backport_pull (repo, pr, branch):
  pr_branch = pr.base.label.split(":")[1]
  print "Source Branch:",pr_branch
  if pr_branch == branch: return "Warning: Can not backport, same branch %s vs %s" % (pr_branch, branch),False
  br = gh_repo.get_branch(branch)
  commits = []
  for c in pr.get_commits().reversed: commits.insert(0,"git cherry-pick %s" % c.sha)
  if not commits: return "There are no commits to backport",False
  print "Cherry-pick commands:"
  print "  "+"\n  ".join(commits)
  if len(commits)>=250:
    return "Error: Too many commits in PR %s\nBot can only handle max 250 commits." % len(commits),False
  new_branch = "backport-%s-%s" % (branch.replace("/","_"), pr.number)
  print "New Branch:",new_branch
  git_ref = ""
  if repo.name == "cmssw": git_ref = "--reference "+CMSSW_GIT_REF
  print "GIT REF:",git_ref
  e , o = run_cmd("rm -rf pr_backport; git clone --branch %s %s [email protected]:%s pr_backport && cd pr_backport && git checkout -b %s" % (branch, git_ref, repo.full_name, new_branch))
  if e:
    print o
    exit(1)
  e, o = run_cmd('cd pr_backport; %s' % ";".join(commits))
  if e: return "Error: Failed to cherry-pick commits. Please backport this PR yourself.\n```"+o+"\n```",False
  e, o = run_cmd("cd pr_backport; git push origin %s" % new_branch)
  if e:
    print o
    exit(1)
  run_cmd("rm -rf pr_backport")
  newBody = "backport of #%s\n\n%s" %(pr.number, pr.body)
  newPR = repo.create_pull(title = pr.title, body = newBody, base = branch, head = new_branch )
  return "Successfully backported PR #%s as #%s for branch %s" % (pr.number, newPR.number, branch),True
Esempio n. 2
0
def cleanOutgoingRevs(reponame, remote, username, sshKey):
    outgoingRevs = retrying(out, attempts=RETRY_ATTEMPTS,
                         kwargs=dict(src=reponame, remote=remote,
                                     ssh_username=username, ssh_key=sshKey))
    for r in reversed(outgoingRevs):
        run_cmd(['hg', '--config', 'extensions.mq=', 'strip', '-n',
                 r[REVISION]], cwd=reponame)
Esempio n. 3
0
def cleanup_exit(msg, tmpdirs=None, image_hash="", exit_code=1):
    if not tmpdirs:
        tmpdirs = []
    if msg: print(msg)
    for tdir in tmpdirs:
        run_cmd("rm -rf %s" % tdir)
    if image_hash: run_cmd("docker rm -f %s" % image_hash)
    exit(exit_code)
Esempio n. 4
0
def purge(dest):
    """Purge the repository of all untracked and ignored files."""
    try:
        run_cmd(['hg', '--config', 'extensions.purge=', 'purge',
                 '-a', '--all', dest], cwd=dest)
    except subprocess.CalledProcessError, e:
        log.debug('purge failed: %s' % e)
        raise
def send_comparison_ready_message(repo, pr_number, tests_results_url, comparison_errors_file, wfs_with_das_inconsistency_file, missing_map ):
  pull_request = repo.get_pull(pr_number)
  message = COMPARISON_READY_MSG +'\n' + tests_results_url

  wfs_with_errors = ''
  for line in open( comparison_errors_file ):
    line = line.rstrip()
    parts = line.split( ';' )
    wf = parts[ 0 ]
    step = parts[ 1 ]
    wfs_with_errors += ( wf + ' step ' + step + '\n' )

  if wfs_with_errors != '':
    error_info = COMPARISON_INCOMPLETE_MSG.format( workflows=wfs_with_errors )
    message += '\n\n' + error_info

  wfs_das_inconsistency = open( wfs_with_das_inconsistency_file ).readline().rstrip().rstrip(',').split( ',' )

  if '' in wfs_das_inconsistency:
    wfs_das_inconsistency.remove( '' )

  if wfs_das_inconsistency:
    das_inconsistency_info = DAS_INCONSISTENCY_MSG.format( workflows=', '.join( wfs_das_inconsistency ) )
    message += '\n\n' + das_inconsistency_info

  if missing_map and exists (missing_map):
    missing = []
    for line in open(missing_map):
      line = line.strip()
      if line: missing.append("   * "+line)
    if missing:
      from categories import COMPARISON_MISSING_MAP
      map_notify = ", ".join([ "@"+u for u in COMPARISON_MISSING_MAP] )
      message += "\n\n"+map_notify+" comparisons for the following workflows were not done due to missing matrix map:\n"+"\n".join(missing)

  alt_comp_dir = join(dirname(comparison_errors_file), "upload","alternative-comparisons")
  print "Alt comparison directory: ",alt_comp_dir
  if exists(alt_comp_dir):
    err, out = run_cmd("grep ' Compilation failed' %s/runDQMComp-*.log" % alt_comp_dir)
    print out
    if not err:
      err_wfs = {}
      for line in out.split("\n"):
        wf = line.split(".log:",1)[0].split("runDQMComp-")[-1]
        err_wfs [wf]=1
      if err_wfs: message += "\n\nAlternative comparison was/were failed for workflow(s):\n"+"\n".join(err_wfs.keys())

  JRCompSummaryLog = join(dirname(comparison_errors_file), "upload/validateJR/qaResultsSummary.log")
  print "JR comparison Summary: ",JRCompSummaryLog
  if exists(JRCompSummaryLog):
    err, out = run_cmd("cat %s" % JRCompSummaryLog)
    if (not err) and out:
      message += "\n\nComparison Summary:\n"
      for l in out.split("\n"):
        if l.strip(): message += " - %s\n" % l.strip()

  send_message_pr( pull_request, message )
Esempio n. 6
0
def push(src, remote, push_new_branches=True, force=False, **kwargs):
    cmd = ['hg', 'push']
    cmd.extend(common_args(**kwargs))
    if force:
        cmd.append('-f')
    if push_new_branches:
        cmd.append('--new-branch')
    cmd.append(remote)
    run_cmd(cmd, cwd=src)
Esempio n. 7
0
def tag(dest, tags, user=None, msg=None, rev=None, force=None):
    cmd = ['hg', 'tag']
    if user:
        cmd.extend(['-u', user])
    if msg:
        cmd.extend(['-m', msg])
    if rev:
        cmd.extend(['-r', rev])
    if force:
        cmd.append('-f')
    cmd.extend(tags)
    run_cmd(cmd, cwd=dest)
    return get_revision(dest)
Esempio n. 8
0
 def get_command(self):
     if self.input: return self.input.readline().strip()
     cFile = 'auto-load'
     while not exists(cFile):
         sleep(0.2)
     sleep(0.5)
     o, cmd = run_cmd("head -1 %s; rm -f %s" % (cFile, cFile))
     return cmd.strip()
Esempio n. 9
0
def update(dest, branch=None, revision=None):
    """Updates working copy `dest` to `branch` or `revision`.  If neither is
    set then the working copy will be updated to the latest revision on the
    current branch.  Local changes will be discarded."""
    # If we have a revision, switch to that
    if revision is not None:
        cmd = ['hg', 'update', '-C', '-r', revision]
        run_cmd(cmd, cwd=dest)
    else:
        # Check & switch branch
        local_branch = get_hg_output(['branch'], cwd=dest).strip()

        cmd = ['hg', 'update', '-C']

        # If this is different, checkout the other branch
        if branch and branch != local_branch:
            cmd.append(branch)

        run_cmd(cmd, cwd=dest)
    return get_revision(dest)
Esempio n. 10
0
def get_manifest(image):
    repo = image.split(":", 1)[0]
    if '/' not in repo:
        repo = 'library/' + repo
    tag = image.split(":", 1)[-1]
    if repo == tag: tag = "latest"
    token = get_docker_token(repo)
    print('Getting image_manifest for %s:%s' % (repo, tag))
    e, o = run_cmd(
        'curl --silent --request "GET" --header "Authorization: Bearer %s" "https://registry-1.docker.io/v2/%s/manifests/%s"'
        % (token, repo, tag))
    return loads(o)
Esempio n. 11
0
def sms_handler(sms):
    cellular.on_new_sms(sms_handler)
    msg = sms.message

    if usb_connected():
        s.writeRegister("sms effect")

    print("sms_handler")
    result = run_cmd(sms.phone_number, [word.lower() for word in msg.split()])

    remove_all_sms()
    if result is not None:
        cellular.SMS(sms.phone_number, result).send(0)
Esempio n. 12
0
def get_repos(user, cache):
    if user not in cache:
        cache[user] = []
        url = 'https://hub.docker.com/v2/repositories/%s?page_size=100' % user
        while True:
            e, o = run_cmd('curl -s -L %s' % url)
            repo_data = json.loads(o)
            if "results" in repo_data:
                for r in repo_data["results"]:
                    cache[user].append(r["name"])
            if "next" in repo_data and repo_data["next"]:
                url = repo_data["next"]
            else:
                break
    return cache[user]
Esempio n. 13
0
def apply_and_push(localrepo, remote, changer, max_attempts=10,
                   ssh_username=None, ssh_key=None, force=False):
    """This function calls `changer' to make changes to the repo, and tries
       its hardest to get them to the origin repo. `changer' must be a
       callable object that receives two arguments: the directory of the local
       repository, and the attempt number. This function will push ALL
       changesets missing from remote."""
    assert callable(changer)
    branch = get_branch(localrepo)
    changer(localrepo, 1)
    for n in range(1, max_attempts + 1):
        new_revs = []
        try:
            new_revs = out(src=localrepo, remote=remote,
                           ssh_username=ssh_username,
                           ssh_key=ssh_key)
            if len(new_revs) < 1:
                raise HgUtilError("No revs to push")
            push(src=localrepo, remote=remote, ssh_username=ssh_username,
                 ssh_key=ssh_key, force=force)
            return
        except subprocess.CalledProcessError, e:
            log.debug("Hit error when trying to push: %s" % str(e))
            if n == max_attempts:
                log.debug("Tried %d times, giving up" % max_attempts)
                for r in reversed(new_revs):
                    run_cmd(['hg', '--config', 'extensions.mq=', 'strip', '-n',
                             r[REVISION]], cwd=localrepo)
                raise HgUtilError("Failed to push")
            pull(remote, localrepo, update_dest=False,
                 ssh_username=ssh_username, ssh_key=ssh_key)
            # After we successfully rebase or strip away heads the push is
            # is attempted again at the start of the loop
            try:
                run_cmd(['hg', '--config', 'ui.merge=internal:merge',
                         'rebase'], cwd=localrepo)
            except subprocess.CalledProcessError, e:
                log.debug("Failed to rebase: %s" % str(e))
                # abort failed rebase
                run_cmd(['hg', 'rebase', '--abort'], cwd=localrepo)
                update(localrepo, branch=branch)
                for r in reversed(new_revs):
                    run_cmd(['hg', '--config', 'extensions.mq=', 'strip', '-n',
                             r[REVISION]], cwd=localrepo)
                changer(localrepo, n + 1)
Esempio n. 14
0
def parse_workflows(workflow_file):
  err, out = run_cmd("cat %s" % workflow_file)
  if err:
    print out
    exit(1)

  wf = ""
  wfs = {}
  steps = 0
  for line in out.split("\n"):
    line =line.strip()
    m = re.match("^.*\[(\d+)\] *: *(.+)$",line)
    if not m: continue
    step = m.group(1)
    cmd = m.group(2).strip()
    prefix, rest = line.split(":",1)
    items = prefix.split(" ")
    if re.match("^\d+(\.\d+|)$",items[0]): wf = items[0]
    if not wf in wfs: wfs[wf]={}
    wfs[wf][step]=re.sub("  +"," ",cmd)
    steps += 1
  print "%s: %s workflows, %s steps" % (workflow_file, len(wfs), steps)
  return wfs
Esempio n. 15
0
def parse_workflows(workflow_file):
    err, out = run_cmd("cat %s" % workflow_file)
    if err:
        print out
        exit(1)

    wf = ""
    wfs = {}
    steps = 0
    for line in out.split("\n"):
        line = line.strip()
        m = re.match("^.*\[(\d+)\] *: *(.+)$", line)
        if not m: continue
        step = m.group(1)
        cmd = m.group(2).strip()
        prefix, rest = line.split(":", 1)
        items = prefix.split(" ")
        if re.match("^\d+(\.\d+|)$", items[0]): wf = items[0]
        if not wf in wfs: wfs[wf] = {}
        wfs[wf][step] = re.sub("  +", " ", cmd)
        steps += 1
    print "%s: %s workflows, %s steps" % (workflow_file, len(wfs), steps)
    return wfs
Esempio n. 16
0
        errs[isrc][inc]=includes[isrc][inc]

#Free memory
checked = {}
includes = {}
uses = {}
usedby = {}

pkg_errs = {}
for e in errs:
  pkg = '/'.join(e.split('/')[:2])
  if pkg not in pkg_errs: pkg_errs[pkg] = {}
  pkg_errs[pkg][e]=errs[e]

outdir = 'invalid-includes'
run_cmd('rm -f %s; mkdir %s' % (outdir, outdir))
all_count = {}
for p in sorted(pkg_errs):
  all_count[p]=len(pkg_errs[p])
  pdir = join(outdir, p)
  run_cmd('mkdir -p %s' % pdir)
  with open(join(pdir, 'index.html'),'w') as ref:
    ref.write("<html><head></head><body>\n")
    for e in sorted(pkg_errs[p]):
      ref.write("<h3>%s:</h3>\n" % e)
      for inc in sorted(errs[e].keys()):
        url = 'https://github.com/cms-sw/cmssw/blob/%s/%s#L%s' % (environ['CMSSW_VERSION'],e,errs[e][inc])
        ref.write('<l1><a href="%s">%s</a></l1></br>\n' % (url, inc))
      ref.write("</ul></br>\n")
    ref.write("</body></html>\n")
#!/usr/bin/env python
import yaml
import json
from os import environ
from sys import argv, exit
from os.path import join
from commands import getstatusoutput as run_cmd

e, o = run_cmd ("find %s -name '*.yaml' -type f" % argv[1])
if e:
  print o
  exit (1)

localtop = environ["CMSSW_BASE"]
files = [ "/src/"+f.split(argv[1],1)[-1][:-5].strip("/") for f in o.split("\n") ]
ignore_files=[]
track_changes = {}
print "Changed files:  ",'\n  '.join(files)
for f in o.split("\n"):
  print "Working on",f
  obj = yaml.load(open(f), Loader=yaml.SafeLoader)
  if not obj: obj={"Diagnostics":[]}
  change = 0
  new_dia = []
  if (not "Diagnostics" in obj) or (not obj["Diagnostics"]):
    run_cmd("rm -f %s" % f)
    continue
  atN = False
  for d in obj["Diagnostics"]:
    new_rep = []
    if (not "Replacements" in d) or (not d["Replacements"]): continue
Esempio n. 18
0

try:
    LLVM_CCDB_NAME = argv[1]
except:
    LLVM_CCDB_NAME = "compile_commands.json"

llvm_ccdb = []
local_files = []
localtop = getenv("LOCALTOP")
arch = getenv("SCRAM_ARCH")
tmpsrc = join(localtop, "tmp", arch, "src")

#Read SCRAM Generated
if exists(tmpsrc):
    err, llvm_ccdb_files = run_cmd("find %s -name '*.%s' -type f" %
                                   (tmpsrc, LLVM_CCDB_NAME))
    if err:
        print_msg(llvm_ccdb_files)
        exit(err)
    if llvm_ccdb_files:
        for llvm_ccdb_file in llvm_ccdb_files.split("\n"):
            obj = json.load(open(llvm_ccdb_file))
            if obj['file'] in local_files: continue
            local_files.append(obj['file'])
            llvm_ccdb.append(obj)

release_top = getenv("RELEASETOP", None)
if not release_top:
    proj_name = getenv("SCRAM_PROJECTNAME")
    proj_ver = getenv("SCRAM_PROJECTVERSION")
    err, full_release = run_cmd(
Esempio n. 19
0
def process(image, outdir):
    container = image.split(":", 1)[0]
    tag = image.split(":", 1)[-1]
    if container == tag: tag = "latest"

    e, image_hash = run_cmd(
        "docker pull %s 2>&1 >/dev/null; docker images %s | grep '^%s \|/%s ' | grep ' %s ' | tail -1 | sed 's|  *|:|g' | cut -d: -f3"
        % (image, container, container, container, tag))
    print("Image hash: %s" % image_hash)
    if e:
        print(image_hash)
        exit(1)

    img_sdir = join(".images", image_hash[0:2], image_hash)
    img_dir = join(outdir, img_sdir)
    if exists(img_dir): return

    print("Starting Container %s with %s hash" % (image, image_hash))
    tmpdir = join(outdir, ".images", "tmp")
    e, o = run_cmd('docker run -u $(id -u):$(id -g) --name %s %s echo OK' %
                   (image_hash, image))
    if e: cleanup_exit(o, [tmpdir], image_hash)

    print("Getting Container Id")
    e, o = run_cmd('docker ps -aq --filter name=%s' % image_hash)
    if e: cleanup_exit(o, [tmpdir], image_hash)
    container_id = o

    print("Exporting Container ", container_id)
    e, o = run_cmd(
        'rm -rf %s; mkdir -p %s; cd %s; docker export -o %s.tar %s' %
        (tmpdir, tmpdir, tmpdir, image_hash, container_id))
    if e: cleanup_exit(o, [tmpdir], image_hash)

    print("Cleaning up container ", image_hash)
    run_cmd('docker rm -f %s' % image_hash)

    print("Unpacking exported container ....")
    e, o = run_cmd('mkdir -p %s; cd %s; tar -xf %s/%s.tar' %
                   (img_dir, img_dir, tmpdir, image_hash))
    if e: cleanup_exit(o, [tmpdir, img_dir])
    run_cmd('rm -rf %s' % tmpdir)

    for xdir in [
            "srv", "cvmfs", "dev", "proc", "sys", "build", "data", "pool"
    ]:
        sdir = join(img_dir, xdir)
        if not exists(sdir): run_cmd('mkdir %s' % sdir)

    print("Fixing file modes ....")
    fix_modes(img_dir)
Esempio n. 20
0
def cmd(cmd2run):
    e, o = run_cmd(cmd2run)
    if e:
        print o
        sys.exit(1)
    return o
Esempio n. 21
0
def mercurial(repo, dest, branch=None, revision=None, update_dest=True,
              shareBase=DefaultShareBase, allowUnsharedLocalClones=False,
              clone_by_rev=False, mirrors=None, bundles=None, autoPurge=False):
    """Makes sure that `dest` is has `revision` or `branch` checked out from
    `repo`.

    Do what it takes to make that happen, including possibly clobbering
    dest.

    If allowUnsharedLocalClones is True and we're trying to use the share
    extension but fail, then we will be able to clone from the shared repo to
    our destination.  If this is False, the default, then if we don't have the
    share extension we will just clone from the remote repository.

    If `clone_by_rev` is True, use 'hg clone -r <rev>' instead of 'hg clone'.
    This is slower, but useful when cloning repos with lots of heads.

    If `mirrors` is set, will try and use the mirrors before `repo`.

    If `bundles` is set, will try and download the bundle first and
    unbundle it instead of doing a full clone. If successful, will pull in
    new revisions from mirrors or the master repo. If unbundling fails, will
    fall back to doing a regular clone from mirrors or the master repo.
    """
    dest = os.path.abspath(dest)
    if shareBase is DefaultShareBase:
        shareBase = os.environ.get("HG_SHARE_BASE_DIR", None)

    log.info("Reporting hg version in use")
    cmd = ['hg', '-q', 'version']
    run_cmd(cmd, cwd='.')

    if shareBase:
        # Check that 'hg share' works
        try:
            log.info("Checking if share extension works")
            output = get_hg_output(['help', 'share'], dont_log=True)
            if 'no commands defined' in output:
                # Share extension is enabled, but not functional
                log.info("Disabling sharing since share extension doesn't seem to work (1)")
                shareBase = None
            elif 'unknown command' in output:
                # Share extension is disabled
                log.info("Disabling sharing since share extension doesn't seem to work (2)")
                shareBase = None
        except subprocess.CalledProcessError:
            # The command failed, so disable sharing
            log.info("Disabling sharing since share extension doesn't seem to work (3)")
            shareBase = None

    # Check that our default path is correct
    if os.path.exists(os.path.join(dest, '.hg')):
        hgpath = path(dest, "default")

        # Make sure that our default path is correct
        if hgpath != _make_absolute(repo):
            log.info("hg path isn't correct (%s should be %s); clobbering",
                     hgpath, _make_absolute(repo))
            remove_path(dest)

    # If the working directory already exists and isn't using share we update
    # the working directory directly from the repo, ignoring the sharing
    # settings
    if os.path.exists(dest):
        if not os.path.exists(os.path.join(dest, ".hg")):
            log.warning("%s doesn't appear to be a valid hg directory; clobbering", dest)
            remove_path(dest)
        elif not os.path.exists(os.path.join(dest, ".hg", "sharedpath")):
            try:
                if autoPurge:
                    purge(dest)
                return pull(repo, dest, update_dest=update_dest, branch=branch,
                            revision=revision,
                            mirrors=mirrors)
            except subprocess.CalledProcessError:
                log.warning("Error pulling changes into %s from %s; clobbering", dest, repo)
                log.debug("Exception:", exc_info=True)
                remove_path(dest)

    # If that fails for any reason, and sharing is requested, we'll try to
    # update the shared repository, and then update the working directory from
    # that.
    if shareBase:
        sharedRepo = os.path.join(shareBase, get_repo_path(repo))
        dest_sharedPath = os.path.join(dest, '.hg', 'sharedpath')

        if os.path.exists(sharedRepo):
            hgpath = path(sharedRepo, "default")

            # Make sure that our default path is correct
            if hgpath != _make_absolute(repo):
                log.info("hg path isn't correct (%s should be %s); clobbering",
                         hgpath, _make_absolute(repo))
                # we need to clobber both the shared checkout and the dest,
                # since hgrc needs to be in both places
                remove_path(sharedRepo)
                remove_path(dest)

        if os.path.exists(dest_sharedPath):
            # Make sure that the sharedpath points to sharedRepo
            dest_sharedPath_data = os.path.normpath(
                open(dest_sharedPath).read())
            norm_sharedRepo = os.path.normpath(os.path.join(sharedRepo, '.hg'))
            if dest_sharedPath_data != norm_sharedRepo:
                # Clobber!
                log.info("We're currently shared from %s, but are being requested to pull from %s (%s); clobbering",
                         dest_sharedPath_data, repo, norm_sharedRepo)
                remove_path(dest)

        try:
            log.info("Updating shared repo")
            mercurial(repo, sharedRepo, branch=branch, revision=revision,
                      update_dest=False, shareBase=None, clone_by_rev=clone_by_rev,
                      mirrors=mirrors, bundles=bundles, autoPurge=False)
            if os.path.exists(dest):

                # Bug 969689: Check to see if the dest repo is still on a valid
                # commit. It is possible that the shared repo was cloberred out
                # from under us, effectively stripping our active commit. This
                # can cause 'hg status', 'hg purge', and the like to do
                # incorrect things. If we detect this situation, then it's best
                # to clobber and re-create dest.
                parent = get_revision(dest)
                if not parent:
                    log.info("Shared repo %s no longer has our parent cset; clobbering",
                             sharedRepo)
                    remove_path(dest)
                else:
                    if autoPurge:
                        purge(dest)
                    return update(dest, branch=branch, revision=revision)

            try:
                log.info("Trying to share %s to %s", sharedRepo, dest)
                return share(sharedRepo, dest, branch=branch, revision=revision)
            except subprocess.CalledProcessError:
                if not allowUnsharedLocalClones:
                    # Re-raise the exception so it gets caught below.
                    # We'll then clobber dest, and clone from original repo
                    raise

                log.warning("Error calling hg share from %s to %s;"
                            "falling back to normal clone from shared repo",
                            sharedRepo, dest)
                # Do a full local clone first, and then update to the
                # revision we want
                # This lets us use hardlinks for the local clone if the OS
                # supports it
                clone(sharedRepo, dest, update_dest=False,
                      mirrors=mirrors, bundles=bundles)
                return update(dest, branch=branch, revision=revision)
        except subprocess.CalledProcessError:
            log.warning(
                "Error updating %s from sharedRepo (%s): ", dest, sharedRepo)
            log.debug("Exception:", exc_info=True)
            remove_path(dest)
    # end if shareBase

    if not os.path.exists(os.path.dirname(dest)):
        os.makedirs(os.path.dirname(dest))

    # Share isn't available or has failed, clone directly from the source
    return clone(repo, dest, branch, revision,
                 update_dest=update_dest, mirrors=mirrors,
                 bundles=bundles, clone_by_rev=clone_by_rev)
Esempio n. 22
0
#!/usr/bin/env python
import json, sys
from commands import getstatusoutput as run_cmd

e, o = run_cmd(
    'curl --silent -f -lSL https://index.docker.io/v1/repositories/%s/tags' %
    sys.argv[1])
if e:
    print o
    sys.exit(1)

data = json.loads(o)
for container in data:
    if container['name'] == sys.argv[2]:
        print "FOUND:%s" % sys.argv[2]
        break

sys.exit(0)
Esempio n. 23
0
def commit(dest, msg, user=None):
    cmd = ['hg', 'commit', '-m', msg]
    if user:
        cmd.extend(['-u', user])
    run_cmd(cmd, cwd=dest)
    return get_revision(dest)
Esempio n. 24
0
def merge_via_debugsetparents(dest, old_head, new_head, msg, user=None):
    """Merge 2 heads avoiding non-fastforward commits"""
    cmd = ['hg', 'debugsetparents', new_head, old_head]
    run_cmd(cmd, cwd=dest)
    commit(dest, msg=msg, user=user)
Esempio n. 25
0
                      action="store_true",
                      help="Do not modify Github",
                      default=False)
    parser.add_option(
        "-r",
        "--repo-list",
        dest="repo_list",
        help="Yaml file with list of repositories to create under docker hun",
        type=str,
        default=None)
    opts, args = parser.parse_args()

    repos = {}
    if not opts.repo_list:
        parser.error(
            "Missing repository list file, please use -r|--repo-list option")

    if opts.repo_list.startswith('https://'):
        e, o = run_cmd('curl -s -L %s' % opts.repo_list)
        if e:
            print(o)
            exit(1)
        repos = yaml.load_all(o)
    elif exists(opts.repo_list):
        repos = yaml.load(open(opts.repo_list))
    else:
        print("Error: No such file: %s" % opts.repo_list)
        exit(1)
    repo_cache = {}
    process(repos, opts.dryRun, repo_cache)
Esempio n. 26
0
#!/usr/bin/env python
import json
from time import time
from sys import argv, exit
from os.path import dirname, abspath
from commands import getstatusoutput as run_cmd
script_path = abspath(dirname(argv[0]))
eos_cmd = "EOS_MGM_URL=root://eoscms.cern.ch /usr/bin/eos"
eos_base = "/eos/cms/store/user/cmsbuild"
unused_days_threshold = 180
try:days=int(argv[1])
except: days=30
if days<30: days=30
e , o = run_cmd("PYTHONPATH=%s/.. %s/ib-datasets.py --days %s" % (script_path, script_path, days))
if e:
  print o
  exit(1)

jdata = json.loads(o)
used = {}
for o in jdata['hits']['hits']:
  used[o['_source']['lfn']]=1

e, o = run_cmd("%s find -f %s" % (eos_cmd, eos_base))
if e:
  print o
  exit(1)

total = 0
active = 0
unused = []
Esempio n. 27
0
from sys import stdin, exit
from commands import getstatusoutput as run_cmd

all_dasfiles = []
new_order = []
for line in stdin:
    line = line.strip("\n")
    if line.startswith("/store/"): all_dasfiles.append(line)
    else: new_order.append(line)

if not all_dasfiles:
    print "\n".join(new_order)
    exit(0)

eos_cmd = "EOS_MGM_URL=root://eoscms.cern.ch /usr/bin/eos"
EOS_BASE = "/eos/cms/store/user/cmsbuild/store"
eos_base_len = len(EOS_BASE)
err, eos_files = run_cmd("%s find -f %s | sort" % (eos_cmd, EOS_BASE))
if err:
    print "\n".join(new_order)
    exit(0)

new_order = []
for eos_file in eos_files.split("\n"):
    eos_file = "/store" + eos_file[eos_base_len:]
    if eos_file in all_dasfiles: new_order.append(eos_file)
for das_file in all_dasfiles:
    if not das_file in new_order: new_order.append(das_file)

print "\n".join(new_order)
Esempio n. 28
0
def init(dest):
    """Initializes an empty repo in `dest`"""
    run_cmd(['hg', 'init', dest])
Esempio n. 29
0
#!/usr/bin/env python
import yaml
import json
from os import environ
from sys import argv, exit
from os.path import join
from commands import getstatusoutput as run_cmd

e, o = run_cmd ("find %s -name '*.yaml' -type f" % argv[1])
if e:
  print o
  exit (1)

localtop = environ["CMSSW_BASE"]
files = [ "/src/"+f.split(argv[1],1)[-1][:-5].strip("/") for f in o.split("\n") ]
ignore_files=[]
print "Changed files:",files
for f in o.split("\n"):
  print "Working on",f
  obj = yaml.load(open(f))
  if not obj: obj={"Diagnostics":[]}
  change = 0
  new_dia = []
  if (not "Diagnostics" in obj) or (not obj["Diagnostics"]):
    run_cmd("rm -f %s" % f)
    continue
  for d in obj["Diagnostics"]:
    new_rep = []
    if (not "Replacements" in d) or (not d["Replacements"]): continue
    for r in d["Replacements"]:
      rf = "/"+r["FilePath"].split(localtop,1)[-1].strip("/")
Esempio n. 30
0
def share(source, dest, branch=None, revision=None):
    """Creates a new working directory in "dest" that shares history with
       "source" using Mercurial's share extension"""
    run_cmd(['hg', 'share', '-U', source, dest])
    return update(dest, branch=branch, revision=revision)
Esempio n. 31
0
def get_docker_token(repo):
    print('\nGetting docker.io token ....')
    e, o = run_cmd(
        'curl --silent --request "GET" "https://auth.docker.io/token?service=registry.docker.io&scope=repository:%s:pull"'
        % repo)
    return loads(o)['token']
Esempio n. 32
0
#!/usr/bin/env python
import json
from sys import argv, exit
from os.path import dirname, abspath
from commands import getstatusoutput as run_cmd
script_path = abspath(dirname(argv[0]))
eos_cmd = "EOS_MGM_URL=root://eoscms.cern.ch /usr/bin/eos"
eos_base = "/eos/cms/store/user/cmsbuild"
try:days=int(argv[1])
except: days=10
if days<10: days=10
e , o = run_cmd("PYTHONPATH=%s/.. %s/ib-datasets.py --days %s" % (script_path, script_path, days))
if e:
  print o
  exit(1)

jdata = json.loads(o)
used = {}

for o in jdata[0]['hits']['hits']:
  used[o['_source']['lfn']]=1

e, o = run_cmd("curl -s https://raw.githubusercontent.com/cms-sw/cms-sw.github.io/master/das_queries/ibeos.txt")
if e:
  print o
  exit(1)

total = 0
active = 0
unused = []
for l in o.split("\n"):
Esempio n. 33
0
#!/usr/bin/env python
import json
from time import time
from sys import argv, exit
from os.path import dirname, abspath
from commands import getstatusoutput as run_cmd
script_path = abspath(dirname(argv[0]))
eos_cmd = "EOS_MGM_URL=root://eoscms.cern.ch /usr/bin/eos"
eos_base = "/eos/cms/store/user/cmsbuild"
unused_days_threshold = 180
try:days=int(argv[1])
except: days=30
if days<30: days=30
e , o = run_cmd("PYTHONPATH=%s/.. %s/ib-datasets.py --days %s" % (script_path, script_path, days))
if e:
  print o
  exit(1)

jdata = json.loads(o)
used = {}
for o in jdata['hits']['hits']:
  used[o['_source']['lfn']]=1

e, o = run_cmd("%s find -f %s" % (eos_cmd, eos_base))
if e:
  print o
  exit(1)

total = 0
active = 0
unused = []
Esempio n. 34
0
    for my_file in prs_dict[pr_number]['changed_files_names']:
        if len(prs_list[my_file]) > 1:
            print("File ", my_file, " modified in PR(s):", ', '.join(['#'+p  for p in  prs_list[my_file] if p!=pr_number]))


if __name__ == '__main__':
    if len(sys.argv) < 2:
        print("Usage: SearchPROverlap.py <PR number> [ <branch> ]")
        print(
            "       <PR number>: number of PR belonging to <branch>, or \"all\" for loop on all open PRs in <branch>\n"
            "       If \"all\" is given as <PR number>, then a branch must be given as well.")
        exit()

    my_pr = sys.argv[1]
    my_branch = None
    e, o = run_cmd('curl -s -k -L https://raw.githubusercontent.com/cms-sw/cms-prs/master/cms-sw/cmssw/.other/files_changed_by_prs.json')
    prs_dict = json.loads(o)
    if my_pr not in prs_dict and not "all":
        print("PR # ", my_pr, "does not exists", file=sys.stderr)
        exit(1)
    if len(sys.argv) > 2:
        my_branch = sys.argv[2]
    elif len(sys.argv) == 2 and my_pr == 'all':
        print("ERROR: If \"all\" is given as <PR number>, then a branch must be given as well.")
        exit(1)
    else:
        pr_metadata = prs_dict[my_pr]
        my_branch = pr_metadata['base_branch']
    my_list = build_open_file_list(prs_dict, my_branch)

    if my_pr == "all":
Esempio n. 35
0
                      default=False,
                      action='store_true')
    
    parser.add_option('--ibeos',
                      help='Use IB EOS site configuration',
                      dest='IBEos',
                      default=False,
                      action='store_true')

    opt,args = parser.parse_args()
    if opt.IBEos:
      import os
      from commands import getstatusoutput as run_cmd
      ibeos_cache = os.path.join(os.getenv("LOCALRT"), "ibeos_cache.txt")
      if not os.path.exists(ibeos_cache):
        err, out = run_cmd("curl -L -s -o %s https://raw.githubusercontent.com/cms-sw/cms-sw.github.io/master/das_queries/ibeos.txt" % ibeos_cache)
        if err:
          run_cmd("rm -f %s" % ibeos_cache)
          print("Error: Unable to download ibeos cache information")
          print(out)
          sys.exit(err)

      for cmssw_env in [ "CMSSW_BASE", "CMSSW_RELEASE_BASE" ]:
        cmssw_base = os.getenv(cmssw_env,None)
        if not cmssw_base: continue
        cmssw_base = os.path.join(cmssw_base,"src/Utilities/General/ibeos")
        if os.path.exists(cmssw_base):
          os.environ["PATH"]=cmssw_base+":"+os.getenv("PATH")
          os.environ["CMS_PATH"]="/cvmfs/cms-ib.cern.ch"
          os.environ["CMSSW_USE_IBEOS"]="true"
          print(">> WARNING: You are using SITECONF from /cvmfs/cms-ib.cern.ch")
Esempio n. 36
0
from sys import stdin,exit
from commands import getstatusoutput as run_cmd

all_dasfiles = []
new_order    = []
for line in stdin:
  line = line.strip("\n")
  if line.startswith("/store/"): all_dasfiles.append(line)
  else: new_order.append(line)

if not all_dasfiles:
  print "\n".join(new_order)
  exit(0)

eos_cmd = "EOS_MGM_URL=root://eoscms.cern.ch /usr/bin/eos"
EOS_BASE="/eos/cms/store/user/cmsbuild/store"
eos_base_len = len(EOS_BASE)
err, eos_files = run_cmd("%s find -f %s | sort" % (eos_cmd,EOS_BASE))
if err:
  print "\n".join(new_order)
  exit(0)

new_order = []
for eos_file in eos_files.split("\n"):
  eos_file="/store"+eos_file[eos_base_len:]
  if eos_file in all_dasfiles: new_order.append(eos_file)
for das_file in all_dasfiles:
  if not das_file in new_order: new_order.append(das_file)

print "\n".join(new_order)