Esempio n. 1
0
def checkPreferSystem(spec, cmd, homebrew_replacement, dockerImage):
    if cmd == "false":
        debug("Package %s can only be managed via alibuild." % spec["package"])
        return (1, "")
    cmd = homebrew_replacement + cmd
    err, out = dockerStatusOutput(cmd,
                                  dockerImage=dockerImage,
                                  executor=getStatusOutputBash)
    if not err:
        success("Package %s will be picked up from the system." %
                spec["package"])
        for x in out.split("\n"):
            debug(spec["package"] + ": " + x)
        return (err, "")

    warning(
        format(
            "Package %(p)s cannot be picked up from the system and will be built by aliBuild.\n"
            "This is due to the fact the following script fails:\n\n"
            "%(cmd)s\n\n"
            "with the following output:\n\n"
            "%(error)s\n",
            p=spec["package"],
            cmd=cmd,
            error="\n".join(
                ["%s: %s" % (spec["package"], x) for x in out.split("\n")])))
    return (err, "")
Esempio n. 2
0
 def syncToLocal(self, p, spec):
     debug("Updating remote store for package %s with hashes %s", p,
           ", ".join(spec["remote_hashes"]))
     err = execute("""\
 mkdir -p {workDir}/{linksPath}
 rsync -rlvW --delete {remoteStore}/{linksPath}/ {workDir}/{linksPath}/ || :
 for storePath in {storePaths}; do
   # Only get the first matching tarball. If there are multiple with the
   # same hash, we only need one and they should be interchangable.
   if tars=$(rsync -s --list-only "{remoteStore}/$storePath/{pkg}-{ver}-*.{arch}.tar.gz" 2>/dev/null) &&
      # Strip away the metadata in rsync's file listing, leaving only the first filename.
      tar=$(echo "$tars" | sed -rn '1s#[- a-z0-9,/]* [0-9]{{2}}:[0-9]{{2}}:[0-9]{{2}} ##p') &&
      mkdir -p "{workDir}/$storePath" &&
      # If we already have a file with the same name, assume it's up to date
      # with the remote. In reality, we'll have unpacked, relocated and
      # repacked the tarball from the remote, so the file differs, but
      # there's no point in downloading the one from the remote again.
      rsync -vW --ignore-existing "{remoteStore}/$storePath/$tar" "{workDir}/$storePath/"
   then
     break
   fi
 done
 """.format(pkg=p,
            ver=spec["version"],
            arch=self.architecture,
            remoteStore=self.remoteStore,
            workDir=self.workdir,
            linksPath=resolve_links_path(self.architecture, p),
            storePaths=" ".join(
                resolve_store_path(self.architecture, pkg_hash)
                for pkg_hash in spec["remote_hashes"])))
     dieOnError(err, "Unable to update from specified store.")
Esempio n. 3
0
 def syncToRemote(self, p, spec):
     if not self.writeStore:
         return
     tarballNameWithRev = (
         "{package}-{version}-{revision}.{architecture}.tar.gz".format(
             architecture=self.architecture, **spec))
     tar_path = os.path.join(
         resolve_store_path(self.architecture, spec["hash"]),
         tarballNameWithRev)
     link_path = os.path.join(resolve_links_path(self.architecture, p),
                              tarballNameWithRev)
     tar_exists = self._s3_key_exists(tar_path)
     link_exists = self._s3_key_exists(link_path)
     if tar_exists and link_exists:
         debug("%s exists on S3 already, not uploading", tarballNameWithRev)
         return
     if tar_exists or link_exists:
         warning("%s exists already but %s does not, overwriting!",
                 tar_path if tar_exists else link_path,
                 link_path if tar_exists else tar_path)
     debug("Uploading tarball and symlink for %s %s-%s (%s) to S3", p,
           spec["version"], spec["revision"], spec["hash"])
     self.s3.upload_file(Bucket=self.writeStore,
                         Key=tar_path,
                         Filename=os.path.join(self.workdir, tar_path))
     self.s3.put_object(
         Bucket=self.writeStore,
         Key=link_path,
         Body=os.readlink(os.path.join(
             self.workdir, link_path)).lstrip("./").encode("utf-8"))
Esempio n. 4
0
def deps(recipesDir, topPackage, outFile, buildRequires, transitiveRed, disable):
  dot = {}
  keys = [ "requires" ]
  if buildRequires:
    keys.append("build_requires")
  for p in glob("%s/*.sh" % recipesDir):
    debug(format("Reading file %(filename)s", filename=p))
    try:
      err, recipe, _ = parseRecipe(getRecipeReader(p))
      name = recipe["package"]
      if name in disable:
        debug("Ignoring %s, disabled explicitly" % name)
        continue
    except Exception as e:
      error(format("Error reading recipe %(filename)s: %(type)s: %(msg)s",
                   filename=p, type=type(e).__name__, msg=str(e)))
      sys.exit(1)
    dot[name] = dot.get(name, [])
    for k in keys:
      for d in recipe.get(k, []):
        d = d.split(":")[0]
        d in disable or dot[name].append(d)

  selected = None
  if topPackage != "all":
    if not topPackage in dot:
      error(format("Package %(topPackage)s does not exist", topPackage=topPackage))
      return False
    selected = [ topPackage ]
    olen = 0
    while len(selected) != olen:
      olen = len(selected)
      selected += [ x
                    for s in selected if s in dot
                    for x in dot[s] if not x in selected ]
    selected.sort()

  result = "digraph {\n"
  for p,deps in list(dot.items()):
    if selected and not p in selected: continue
    result += "  \"%s\";\n" % p
    for d in deps:
      result += "  \"%s\" -> \"%s\";\n" % (p,d)
  result += "}\n"

  with NamedTemporaryFile(delete=False) as fp:
    fp.write(result)
  try:
    if transitiveRed:
      execute(format("tred %(dotFile)s > %(dotFile)s.0 && mv %(dotFile)s.0 %(dotFile)s",
              dotFile=fp.name))
    execute(["dot", fp.name, "-Tpdf", "-o", outFile])
  except Exception as e:
    error(format("Error generating dependencies with dot: %(type)s: %(msg)s",
                 type=type(e).__name__, msg=str(e)))
  else:
    info(format("Dependencies graph generated: %(outFile)s", outFile=outFile))
  remove(fp.name)
  return True
Esempio n. 5
0
def askForAnalytics():
  banner("In order to improve user experience, aliBuild would like to gather "
         "analytics about your builds.\nYou can find all the details at:\n\n"
         "  https://github.com/alisw/alibuild/blob/master/ANALYTICS.md\n")
  a = raw_input("Is that ok for you [YES/no]? ")
  if a.strip() and a.strip().lower().startswith("n"):
    debug("User requsted disabling analytics.")
    return disable_analytics()
  return generate_analytics_id()
Esempio n. 6
0
def generate_analytics_id():
  getstatusoutput("mkdir -p  ~/.config/alibuild")
  err, output = getstatusoutput("uuidgen >  ~/.config/alibuild/analytics-uuid")
  # If an error is found while generating the unique user ID, we disable
  # the analytics on the machine.
  if err:
    debug("Could not generate unique ID for user. Disabling analytics")
    getstatusoutput("touch ~/.config/alibuild/disable-analytics")
    return False
  return True
Esempio n. 7
0
def generate_analytics_id():
  getstatusoutput("mkdir -p  ~/.config/alibuild")
  err, output = getstatusoutput("uuidgen >  ~/.config/alibuild/analytics-uuid")
  # If an error is found while generating the unique user ID, we disable
  # the analytics on the machine.
  if err:
    debug("Could not generate unique ID for user. Disabling analytics")
    getstatusoutput("touch ~/.config/alibuild/disable-analytics")
    return False
  return True
Esempio n. 8
0
def updateReferenceRepo(referenceSources, p, spec, fetch=True):
    """
  Update source reference area, if possible.
  If the area is already there and cannot be written, assume it maintained
  by someone else.

  If the area can be created, clone a bare repository with the sources.

  Returns the reference repository's local path if available, otherwise None.
  Throws a fatal error in case repository cannot be updated even if it appears
  to be writeable.

  @referenceSources : a string containing the path to the sources to be updated
  @p                : the name of the package to be updated
  @spec             : the spec of the package to be updated (an OrderedDict)
  @fetch            : whether to fetch updates: if False, only clone if not found
  """
    assert (type(spec) == OrderedDict)
    if not "source" in spec:
        return

    debug("Updating references.")
    referenceRepo = os.path.join(abspath(referenceSources), p.lower())

    try:
        os.makedirs(abspath(referenceSources))
    except:
        pass

    if not is_writeable(referenceSources):
        if path.exists(referenceRepo):
            debug("Using %s as reference for %s" % (referenceRepo, p))
            return referenceRepo  # reference is read-only
        else:
            debug("Cannot create reference for %s in %s" %
                  (p, referenceSources))
            return None  # no reference can be found and created (not fatal)

    err = False
    if not path.exists(referenceRepo):
        cmd = ["git", "clone", "--bare", spec["source"], referenceRepo]
        debug("Cloning reference repository: %s" % " ".join(cmd))
        err = execute(cmd)
    elif fetch:
        cmd = format(
            "cd %(referenceRepo)s && "
            "git fetch --tags %(source)s 2>&1 && "
            "git fetch %(source)s '+refs/heads/*:refs/heads/*' 2>&1",
            referenceRepo=referenceRepo,
            source=spec["source"])
        debug("Updating reference repository: %s" % cmd)
        err = execute(cmd)
    dieOnError(err,
               "Error while updating reference repos %s." % spec["source"])
    return referenceRepo  # reference is read-write
Esempio n. 9
0
def askForAnalytics():
  banner("In order to improve user experience, aliBuild would like to gather "
         "analytics about your builds.\nYou can find all the details at:\n\n"
         "  https://github.com/alisw/alibuild/blob/master/ANALYTICS.md\n")
  # raw_input and input are different between python 2 and 3
  try: _input = raw_input
  except NameError: _input = input
  a = _input("Is that ok for you [YES/no]? ")
  if a.strip() and a.strip().lower().startswith("n"):
    debug("User requsted disabling analytics.")
    return disable_analytics()
  return generate_analytics_id()
Esempio n. 10
0
 def syncToLocal(self, p, spec):
   debug("Updating remote store for package %s@%s" % (p, spec["hash"]))
   cmd = format("mkdir -p %(tarballHashDir)s\n"
                "rsync -av %(ro)s %(remoteStore)s/%(storePath)s/ %(tarballHashDir)s/ || true\n"
                "rsync -av --delete %(ro)s %(remoteStore)s/%(linksPath)s/ %(tarballLinkDir)s/ || true\n",
                ro=self.rsyncOptions,
                remoteStore=self.remoteStore,
                storePath=spec["storePath"],
                linksPath=spec["linksPath"],
                tarballHashDir=spec["tarballHashDir"],
                tarballLinkDir=spec["tarballLinkDir"])
   err = execute(cmd)
   dieOnError(err, "Unable to update from specified store.")
Esempio n. 11
0
def systemInfo():
  _,out = getstatusoutput("env")
  debug("Environment:\n"+out)
  _,out = getstatusoutput("uname -a")
  debug("uname -a: "+out)
  _,out = getstatusoutput("mount")
  debug("Mounts:\n"+out)
  _,out = getstatusoutput("df")
  debug("Disk free:\n"+out)
  for f in ["/etc/lsb-release", "/etc/redhat-release", "/etc/os-release"]:
    err,out = getstatusoutput("cat "+f)
    if not err:
      debug(f+":\n"+out)
Esempio n. 12
0
def systemInfo():
    _, out = getstatusoutput("env")
    debug("Environment:\n" + out)
    _, out = getstatusoutput("uname -a")
    debug("uname -a: " + out)
    _, out = getstatusoutput("mount")
    debug("Mounts:\n" + out)
    _, out = getstatusoutput("df")
    debug("Disk free:\n" + out)
    for f in ["/etc/lsb-release", "/etc/redhat-release", "/etc/os-release"]:
        err, out = getstatusoutput("cat " + f)
        if not err:
            debug(f + ":\n" + out)
Esempio n. 13
0
 def syncToLocal(self, p, spec):
     debug("Updating remote store for package %s@%s" % (p, spec["hash"]))
     cmd = format(
         "mkdir -p %(tarballHashDir)s\n"
         "rsync -av %(ro)s %(remoteStore)s/%(storePath)s/ %(tarballHashDir)s/ || true\n"
         "rsync -av --delete %(ro)s %(remoteStore)s/%(linksPath)s/ %(tarballLinkDir)s/ || true\n",
         ro=self.rsyncOptions,
         remoteStore=self.remoteStore,
         storePath=spec["storePath"],
         linksPath=spec["linksPath"],
         tarballHashDir=spec["tarballHashDir"],
         tarballLinkDir=spec["tarballLinkDir"])
     err = execute(cmd)
     dieOnError(err, "Unable to update from specified store.")
Esempio n. 14
0
def git(args, directory=".", check=True):
  debug("Executing git %s (in directory %s)", " ".join(args), directory)
  # We can't use git --git-dir=%s/.git or git -C %s here as the former requires
  # that the directory we're inspecting to be the root of a git directory, not
  # just contained in one (and that breaks CI tests), and the latter isn't
  # supported by the git version we have on slc6.
  # Silence cd as shell configuration can cause the new directory to be echoed.
  err, output = getstatusoutput("""\
  set -e +x
  cd {directory} >/dev/null 2>&1
  exec git {args}
  """.format(directory=quote(directory),
             args=" ".join(map(quote, args))))
  if check and err != 0:
    raise RuntimeError("Error {} from git {}: {}".format(err, " ".join(args), output))
  return output if check else (err, output)
Esempio n. 15
0
def checkPreferSystem(spec, cmd, homebrew_replacement, getstatusoutput_docker):
    if cmd == "false":
        debug("Package %s can only be managed via alibuild.", spec["package"])
        return (1, "")
    cmd = homebrew_replacement + cmd
    err, out = getstatusoutput_docker(cmd)
    if not err:
        success("Package %s will be picked up from the system.",
                spec["package"])
        for x in out.split("\n"):
            debug("%s: %s", spec["package"], x)
        return (err, "")

    warning(
        "Package %s cannot be picked up from the system and will be built by aliBuild.\n"
        "This is due to the fact the following script fails:\n\n%s\n\n"
        "with the following output:\n\n%s\n", spec["package"], cmd,
        "\n".join("%s: %s" % (spec["package"], x) for x in out.split("\n")))
    return (err, "")
Esempio n. 16
0
def checkPreferSystem(spec, cmd, homebrew_replacement, dockerImage):
    if cmd == "false":
      debug("Package %s can only be managed via alibuild." % spec["package"])
      return (1, "")
    cmd = homebrew_replacement + cmd
    err, out = dockerStatusOutput(cmd, dockerImage=dockerImage, executor=getStatusOutputBash)
    if not err:
      success("Package %s will be picked up from the system." % spec["package"])
      for x in out.split("\n"):
        debug(spec["package"] + ": " + x)
      return (err, "")

    warning(format("Package %(p)s cannot be picked up from the system and will be built by aliBuild.\n"
                   "This is due to the fact the following script fails:\n\n"
                   "%(cmd)s\n\n"
                   "with the following output:\n\n"
                   "%(error)s\n",
                   p=spec["package"],
                   cmd=cmd,
                   error="\n".join(["%s: %s" % (spec["package"],x) for x in out.split("\n")])))
    return (err, "")
Esempio n. 17
0
def checkRequirements(spec, cmd, homebrew_replacement, dockerImage):
    if cmd == "false":
        debug("Package %s is not a system requirement." % spec["package"])
        return (0, "")
    cmd = homebrew_replacement + cmd
    err, out = dockerStatusOutput(cmd,
                                  dockerImage=dockerImage,
                                  executor=getStatusOutputBash)
    if not err:
        success("Required package %s will be picked up from the system." %
                spec["package"])
        debug(cmd)
        for x in out.split("\n"):
            debug(spec["package"] + ": " + x)
        return (0, "")
    error(
        format(
            "Package %(p)s is a system requirement and cannot be found.\n"
            "This is due to the fact that the following script fails:\n\n"
            "%(cmd)s\n"
            "with the following output:\n\n"
            "%(error)s\n"
            "%(help)s\n",
            p=spec["package"],
            cmd=cmd,
            error="\n".join(
                ["%s: %s" % (spec["package"], x) for x in out.split("\n")]),
            help=spec.get("system_requirement_missing")))
    return (err, "")
Esempio n. 18
0
    def syncToLocal(self, p, spec):
        debug("Updating remote store for package %s@%s" % (p, spec["hash"]))
        hashListUrl = format("%(rs)s/%(sp)s/",
                             rs=self.remoteStore,
                             sp=spec["storePath"])
        pkgListUrl = format("%(rs)s/%(sp)s/",
                            rs=self.remoteStore,
                            sp=spec["linksPath"])
        hashList = []
        pkgList = []
        try:
            if self.insecure:
                context = ssl._create_unverified_context()
                hashList = json.loads(
                    urlopen(hashListUrl, context=context).read())
                pkgList = json.loads(
                    urlopen(pkgListUrl, context=context).read())
            else:
                hashList = json.loads(urlopen(hashListUrl).read())
                pkgList = json.loads(urlopen(pkgListUrl).read())
        except URLError as e:
            debug("Cannot find precompiled package for %s@%s" %
                  (p, spec["hash"]))
            pass
        except Exception as e:
            info(e)
            error("Unknown response from server")

        cmd = format("mkdir -p %(hd)s && "
                     "mkdir -p %(ld)s",
                     hd=spec["tarballHashDir"],
                     ld=spec["tarballLinkDir"])
        execute(cmd)
        hashList = [x["name"] for x in hashList]

        for pkg in hashList:
            cmd = format("curl %(i)s -o %(hd)s/%(n)s -L %(rs)s/%(sp)s/%(n)s\n",
                         i="-k" if self.insecure else "",
                         n=pkg,
                         sp=spec["storePath"],
                         rs=self.remoteStore,
                         hd=spec["tarballHashDir"])
            debug(cmd)
            execute(cmd)
        for pkg in pkgList:
            if pkg["name"] in hashList:
                cmd = format(
                    "ln -sf ../../%(a)s/store/%(sh)s/%(h)s/%(n)s %(ld)s/%(n)s\n",
                    a=self.architecture,
                    h=spec["hash"],
                    sh=spec["hash"][0:2],
                    n=pkg["name"],
                    ld=spec["tarballLinkDir"])
                execute(cmd)
            else:
                cmd = format(
                    "ln -s unknown %(ld)s/%(n)s 2>/dev/null || true\n",
                    ld=spec["tarballLinkDir"],
                    n=pkg["name"])
                execute(cmd)
Esempio n. 19
0
 def syncToLocal(self, p, spec):
     debug("Updating remote store for package %s with hashes %s", p,
           ", ".join(spec["remote_hashes"]))
     err = execute("""\
 for storePath in {storePaths}; do
   # For the first store path that contains tarballs, fetch them, and skip
   # any possible later tarballs (we only need one).
   if [ -n "$(s3cmd ls -s -v --host s3.cern.ch --host-bucket {b}.s3.cern.ch \
                    "s3://{b}/$storePath/")" ]; then
     s3cmd --no-check-md5 sync -s -v --host s3.cern.ch --host-bucket {b}.s3.cern.ch \
           "s3://{b}/$storePath/" "{workDir}/$storePath/" 2>&1 || :
     break
   fi
 done
 mkdir -p "{workDir}/{linksPath}"
 find "{workDir}/{linksPath}" -type l -delete
 curl -sL "https://s3.cern.ch/swift/v1/{b}/{linksPath}.manifest" |
   while IFS='\t' read -r symlink target; do
     ln -sf "../../${{target#../../}}" "{workDir}/{linksPath}/$symlink" || true
   done
 for x in $(curl -sL "https://s3.cern.ch/swift/v1/{b}/?prefix={linksPath}/"); do
   # Skip already existing symlinks -- these were from the manifest.
   # (We delete leftover symlinks from previous runs above.)
   [ -L "{workDir}/{linksPath}/$(basename "$x")" ] && continue
   ln -sf "$(curl -sL "https://s3.cern.ch/swift/v1/{b}/$x" | sed -r 's,^(\\.\\./\\.\\./)?,../../,')" \
      "{workDir}/{linksPath}/$(basename "$x")" || true
 done
 """.format(
         b=self.remoteStore,
         storePaths=" ".join(
             resolve_store_path(self.architecture, pkg_hash)
             for pkg_hash in spec["remote_hashes"]),
         linksPath=resolve_links_path(self.architecture, p),
         workDir=self.workdir,
     ))
     dieOnError(err, "Unable to update from specified store.")
Esempio n. 20
0
  def syncToLocal(self, p, spec):
    debug("Updating remote store for package %s@%s" % (p, spec["hash"]))
    hashListUrl = format("%(rs)s/%(sp)s/",
                        rs=self.remoteStore,
                        sp=spec["storePath"])
    pkgListUrl = format("%(rs)s/%(sp)s/",
                        rs=self.remoteStore,
                        sp=spec["linksPath"])
    hashList = []
    pkgList = []
    try:
      if self.insecure:
        context = ssl._create_unverified_context()
        hashList = json.loads(urlopen(hashListUrl, context=context).read())
        pkgList = json.loads(urlopen(pkgListUrl, context=context).read())
      else:
        hashList = json.loads(urlopen(hashListUrl).read())
        pkgList = json.loads(urlopen(pkgListUrl).read())
    except URLError as e:
      debug("Cannot find precompiled package for %s@%s" % (p, spec["hash"]))
    except Exception as e:
      info(e)
      error("Unknown response from server")

    cmd = format("mkdir -p %(hd)s && "
                 "mkdir -p %(ld)s",
                 hd=spec["tarballHashDir"],
                 ld=spec["tarballLinkDir"])
    execute(cmd)
    hashList = [x["name"] for x in hashList]

    for pkg in hashList:
      cmd = format("curl %(i)s -o %(hd)s/%(n)s -L %(rs)s/%(sp)s/%(n)s\n",
                   i="-k" if self.insecure else "",
                   n=pkg,
                   sp=spec["storePath"],
                   rs=self.remoteStore,
                   hd=spec["tarballHashDir"])
      debug(cmd)
      execute(cmd)
    for pkg in pkgList:
      if pkg["name"] in hashList:
        cmd = format("ln -sf ../../%(a)s/store/%(sh)s/%(h)s/%(n)s %(ld)s/%(n)s\n",
                     a = self.architecture,
                     h = spec["hash"],
                     sh = spec["hash"][0:2],
                     n = pkg["name"],
                     ld = spec["tarballLinkDir"])
        execute(cmd)
      else:
        cmd = format("ln -s unknown %(ld)s/%(n)s 2>/dev/null || true\n",
                     ld = spec["tarballLinkDir"],
                     n = pkg["name"])
        execute(cmd)
Esempio n. 21
0
def updateReferenceRepos(referenceSources, p, spec):
    """
  Update source reference area, if possible.
  If the area is already there and cannot be written, assume it maintained
  by someone else.

  If the area can be created, clone a bare repository with the sources.

  @referenceSources: a string containing the path to the sources to be updated
  @p: the name of the package (?) to be updated
  @spec: the spec of the package to be updated
  """
    assert (type(spec) == OrderedDict)
    debug("Updating references.")
    referenceRepo = "%s/%s" % (abspath(referenceSources), p.lower())
    if os.access(dirname(referenceSources), os.W_OK):
        getstatusoutput("mkdir -p %s" % referenceSources)
    writeableReference = os.access(referenceSources, os.W_OK)
    if not writeableReference and path.exists(referenceRepo):
        debug("Using %s as reference for %s." % (referenceRepo, p))
        spec["reference"] = referenceRepo
        return
    if not writeableReference:
        debug("Cannot create reference for %s in specified folder.", p)
        return

    err, out = getstatusoutput("mkdir -p %s" % abspath(referenceSources))
    if not "source" in spec:
        return
    if not path.exists(referenceRepo):
        cmd = ["git", "clone", "--bare", spec["source"], referenceRepo]
        debug(" ".join(cmd))
        err = execute(" ".join(cmd))
    else:
        err = execute(
            format(
                "cd %(referenceRepo)s && "
                "git fetch --tags %(source)s 2>&1 && "
                "git fetch %(source)s 2>&1",
                referenceRepo=referenceRepo,
                source=spec["source"]))
    dieOnError(err,
               "Error while updating reference repos %s." % spec["source"])
    spec["reference"] = referenceRepo
Esempio n. 22
0
def decideAnalytics(hasDisableFile, hasUuid, isTty, questionCallback):
    if hasDisableFile:
        debug("Analytics previously disabled.")
        return False
    if hasUuid:
        debug("User has analytics id. Pushing analytics to Google Analytics.")
        return True
    if not isTty:
        debug("This is not an interactive process and "
              "no indication has been given about analytics. Disabling")
        return False
    return questionCallback()
Esempio n. 23
0
def decideAnalytics(hasDisableFile, hasUuid, isTty, questionCallback):
  if hasDisableFile:
    debug("Analytics previously disabled.")
    return False
  if hasUuid:
    debug("User has analytics id. Pushing analytics to Google Analytics.")
    return True
  if not isTty:
    debug("This is not an interactive process and "
          "no indication has been given about analytics. Disabling")
    return False
  return questionCallback()
Esempio n. 24
0
def checkRequirements(spec, cmd, homebrew_replacement, getstatusoutput_docker):
    if cmd == "false":
        debug("Package %s is not a system requirement.", spec["package"])
        return (0, "")
    cmd = homebrew_replacement + cmd
    err, out = getstatusoutput_docker(cmd)
    if not err:
        success("Required package %s will be picked up from the system.",
                spec["package"])
        debug("%s", cmd)
        for x in out.split("\n"):
            debug("%s: %s", spec["package"], x)
        return (0, "")
    error(
        "Package %s is a system requirement and cannot be found.\n"
        "This is due to the fact that the following script fails:\n\n%s\n"
        "with the following output:\n\n%s\n%s\n", spec["package"], cmd,
        "\n".join("%s: %s" % (spec["package"], x) for x in out.split("\n")),
        spec.get("system_requirement_missing"))
    return (err, "")
Esempio n. 25
0
def checkRequirements(spec, cmd, homebrew_replacement, dockerImage):
    if cmd == "false":
      debug("Package %s is not a system requirement." % spec["package"])
      return (0, "")
    cmd = homebrew_replacement + cmd
    err, out = dockerStatusOutput(cmd, dockerImage=dockerImage, executor=getStatusOutputBash)
    if not err:
      success("Required package %s will be picked up from the system." % spec["package"])
      debug(cmd)
      for x in out.split("\n"):
        debug(spec["package"] + ": " + x)
      return (0, "")
    error(format("Package %(p)s is a system requirement and cannot be found.\n"
                 "This is due to the fact that the following script fails:\n\n"
                 "%(cmd)s\n"
                 "with the following output:\n\n"
                 "%(error)s\n"
                 "%(help)s\n",
                 p=spec["package"],
                 cmd=cmd,
                 error="\n".join(["%s: %s" % (spec["package"],x) for x in out.split("\n")]),
                 help=spec.get("system_requirement_missing")))
    return (err, "")
Esempio n. 26
0
def doInit(args):
    assert (args.pkgname != None)
    assert (type(args.dist) == dict)
    assert (sorted(args.dist.keys()) == ["repo", "ver"])
    pkgs = parsePackagesDefinition(args.pkgname)
    assert (type(pkgs) == list)
    if args.dryRun:
        info(
            "This will initialise local checkouts for %s\n"
            "--dry-run / -n specified. Doing nothing.",
            ",".join(x["name"] for x in pkgs))
        sys.exit(0)
    try:
        path.exists(args.develPrefix) or os.mkdir(args.develPrefix)
        path.exists(args.referenceSources) or os.makedirs(
            args.referenceSources)
    except OSError as e:
        error("%s", e)
        sys.exit(1)

    # Fetch recipes first if necessary
    if path.exists(args.configDir):
        warning("using existing recipes from %s", args.configDir)
    else:
        cmd = [
            "clone", "--origin", "upstream", args.dist["repo"] if ":"
            in args.dist["repo"] else "https://github.com/" + args.dist["repo"]
        ]
        if args.dist["ver"]:
            cmd.extend(["-b", args.dist["ver"]])
        cmd.append(args.configDir)
        git(cmd)

    # Use standard functions supporting overrides and taps. Ignore all disables
    # and system packages as they are irrelevant in this context
    specs = {}
    defaultsReader = lambda: readDefaults(args.configDir, args.
                                          defaults, lambda msg: error(
                                              "%s", msg), args.architecture)
    (err, overrides, taps) = parseDefaults([], defaultsReader, debug)
    (_, _, _, validDefaults) = getPackageList(
        packages=[p["name"] for p in pkgs],
        specs=specs,
        configDir=args.configDir,
        preferSystem=False,
        noSystem=True,
        architecture="",
        disable=[],
        defaults=args.defaults,
        performPreferCheck=lambda *x, **y: (1, ""),
        performRequirementCheck=lambda *x, **y: (0, ""),
        performValidateDefaults=lambda spec: validateDefaults(
            spec, args.defaults),
        overrides=overrides,
        taps=taps,
        log=debug)
    dieOnError(
        validDefaults and args.defaults not in validDefaults,
        "Specified default `%s' is not compatible with the packages you want to build.\n"
        % args.defaults + "Valid defaults:\n\n- " +
        "\n- ".join(sorted(validDefaults)))

    for p in pkgs:
        spec = specs.get(p["name"])
        dieOnError(spec is None,
                   "cannot find recipe for package %s" % p["name"])
        dest = join(args.develPrefix, spec["package"])
        writeRepo = spec.get("write_repo", spec.get("source"))
        dieOnError(
            not writeRepo,
            "package %s has no source field and cannot be developed" %
            spec["package"])
        if path.exists(dest):
            warning("not cloning %s since it already exists", spec["package"])
            continue
        p["ver"] = p["ver"] if p["ver"] else spec.get("tag", spec["version"])
        debug("cloning %s%s for development", spec["package"],
              " version " + p["ver"] if p["ver"] else "")

        updateReferenceRepoSpec(args.referenceSources, spec["package"], spec,
                                True, False)

        cmd = [
            "clone", "--origin", "upstream", spec["source"], "--reference",
            join(args.referenceSources, spec["package"].lower())
        ]
        if p["ver"]:
            cmd.extend(["-b", p["ver"]])
        cmd.append(dest)
        git(cmd)
        git(("remote", "set-url", "--push", "upstream", writeRepo),
            directory=dest)

        # Make it point relatively to the mirrors for relocation: as per Git specifics, the path has to
        # be relative to the repository's `.git` directory. Don't do it if no common path is found
        repoObjects = os.path.join(os.path.realpath(dest), ".git", "objects")
        refObjects = os.path.join(os.path.realpath(args.referenceSources),
                                  spec["package"].lower(), "objects")
        repoAltConf = os.path.join(repoObjects, "info", "alternates")
        if len(os.path.commonprefix([repoObjects, refObjects])) > 1:
            with open(repoAltConf, "w") as fil:
                fil.write(os.path.relpath(refObjects, repoObjects) + "\n")

    banner(
        "Development directory %s created%s", args.develPrefix,
        " for " + ", ".join(x["name"].lower() for x in pkgs) if pkgs else "")
Esempio n. 27
0
def deps(recipesDir, topPackage, outFile, buildRequires, transitiveRed,
         disable):
    dot = {}
    keys = ["requires"]
    if buildRequires:
        keys.append("build_requires")
    for p in glob("%s/*.sh" % recipesDir):
        debug(format("Reading file %(filename)s", filename=p))
        try:
            err, recipe, _ = parseRecipe(getRecipeReader(p))
            name = recipe["package"]
            if name in disable:
                debug("Ignoring %s, disabled explicitly" % name)
                continue
        except Exception as e:
            error(
                format("Error reading recipe %(filename)s: %(type)s: %(msg)s",
                       filename=p,
                       type=type(e).__name__,
                       msg=str(e)))
            sys.exit(1)
        dot[name] = dot.get(name, [])
        for k in keys:
            for d in recipe.get(k, []):
                d = d.split(":")[0]
                d in disable or dot[name].append(d)

    selected = None
    if topPackage != "all":
        if not topPackage in dot:
            error(
                format("Package %(topPackage)s does not exist",
                       topPackage=topPackage))
            return False
        selected = [topPackage]
        olen = 0
        while len(selected) != olen:
            olen = len(selected)
            selected += [
                x for s in selected if s in dot for x in dot[s]
                if not x in selected
            ]
        selected.sort()

    result = "digraph {\n"
    for p, deps in list(dot.items()):
        if selected and not p in selected: continue
        result += "  \"%s\";\n" % p
        for d in deps:
            result += "  \"%s\" -> \"%s\";\n" % (p, d)
    result += "}\n"

    with NamedTemporaryFile(delete=False) as fp:
        fp.write(result)
    try:
        if transitiveRed:
            execute(
                format(
                    "tred %(dotFile)s > %(dotFile)s.0 && mv %(dotFile)s.0 %(dotFile)s",
                    dotFile=fp.name))
        execute(["dot", fp.name, "-Tpdf", "-o", outFile])
    except Exception as e:
        error(
            format("Error generating dependencies with dot: %(type)s: %(msg)s",
                   type=type(e).__name__,
                   msg=str(e)))
    else:
        info(
            format("Dependencies graph generated: %(outFile)s",
                   outFile=outFile))
    remove(fp.name)
    return True
Esempio n. 28
0
def doBuild(args, parser):
  if args.remoteStore.startswith("http"):
    syncHelper = HttpRemoteSync(args.remoteStore, args.architecture, args.workDir, args.insecure)
  elif args.remoteStore:
    syncHelper = RsyncRemoteSync(args.remoteStore, args.writeStore, args.architecture, args.workDir, "")
  else:
    syncHelper = NoRemoteSync()

  packages = args.pkgname
  dockerImage = args.dockerImage if "dockerImage" in args else None
  specs = {}
  buildOrder = []
  workDir = abspath(args.workDir)
  prunePaths(workDir)

  if not exists(args.configDir):
    return (error, ("Cannot find %sdist recipes under directory \"%s\".\n" +
                    "Maybe you need to \"cd\" to the right directory or " +
                    "you forgot to run \"aliBuild init\"?") % (star(), args.configDir), 1)

  defaultsReader = lambda : readDefaults(args.configDir, args.defaults, parser.error)
  (err, overrides, taps) = parseDefaults(args.disable,
                                         defaultsReader, debug)
  dieOnError(err, err)

  specDir = "%s/SPECS" % workDir
  if not exists(specDir):
    makedirs(specDir)

  os.environ["ALIBUILD_ALIDIST_HASH"] = getDirectoryHash(args.configDir)

  debug("Building for architecture %s" % args.architecture)
  debug("Number of parallel builds: %d" % args.jobs)
  debug(format("Using %(star)sBuild from "
               "%(star)sbuild@%(toolHash)s recipes "
               "in %(star)sdist@%(distHash)s",
               star=star(),
               toolHash=getDirectoryHash(dirname(__file__)),
               distHash=os.environ["ALIBUILD_ALIDIST_HASH"]))

  (systemPackages, ownPackages, failed, validDefaults) = getPackageList(packages                = packages,
                                                                        specs                   = specs,
                                                                        configDir               = args.configDir,
                                                                        preferSystem            = args.preferSystem,
                                                                        noSystem                = args.noSystem,
                                                                        architecture            = args.architecture,
                                                                        disable                 = args.disable,
                                                                        defaults                = args.defaults,
                                                                        dieOnError              = dieOnError,
                                                                        performPreferCheck      = lambda pkg, cmd : dockerStatusOutput(cmd, dockerImage, executor=getStatusOutputBash),
                                                                        performRequirementCheck = lambda pkg, cmd : dockerStatusOutput(cmd, dockerImage, executor=getStatusOutputBash),
                                                                        performValidateDefaults = lambda spec : validateDefaults(spec, args.defaults),
                                                                        overrides               = overrides,
                                                                        taps                    = taps,
                                                                        log                     = debug)
  if validDefaults and args.defaults not in validDefaults:
    return (error, "Specified default `%s' is not compatible with the packages you want to build.\n" % args.defaults +
                   "Valid defaults:\n\n- " +
                   "\n- ".join(sorted(validDefaults)), 1)

  if failed:
    return (error, "The following packages are system requirements and could not be found:\n\n- " + "\n- ".join(sorted(list(failed))) +
                   "\n\nPlease run:\n\n\taliDoctor %s\n\nto get a full diagnosis." % args.pkgname.pop(), 1)

  for x in specs.values():
    x["requires"] = [r for r in x["requires"] if not r in args.disable]
    x["build_requires"] = [r for r in x["build_requires"] if not r in args.disable]
    x["runtime_requires"] = [r for r in x["runtime_requires"] if not r in args.disable]

  if systemPackages:
    banner("%sBuild can take the following packages from the system and will not build them:\n  %s" %
           (star(), ", ".join(systemPackages)))
  if ownPackages:
    banner("The following packages cannot be taken from the system and will be built:\n  %s" %
           ", ".join(ownPackages))

  # Do topological sort to have the correct build order even in the
  # case of non-tree like dependencies..
  # The actual algorith used can be found at:
  #
  # http://www.stoimen.com/blog/2012/10/01/computer-algorithms-topological-sort-of-a-graph/
  #
  edges = [(p["package"], d) for p in specs.values() for d in p["requires"] ]
  L = [l for l in specs.values() if not l["requires"]]
  S = []
  while L:
    spec = L.pop(0)
    S.append(spec)
    nextVertex = [e[0] for e in edges if e[1] == spec["package"]]
    edges = [e for e in edges if e[1] != spec["package"]]
    hasPredecessors = set([m for e in edges for m in nextVertex if e[0] == m])
    withPredecessor = set(nextVertex) - hasPredecessors
    L += [specs[m] for m in withPredecessor]
  buildOrder = [s["package"] for s in S]

  # Date fields to substitute: they are zero-padded
  now = datetime.now()
  nowKwds = { "year": str(now.year),
              "month": str(now.month).zfill(2),
              "day": str(now.day).zfill(2),
              "hour": str(now.hour).zfill(2) }

  # Check if any of the packages can be picked up from a local checkout
  develCandidates = [basename(d) for d in glob("*") if os.path.isdir(d)]
  develCandidatesUpper = [basename(d).upper() for d in glob("*") if os.path.isdir(d)]
  develPkgs = [p for p in buildOrder
               if p in develCandidates and p not in args.noDevel]
  develPkgsUpper = [(p, p.upper()) for p in buildOrder
                    if p.upper() in develCandidatesUpper and p not in args.noDevel]
  if set(develPkgs) != set(x for (x, y) in develPkgsUpper):
    return (error, format("The following development packages have wrong spelling: %(pkgs)s.\n"
                          "Please check your local checkout and adapt to the correct one indicated.",
                          pkgs=", ".join(set(x.strip() for (x,y) in develPkgsUpper) - set(develPkgs))), 1)

  if buildOrder:
    banner("Packages will be built in the following order:\n - %s" %
           "\n - ".join([ x+" (development package)" if x in develPkgs else "%s@%s" % (x, specs[x]["tag"]) for x in buildOrder if x != "defaults-release" ]))

  if develPkgs:
    banner(format("You have packages in development mode.\n"
                  "This means their source code can be freely modified under:\n\n"
                  "  %(pwd)s/<package_name>\n\n"
                  "%(star)sBuild does not automatically update such packages to avoid work loss.\n"
                  "In most cases this is achieved by doing in the package source directory:\n\n"
                  "  git pull --rebase\n",
                  pwd=os.getcwd(), star=star()))

  # Clone/update repos
  for p in [p for p in buildOrder if "source" in specs[p]]:
    updateReferenceRepoSpec(args.referenceSources, p, specs[p], args.fetchRepos)

    # Retrieve git heads
    cmd = "git ls-remote --heads %s" % (specs[p]["reference"] if "reference" in specs[p] else specs[p]["source"])
    if specs[p]["package"] in develPkgs:
       specs[p]["source"] = join(os.getcwd(), specs[p]["package"])
       cmd = "git ls-remote --heads %s" % specs[p]["source"]
    debug("Executing %s" % cmd)
    res, output = getStatusOutputBash(cmd)
    dieOnError(res, "Error on '%s': %s" % (cmd, output))
    specs[p]["git_heads"] = output.split("\n")

  # Resolve the tag to the actual commit ref
  for p in buildOrder:
    spec = specs[p]
    spec["commit_hash"] = "0"
    develPackageBranch = ""
    if "source" in spec:
      # Tag may contain date params like %(year)s, %(month)s, %(day)s, %(hour).
      spec["tag"] = format(spec["tag"], **nowKwds)
      # By default we assume tag is a commit hash. We then try to find
      # out if the tag is actually a branch and we use the tip of the branch
      # as commit_hash. Finally if the package is a development one, we use the
      # name of the branch as commit_hash.
      spec["commit_hash"] = spec["tag"]
      for head in spec["git_heads"]:
        if head.endswith("refs/heads/{0}".format(spec["tag"])) or spec["package"] in develPkgs:
          spec["commit_hash"] = head.split("\t", 1)[0]
          # We are in development mode, we need to rebuild if the commit hash
          # is different and if there are extra changes on to.
          if spec["package"] in develPkgs:
            # Devel package: we get the commit hash from the checked source, not from remote.
            cmd = "cd %s && git rev-parse HEAD" % spec["source"]
            err, out = getstatusoutput(cmd)
            dieOnError(err, "Unable to detect current commit hash.")
            spec["commit_hash"] = out.strip()
            cmd = "cd %s && git diff -r HEAD && git status --porcelain" % spec["source"]
            h = Hasher()
            err = execute(cmd, h)
            debug(err, cmd)
            dieOnError(err, "Unable to detect source code changes.")
            spec["devel_hash"] = spec["commit_hash"] + h.hexdigest()
            cmd = "cd %s && git rev-parse --abbrev-ref HEAD" % spec["source"]
            err, out = getstatusoutput(cmd)
            if out == "HEAD":
              err, out = getstatusoutput("cd %s && git rev-parse HEAD" % spec["source"])
              out = out[0:10]
            if err:
              return (error, "Error, unable to lookup changes in development package %s. Is it a git clone?" % spec["source"], 1)
            develPackageBranch = out.replace("/", "-")
            spec["tag"] = args.develPrefix if "develPrefix" in args else develPackageBranch
            spec["commit_hash"] = "0"
          break

    # Version may contain date params like tag, plus %(commit_hash)s,
    # %(short_hash)s and %(tag)s.
    defaults_upper = args.defaults != "release" and "_" + args.defaults.upper().replace("-", "_") or ""
    spec["version"] = format(spec["version"],
                             commit_hash=spec["commit_hash"],
                             short_hash=spec["commit_hash"][0:10],
                             tag=spec["tag"],
                             tag_basename=basename(spec["tag"]),
                             defaults_upper=defaults_upper,
                             **nowKwds)

    if spec["package"] in develPkgs and "develPrefix" in args and args.develPrefix != "ali-master":
      spec["version"] = args.develPrefix

  # Decide what is the main package we are building and at what commit.
  #
  # We emit an event for the main package, when encountered, so that we can use
  # it to index builds of the same hash on different architectures. We also
  # make sure add the main package and it's hash to the debug log, so that we
  # can always extract it from it.
  # If one of the special packages is in the list of packages to be built,
  # we use it as main package, rather than the last one.
  if not buildOrder:
    return (banner, "Nothing to be done.", 0)
  mainPackage = buildOrder[-1]
  mainHash = specs[mainPackage]["commit_hash"]

  debug("Main package is %s@%s" % (mainPackage, mainHash))
  if args.debug:
    logger_handler.setFormatter(
        LogFormatter("%%(levelname)s:%s:%s: %%(message)s" %
                     (mainPackage, args.develPrefix if "develPrefix" in args else mainHash[0:8])))

  # Now that we have the main package set, we can print out Useful information
  # which we will be able to associate with this build. Also lets make sure each package
  # we need to build can be built with the current default.
  for p in buildOrder:
    spec = specs[p]
    if "source" in spec:
      debug("Commit hash for %s@%s is %s" % (spec["source"], spec["tag"], spec["commit_hash"]))

  # Calculate the hashes. We do this in build order so that we can guarantee
  # that the hashes of the dependencies are calculated first.  Also notice that
  # if the commit hash is a real hash, and not a tag, we can safely assume
  # that's unique, and therefore we can avoid putting the repository or the
  # name of the branch in the hash.
  debug("Calculating hashes.")
  for p in buildOrder:
    spec = specs[p]
    debug(spec)
    debug(develPkgs)
    h = Hasher()
    dh = Hasher()
    for x in ["recipe", "version", "package", "commit_hash",
              "env", "append_path", "prepend_path"]:
      if sys.version_info[0] < 3 and x in spec and type(spec[x]) == OrderedDict:
        # Python 2: use YAML dict order to prevent changing hashes
        h(str(yaml.safe_load(yamlDump(spec[x]))))
      else:
        h(str(spec.get(x, "none")))
    if spec["commit_hash"] == spec.get("tag", "0"):
      h(spec.get("source", "none"))
      if "source" in spec:
        h(spec["tag"])
    for dep in spec.get("requires", []):
      h(specs[dep]["hash"])
      dh(specs[dep]["hash"] + specs[dep].get("devel_hash", ""))
    if bool(spec.get("force_rebuild", False)):
      h(str(time.time()))
    if spec["package"] in develPkgs and "incremental_recipe" in spec:
      h(spec["incremental_recipe"])
      ih = Hasher()
      ih(spec["incremental_recipe"])
      spec["incremental_hash"] = ih.hexdigest()
    elif p in develPkgs:
      h(spec.get("devel_hash"))
    if args.architecture.startswith("osx") and "relocate_paths" in spec:
        h("relocate:"+" ".join(sorted(spec["relocate_paths"])))
    spec["hash"] = h.hexdigest()
    spec["deps_hash"] = dh.hexdigest()
    debug("Hash for recipe %s is %s" % (p, spec["hash"]))

  # This adds to the spec where it should find, locally or remotely the
  # various tarballs and links.
  for p in buildOrder:
    spec = specs[p]
    pkgSpec = {
      "workDir": workDir,
      "package": spec["package"],
      "version": spec["version"],
      "hash": spec["hash"],
      "prefix": spec["hash"][0:2],
      "architecture": args.architecture
    }
    varSpecs = [
      ("storePath", "TARS/%(architecture)s/store/%(prefix)s/%(hash)s"),
      ("linksPath", "TARS/%(architecture)s/%(package)s"),
      ("tarballHashDir", "%(workDir)s/TARS/%(architecture)s/store/%(prefix)s/%(hash)s"),
      ("tarballLinkDir", "%(workDir)s/TARS/%(architecture)s/%(package)s"),
      ("buildDir", "%(workDir)s/BUILD/%(hash)s/%(package)s")
    ]
    spec.update(dict([(x, format(y, **pkgSpec)) for (x, y) in varSpecs]))
    spec["old_devel_hash"] = readHashFile(spec["buildDir"]+"/.build_succeeded")

  # We recursively calculate the full set of requires "full_requires"
  # including build_requires and the subset of them which are needed at
  # runtime "full_runtime_requires".
  for p in buildOrder:
    spec = specs[p]
    todo = [p]
    spec["full_requires"] = []
    spec["full_runtime_requires"] = []
    while todo:
      i = todo.pop(0)
      requires = specs[i].get("requires", [])
      runTimeRequires = specs[i].get("runtime_requires", [])
      spec["full_requires"] += requires
      spec["full_runtime_requires"] += runTimeRequires
      todo += requires
    spec["full_requires"] = set(spec["full_requires"])
    spec["full_runtime_requires"] = set(spec["full_runtime_requires"])

  debug("We will build packages in the following order: %s" % " ".join(buildOrder))
  if args.dryRun:
    return (info, "--dry-run / -n specified. Not building.", 0)

  # We now iterate on all the packages, making sure we build correctly every
  # single one of them. This is done this way so that the second time we run we
  # can check if the build was consistent and if it is, we bail out.
  packageIterations = 0
  report_event("install",
               format("%(p)s disabled=%(dis)s devel=%(dev)s system=%(sys)s own=%(own)s deps=%(deps)s",
                      p=args.pkgname,
                      dis=",".join(sorted(args.disable)),
                      dev=",".join(sorted(develPkgs)),
                      sys=",".join(sorted(systemPackages)),
                      own=",".join(sorted(ownPackages)),
                      deps=",".join(buildOrder[:-1])
                     ),
               args.architecture)

  while buildOrder:
    packageIterations += 1
    if packageIterations > 20:
      return (error, "Too many attempts at building %s. Something wrong with the repository?" % spec["package"], 1)
    p = buildOrder[0]
    spec = specs[p]
    if args.debug:
      logger_handler.setFormatter(
          LogFormatter("%%(levelname)s:%s:%s:%s: %%(message)s" %
                       (mainPackage, p, args.develPrefix if "develPrefix" in args else mainHash[0:8])))
    if spec["package"] in develPkgs and getattr(syncHelper, "writeStore", None):
      warning("Disabling remote write store from now since %s is a development package." % spec["package"])
      syncHelper.writeStore = ""

    # Since we can execute this multiple times for a given package, in order to
    # ensure consistency, we need to reset things and make them pristine.
    spec.pop("revision", None)
    riemannStream.setAttributes(package = spec["package"],
                                package_hash = spec["version"],
                                architecture = args.architecture,
                                defaults = args.defaults)
    riemannStream.setState("warning")

    debug("Updating from tarballs")
    # If we arrived here it really means we have a tarball which was created
    # using the same recipe. We will use it as a cache for the build. This means
    # that while we will still perform the build process, rather than
    # executing the build itself we will:
    #
    # - Unpack it in a temporary place.
    # - Invoke the relocation specifying the correct work_dir and the
    #   correct path which should have been used.
    # - Move the version directory to its final destination, including the
    #   correct revision.
    # - Repack it and put it in the store with the
    #
    # this will result in a new package which has the same binary contents of
    # the old one but where the relocation will work for the new dictory. Here
    # we simply store the fact that we can reuse the contents of cachedTarball.
    syncHelper.syncToLocal(p, spec)

    # Decide how it should be called, based on the hash and what is already
    # available.
    debug("Checking for packages already built.")
    linksGlob = format("%(w)s/TARS/%(a)s/%(p)s/%(p)s-%(v)s-*.%(a)s.tar.gz",
                       w=workDir,
                       a=args.architecture,
                       p=spec["package"],
                       v=spec["version"])
    debug("Glob pattern used: %s" % linksGlob)
    packages = glob(linksGlob)
    # In case there is no installed software, revision is 1
    # If there is already an installed package:
    # - Remove it if we do not know its hash
    # - Use the latest number in the version, to decide its revision
    debug("Packages already built using this version\n%s" % "\n".join(packages))
    busyRevisions = []

    # Calculate the build_family for the package
    #
    # If the package is a devel package, we need to associate it a devel
    # prefix, either via the -z option or using its checked out branch. This
    # affects its build hash.
    #
    # Moreover we need to define a global "buildFamily" which is used
    # to tag all the packages incurred in the build, this way we can have
    # a latest-<buildFamily> link for all of them an we will not incur in the
    # flip - flopping described in https://github.com/alisw/alibuild/issues/325.
    develPrefix = ""
    possibleDevelPrefix = getattr(args, "develPrefix", develPackageBranch)
    if spec["package"] in develPkgs:
      develPrefix = possibleDevelPrefix

    if possibleDevelPrefix:
      spec["build_family"] = "%s-%s" % (possibleDevelPrefix, args.defaults)
    else:
      spec["build_family"] = args.defaults
    if spec["package"] == mainPackage:
      mainBuildFamily = spec["build_family"]

    for d in packages:
      realPath = readlink(d)
      matcher = format("../../%(a)s/store/[0-9a-f]{2}/([0-9a-f]*)/%(p)s-%(v)s-([0-9]*).%(a)s.tar.gz$",
                       a=args.architecture,
                       p=spec["package"],
                       v=spec["version"])
      m = re.match(matcher, realPath)
      if not m:
        continue
      h, revision = m.groups()
      revision = int(revision)

      # If we have an hash match, we use the old revision for the package
      # and we do not need to build it.
      if h == spec["hash"]:
        spec["revision"] = revision
        if spec["package"] in develPkgs and "incremental_recipe" in spec:
          spec["obsolete_tarball"] = d
        else:
          debug("Package %s with hash %s is already found in %s. Not building." % (p, h, d))
          src = format("%(v)s-%(r)s",
                       w=workDir,
                       v=spec["version"],
                       r=spec["revision"])
          dst1 = format("%(w)s/%(a)s/%(p)s/latest-%(bf)s",
                        w=workDir,
                        a=args.architecture,
                        p=spec["package"],
                        bf=spec["build_family"])
          dst2 = format("%(w)s/%(a)s/%(p)s/latest",
                        w=workDir,
                        a=args.architecture,
                        p=spec["package"])

          getstatusoutput("ln -snf %s %s" % (src, dst1))
          getstatusoutput("ln -snf %s %s" % (src, dst2))
          info("Using cached build for %s" % p)
        break
      else:
        busyRevisions.append(revision)

    if not "revision" in spec and busyRevisions:
      spec["revision"] = min(set(range(1, max(busyRevisions)+2)) - set(busyRevisions))
    elif not "revision" in spec:
      spec["revision"] = "1"

    # Check if this development package needs to be rebuilt.
    if spec["package"] in develPkgs:
      debug("Checking if devel package %s needs rebuild" % spec["package"])
      if spec["devel_hash"]+spec["deps_hash"] == spec["old_devel_hash"]:
        info("Development package %s does not need rebuild" % spec["package"])
        buildOrder.pop(0)
        continue

    # Now that we have all the information about the package we want to build, let's
    # check if it wasn't built / unpacked already.
    hashFile = "%s/%s/%s/%s-%s/.build-hash" % (workDir,
                                               args.architecture,
                                               spec["package"],
                                               spec["version"],
                                               spec["revision"])
    fileHash = readHashFile(hashFile)
    if fileHash != spec["hash"]:
      if fileHash != "0":
        debug("Mismatch between local area (%s) and the one which I should build (%s). Redoing." % (fileHash, spec["hash"]))
      shutil.rmtree(dirname(hashFile), True)
    else:
      # If we get here, we know we are in sync with whatever remote store.  We
      # can therefore create a directory which contains all the packages which
      # were used to compile this one.
      riemannStream.setState('ok')
      debug("Package %s was correctly compiled. Moving to next one." % spec["package"])
      # If using incremental builds, next time we execute the script we need to remove
      # the placeholders which avoid rebuilds.
      if spec["package"] in develPkgs and "incremental_recipe" in spec:
        unlink(hashFile)
      if "obsolete_tarball" in spec:
        unlink(realpath(spec["obsolete_tarball"]))
        unlink(spec["obsolete_tarball"])
      # We need to create 2 sets of links, once with the full requires,
      # once with only direct dependencies, since that's required to
      # register packages in Alien.
      createDistLinks(spec, specs, args, "dist", "full_requires")
      createDistLinks(spec, specs, args, "dist-direct", "requires")
      createDistLinks(spec, specs, args, "dist-runtime", "full_runtime_requires")
      buildOrder.pop(0)
      packageIterations = 0
      # We can now delete the INSTALLROOT and BUILD directories,
      # assuming the package is not a development one. We also can
      # delete the SOURCES in case we have aggressive-cleanup enabled.
      if not spec["package"] in develPkgs and args.autoCleanup:
        cleanupDirs = [format("%(w)s/BUILD/%(h)s",
                              w=workDir,
                              h=spec["hash"]),
                       format("%(w)s/INSTALLROOT/%(h)s",
                              w=workDir,
                              h=spec["hash"])]
        if args.aggressiveCleanup:
          cleanupDirs.append(format("%(w)s/SOURCES/%(p)s",
                                    w=workDir,
                                    p=spec["package"]))
        debug("Cleaning up:\n" + "\n".join(cleanupDirs))

        for d in cleanupDirs:
          shutil.rmtree(d.encode("utf8"), True)
        try:
          unlink(format("%(w)s/BUILD/%(p)s-latest",
                 w=workDir, p=spec["package"]))
          if "develPrefix" in args:
            unlink(format("%(w)s/BUILD/%(p)s-latest-%(dp)s",
                   w=workDir, p=spec["package"], dp=args.develPrefix))
        except:
          pass
        try:
          rmdir(format("%(w)s/BUILD",
                w=workDir, p=spec["package"]))
          rmdir(format("%(w)s/INSTALLROOT",
                w=workDir, p=spec["package"]))
        except:
          pass
      continue

    debug("Looking for cached tarball in %s" % spec["tarballHashDir"])
    # FIXME: I should get the tarballHashDir updated with server at this point.
    #        It does not really matter that the symlinks are ok at this point
    #        as I only used the tarballs as reusable binary blobs.
    spec["cachedTarball"] = ""
    if not spec["package"] in develPkgs:
      tarballs = [x
                  for x in glob("%s/*" % spec["tarballHashDir"])
                  if x.endswith("gz")]
      spec["cachedTarball"] = tarballs[0] if len(tarballs) else ""
      debug(spec["cachedTarball"] and
            "Found tarball in %s" % spec["cachedTarball"] or
            "No cache tarballs found")

    # Generate the part which sources the environment for all the dependencies.
    # Notice that we guarantee that a dependency is always sourced before the
    # parts depending on it, but we do not guaranteed anything for the order in
    # which unrelated components are activated.
    dependencies = ""
    dependenciesInit = ""
    for dep in spec.get("requires", []):
      depSpec = specs[dep]
      depInfo = {
        "architecture": args.architecture,
        "package": dep,
        "version": depSpec["version"],
        "revision": depSpec["revision"],
        "bigpackage": dep.upper().replace("-", "_")
      }
      dependencies += format("[ \"X$%(bigpackage)s_VERSION\" = X  ] && source \"$WORK_DIR/%(architecture)s/%(package)s/%(version)s-%(revision)s/etc/profile.d/init.sh\"\n",
                             **depInfo)
      dependenciesInit += format('echo [ \\\"X\$%(bigpackage)s_VERSION\\\" = X ] \&\& source \${WORK_DIR}/%(architecture)s/%(package)s/%(version)s-%(revision)s/etc/profile.d/init.sh >> \"$INSTALLROOT/etc/profile.d/init.sh\"\n',
                             **depInfo)
    # Generate the part which creates the environment for the package.
    # This can be either variable set via the "env" keyword in the metadata
    # or paths which get appended via the "append_path" one.
    # By default we append LD_LIBRARY_PATH, PATH and DYLD_LIBRARY_PATH
    # FIXME: do not append variables for Mac on Linux.
    environment = ""
    dieOnError(not isinstance(spec.get("env", {}), dict),
               "Tag `env' in %s should be a dict." % p)
    for key,value in spec.get("env", {}).items():
      environment += format("echo 'export %(key)s=\"%(value)s\"' >> $INSTALLROOT/etc/profile.d/init.sh\n",
                            key=key,
                            value=value)
    basePath = "%s_ROOT" % p.upper().replace("-", "_")

    pathDict = spec.get("append_path", {})
    dieOnError(not isinstance(pathDict, dict),
               "Tag `append_path' in %s should be a dict." % p)
    for pathName,pathVal in pathDict.items():
      pathVal = isinstance(pathVal, list) and pathVal or [ pathVal ]
      environment += format("\ncat << \EOF >> \"$INSTALLROOT/etc/profile.d/init.sh\"\nexport %(key)s=$%(key)s:%(value)s\nEOF",
                            key=pathName,
                            value=":".join(pathVal))

    # Same thing, but prepending the results so that they win against system ones.
    defaultPrependPaths = { "LD_LIBRARY_PATH": "$%s/lib" % basePath,
                            "DYLD_LIBRARY_PATH": "$%s/lib" % basePath,
                            "PATH": "$%s/bin" % basePath }
    pathDict = spec.get("prepend_path", {})
    dieOnError(not isinstance(pathDict, dict),
               "Tag `prepend_path' in %s should be a dict." % p)
    for pathName,pathVal in pathDict.items():
      pathDict[pathName] = isinstance(pathVal, list) and pathVal or [ pathVal ]
    for pathName,pathVal in defaultPrependPaths.items():
      pathDict[pathName] = [ pathVal ] + pathDict.get(pathName, [])
    for pathName,pathVal in pathDict.items():
      environment += format("\ncat << \EOF >> \"$INSTALLROOT/etc/profile.d/init.sh\"\nexport %(key)s=%(value)s:$%(key)s\nEOF",
                            key=pathName,
                            value=":".join(pathVal))

    # The actual build script.
    referenceStatement = ""
    if "reference" in spec:
      referenceStatement = "export GIT_REFERENCE=${GIT_REFERENCE_OVERRIDE:-%s}/%s" % (dirname(spec["reference"]), basename(spec["reference"]))

    debug(spec)

    cmd_raw = ""
    try:
      fp = open(dirname(realpath(__file__))+'/alibuild_helpers/build_template.sh', 'r')
      cmd_raw = fp.read()
      fp.close()
    except:
      from pkg_resources import resource_string
      cmd_raw = resource_string("alibuild_helpers", 'build_template.sh')

    source = spec.get("source", "")
    # Shortend the commit hash in case it's a real commit hash and not simply
    # the tag.
    commit_hash = spec["commit_hash"]
    if spec["tag"] != spec["commit_hash"]:
      commit_hash = spec["commit_hash"][0:10]

    # Split the source in two parts, sourceDir and sourceName.  This is done so
    # that when we use Docker we can replace sourceDir with the correct
    # container path, if required.  No changes for what concerns the standard
    # bash builds, though.
    if args.docker:
      cachedTarball = re.sub("^" + workDir, "/sw", spec["cachedTarball"])
    else:
      cachedTarball = spec["cachedTarball"]


    cmd = format(cmd_raw,
                 dependencies=dependencies,
                 dependenciesInit=dependenciesInit,
                 develPrefix=develPrefix,
                 environment=environment,
                 workDir=workDir,
                 configDir=abspath(args.configDir),
                 incremental_recipe=spec.get("incremental_recipe", ":"),
                 sourceDir=source and (dirname(source) + "/") or "",
                 sourceName=source and basename(source) or "",
                 referenceStatement=referenceStatement,
                 requires=" ".join(spec["requires"]),
                 build_requires=" ".join(spec["build_requires"]),
                 runtime_requires=" ".join(spec["runtime_requires"])
                )

    commonPath = "%s/%%s/%s/%s/%s-%s" % (workDir,
                                         args.architecture,
                                         spec["package"],
                                         spec["version"],
                                         spec["revision"])
    scriptDir = commonPath % "SPECS"

    err, out = getstatusoutput("mkdir -p %s" % scriptDir)
    writeAll("%s/build.sh" % scriptDir, cmd)
    writeAll("%s/%s.sh" % (scriptDir, spec["package"]), spec["recipe"])

    banner("Building %s@%s" % (spec["package"],
                               args.develPrefix if "develPrefix" in args and spec["package"]  in develPkgs
                                                else spec["version"]))
    # Define the environment so that it can be passed up to the
    # actual build script
    buildEnvironment = [
      ("ARCHITECTURE", args.architecture),
      ("BUILD_REQUIRES", " ".join(spec["build_requires"])),
      ("CACHED_TARBALL", cachedTarball),
      ("CAN_DELETE", args.aggressiveCleanup and "1" or ""),
      ("COMMIT_HASH", commit_hash),
      ("DEPS_HASH", spec.get("deps_hash", "")),
      ("DEVEL_HASH", spec.get("devel_hash", "")),
      ("DEVEL_PREFIX", develPrefix),
      ("BUILD_FAMILY", spec["build_family"]),
      ("GIT_TAG", spec["tag"]),
      ("MY_GZIP", gzip()),
      ("MY_TAR", tar()),
      ("INCREMENTAL_BUILD_HASH", spec.get("incremental_hash", "0")),
      ("JOBS", args.jobs),
      ("PKGHASH", spec["hash"]),
      ("PKGNAME", spec["package"]),
      ("PKGREVISION", spec["revision"]),
      ("PKGVERSION", spec["version"]),
      ("RELOCATE_PATHS", " ".join(spec.get("relocate_paths", []))),
      ("REQUIRES", " ".join(spec["requires"])),
      ("RUNTIME_REQUIRES", " ".join(spec["runtime_requires"])),
      ("WRITE_REPO", spec.get("write_repo", source)),
    ]
    # Add the extra environment as passed from the command line.
    buildEnvironment += [e.partition('=')[::2] for e in args.environment]
    # In case the --docker options is passed, we setup a docker container which
    # will perform the actual build. Otherwise build as usual using bash.
    if args.docker:
      additionalEnv = ""
      additionalVolumes = ""
      develVolumes = ""
      mirrorVolume = "reference" in spec and " -v %s:/mirror" % dirname(spec["reference"]) or ""
      overrideSource = source.startswith("/") and "-e SOURCE0_DIR_OVERRIDE=/" or ""

      for devel in develPkgs:
        develVolumes += " -v $PWD/`readlink %s || echo %s`:/%s:ro " % (devel, devel, devel)
      for env in buildEnvironment:
        additionalEnv += " -e %s='%s' " % env
      for volume in args.volumes:
        additionalVolumes += " -v %s " % volume
      dockerWrapper = format("docker run --rm -it"
              " -v %(workdir)s:/sw"
              " -v %(scriptDir)s/build.sh:/build.sh:ro"
              " %(mirrorVolume)s"
              " %(develVolumes)s"
              " %(additionalEnv)s"
              " %(additionalVolumes)s"
              " -e GIT_REFERENCE_OVERRIDE=/mirror"
              " %(overrideSource)s"
              " -e WORK_DIR_OVERRIDE=/sw"
              " %(image)s"
              " %(bash)s -e -x /build.sh",
              additionalEnv=additionalEnv,
              additionalVolumes=additionalVolumes,
              bash=BASH,
              develVolumes=develVolumes,
              workdir=abspath(args.workDir),
              image=dockerImage,
              mirrorVolume=mirrorVolume,
              overrideSource=overrideSource,
              scriptDir=scriptDir)
      debug(dockerWrapper)
      err = execute(dockerWrapper)
    else:
      progress = ProgressPrint("%s is being built (use --debug for full output)" % spec["package"])
      for k,v in buildEnvironment:
        os.environ[k] = str(v)
      err = execute("%s -e -x %s/build.sh 2>&1" % (BASH, scriptDir),
                    printer=debug if args.debug or not sys.stdout.isatty() else progress)
      progress.end("failed" if err else "ok", err)
    report_event("BuildError" if err else "BuildSuccess",
                 spec["package"],
                 format("%(a)s %(v)s %(c)s %(h)s",
                        a = args.architecture,
                        v = spec["version"],
                        c = spec["commit_hash"],
                        h = os.environ["ALIBUILD_ALIDIST_HASH"][0:10]))

    updatablePkgs = [ x for x in spec["requires"] if x in develPkgs ]
    if spec["package"] in develPkgs:
      updatablePkgs.append(spec["package"])

    buildErrMsg = format("Error while executing %(sd)s/build.sh on `%(h)s'.\n"
                         "Log can be found in %(w)s/BUILD/%(p)s-latest%(devSuffix)s/log.\n"
                         "Please upload it to CERNBox/Dropbox if you intend to request support.\n"
                         "Build directory is %(w)s/BUILD/%(p)s-latest%(devSuffix)s/%(p)s.",
                         h=socket.gethostname(),
                         sd=scriptDir,
                         w=abspath(args.workDir),
                         p=spec["package"],
                         devSuffix="-" + args.develPrefix
                                   if "develPrefix" in args and spec["package"] in develPkgs
                                   else "")
    if updatablePkgs:
      buildErrMsg += format("\n\n"
                            "Note that you have packages in development mode.\n"
                            "Devel sources are not updated automatically, you must do it by hand.\n"
                            "This problem might be due to one or more outdated devel sources.\n"
                            "To update all development packages required for this build "
                            "it is usually sufficient to do:\n%(updatablePkgs)s",
                            updatablePkgs="".join(["\n  ( cd %s && git pull --rebase )" % dp
                                                   for dp in updatablePkgs]))

    dieOnError(err, buildErrMsg)

    syncHelper.syncToRemote(p, spec)
  banner(format("Build of %(mainPackage)s successfully completed on `%(h)s'.\n"
              "Your software installation is at:"
              "\n\n  %(wp)s\n\n"
              "You can use this package by loading the environment:"
              "\n\n  alienv enter %(mainPackage)s/latest-%(buildFamily)s",
              mainPackage=mainPackage,
              buildFamily=mainBuildFamily,
              h=socket.gethostname(),
              defaults=args.defaults,
              wp=abspath(join(args.workDir, args.architecture))))
  for x in develPkgs:
    banner(format("Build directory for devel package %(p)s:\n%(w)s/BUILD/%(p)s-latest%(devSuffix)s/%(p)s",
                  p=x,
                  devSuffix="-"+args.develPrefix if "develPrefix" in args else "",
                  w=abspath(args.workDir)))
  return (debug, "Everything done", 0)
Esempio n. 29
0
def doInit(args):
  assert(args.pkgname != None)
  assert(type(args.dist) == dict)
  assert(sorted(args.dist.keys()) == ["repo", "ver"])
  pkgs = parsePackagesDefinition(args.pkgname)
  assert(type(pkgs) == list)
  if args.dryRun:
    info("This will initialise local checkouts for %s\n"
         "--dry-run / -n specified. Doing nothing." % ",".join(x["name"] for x in pkgs))
    exit(0)
  try:
    path.exists(args.develPrefix) or os.mkdir(args.develPrefix)
    path.exists(args.referenceSources) or os.makedirs(args.referenceSources)
  except OSError as e:
    error(str(e))
    exit(1)

  # Fetch recipes first if necessary
  if path.exists(args.configDir):
    warning("using existing recipes from %s" % args.configDir)
  else:
    cmd = format("git clone %(repo)s%(branch)s %(cd)s",
                 repo=args.dist["repo"] if ":" in args.dist["repo"] else "https://github.com/%s" % args.dist["repo"],
                 branch=" -b "+args.dist["ver"] if args.dist["ver"] else "",
                 cd=args.configDir)
    debug(cmd)
    err = execute(cmd)
    dieOnError(err!=0, "cannot clone recipes")

  # Use standard functions supporting overrides and taps. Ignore all disables
  # and system packages as they are irrelevant in this context
  specs = {}
  defaultsReader = lambda: readDefaults(args.configDir, args.defaults, error)
  (err, overrides, taps) = parseDefaults([], defaultsReader, debug)
  (_,_,_,validDefaults) = getPackageList(packages=[ p["name"] for p in pkgs ],
                                         specs=specs,
                                         configDir=args.configDir,
                                         preferSystem=False,
                                         noSystem=True,
                                         architecture="",
                                         disable=[],
                                         defaults=args.defaults,
                                         dieOnError=dieOnError,
                                         performPreferCheck=lambda *x, **y: (1, ""),
                                         performRequirementCheck=lambda *x, **y: (0, ""),
                                         performValidateDefaults=lambda spec : validateDefaults(spec, args.defaults),
                                         overrides=overrides,
                                         taps=taps,
                                         log=debug)
  dieOnError(validDefaults and args.defaults not in validDefaults,
             "Specified default `%s' is not compatible with the packages you want to build.\n" % args.defaults +
             "Valid defaults:\n\n- " +
             "\n- ".join(sorted(validDefaults)))

  for p in pkgs:
    spec = specs.get(p["name"])
    dieOnError(spec is None, "cannot find recipe for package %s" % p["name"])
    dest = join(args.develPrefix, spec["package"])
    writeRepo = spec.get("write_repo", spec.get("source"))
    dieOnError(not writeRepo, "package %s has no source field and cannot be developed" % spec["package"])
    if path.exists(dest):
      warning("not cloning %s since it already exists" % spec["package"])
      continue
    p["ver"] = p["ver"] if p["ver"] else spec.get("tag", spec["version"])
    debug("cloning %s%s for development" % (spec["package"], " version "+p["ver"] if p["ver"] else ""))

    updateReferenceRepoSpec(args.referenceSources, spec["package"], spec, True)
    cmd = format("git clone %(readRepo)s%(branch)s --reference %(refSource)s %(cd)s && " +
                 "cd %(cd)s && git remote set-url --push origin %(writeRepo)s",
                 readRepo=spec["source"],
                 writeRepo=writeRepo,
                 branch=" -b "+p["ver"] if p["ver"] else "",
                 refSource=join(args.referenceSources, spec["package"].lower()),
                 cd=dest)
    debug(cmd)
    err = execute(cmd)
    dieOnError(err!=0, "cannot clone %s%s" %
                       (spec["package"], " version "+p["ver"] if p["ver"] else ""))
  banner(format("Development directory %(d)s created%(pkgs)s",
         pkgs=" for "+", ".join([ x["name"].lower() for x in pkgs ]) if pkgs else "",
         d=args.develPrefix))
Esempio n. 30
0
def doInit(args):
  assert(args.pkgname != None)
  assert(type(args.dist) == dict)
  assert(sorted(args.dist.keys()) == ["repo", "ver"])
  pkgs = parsePackagesDefinition(args.pkgname)
  assert(type(pkgs) == list)
  if args.dryRun:
    info("This will initialise local checkouts for %s\n"
         "--dry-run / -n specified. Doing nothing." % ",".join(x["name"] for x in pkgs))
    exit(0)
  try:
    path.exists(args.develPrefix) or os.mkdir(args.develPrefix)
    path.exists(args.referenceSources) or os.makedirs(args.referenceSources)
  except OSError as e:
    error(str(e))
    exit(1)

  # Fetch recipes first if necessary
  if path.exists(args.configDir):
    warning("using existing recipes from %s" % args.configDir)
  else:
    cmd = format("git clone %(repo)s%(branch)s %(cd)s",
                 repo=args.dist["repo"] if ":" in args.dist["repo"] else "https://github.com/%s" % args.dist["repo"],
                 branch=" -b "+args.dist["ver"] if args.dist["ver"] else "",
                 cd=args.configDir)
    debug(cmd)
    err = execute(cmd)
    dieOnError(err!=0, "cannot clone recipes")

  # Use standard functions supporting overrides and taps. Ignore all disables
  # and system packages as they are irrelevant in this context
  specs = {}
  defaultsReader = lambda: readDefaults(args.configDir, args.defaults, error)
  (err, overrides, taps) = parseDefaults([], defaultsReader, debug)
  (_,_,_,validDefaults) = getPackageList(packages=[ p["name"] for p in pkgs ],
                                         specs=specs,
                                         configDir=args.configDir,
                                         preferSystem=False,
                                         noSystem=True,
                                         architecture="",
                                         disable=[],
                                         defaults=args.defaults,
                                         dieOnError=lambda *x, **y: None,
                                         performPreferCheck=lambda *x, **y: (1, ""),
                                         performRequirementCheck=lambda *x, **y: (0, ""),
                                         performValidateDefaults=lambda spec : validateDefaults(spec, args.defaults),
                                         overrides=overrides,
                                         taps=taps,
                                         log=debug)
  dieOnError(validDefaults and args.defaults not in validDefaults,
             "Specified default `%s' is not compatible with the packages you want to build.\n" % args.defaults +
             "Valid defaults:\n\n- " +
             "\n- ".join(sorted(validDefaults)))

  for p in pkgs:
    spec = specs.get(p["name"])
    dieOnError(spec is None, "cannot find recipe for package %s" % p["name"])
    dest = join(args.develPrefix, spec["package"])
    writeRepo = spec.get("write_repo", spec.get("source"))
    dieOnError(not writeRepo, "package %s has no source field and cannot be developed" % spec["package"])
    if path.exists(dest):
      warning("not cloning %s since it already exists" % spec["package"])
      continue
    p["ver"] = p["ver"] if p["ver"] else spec.get("tag", spec["version"])
    debug("cloning %s%s for development" % (spec["package"], " version "+p["ver"] if p["ver"] else ""))

    updateReferenceRepoSpec(args.referenceSources, spec["package"], spec, True)
    cmd = format("git clone %(readRepo)s%(branch)s --reference %(refSource)s %(cd)s && " +
                 "cd %(cd)s && git remote set-url --push origin %(writeRepo)s",
                 readRepo=spec["source"],
                 writeRepo=writeRepo,
                 branch=" -b "+p["ver"] if p["ver"] else "",
                 refSource=join(args.referenceSources, spec["package"].lower()),
                 cd=dest)
    debug(cmd)
    err = execute(cmd)
    dieOnError(err!=0, "cannot clone %s%s" %
                       (spec["package"], " version "+p["ver"] if p["ver"] else ""))
  banner(format("Development directory %(d)s created%(pkgs)s",
         pkgs=" for "+", ".join([ x["name"].lower() for x in pkgs ]) if pkgs else "",
         d=args.develPrefix))
Esempio n. 31
0
def updateReferenceRepo(referenceSources,
                        p,
                        spec,
                        fetch=True,
                        usePartialClone=True):
    """
  Update source reference area, if possible.
  If the area is already there and cannot be written, assume it maintained
  by someone else.

  If the area can be created, clone a bare repository with the sources.

  Returns the reference repository's local path if available, otherwise None.
  Throws a fatal error in case repository cannot be updated even if it appears
  to be writeable.

  @referenceSources : a string containing the path to the sources to be updated
  @p                : the name of the package to be updated
  @spec             : the spec of the package to be updated (an OrderedDict)
  @fetch            : whether to fetch updates: if False, only clone if not found
  """
    assert isinstance(spec, OrderedDict)
    if "source" not in spec:
        return

    debug("Updating references.")
    referenceRepo = os.path.join(os.path.abspath(referenceSources), p.lower())

    try:
        os.makedirs(os.path.abspath(referenceSources))
    except:
        pass

    if not is_writeable(referenceSources):
        if os.path.exists(referenceRepo):
            debug("Using %s as reference for %s", referenceRepo, p)
            return referenceRepo  # reference is read-only
        else:
            debug("Cannot create reference for %s in %s", p, referenceSources)
            return None  # no reference can be found and created (not fatal)

    if not os.path.exists(referenceRepo):
        cmd = ["clone", "--bare", spec["source"], referenceRepo]
        if usePartialClone and partialCloneFilter:
            cmd.append(partialCloneFilter)
        git(cmd)
    elif fetch:
        with codecs.open(os.path.join(os.path.dirname(referenceRepo),
                                      "fetch-log.txt"),
                         "w",
                         encoding="utf-8",
                         errors="replace") as logf:
            err, output = git(("fetch", "-f", "--tags", spec["source"],
                               "+refs/heads/*:refs/heads/*"),
                              directory=referenceRepo,
                              check=False)
            logf.write(output)
            debug(output)
            dieOnError(
                err,
                "Error while updating reference repo for %s." % spec["source"])
    return referenceRepo  # reference is read-write
Esempio n. 32
0
    def syncToLocal(self, p, spec):
        # Check for any existing tarballs we can use instead of fetching new ones.
        for pkg_hash in spec["remote_hashes"]:
            try:
                have_tarballs = os.listdir(
                    os.path.join(
                        self.workdir,
                        resolve_store_path(self.architecture, pkg_hash)))
            except OSError:  # store path not readable
                continue
            for tarball in have_tarballs:
                if re.match(
                        r"^{package}-{version}-[0-9]+\.{arch}\.tar\.gz$".
                        format(
                            package=re.escape(spec["package"]),
                            version=re.escape(spec["version"]),
                            arch=re.escape(self.architecture),
                        ), os.path.basename(tarball)):
                    debug(
                        "Previously downloaded tarball for %s with hash %s, reusing",
                        p, pkg_hash)
                    return

        with requests.Session() as session:
            debug("Updating remote store for package %s; trying hashes %s", p,
                  ", ".join(spec["remote_hashes"]))
            store_path = use_tarball = None
            # Find the first tarball that matches any possible hash and fetch it.
            for pkg_hash in spec["remote_hashes"]:
                store_path = resolve_store_path(self.architecture, pkg_hash)
                tarballs = self.getRetry("%s/%s/" %
                                         (self.remoteStore, store_path),
                                         session=session)
                if tarballs:
                    use_tarball = tarballs[0]["name"]
                    break

            if store_path is None or use_tarball is None:
                debug("Nothing fetched for %s (%s)", p,
                      ", ".join(spec["remote_hashes"]))
                return

            links_path = resolve_links_path(self.architecture, spec["package"])
            execute("mkdir -p {}/{} {}/{}".format(self.workdir, store_path,
                                                  self.workdir, links_path))

            destPath = os.path.join(self.workdir, store_path, use_tarball)
            if not os.path.isfile(destPath):
                # Do not download twice
                self.getRetry("/".join(
                    (self.remoteStore, store_path, use_tarball)),
                              destPath,
                              session=session)

            # Fetch manifest file with initial symlinks. This file is updated
            # regularly; we use it to avoid many small network requests.
            manifest = self.getRetry("%s/%s.manifest" %
                                     (self.remoteStore, links_path),
                                     returnResult=True,
                                     session=session)
            symlinks = {
                linkname.decode("utf-8"): target.decode("utf-8")
                for linkname, sep, target in (
                    line.partition(b"\t") for line in manifest.splitlines())
                if sep and linkname and target
            }
            # If we've just downloaded a tarball, add a symlink to it.
            # We need to strip the leading TARS/ first, though.
            assert store_path.startswith("TARS/"), store_path
            symlinks[use_tarball] = os.path.join(store_path[len("TARS/"):],
                                                 use_tarball)
            # Now add any remaining symlinks that aren't in the manifest yet. There
            # should always be relatively few of these, as the separate network
            # requests are a bit expensive.
            for link in self.getRetry("%s/%s/" %
                                      (self.remoteStore, links_path),
                                      session=session):
                linkname = link["name"]
                if linkname in symlinks:
                    # This symlink is already present in the manifest.
                    continue
                if os.path.islink(
                        os.path.join(self.workdir, links_path, linkname)):
                    # We have this symlink locally. With local revisions, we won't produce
                    # revisions that will conflict with remote revisions unless we upload
                    # them anyway, so there's no need to redownload.
                    continue
                # This symlink isn't in the manifest yet, and we don't have it locally,
                # so download it individually.
                symlinks[linkname] = \
                    self.getRetry("/".join((self.remoteStore, links_path, linkname)),
                                  returnResult=True, log=False, session=session) \
                        .decode("utf-8").rstrip("\r\n")
        for linkname, target in symlinks.items():
            execute("ln -nsf ../../{target} {workdir}/{linkdir}/{name}".format(
                workdir=self.workdir,
                linkdir=links_path,
                name=linkname,
                target=target))
Esempio n. 33
0
def doBuild(args, parser):
    if args.remoteStore.startswith("http"):
        syncHelper = HttpRemoteSync(args.remoteStore, args.architecture,
                                    args.workDir, args.insecure)
    elif args.remoteStore:
        syncHelper = RsyncRemoteSync(args.remoteStore, args.writeStore,
                                     args.architecture, args.workDir, "")
    else:
        syncHelper = NoRemoteSync()

    packages = args.pkgname
    dockerImage = args.dockerImage if "dockerImage" in args else None
    specs = {}
    buildOrder = []
    workDir = abspath(args.workDir)
    prunePaths(workDir)

    if not exists(args.configDir):
        return (error,
                ("Cannot find %sdist recipes under directory \"%s\".\n" +
                 "Maybe you need to \"cd\" to the right directory or " +
                 "you forgot to run \"aliBuild init\"?") %
                (star(), args.configDir), 1)

    defaultsReader = lambda: readDefaults(args.configDir, args.defaults, parser
                                          .error)
    (err, overrides, taps) = parseDefaults(args.disable, defaultsReader, debug)
    dieOnError(err, err)

    specDir = "%s/SPECS" % workDir
    if not exists(specDir):
        makedirs(specDir)

    os.environ["ALIBUILD_ALIDIST_HASH"] = getDirectoryHash(args.configDir)

    debug("Building for architecture %s" % args.architecture)
    debug("Number of parallel builds: %d" % args.jobs)
    debug(
        format(
            "Using %(star)sBuild from "
            "%(star)sbuild@%(toolHash)s recipes "
            "in %(star)sdist@%(distHash)s",
            star=star(),
            toolHash=getDirectoryHash(dirname(__file__)),
            distHash=os.environ["ALIBUILD_ALIDIST_HASH"]))

    (systemPackages, ownPackages, failed, validDefaults) = getPackageList(
        packages=packages,
        specs=specs,
        configDir=args.configDir,
        preferSystem=args.preferSystem,
        noSystem=args.noSystem,
        architecture=args.architecture,
        disable=args.disable,
        defaults=args.defaults,
        dieOnError=dieOnError,
        performPreferCheck=lambda pkg, cmd: dockerStatusOutput(
            cmd, dockerImage, executor=getStatusOutputBash),
        performRequirementCheck=lambda pkg, cmd: dockerStatusOutput(
            cmd, dockerImage, executor=getStatusOutputBash),
        performValidateDefaults=lambda spec: validateDefaults(
            spec, args.defaults),
        overrides=overrides,
        taps=taps,
        log=debug)
    if validDefaults and args.defaults not in validDefaults:
        return (
            error,
            "Specified default `%s' is not compatible with the packages you want to build.\n"
            % args.defaults + "Valid defaults:\n\n- " +
            "\n- ".join(sorted(validDefaults)), 1)

    if failed:
        return (
            error,
            "The following packages are system requirements and could not be found:\n\n- "
            + "\n- ".join(sorted(list(failed))) +
            "\n\nPlease run:\n\n\taliDoctor %s\n\nto get a full diagnosis." %
            args.pkgname.pop(), 1)

    for x in specs.values():
        x["requires"] = [r for r in x["requires"] if not r in args.disable]
        x["build_requires"] = [
            r for r in x["build_requires"] if not r in args.disable
        ]
        x["runtime_requires"] = [
            r for r in x["runtime_requires"] if not r in args.disable
        ]

    if systemPackages:
        banner(
            "%sBuild can take the following packages from the system and will not build them:\n  %s"
            % (star(), ", ".join(systemPackages)))
    if ownPackages:
        banner(
            "The following packages cannot be taken from the system and will be built:\n  %s"
            % ", ".join(ownPackages))

    # Do topological sort to have the correct build order even in the
    # case of non-tree like dependencies..
    # The actual algorith used can be found at:
    #
    # http://www.stoimen.com/blog/2012/10/01/computer-algorithms-topological-sort-of-a-graph/
    #
    edges = [(p["package"], d) for p in specs.values() for d in p["requires"]]
    L = [l for l in specs.values() if not l["requires"]]
    S = []
    while L:
        spec = L.pop(0)
        S.append(spec)
        nextVertex = [e[0] for e in edges if e[1] == spec["package"]]
        edges = [e for e in edges if e[1] != spec["package"]]
        hasPredecessors = set(
            [m for e in edges for m in nextVertex if e[0] == m])
        withPredecessor = set(nextVertex) - hasPredecessors
        L += [specs[m] for m in withPredecessor]
    buildOrder = [s["package"] for s in S]

    # Date fields to substitute: they are zero-padded
    now = datetime.now()
    nowKwds = {
        "year": str(now.year),
        "month": str(now.month).zfill(2),
        "day": str(now.day).zfill(2),
        "hour": str(now.hour).zfill(2)
    }

    # Check if any of the packages can be picked up from a local checkout
    develCandidates = [basename(d) for d in glob("*") if os.path.isdir(d)]
    develCandidatesUpper = [
        basename(d).upper() for d in glob("*") if os.path.isdir(d)
    ]
    develPkgs = [
        p for p in buildOrder if p in develCandidates and p not in args.noDevel
    ]
    develPkgsUpper = [
        (p, p.upper()) for p in buildOrder
        if p.upper() in develCandidatesUpper and p not in args.noDevel
    ]
    if set(develPkgs) != set(x for (x, y) in develPkgsUpper):
        return (
            error,
            format(
                "The following development packages have wrong spelling: %(pkgs)s.\n"
                "Please check your local checkout and adapt to the correct one indicated.",
                pkgs=", ".join(
                    set(x.strip()
                        for (x, y) in develPkgsUpper) - set(develPkgs))), 1)

    if buildOrder:
        banner(
            "Packages will be built in the following order:\n - %s" %
            "\n - ".join([
                x + " (development package)" if x in develPkgs else "%s@%s" %
                (x, specs[x]["tag"])
                for x in buildOrder if x != "defaults-release"
            ]))

    if develPkgs:
        banner(
            format(
                "You have packages in development mode.\n"
                "This means their source code can be freely modified under:\n\n"
                "  %(pwd)s/<package_name>\n\n"
                "%(star)sBuild does not automatically update such packages to avoid work loss.\n"
                "In most cases this is achieved by doing in the package source directory:\n\n"
                "  git pull --rebase\n",
                pwd=os.getcwd(),
                star=star()))

    # Clone/update repos
    for p in [p for p in buildOrder if "source" in specs[p]]:
        updateReferenceRepoSpec(args.referenceSources, p, specs[p],
                                args.fetchRepos)

        # Retrieve git heads
        cmd = "git ls-remote --heads %s" % (specs[p]["reference"]
                                            if "reference" in specs[p] else
                                            specs[p]["source"])
        if specs[p]["package"] in develPkgs:
            specs[p]["source"] = join(os.getcwd(), specs[p]["package"])
            cmd = "git ls-remote --heads %s" % specs[p]["source"]
        debug("Executing %s" % cmd)
        res, output = getStatusOutputBash(cmd)
        dieOnError(res, "Error on '%s': %s" % (cmd, output))
        specs[p]["git_heads"] = output.split("\n")

    # Resolve the tag to the actual commit ref
    for p in buildOrder:
        spec = specs[p]
        spec["commit_hash"] = "0"
        develPackageBranch = ""
        if "source" in spec:
            # Tag may contain date params like %(year)s, %(month)s, %(day)s, %(hour).
            spec["tag"] = format(spec["tag"], **nowKwds)
            # By default we assume tag is a commit hash. We then try to find
            # out if the tag is actually a branch and we use the tip of the branch
            # as commit_hash. Finally if the package is a development one, we use the
            # name of the branch as commit_hash.
            spec["commit_hash"] = spec["tag"]
            for head in spec["git_heads"]:
                if head.endswith("refs/heads/{0}".format(
                        spec["tag"])) or spec["package"] in develPkgs:
                    spec["commit_hash"] = head.split("\t", 1)[0]
                    # We are in development mode, we need to rebuild if the commit hash
                    # is different and if there are extra changes on to.
                    if spec["package"] in develPkgs:
                        # Devel package: we get the commit hash from the checked source, not from remote.
                        cmd = "cd %s && git rev-parse HEAD" % spec["source"]
                        err, out = getstatusoutput(cmd)
                        dieOnError(err,
                                   "Unable to detect current commit hash.")
                        spec["commit_hash"] = out.strip()
                        cmd = "cd %s && git diff -r HEAD && git status --porcelain" % spec[
                            "source"]
                        h = Hasher()
                        err = execute(cmd, h)
                        debug(err, cmd)
                        dieOnError(err,
                                   "Unable to detect source code changes.")
                        spec["devel_hash"] = spec["commit_hash"] + h.hexdigest(
                        )
                        cmd = "cd %s && git rev-parse --abbrev-ref HEAD" % spec[
                            "source"]
                        err, out = getstatusoutput(cmd)
                        if out == "HEAD":
                            err, out = getstatusoutput(
                                "cd %s && git rev-parse HEAD" % spec["source"])
                            out = out[0:10]
                        if err:
                            return (
                                error,
                                "Error, unable to lookup changes in development package %s. Is it a git clone?"
                                % spec["source"], 1)
                        develPackageBranch = out.replace("/", "-")
                        spec[
                            "tag"] = args.develPrefix if "develPrefix" in args else develPackageBranch
                        spec["commit_hash"] = "0"
                    break

        # Version may contain date params like tag, plus %(commit_hash)s,
        # %(short_hash)s and %(tag)s.
        defaults_upper = args.defaults != "release" and "_" + args.defaults.upper(
        ).replace("-", "_") or ""
        spec["version"] = format(spec["version"],
                                 commit_hash=spec["commit_hash"],
                                 short_hash=spec["commit_hash"][0:10],
                                 tag=spec["tag"],
                                 tag_basename=basename(spec["tag"]),
                                 defaults_upper=defaults_upper,
                                 **nowKwds)

        if spec["package"] in develPkgs and "develPrefix" in args and args.develPrefix != "ali-master":
            spec["version"] = args.develPrefix

    # Decide what is the main package we are building and at what commit.
    #
    # We emit an event for the main package, when encountered, so that we can use
    # it to index builds of the same hash on different architectures. We also
    # make sure add the main package and it's hash to the debug log, so that we
    # can always extract it from it.
    # If one of the special packages is in the list of packages to be built,
    # we use it as main package, rather than the last one.
    if not buildOrder:
        return (banner, "Nothing to be done.", 0)
    mainPackage = buildOrder[-1]
    mainHash = specs[mainPackage]["commit_hash"]

    debug("Main package is %s@%s" % (mainPackage, mainHash))
    if args.debug:
        logger_handler.setFormatter(
            LogFormatter("%%(levelname)s:%s:%s: %%(message)s" %
                         (mainPackage, args.develPrefix
                          if "develPrefix" in args else mainHash[0:8])))

    # Now that we have the main package set, we can print out Useful information
    # which we will be able to associate with this build. Also lets make sure each package
    # we need to build can be built with the current default.
    for p in buildOrder:
        spec = specs[p]
        if "source" in spec:
            debug("Commit hash for %s@%s is %s" %
                  (spec["source"], spec["tag"], spec["commit_hash"]))

    # Calculate the hashes. We do this in build order so that we can guarantee
    # that the hashes of the dependencies are calculated first.  Also notice that
    # if the commit hash is a real hash, and not a tag, we can safely assume
    # that's unique, and therefore we can avoid putting the repository or the
    # name of the branch in the hash.
    debug("Calculating hashes.")
    for p in buildOrder:
        spec = specs[p]
        debug(spec)
        debug(develPkgs)
        h = Hasher()
        dh = Hasher()
        for x in [
                "recipe", "version", "package", "commit_hash", "env",
                "append_path", "prepend_path"
        ]:
            if sys.version_info[0] < 3 and x in spec and type(
                    spec[x]) == OrderedDict:
                # Python 2: use YAML dict order to prevent changing hashes
                h(str(yaml.safe_load(yamlDump(spec[x]))))
            else:
                h(str(spec.get(x, "none")))
        if spec["commit_hash"] == spec.get("tag", "0"):
            h(spec.get("source", "none"))
            if "source" in spec:
                h(spec["tag"])
        for dep in spec.get("requires", []):
            h(specs[dep]["hash"])
            dh(specs[dep]["hash"] + specs[dep].get("devel_hash", ""))
        if bool(spec.get("force_rebuild", False)):
            h(str(time.time()))
        if spec["package"] in develPkgs and "incremental_recipe" in spec:
            h(spec["incremental_recipe"])
            ih = Hasher()
            ih(spec["incremental_recipe"])
            spec["incremental_hash"] = ih.hexdigest()
        elif p in develPkgs:
            h(spec.get("devel_hash"))
        if args.architecture.startswith("osx") and "relocate_paths" in spec:
            h("relocate:" + " ".join(sorted(spec["relocate_paths"])))
        spec["hash"] = h.hexdigest()
        spec["deps_hash"] = dh.hexdigest()
        debug("Hash for recipe %s is %s" % (p, spec["hash"]))

    # This adds to the spec where it should find, locally or remotely the
    # various tarballs and links.
    for p in buildOrder:
        spec = specs[p]
        pkgSpec = {
            "workDir": workDir,
            "package": spec["package"],
            "version": spec["version"],
            "hash": spec["hash"],
            "prefix": spec["hash"][0:2],
            "architecture": args.architecture
        }
        varSpecs = [
            ("storePath", "TARS/%(architecture)s/store/%(prefix)s/%(hash)s"),
            ("linksPath", "TARS/%(architecture)s/%(package)s"),
            ("tarballHashDir",
             "%(workDir)s/TARS/%(architecture)s/store/%(prefix)s/%(hash)s"),
            ("tarballLinkDir",
             "%(workDir)s/TARS/%(architecture)s/%(package)s"),
            ("buildDir", "%(workDir)s/BUILD/%(hash)s/%(package)s")
        ]
        spec.update(dict([(x, format(y, **pkgSpec)) for (x, y) in varSpecs]))
        spec["old_devel_hash"] = readHashFile(spec["buildDir"] +
                                              "/.build_succeeded")

    # We recursively calculate the full set of requires "full_requires"
    # including build_requires and the subset of them which are needed at
    # runtime "full_runtime_requires".
    for p in buildOrder:
        spec = specs[p]
        todo = [p]
        spec["full_requires"] = []
        spec["full_runtime_requires"] = []
        while todo:
            i = todo.pop(0)
            requires = specs[i].get("requires", [])
            runTimeRequires = specs[i].get("runtime_requires", [])
            spec["full_requires"] += requires
            spec["full_runtime_requires"] += runTimeRequires
            todo += requires
        spec["full_requires"] = set(spec["full_requires"])
        spec["full_runtime_requires"] = set(spec["full_runtime_requires"])

    debug("We will build packages in the following order: %s" %
          " ".join(buildOrder))
    if args.dryRun:
        return (info, "--dry-run / -n specified. Not building.", 0)

    # We now iterate on all the packages, making sure we build correctly every
    # single one of them. This is done this way so that the second time we run we
    # can check if the build was consistent and if it is, we bail out.
    packageIterations = 0
    report_event(
        "install",
        format(
            "%(p)s disabled=%(dis)s devel=%(dev)s system=%(sys)s own=%(own)s deps=%(deps)s",
            p=args.pkgname,
            dis=",".join(sorted(args.disable)),
            dev=",".join(sorted(develPkgs)),
            sys=",".join(sorted(systemPackages)),
            own=",".join(sorted(ownPackages)),
            deps=",".join(buildOrder[:-1])), args.architecture)

    while buildOrder:
        packageIterations += 1
        if packageIterations > 20:
            return (
                error,
                "Too many attempts at building %s. Something wrong with the repository?"
                % spec["package"], 1)
        p = buildOrder[0]
        spec = specs[p]
        if args.debug:
            logger_handler.setFormatter(
                LogFormatter("%%(levelname)s:%s:%s:%s: %%(message)s" %
                             (mainPackage, p, args.develPrefix
                              if "develPrefix" in args else mainHash[0:8])))
        if spec["package"] in develPkgs and getattr(syncHelper, "writeStore",
                                                    None):
            warning(
                "Disabling remote write store from now since %s is a development package."
                % spec["package"])
            syncHelper.writeStore = ""

        # Since we can execute this multiple times for a given package, in order to
        # ensure consistency, we need to reset things and make them pristine.
        spec.pop("revision", None)
        riemannStream.setAttributes(package=spec["package"],
                                    package_hash=spec["version"],
                                    architecture=args.architecture,
                                    defaults=args.defaults)
        riemannStream.setState("warning")

        debug("Updating from tarballs")
        # If we arrived here it really means we have a tarball which was created
        # using the same recipe. We will use it as a cache for the build. This means
        # that while we will still perform the build process, rather than
        # executing the build itself we will:
        #
        # - Unpack it in a temporary place.
        # - Invoke the relocation specifying the correct work_dir and the
        #   correct path which should have been used.
        # - Move the version directory to its final destination, including the
        #   correct revision.
        # - Repack it and put it in the store with the
        #
        # this will result in a new package which has the same binary contents of
        # the old one but where the relocation will work for the new dictory. Here
        # we simply store the fact that we can reuse the contents of cachedTarball.
        syncHelper.syncToLocal(p, spec)

        # Decide how it should be called, based on the hash and what is already
        # available.
        debug("Checking for packages already built.")
        linksGlob = format("%(w)s/TARS/%(a)s/%(p)s/%(p)s-%(v)s-*.%(a)s.tar.gz",
                           w=workDir,
                           a=args.architecture,
                           p=spec["package"],
                           v=spec["version"])
        debug("Glob pattern used: %s" % linksGlob)
        packages = glob(linksGlob)
        # In case there is no installed software, revision is 1
        # If there is already an installed package:
        # - Remove it if we do not know its hash
        # - Use the latest number in the version, to decide its revision
        debug("Packages already built using this version\n%s" %
              "\n".join(packages))
        busyRevisions = []

        # Calculate the build_family for the package
        #
        # If the package is a devel package, we need to associate it a devel
        # prefix, either via the -z option or using its checked out branch. This
        # affects its build hash.
        #
        # Moreover we need to define a global "buildFamily" which is used
        # to tag all the packages incurred in the build, this way we can have
        # a latest-<buildFamily> link for all of them an we will not incur in the
        # flip - flopping described in https://github.com/alisw/alibuild/issues/325.
        develPrefix = ""
        possibleDevelPrefix = getattr(args, "develPrefix", develPackageBranch)
        if spec["package"] in develPkgs:
            develPrefix = possibleDevelPrefix

        if possibleDevelPrefix:
            spec["build_family"] = "%s-%s" % (possibleDevelPrefix,
                                              args.defaults)
        else:
            spec["build_family"] = args.defaults
        if spec["package"] == mainPackage:
            mainBuildFamily = spec["build_family"]

        for d in packages:
            realPath = readlink(d)
            matcher = format(
                "../../%(a)s/store/[0-9a-f]{2}/([0-9a-f]*)/%(p)s-%(v)s-([0-9]*).%(a)s.tar.gz$",
                a=args.architecture,
                p=spec["package"],
                v=spec["version"])
            m = re.match(matcher, realPath)
            if not m:
                continue
            h, revision = m.groups()
            revision = int(revision)

            # If we have an hash match, we use the old revision for the package
            # and we do not need to build it.
            if h == spec["hash"]:
                spec["revision"] = revision
                if spec["package"] in develPkgs and "incremental_recipe" in spec:
                    spec["obsolete_tarball"] = d
                else:
                    debug(
                        "Package %s with hash %s is already found in %s. Not building."
                        % (p, h, d))
                    src = format("%(v)s-%(r)s",
                                 w=workDir,
                                 v=spec["version"],
                                 r=spec["revision"])
                    dst1 = format("%(w)s/%(a)s/%(p)s/latest-%(bf)s",
                                  w=workDir,
                                  a=args.architecture,
                                  p=spec["package"],
                                  bf=spec["build_family"])
                    dst2 = format("%(w)s/%(a)s/%(p)s/latest",
                                  w=workDir,
                                  a=args.architecture,
                                  p=spec["package"])

                    getstatusoutput("ln -snf %s %s" % (src, dst1))
                    getstatusoutput("ln -snf %s %s" % (src, dst2))
                    info("Using cached build for %s" % p)
                break
            else:
                busyRevisions.append(revision)

        if not "revision" in spec and busyRevisions:
            spec["revision"] = min(
                set(range(1,
                          max(busyRevisions) + 2)) - set(busyRevisions))
        elif not "revision" in spec:
            spec["revision"] = "1"

        # Check if this development package needs to be rebuilt.
        if spec["package"] in develPkgs:
            debug("Checking if devel package %s needs rebuild" %
                  spec["package"])
            if spec["devel_hash"] + spec["deps_hash"] == spec[
                    "old_devel_hash"]:
                info("Development package %s does not need rebuild" %
                     spec["package"])
                buildOrder.pop(0)
                continue

        # Now that we have all the information about the package we want to build, let's
        # check if it wasn't built / unpacked already.
        hashFile = "%s/%s/%s/%s-%s/.build-hash" % (
            workDir, args.architecture, spec["package"], spec["version"],
            spec["revision"])
        fileHash = readHashFile(hashFile)
        if fileHash != spec["hash"]:
            if fileHash != "0":
                debug(
                    "Mismatch between local area (%s) and the one which I should build (%s). Redoing."
                    % (fileHash, spec["hash"]))
            shutil.rmtree(dirname(hashFile), True)
        else:
            # If we get here, we know we are in sync with whatever remote store.  We
            # can therefore create a directory which contains all the packages which
            # were used to compile this one.
            riemannStream.setState('ok')
            debug("Package %s was correctly compiled. Moving to next one." %
                  spec["package"])
            # If using incremental builds, next time we execute the script we need to remove
            # the placeholders which avoid rebuilds.
            if spec["package"] in develPkgs and "incremental_recipe" in spec:
                unlink(hashFile)
            if "obsolete_tarball" in spec:
                unlink(realpath(spec["obsolete_tarball"]))
                unlink(spec["obsolete_tarball"])
            # We need to create 2 sets of links, once with the full requires,
            # once with only direct dependencies, since that's required to
            # register packages in Alien.
            createDistLinks(spec, specs, args, "dist", "full_requires")
            createDistLinks(spec, specs, args, "dist-direct", "requires")
            createDistLinks(spec, specs, args, "dist-runtime",
                            "full_runtime_requires")
            buildOrder.pop(0)
            packageIterations = 0
            # We can now delete the INSTALLROOT and BUILD directories,
            # assuming the package is not a development one. We also can
            # delete the SOURCES in case we have aggressive-cleanup enabled.
            if not spec["package"] in develPkgs and args.autoCleanup:
                cleanupDirs = [
                    format("%(w)s/BUILD/%(h)s", w=workDir, h=spec["hash"]),
                    format("%(w)s/INSTALLROOT/%(h)s",
                           w=workDir,
                           h=spec["hash"])
                ]
                if args.aggressiveCleanup:
                    cleanupDirs.append(
                        format("%(w)s/SOURCES/%(p)s",
                               w=workDir,
                               p=spec["package"]))
                debug("Cleaning up:\n" + "\n".join(cleanupDirs))

                for d in cleanupDirs:
                    shutil.rmtree(d.encode("utf8"), True)
                try:
                    unlink(
                        format("%(w)s/BUILD/%(p)s-latest",
                               w=workDir,
                               p=spec["package"]))
                    if "develPrefix" in args:
                        unlink(
                            format("%(w)s/BUILD/%(p)s-latest-%(dp)s",
                                   w=workDir,
                                   p=spec["package"],
                                   dp=args.develPrefix))
                except:
                    pass
                try:
                    rmdir(format("%(w)s/BUILD", w=workDir, p=spec["package"]))
                    rmdir(
                        format("%(w)s/INSTALLROOT",
                               w=workDir,
                               p=spec["package"]))
                except:
                    pass
            continue

        debug("Looking for cached tarball in %s" % spec["tarballHashDir"])
        # FIXME: I should get the tarballHashDir updated with server at this point.
        #        It does not really matter that the symlinks are ok at this point
        #        as I only used the tarballs as reusable binary blobs.
        spec["cachedTarball"] = ""
        if not spec["package"] in develPkgs:
            tarballs = [
                x for x in glob("%s/*" % spec["tarballHashDir"])
                if x.endswith("gz")
            ]
            spec["cachedTarball"] = tarballs[0] if len(tarballs) else ""
            debug(spec["cachedTarball"]
                  and "Found tarball in %s" % spec["cachedTarball"]
                  or "No cache tarballs found")

        # Generate the part which sources the environment for all the dependencies.
        # Notice that we guarantee that a dependency is always sourced before the
        # parts depending on it, but we do not guaranteed anything for the order in
        # which unrelated components are activated.
        dependencies = ""
        dependenciesInit = ""
        for dep in spec.get("requires", []):
            depSpec = specs[dep]
            depInfo = {
                "architecture": args.architecture,
                "package": dep,
                "version": depSpec["version"],
                "revision": depSpec["revision"],
                "bigpackage": dep.upper().replace("-", "_")
            }
            dependencies += format(
                "[ \"X$%(bigpackage)s_VERSION\" = X  ] && source \"$WORK_DIR/%(architecture)s/%(package)s/%(version)s-%(revision)s/etc/profile.d/init.sh\"\n",
                **depInfo)
            dependenciesInit += format(
                'echo [ \\\"X\$%(bigpackage)s_VERSION\\\" = X ] \&\& source \${WORK_DIR}/%(architecture)s/%(package)s/%(version)s-%(revision)s/etc/profile.d/init.sh >> \"$INSTALLROOT/etc/profile.d/init.sh\"\n',
                **depInfo)
        # Generate the part which creates the environment for the package.
        # This can be either variable set via the "env" keyword in the metadata
        # or paths which get appended via the "append_path" one.
        # By default we append LD_LIBRARY_PATH, PATH and DYLD_LIBRARY_PATH
        # FIXME: do not append variables for Mac on Linux.
        environment = ""
        dieOnError(not isinstance(spec.get("env", {}), dict),
                   "Tag `env' in %s should be a dict." % p)
        for key, value in spec.get("env", {}).items():
            environment += format(
                "echo 'export %(key)s=\"%(value)s\"' >> $INSTALLROOT/etc/profile.d/init.sh\n",
                key=key,
                value=value)
        basePath = "%s_ROOT" % p.upper().replace("-", "_")

        pathDict = spec.get("append_path", {})
        dieOnError(not isinstance(pathDict, dict),
                   "Tag `append_path' in %s should be a dict." % p)
        for pathName, pathVal in pathDict.items():
            pathVal = isinstance(pathVal, list) and pathVal or [pathVal]
            environment += format(
                "\ncat << \EOF >> \"$INSTALLROOT/etc/profile.d/init.sh\"\nexport %(key)s=$%(key)s:%(value)s\nEOF",
                key=pathName,
                value=":".join(pathVal))

        # Same thing, but prepending the results so that they win against system ones.
        defaultPrependPaths = {
            "LD_LIBRARY_PATH": "$%s/lib" % basePath,
            "DYLD_LIBRARY_PATH": "$%s/lib" % basePath,
            "PATH": "$%s/bin" % basePath
        }
        pathDict = spec.get("prepend_path", {})
        dieOnError(not isinstance(pathDict, dict),
                   "Tag `prepend_path' in %s should be a dict." % p)
        for pathName, pathVal in pathDict.items():
            pathDict[pathName] = isinstance(pathVal, list) and pathVal or [
                pathVal
            ]
        for pathName, pathVal in defaultPrependPaths.items():
            pathDict[pathName] = [pathVal] + pathDict.get(pathName, [])
        for pathName, pathVal in pathDict.items():
            environment += format(
                "\ncat << \EOF >> \"$INSTALLROOT/etc/profile.d/init.sh\"\nexport %(key)s=%(value)s:$%(key)s\nEOF",
                key=pathName,
                value=":".join(pathVal))

        # The actual build script.
        referenceStatement = ""
        if "reference" in spec:
            referenceStatement = "export GIT_REFERENCE=${GIT_REFERENCE_OVERRIDE:-%s}/%s" % (
                dirname(spec["reference"]), basename(spec["reference"]))

        debug(spec)

        cmd_raw = ""
        try:
            fp = open(
                dirname(realpath(__file__)) +
                '/alibuild_helpers/build_template.sh', 'r')
            cmd_raw = fp.read()
            fp.close()
        except:
            from pkg_resources import resource_string
            cmd_raw = resource_string("alibuild_helpers", 'build_template.sh')

        source = spec.get("source", "")
        # Shortend the commit hash in case it's a real commit hash and not simply
        # the tag.
        commit_hash = spec["commit_hash"]
        if spec["tag"] != spec["commit_hash"]:
            commit_hash = spec["commit_hash"][0:10]

        # Split the source in two parts, sourceDir and sourceName.  This is done so
        # that when we use Docker we can replace sourceDir with the correct
        # container path, if required.  No changes for what concerns the standard
        # bash builds, though.
        if args.docker:
            cachedTarball = re.sub("^" + workDir, "/sw", spec["cachedTarball"])
        else:
            cachedTarball = spec["cachedTarball"]

        cmd = format(cmd_raw,
                     dependencies=dependencies,
                     dependenciesInit=dependenciesInit,
                     develPrefix=develPrefix,
                     environment=environment,
                     workDir=workDir,
                     configDir=abspath(args.configDir),
                     incremental_recipe=spec.get("incremental_recipe", ":"),
                     sourceDir=source and (dirname(source) + "/") or "",
                     sourceName=source and basename(source) or "",
                     referenceStatement=referenceStatement,
                     requires=" ".join(spec["requires"]),
                     build_requires=" ".join(spec["build_requires"]),
                     runtime_requires=" ".join(spec["runtime_requires"]))

        commonPath = "%s/%%s/%s/%s/%s-%s" % (workDir, args.architecture,
                                             spec["package"], spec["version"],
                                             spec["revision"])
        scriptDir = commonPath % "SPECS"

        err, out = getstatusoutput("mkdir -p %s" % scriptDir)
        writeAll("%s/build.sh" % scriptDir, cmd)
        writeAll("%s/%s.sh" % (scriptDir, spec["package"]), spec["recipe"])

        banner("Building %s@%s" %
               (spec["package"], args.develPrefix if "develPrefix" in args
                and spec["package"] in develPkgs else spec["version"]))
        # Define the environment so that it can be passed up to the
        # actual build script
        buildEnvironment = [
            ("ARCHITECTURE", args.architecture),
            ("BUILD_REQUIRES", " ".join(spec["build_requires"])),
            ("CACHED_TARBALL", cachedTarball),
            ("CAN_DELETE", args.aggressiveCleanup and "1" or ""),
            ("COMMIT_HASH", commit_hash),
            ("DEPS_HASH", spec.get("deps_hash", "")),
            ("DEVEL_HASH", spec.get("devel_hash", "")),
            ("DEVEL_PREFIX", develPrefix),
            ("BUILD_FAMILY", spec["build_family"]),
            ("GIT_TAG", spec["tag"]),
            ("MY_GZIP", gzip()),
            ("MY_TAR", tar()),
            ("INCREMENTAL_BUILD_HASH", spec.get("incremental_hash", "0")),
            ("JOBS", args.jobs),
            ("PKGHASH", spec["hash"]),
            ("PKGNAME", spec["package"]),
            ("PKGREVISION", spec["revision"]),
            ("PKGVERSION", spec["version"]),
            ("RELOCATE_PATHS", " ".join(spec.get("relocate_paths", []))),
            ("REQUIRES", " ".join(spec["requires"])),
            ("RUNTIME_REQUIRES", " ".join(spec["runtime_requires"])),
            ("WRITE_REPO", spec.get("write_repo", source)),
        ]
        # Add the extra environment as passed from the command line.
        buildEnvironment += [e.partition('=')[::2] for e in args.environment]
        # In case the --docker options is passed, we setup a docker container which
        # will perform the actual build. Otherwise build as usual using bash.
        if args.docker:
            additionalEnv = ""
            additionalVolumes = ""
            develVolumes = ""
            mirrorVolume = "reference" in spec and " -v %s:/mirror" % dirname(
                spec["reference"]) or ""
            overrideSource = source.startswith(
                "/") and "-e SOURCE0_DIR_OVERRIDE=/" or ""

            for devel in develPkgs:
                develVolumes += " -v $PWD/`readlink %s || echo %s`:/%s:ro " % (
                    devel, devel, devel)
            for env in buildEnvironment:
                additionalEnv += " -e %s='%s' " % env
            for volume in args.volumes:
                additionalVolumes += " -v %s " % volume
            dockerWrapper = format(
                "docker run --rm -it"
                " -v %(workdir)s:/sw"
                " -v %(scriptDir)s/build.sh:/build.sh:ro"
                " %(mirrorVolume)s"
                " %(develVolumes)s"
                " %(additionalEnv)s"
                " %(additionalVolumes)s"
                " -e GIT_REFERENCE_OVERRIDE=/mirror"
                " %(overrideSource)s"
                " -e WORK_DIR_OVERRIDE=/sw"
                " %(image)s"
                " %(bash)s -e -x /build.sh",
                additionalEnv=additionalEnv,
                additionalVolumes=additionalVolumes,
                bash=BASH,
                develVolumes=develVolumes,
                workdir=abspath(args.workDir),
                image=dockerImage,
                mirrorVolume=mirrorVolume,
                overrideSource=overrideSource,
                scriptDir=scriptDir)
            debug(dockerWrapper)
            err = execute(dockerWrapper)
        else:
            progress = ProgressPrint(
                "%s is being built (use --debug for full output)" %
                spec["package"])
            for k, v in buildEnvironment:
                os.environ[k] = str(v)
            err = execute("%s -e -x %s/build.sh 2>&1" % (BASH, scriptDir),
                          printer=debug if args.debug
                          or not sys.stdout.isatty() else progress)
            progress.end("failed" if err else "ok", err)
        report_event(
            "BuildError" if err else "BuildSuccess", spec["package"],
            format("%(a)s %(v)s %(c)s %(h)s",
                   a=args.architecture,
                   v=spec["version"],
                   c=spec["commit_hash"],
                   h=os.environ["ALIBUILD_ALIDIST_HASH"][0:10]))

        updatablePkgs = [x for x in spec["requires"] if x in develPkgs]
        if spec["package"] in develPkgs:
            updatablePkgs.append(spec["package"])

        buildErrMsg = format(
            "Error while executing %(sd)s/build.sh on `%(h)s'.\n"
            "Log can be found in %(w)s/BUILD/%(p)s-latest%(devSuffix)s/log.\n"
            "Please upload it to CERNBox/Dropbox if you intend to request support.\n"
            "Build directory is %(w)s/BUILD/%(p)s-latest%(devSuffix)s/%(p)s.",
            h=socket.gethostname(),
            sd=scriptDir,
            w=abspath(args.workDir),
            p=spec["package"],
            devSuffix="-" + args.develPrefix
            if "develPrefix" in args and spec["package"] in develPkgs else "")
        if updatablePkgs:
            buildErrMsg += format(
                "\n\n"
                "Note that you have packages in development mode.\n"
                "Devel sources are not updated automatically, you must do it by hand.\n"
                "This problem might be due to one or more outdated devel sources.\n"
                "To update all development packages required for this build "
                "it is usually sufficient to do:\n%(updatablePkgs)s",
                updatablePkgs="".join([
                    "\n  ( cd %s && git pull --rebase )" % dp
                    for dp in updatablePkgs
                ]))

        dieOnError(err, buildErrMsg)

        syncHelper.syncToRemote(p, spec)
    banner(
        format(
            "Build of %(mainPackage)s successfully completed on `%(h)s'.\n"
            "Your software installation is at:"
            "\n\n  %(wp)s\n\n"
            "You can use this package by loading the environment:"
            "\n\n  alienv enter %(mainPackage)s/latest-%(buildFamily)s",
            mainPackage=mainPackage,
            buildFamily=mainBuildFamily,
            h=socket.gethostname(),
            defaults=args.defaults,
            wp=abspath(join(args.workDir, args.architecture))))
    for x in develPkgs:
        banner(
            format(
                "Build directory for devel package %(p)s:\n%(w)s/BUILD/%(p)s-latest%(devSuffix)s/%(p)s",
                p=x,
                devSuffix="-" +
                args.develPrefix if "develPrefix" in args else "",
                w=abspath(args.workDir)))
    return (debug, "Everything done", 0)
Esempio n. 34
0
    def syncToLocal(self, p, spec):
        from botocore.exceptions import ClientError
        debug("Updating remote store for package %s with hashes %s", p,
              ", ".join(spec["remote_hashes"]))

        # If we already have a tarball with any equivalent hash, don't check S3.
        have_tarball = False
        for pkg_hash in spec["remote_hashes"]:
            store_path = resolve_store_path(self.architecture, pkg_hash)
            if glob.glob(
                    os.path.join(self.workdir, store_path, "%s-*.tar.gz" % p)):
                debug("Reusing existing tarball for %s@%s", p, pkg_hash)
                have_tarball = True
                break

        for pkg_hash in spec["remote_hashes"]:
            if have_tarball:
                break
            store_path = resolve_store_path(self.architecture, pkg_hash)

            # We don't already have a tarball with the hash that we need, so download
            # the first existing one from the remote, if possible. (Downloading more
            # than one is a waste of time as they should be equivalent and we only
            # ever use one anyway.)
            for tarball in self._s3_listdir(store_path):
                debug("Fetching tarball %s", tarball)
                # Create containing directory locally. (exist_ok= is python3-specific.)
                os.makedirs(os.path.join(self.workdir, store_path),
                            exist_ok=True)
                self.s3.download_file(Bucket=self.remoteStore,
                                      Key=tarball,
                                      Filename=os.path.join(
                                          self.workdir, store_path,
                                          os.path.basename(tarball)))
                have_tarball = True  # break out of outer loop
                break

        if not have_tarball:
            debug("Remote has no tarballs for %s with hashes %s", p,
                  ", ".join(spec["remote_hashes"]))

        links_path = resolve_links_path(self.architecture, p)
        os.makedirs(os.path.join(self.workdir, links_path), exist_ok=True)

        # Remove existing symlinks: we'll fetch the ones from the remote next.
        parent = os.path.join(self.workdir, links_path)
        for fname in os.listdir(parent):
            path = os.path.join(parent, fname)
            if os.path.islink(path):
                os.unlink(path)

        # Fetch symlink manifest and create local symlinks to match.
        debug("Fetching symlink manifest")
        n_symlinks = 0
        try:
            manifest = self.s3.get_object(Bucket=self.remoteStore,
                                          Key=links_path + ".manifest")
        except ClientError as exc:
            debug("Could not fetch manifest: %s", exc)
        else:
            for line in manifest["Body"].iter_lines():
                link_name, has_sep, target = line.rstrip(b"\n").partition(
                    b"\t")
                if not has_sep:
                    debug("Ignoring malformed line in manifest: %r", line)
                    continue
                if not target.startswith(b"../../"):
                    target = b"../../" + target
                target = os.fsdecode(target)
                link_path = os.path.join(self.workdir, links_path,
                                         os.fsdecode(link_name))
                dieOnError(
                    execute("ln -sf {} {}".format(target, link_path)),
                    "Unable to create symlink {} -> {}".format(
                        link_name, target))
                n_symlinks += 1
            debug("Got %d entries in manifest", n_symlinks)

        # Create remote symlinks that aren't in the manifest yet.
        debug("Looking for symlinks not in manifest")
        for link_key in self._s3_listdir(links_path):
            link_path = os.path.join(self.workdir, link_key)
            if os.path.islink(link_path):
                continue
            debug("Fetching leftover symlink %s", link_key)
            resp = self.s3.get_object(Bucket=self.remoteStore, Key=link_key)
            target = os.fsdecode(resp["Body"].read()).rstrip("\n")
            if not target.startswith("../../"):
                target = "../../" + target
            dieOnError(
                execute("ln -sf {} {}".format(target, link_path)),
                "Unable to create symlink {} -> {}".format(link_key, target))
Esempio n. 35
0
    def getRetry(self,
                 url,
                 dest=None,
                 returnResult=False,
                 log=True,
                 session=None):
        get = session.get if session is not None else requests.get
        for i in range(0, self.httpConnRetries):
            if i > 0:
                pauseSec = self.httpBackoff * (2**(i - 1))
                debug("GET %s failed: retrying in %.2f", url, pauseSec)
                time.sleep(pauseSec)
                # If the download has failed, enable debug output, even if it was
                # disabled before. We disable debug output for e.g. symlink downloads
                # to make sure the output log isn't overwhelmed. If the download
                # failed, we want to know about it, though. Note that aliBuild has to
                # be called with --debug for this to take effect.
                log = True
            try:
                if log:
                    debug("GET %s: processing (attempt %d/%d)", url, i + 1,
                          self.httpConnRetries)
                if dest or returnResult:
                    # Destination specified -- file (dest) or buffer (returnResult).
                    # Use requests in stream mode
                    resp = get(url,
                               stream=True,
                               verify=not self.insecure,
                               timeout=self.httpTimeoutSec)
                    size = int(resp.headers.get("content-length", "-1"))
                    downloaded = 0
                    reportTime = time.time()
                    result = []

                    try:
                        destFp = open(dest + ".tmp", "wb") if dest else None
                        for chunk in filter(
                                bool, resp.iter_content(chunk_size=32768)):
                            if destFp:
                                destFp.write(chunk)
                            if returnResult:
                                result.append(chunk)
                            downloaded += len(chunk)
                            if log and size != -1:
                                now = time.time()
                                if downloaded == size:
                                    debug("Download complete")
                                elif now - reportTime > 3:
                                    debug("%.0f%% downloaded...",
                                          100 * downloaded / size)
                                    reportTime = now
                    finally:
                        if destFp:
                            destFp.close()

                    if size not in (downloaded, -1):
                        raise PartialDownloadError(downloaded, size)
                    if dest:
                        os.rename(dest + ".tmp",
                                  dest)  # we should not have errors here
                    return b''.join(result) if returnResult else True
                else:
                    # For CERN S3 we need to construct the JSON ourself...
                    s3Request = re.match(
                        "https://s3.cern.ch/swift/v1[/]+([^/]*)/(.*)$", url)
                    if s3Request:
                        [bucket, prefix] = s3Request.groups()
                        url = "https://s3.cern.ch/swift/v1/%s/?prefix=%s" % (
                            bucket, prefix.lstrip("/"))
                        resp = get(url,
                                   verify=not self.insecure,
                                   timeout=self.httpTimeoutSec)
                        if resp.status_code == 404:
                            # No need to retry any further
                            return None
                        resp.raise_for_status()
                        return [{
                            "name": os.path.basename(x),
                            "type": "file"
                        } for x in resp.text.split()]
                    else:
                        # No destination specified: JSON request
                        resp = get(url,
                                   verify=not self.insecure,
                                   timeout=self.httpTimeoutSec)
                        if resp.status_code == 404:
                            # No need to retry any further
                            return None
                        resp.raise_for_status()
                        return resp.json()
            except (RequestException, ValueError, PartialDownloadError) as e:
                if i == self.httpConnRetries - 1:
                    error("GET %s failed: %s", url, e)
                if dest:
                    try:
                        os.unlink(dest + ".tmp")
                    except:
                        pass
        return None