def syncToLocal(self, p, spec): debug("Updating remote store for package %s@%s" % (p, spec["hash"])) hashListUrl = format("%(rs)s/%(sp)s/", rs=self.remoteStore, sp=spec["storePath"]) pkgListUrl = format("%(rs)s/%(sp)s/", rs=self.remoteStore, sp=spec["linksPath"]) hashList = [] pkgList = [] try: if self.insecure: context = ssl._create_unverified_context() hashList = json.loads( urlopen(hashListUrl, context=context).read()) pkgList = json.loads( urlopen(pkgListUrl, context=context).read()) else: hashList = json.loads(urlopen(hashListUrl).read()) pkgList = json.loads(urlopen(pkgListUrl).read()) except URLError as e: debug("Cannot find precompiled package for %s@%s" % (p, spec["hash"])) pass except Exception as e: info(e) error("Unknown response from server") cmd = format("mkdir -p %(hd)s && " "mkdir -p %(ld)s", hd=spec["tarballHashDir"], ld=spec["tarballLinkDir"]) execute(cmd) hashList = [x["name"] for x in hashList] for pkg in hashList: cmd = format("curl %(i)s -o %(hd)s/%(n)s -L %(rs)s/%(sp)s/%(n)s\n", i="-k" if self.insecure else "", n=pkg, sp=spec["storePath"], rs=self.remoteStore, hd=spec["tarballHashDir"]) debug(cmd) execute(cmd) for pkg in pkgList: if pkg["name"] in hashList: cmd = format( "ln -sf ../../%(a)s/store/%(sh)s/%(h)s/%(n)s %(ld)s/%(n)s\n", a=self.architecture, h=spec["hash"], sh=spec["hash"][0:2], n=pkg["name"], ld=spec["tarballLinkDir"]) execute(cmd) else: cmd = format( "ln -s unknown %(ld)s/%(n)s 2>/dev/null || true\n", ld=spec["tarballLinkDir"], n=pkg["name"]) execute(cmd)
def checkRequirements(spec, cmd, homebrew_replacement, dockerImage): if cmd == "false": debug("Package %s is not a system requirement." % spec["package"]) return (0, "") cmd = homebrew_replacement + cmd err, out = dockerStatusOutput(cmd, dockerImage=dockerImage, executor=getStatusOutputBash) if not err: success("Required package %s will be picked up from the system." % spec["package"]) debug(cmd) for x in out.split("\n"): debug(spec["package"] + ": " + x) return (0, "") error( format( "Package %(p)s is a system requirement and cannot be found.\n" "This is due to the fact that the following script fails:\n\n" "%(cmd)s\n" "with the following output:\n\n" "%(error)s\n" "%(help)s\n", p=spec["package"], cmd=cmd, error="\n".join( ["%s: %s" % (spec["package"], x) for x in out.split("\n")]), help=spec.get("system_requirement_missing"))) return (err, "")
def syncToLocal(self, p, spec): debug("Updating remote store for package %s@%s" % (p, spec["hash"])) hashListUrl = format("%(rs)s/%(sp)s/", rs=self.remoteStore, sp=spec["storePath"]) pkgListUrl = format("%(rs)s/%(sp)s/", rs=self.remoteStore, sp=spec["linksPath"]) hashList = [] pkgList = [] try: if self.insecure: context = ssl._create_unverified_context() hashList = json.loads(urlopen(hashListUrl, context=context).read()) pkgList = json.loads(urlopen(pkgListUrl, context=context).read()) else: hashList = json.loads(urlopen(hashListUrl).read()) pkgList = json.loads(urlopen(pkgListUrl).read()) except URLError as e: debug("Cannot find precompiled package for %s@%s" % (p, spec["hash"])) except Exception as e: info(e) error("Unknown response from server") cmd = format("mkdir -p %(hd)s && " "mkdir -p %(ld)s", hd=spec["tarballHashDir"], ld=spec["tarballLinkDir"]) execute(cmd) hashList = [x["name"] for x in hashList] for pkg in hashList: cmd = format("curl %(i)s -o %(hd)s/%(n)s -L %(rs)s/%(sp)s/%(n)s\n", i="-k" if self.insecure else "", n=pkg, sp=spec["storePath"], rs=self.remoteStore, hd=spec["tarballHashDir"]) debug(cmd) execute(cmd) for pkg in pkgList: if pkg["name"] in hashList: cmd = format("ln -sf ../../%(a)s/store/%(sh)s/%(h)s/%(n)s %(ld)s/%(n)s\n", a = self.architecture, h = spec["hash"], sh = spec["hash"][0:2], n = pkg["name"], ld = spec["tarballLinkDir"]) execute(cmd) else: cmd = format("ln -s unknown %(ld)s/%(n)s 2>/dev/null || true\n", ld = spec["tarballLinkDir"], n = pkg["name"]) execute(cmd)
def checkRequirements(spec, cmd, homebrew_replacement, getstatusoutput_docker): if cmd == "false": debug("Package %s is not a system requirement.", spec["package"]) return (0, "") cmd = homebrew_replacement + cmd err, out = getstatusoutput_docker(cmd) if not err: success("Required package %s will be picked up from the system.", spec["package"]) debug("%s", cmd) for x in out.split("\n"): debug("%s: %s", spec["package"], x) return (0, "") error( "Package %s is a system requirement and cannot be found.\n" "This is due to the fact that the following script fails:\n\n%s\n" "with the following output:\n\n%s\n%s\n", spec["package"], cmd, "\n".join("%s: %s" % (spec["package"], x) for x in out.split("\n")), spec.get("system_requirement_missing")) return (err, "")
def _s3_init(self): # This is a separate method so that we can patch it out for unit tests. # Import boto3 here, so that if we don't use this remote store, we don't # have to install it in the first place. try: import boto3 except ImportError: error("boto3 must be installed to use %s", Boto3RemoteSync) sys.exit(1) try: self.s3 = boto3.client( "s3", endpoint_url="https://s3.cern.ch", aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"], aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"]) except KeyError: error( "you must pass the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env " "variables to %sBuild in order to use the S3 remote store", star()) sys.exit(1)
def deps(recipesDir, topPackage, outFile, buildRequires, transitiveRed, disable): dot = {} keys = [ "requires" ] if buildRequires: keys.append("build_requires") for p in glob("%s/*.sh" % recipesDir): debug(format("Reading file %(filename)s", filename=p)) try: err, recipe, _ = parseRecipe(getRecipeReader(p)) name = recipe["package"] if name in disable: debug("Ignoring %s, disabled explicitly" % name) continue except Exception as e: error(format("Error reading recipe %(filename)s: %(type)s: %(msg)s", filename=p, type=type(e).__name__, msg=str(e))) sys.exit(1) dot[name] = dot.get(name, []) for k in keys: for d in recipe.get(k, []): d = d.split(":")[0] d in disable or dot[name].append(d) selected = None if topPackage != "all": if not topPackage in dot: error(format("Package %(topPackage)s does not exist", topPackage=topPackage)) return False selected = [ topPackage ] olen = 0 while len(selected) != olen: olen = len(selected) selected += [ x for s in selected if s in dot for x in dot[s] if not x in selected ] selected.sort() result = "digraph {\n" for p,deps in list(dot.items()): if selected and not p in selected: continue result += " \"%s\";\n" % p for d in deps: result += " \"%s\" -> \"%s\";\n" % (p,d) result += "}\n" with NamedTemporaryFile(delete=False) as fp: fp.write(result) try: if transitiveRed: execute(format("tred %(dotFile)s > %(dotFile)s.0 && mv %(dotFile)s.0 %(dotFile)s", dotFile=fp.name)) execute(["dot", fp.name, "-Tpdf", "-o", outFile]) except Exception as e: error(format("Error generating dependencies with dot: %(type)s: %(msg)s", type=type(e).__name__, msg=str(e))) else: info(format("Dependencies graph generated: %(outFile)s", outFile=outFile)) remove(fp.name) return True
def checkRequirements(spec, cmd, homebrew_replacement, dockerImage): if cmd == "false": debug("Package %s is not a system requirement." % spec["package"]) return (0, "") cmd = homebrew_replacement + cmd err, out = dockerStatusOutput(cmd, dockerImage=dockerImage, executor=getStatusOutputBash) if not err: success("Required package %s will be picked up from the system." % spec["package"]) debug(cmd) for x in out.split("\n"): debug(spec["package"] + ": " + x) return (0, "") error(format("Package %(p)s is a system requirement and cannot be found.\n" "This is due to the fact that the following script fails:\n\n" "%(cmd)s\n" "with the following output:\n\n" "%(error)s\n" "%(help)s\n", p=spec["package"], cmd=cmd, error="\n".join(["%s: %s" % (spec["package"],x) for x in out.split("\n")]), help=spec.get("system_requirement_missing"))) return (err, "")
def performValidateDefaults(spec): (ok, msg, valid) = validateDefaults(spec, args.defaults) if not ok: error(msg) return (ok, msg, valid)
def doDoctor(args, parser): if not exists(args.configDir): parser.error("Wrong path to alidist specified: %s" % args.configDir) prunePaths(abspath(args.workDir)) if exists(expanduser("~/.rootlogon.C")): warning( "You have a ~/.rootlogon.C notice that this might" " interfere with your environment in hidden ways.\n" "Please review it an make sure you are not force loading any library" " which might interphere with the rest of the setup.") # Decide if we can use homebrew. If not, we replace it with "true" so # that we do not get spurious messages on linux homebrew_replacement = "" err, output = getstatusoutput("which brew") if err: homebrew_replacement = "brew() { true; }; " dockerImage = args.dockerImage if "dockerImage" in args else "" if args.docker and not dockerImage: dockerImage = "alisw/%s-builder" % args.architecture.split("_")[0] logger.setLevel(logging.BANNER) if args.debug: logger.setLevel(logging.DEBUG) specs = {} packages = [] exitcode = 0 for p in args.packages: path = "%s/%s.sh" % (args.configDir, p.lower()) if not exists(path): error("Cannot find recipe %s for package %s." % (path, p)) exitcode = 1 continue packages.append(p) systemInfo() specs = {} def unreachable(): assert (False) defaultsReader = lambda: readDefaults(args.configDir, args.defaults, parser .error) (err, overrides, taps) = parseDefaults(args.disable, defaultsReader, info) if err: error(err) exit(1) def performValidateDefaults(spec): (ok, msg, valid) = validateDefaults(spec, args.defaults) if not ok: error(msg) return (ok, msg, valid) (fromSystem, own, failed, validDefaults) = getPackageList( packages=packages, specs=specs, configDir=args.configDir, preferSystem=args.preferSystem, noSystem=args.noSystem, architecture=args.architecture, disable=args.disable, defaults=args.defaults, dieOnError=lambda x, y: unreachable, performPreferCheck=lambda pkg, cmd: checkPreferSystem( pkg, cmd, homebrew_replacement, dockerImage), performRequirementCheck=lambda pkg, cmd: checkRequirements( pkg, cmd, homebrew_replacement, dockerImage), performValidateDefaults=performValidateDefaults, overrides=overrides, taps=taps, log=info) alwaysBuilt = set([x for x in specs]) - fromSystem - own - failed if alwaysBuilt: banner( "The following packages will be built by aliBuild because\n" " usage of a system version of it is not allowed or supported, by policy:\n\n- " + " \n- ".join(alwaysBuilt)) if fromSystem: banner( "The following packages will be picked up from the system:\n\n- " + "\n- ".join(fromSystem) + "\n\nIf this is not you want, you have to uninstall / unload them." ) if own: banner( "The following packages will be built by aliBuild because they couldn't be picked up from the system:\n\n- " + "\n- ".join(own) + "\n\nThis is not a real issue, but it might take longer the first time you invoke aliBuild." + "\nLook at the error messages above to get hints on what packages you need to install separately." ) if failed: banner( "The following packages are system dependencies and could not be found:\n\n- " + "\n- ".join(failed) + "\n\nLook at the error messages above to get hints on what packages you need to install separately." ) exitcode = 1 if validDefaults and args.defaults not in validDefaults: banner( "The list of packages cannot be built with the defaults you have specified.\n" + "List of valid defaults:\n\n- " + "\n- ".join(validDefaults) + "\n\nUse the `--defaults' switch to specify one of them.") exitcode = 2 if validDefaults is None: banner( "No valid defaults combination was found for the given list of packages, check your recipes!" ) exitcode = 3 if exitcode: error( "There were errors: build cannot be performed if they are not resolved. Check the messages above." ) exit(exitcode)
def doInit(args): assert(args.pkgname != None) assert(type(args.dist) == dict) assert(sorted(args.dist.keys()) == ["repo", "ver"]) pkgs = parsePackagesDefinition(args.pkgname) assert(type(pkgs) == list) if args.dryRun: info("This will initialise local checkouts for %s\n" "--dry-run / -n specified. Doing nothing." % ",".join(x["name"] for x in pkgs)) exit(0) try: path.exists(args.develPrefix) or os.mkdir(args.develPrefix) path.exists(args.referenceSources) or os.makedirs(args.referenceSources) except OSError as e: error(str(e)) exit(1) # Fetch recipes first if necessary if path.exists(args.configDir): warning("using existing recipes from %s" % args.configDir) else: cmd = format("git clone %(repo)s%(branch)s %(cd)s", repo=args.dist["repo"] if ":" in args.dist["repo"] else "https://github.com/%s" % args.dist["repo"], branch=" -b "+args.dist["ver"] if args.dist["ver"] else "", cd=args.configDir) debug(cmd) err = execute(cmd) dieOnError(err!=0, "cannot clone recipes") # Use standard functions supporting overrides and taps. Ignore all disables # and system packages as they are irrelevant in this context specs = {} defaultsReader = lambda: readDefaults(args.configDir, args.defaults, error) (err, overrides, taps) = parseDefaults([], defaultsReader, debug) (_,_,_,validDefaults) = getPackageList(packages=[ p["name"] for p in pkgs ], specs=specs, configDir=args.configDir, preferSystem=False, noSystem=True, architecture="", disable=[], defaults=args.defaults, dieOnError=dieOnError, performPreferCheck=lambda *x, **y: (1, ""), performRequirementCheck=lambda *x, **y: (0, ""), performValidateDefaults=lambda spec : validateDefaults(spec, args.defaults), overrides=overrides, taps=taps, log=debug) dieOnError(validDefaults and args.defaults not in validDefaults, "Specified default `%s' is not compatible with the packages you want to build.\n" % args.defaults + "Valid defaults:\n\n- " + "\n- ".join(sorted(validDefaults))) for p in pkgs: spec = specs.get(p["name"]) dieOnError(spec is None, "cannot find recipe for package %s" % p["name"]) dest = join(args.develPrefix, spec["package"]) writeRepo = spec.get("write_repo", spec.get("source")) dieOnError(not writeRepo, "package %s has no source field and cannot be developed" % spec["package"]) if path.exists(dest): warning("not cloning %s since it already exists" % spec["package"]) continue p["ver"] = p["ver"] if p["ver"] else spec.get("tag", spec["version"]) debug("cloning %s%s for development" % (spec["package"], " version "+p["ver"] if p["ver"] else "")) updateReferenceRepoSpec(args.referenceSources, spec["package"], spec, True) cmd = format("git clone %(readRepo)s%(branch)s --reference %(refSource)s %(cd)s && " + "cd %(cd)s && git remote set-url --push origin %(writeRepo)s", readRepo=spec["source"], writeRepo=writeRepo, branch=" -b "+p["ver"] if p["ver"] else "", refSource=join(args.referenceSources, spec["package"].lower()), cd=dest) debug(cmd) err = execute(cmd) dieOnError(err!=0, "cannot clone %s%s" % (spec["package"], " version "+p["ver"] if p["ver"] else "")) banner(format("Development directory %(d)s created%(pkgs)s", pkgs=" for "+", ".join([ x["name"].lower() for x in pkgs ]) if pkgs else "", d=args.develPrefix))
def doInit(args): assert(args.pkgname != None) assert(type(args.dist) == dict) assert(sorted(args.dist.keys()) == ["repo", "ver"]) pkgs = parsePackagesDefinition(args.pkgname) assert(type(pkgs) == list) if args.dryRun: info("This will initialise local checkouts for %s\n" "--dry-run / -n specified. Doing nothing." % ",".join(x["name"] for x in pkgs)) exit(0) try: path.exists(args.develPrefix) or os.mkdir(args.develPrefix) path.exists(args.referenceSources) or os.makedirs(args.referenceSources) except OSError as e: error(str(e)) exit(1) # Fetch recipes first if necessary if path.exists(args.configDir): warning("using existing recipes from %s" % args.configDir) else: cmd = format("git clone %(repo)s%(branch)s %(cd)s", repo=args.dist["repo"] if ":" in args.dist["repo"] else "https://github.com/%s" % args.dist["repo"], branch=" -b "+args.dist["ver"] if args.dist["ver"] else "", cd=args.configDir) debug(cmd) err = execute(cmd) dieOnError(err!=0, "cannot clone recipes") # Use standard functions supporting overrides and taps. Ignore all disables # and system packages as they are irrelevant in this context specs = {} defaultsReader = lambda: readDefaults(args.configDir, args.defaults, error) (err, overrides, taps) = parseDefaults([], defaultsReader, debug) (_,_,_,validDefaults) = getPackageList(packages=[ p["name"] for p in pkgs ], specs=specs, configDir=args.configDir, preferSystem=False, noSystem=True, architecture="", disable=[], defaults=args.defaults, dieOnError=lambda *x, **y: None, performPreferCheck=lambda *x, **y: (1, ""), performRequirementCheck=lambda *x, **y: (0, ""), performValidateDefaults=lambda spec : validateDefaults(spec, args.defaults), overrides=overrides, taps=taps, log=debug) dieOnError(validDefaults and args.defaults not in validDefaults, "Specified default `%s' is not compatible with the packages you want to build.\n" % args.defaults + "Valid defaults:\n\n- " + "\n- ".join(sorted(validDefaults))) for p in pkgs: spec = specs.get(p["name"]) dieOnError(spec is None, "cannot find recipe for package %s" % p["name"]) dest = join(args.develPrefix, spec["package"]) writeRepo = spec.get("write_repo", spec.get("source")) dieOnError(not writeRepo, "package %s has no source field and cannot be developed" % spec["package"]) if path.exists(dest): warning("not cloning %s since it already exists" % spec["package"]) continue p["ver"] = p["ver"] if p["ver"] else spec.get("tag", spec["version"]) debug("cloning %s%s for development" % (spec["package"], " version "+p["ver"] if p["ver"] else "")) updateReferenceRepoSpec(args.referenceSources, spec["package"], spec, True) cmd = format("git clone %(readRepo)s%(branch)s --reference %(refSource)s %(cd)s && " + "cd %(cd)s && git remote set-url --push origin %(writeRepo)s", readRepo=spec["source"], writeRepo=writeRepo, branch=" -b "+p["ver"] if p["ver"] else "", refSource=join(args.referenceSources, spec["package"].lower()), cd=dest) debug(cmd) err = execute(cmd) dieOnError(err!=0, "cannot clone %s%s" % (spec["package"], " version "+p["ver"] if p["ver"] else "")) banner(format("Development directory %(d)s created%(pkgs)s", pkgs=" for "+", ".join([ x["name"].lower() for x in pkgs ]) if pkgs else "", d=args.develPrefix))
def getRetry(self, url, dest=None, returnResult=False, log=True, session=None): get = session.get if session is not None else requests.get for i in range(0, self.httpConnRetries): if i > 0: pauseSec = self.httpBackoff * (2**(i - 1)) debug("GET %s failed: retrying in %.2f", url, pauseSec) time.sleep(pauseSec) # If the download has failed, enable debug output, even if it was # disabled before. We disable debug output for e.g. symlink downloads # to make sure the output log isn't overwhelmed. If the download # failed, we want to know about it, though. Note that aliBuild has to # be called with --debug for this to take effect. log = True try: if log: debug("GET %s: processing (attempt %d/%d)", url, i + 1, self.httpConnRetries) if dest or returnResult: # Destination specified -- file (dest) or buffer (returnResult). # Use requests in stream mode resp = get(url, stream=True, verify=not self.insecure, timeout=self.httpTimeoutSec) size = int(resp.headers.get("content-length", "-1")) downloaded = 0 reportTime = time.time() result = [] try: destFp = open(dest + ".tmp", "wb") if dest else None for chunk in filter( bool, resp.iter_content(chunk_size=32768)): if destFp: destFp.write(chunk) if returnResult: result.append(chunk) downloaded += len(chunk) if log and size != -1: now = time.time() if downloaded == size: debug("Download complete") elif now - reportTime > 3: debug("%.0f%% downloaded...", 100 * downloaded / size) reportTime = now finally: if destFp: destFp.close() if size not in (downloaded, -1): raise PartialDownloadError(downloaded, size) if dest: os.rename(dest + ".tmp", dest) # we should not have errors here return b''.join(result) if returnResult else True else: # For CERN S3 we need to construct the JSON ourself... s3Request = re.match( "https://s3.cern.ch/swift/v1[/]+([^/]*)/(.*)$", url) if s3Request: [bucket, prefix] = s3Request.groups() url = "https://s3.cern.ch/swift/v1/%s/?prefix=%s" % ( bucket, prefix.lstrip("/")) resp = get(url, verify=not self.insecure, timeout=self.httpTimeoutSec) if resp.status_code == 404: # No need to retry any further return None resp.raise_for_status() return [{ "name": os.path.basename(x), "type": "file" } for x in resp.text.split()] else: # No destination specified: JSON request resp = get(url, verify=not self.insecure, timeout=self.httpTimeoutSec) if resp.status_code == 404: # No need to retry any further return None resp.raise_for_status() return resp.json() except (RequestException, ValueError, PartialDownloadError) as e: if i == self.httpConnRetries - 1: error("GET %s failed: %s", url, e) if dest: try: os.unlink(dest + ".tmp") except: pass return None
def doDoctor(args, parser): if not exists(args.configDir): parser.error("Wrong path to alidist specified: %s" % args.configDir) prunePaths(abspath(args.workDir)) if exists(expanduser("~/.rootlogon.C")): warning("You have a ~/.rootlogon.C notice that this might" " interfere with your environment in hidden ways.\n" "Please review it an make sure you are not force loading any library" " which might interphere with the rest of the setup.") # Decide if we can use homebrew. If not, we replace it with "true" so # that we do not get spurious messages on linux homebrew_replacement = "" err, output = getstatusoutput("which brew") if err: homebrew_replacement = "brew() { true; }; " dockerImage = args.dockerImage if "dockerImage" in args else "" if args.docker and not dockerImage: dockerImage = "alisw/%s-builder" % args.architecture.split("_")[0] logger.setLevel(logging.BANNER) if args.debug: logger.setLevel(logging.DEBUG) specs = {} packages = [] exitcode = 0 for p in args.packages: path = "%s/%s.sh" % (args.configDir, p.lower()) if not exists(path): error("Cannot find recipe %s for package %s." % (path, p)) exitcode = 1 continue packages.append(p) systemInfo() specs = {} def unreachable(): assert(False) defaultsReader = lambda : readDefaults(args.configDir, args.defaults, parser.error) (err, overrides, taps) = parseDefaults(args.disable, defaultsReader, info) if err: error(err) exit(1) def performValidateDefaults(spec): (ok,msg,valid) = validateDefaults(spec, args.defaults) if not ok: error(msg) return (ok,msg,valid) (fromSystem, own, failed, validDefaults) = getPackageList(packages = packages, specs = specs, configDir = args.configDir, preferSystem = args.preferSystem, noSystem = args.noSystem, architecture = args.architecture, disable = args.disable, defaults = args.defaults, dieOnError = lambda x, y : unreachable, performPreferCheck = lambda pkg, cmd : checkPreferSystem(pkg, cmd, homebrew_replacement, dockerImage), performRequirementCheck = lambda pkg, cmd : checkRequirements(pkg, cmd, homebrew_replacement, dockerImage), performValidateDefaults = performValidateDefaults, overrides = overrides, taps = taps, log = info) if fromSystem: banner("The following packages will be picked up from the system:\n\n- " + "\n- ".join(fromSystem) + "\n\nIf this is not you want, you have to uninstall / unload them.") if own: banner("The following packages will be built by aliBuild because they couldn't be picked up from the system:\n\n- " + "\n- ".join(own) + "\n\nThis is not a real issue, but it might take longer the first time you invoke aliBuild." + "\nLook at the error messages above to get hints on what packages you need to install separately.") if failed: banner("The following packages are system dependencies and could not be found:\n\n- " + "\n- ".join(failed) + "\n\nLook at the error messages above to get hints on what packages you need to install separately.") exitcode = 1 if validDefaults and args.defaults not in validDefaults: banner("The list of packages cannot be built with the defaults you have specified.\n" + "List of valid defaults:\n\n- " + "\n- ".join(validDefaults) + "\n\nUse the `--defaults' switch to specify one of them.") exitcode = 2 if validDefaults is None: banner("No valid defaults combination was found for the given list of packages, check your recipes!") exitcode = 3 if exitcode: error("There were errors: build cannot be performed if they are not resolved. Check the messages above.") exit(exitcode)
def doDeps(args, parser): # Check if we have an output file if not args.outgraph: parser.error("Specify a PDF output file with --outgraph") # In case we are using Docker dockerImage = args.dockerImage if "dockerImage" in args else "" if args.docker and not dockerImage: dockerImage = "alisw/%s-builder" % args.architecture.split("_")[0] # Resolve all the package parsing boilerplate specs = {} defaultsReader = lambda: readDefaults(args.configDir, args.defaults, parser.error, args.architecture) (err, overrides, taps) = parseDefaults(args.disable, defaultsReader, debug) with DockerRunner(dockerImage, ["--network=host"]) as getstatusoutput_docker: systemPackages, ownPackages, failed, validDefaults = \ getPackageList(packages = [args.package], specs = specs, configDir = args.configDir, preferSystem = args.preferSystem, noSystem = args.noSystem, architecture = args.architecture, disable = args.disable, defaults = args.defaults, performPreferCheck = lambda pkg, cmd: getstatusoutput_docker(cmd), performRequirementCheck = lambda pkg, cmd: getstatusoutput_docker(cmd), performValidateDefaults = lambda spec: validateDefaults(spec, args.defaults), overrides = overrides, taps = taps, log = debug) dieOnError(validDefaults and args.defaults not in validDefaults, "Specified default `%s' is not compatible with the packages you want to build.\n" % args.defaults + "Valid defaults:\n\n- " + "\n- ".join(sorted(validDefaults))) for s in specs.values(): # Remove disabled packages s["requires"] = [r for r in s["requires"] if not r in args.disable and r != "defaults-release"] s["build_requires"] = [r for r in s["build_requires"] if not r in args.disable and r != "defaults-release"] s["runtime_requires"] = [r for r in s["runtime_requires"] if not r in args.disable and r != "defaults-release"] # Determine which pacakages are only build/runtime dependencies all_build = set() all_runtime = set() for k,spec in specs.items(): all_build.update(spec["build_requires"]) all_runtime.update(spec["runtime_requires"]) all_both = all_build.intersection(all_runtime) dot = "digraph {\n" for k,spec in specs.items(): if k == "defaults-release": continue # Determine node color based on its dependency status color = None if k in all_both: color = "tomato1" elif k in all_runtime: color = "greenyellow" elif k in all_build: color = "plum" elif k == args.package: color = "gold" else: assert color, "This should not happen (happened for %s)" % k # Node definition dot += '"%s" [shape=box, style="rounded,filled", fontname="helvetica", fillcolor=%s]\n' % (k,color) # Connections (different whether it's a build dependency or a runtime one) for dep in spec["build_requires"]: dot += '"%s" -> "%s" [color=grey70]\n' % (k, dep) for dep in spec["runtime_requires"]: dot += '"%s" -> "%s" [color=dodgerblue3]\n' % (k, dep) dot += "}\n" if args.outdot: fp = open(args.outdot, "wt") else: fp = NamedTemporaryFile(delete=False, mode="wt") fp.write(dot) fp.close() try: if args.neat: execute(format("tred %(dotFile)s > %(dotFile)s.0 && mv %(dotFile)s.0 %(dotFile)s", dotFile=fp.name)) execute(["dot", fp.name, "-Tpdf", "-o", args.outgraph]) except Exception as e: error("Error generating dependencies with dot: %s: %s", type(e).__name__, e) else: info("Dependencies graph generated: %s" % args.outgraph) if fp.name != args.outdot: remove(fp.name) else: info("Intermediate dot file for Graphviz saved: %s" % args.outdot) return True
def performValidateDefaults(spec): (ok,msg,valid) = validateDefaults(spec, args.defaults) if not ok: error(msg) return (ok,msg,valid)
def doInit(args): assert (args.pkgname != None) assert (type(args.dist) == dict) assert (sorted(args.dist.keys()) == ["repo", "ver"]) pkgs = parsePackagesDefinition(args.pkgname) assert (type(pkgs) == list) if args.dryRun: info( "This will initialise local checkouts for %s\n" "--dry-run / -n specified. Doing nothing.", ",".join(x["name"] for x in pkgs)) sys.exit(0) try: path.exists(args.develPrefix) or os.mkdir(args.develPrefix) path.exists(args.referenceSources) or os.makedirs( args.referenceSources) except OSError as e: error("%s", e) sys.exit(1) # Fetch recipes first if necessary if path.exists(args.configDir): warning("using existing recipes from %s", args.configDir) else: cmd = [ "clone", "--origin", "upstream", args.dist["repo"] if ":" in args.dist["repo"] else "https://github.com/" + args.dist["repo"] ] if args.dist["ver"]: cmd.extend(["-b", args.dist["ver"]]) cmd.append(args.configDir) git(cmd) # Use standard functions supporting overrides and taps. Ignore all disables # and system packages as they are irrelevant in this context specs = {} defaultsReader = lambda: readDefaults(args.configDir, args. defaults, lambda msg: error( "%s", msg), args.architecture) (err, overrides, taps) = parseDefaults([], defaultsReader, debug) (_, _, _, validDefaults) = getPackageList( packages=[p["name"] for p in pkgs], specs=specs, configDir=args.configDir, preferSystem=False, noSystem=True, architecture="", disable=[], defaults=args.defaults, performPreferCheck=lambda *x, **y: (1, ""), performRequirementCheck=lambda *x, **y: (0, ""), performValidateDefaults=lambda spec: validateDefaults( spec, args.defaults), overrides=overrides, taps=taps, log=debug) dieOnError( validDefaults and args.defaults not in validDefaults, "Specified default `%s' is not compatible with the packages you want to build.\n" % args.defaults + "Valid defaults:\n\n- " + "\n- ".join(sorted(validDefaults))) for p in pkgs: spec = specs.get(p["name"]) dieOnError(spec is None, "cannot find recipe for package %s" % p["name"]) dest = join(args.develPrefix, spec["package"]) writeRepo = spec.get("write_repo", spec.get("source")) dieOnError( not writeRepo, "package %s has no source field and cannot be developed" % spec["package"]) if path.exists(dest): warning("not cloning %s since it already exists", spec["package"]) continue p["ver"] = p["ver"] if p["ver"] else spec.get("tag", spec["version"]) debug("cloning %s%s for development", spec["package"], " version " + p["ver"] if p["ver"] else "") updateReferenceRepoSpec(args.referenceSources, spec["package"], spec, True, False) cmd = [ "clone", "--origin", "upstream", spec["source"], "--reference", join(args.referenceSources, spec["package"].lower()) ] if p["ver"]: cmd.extend(["-b", p["ver"]]) cmd.append(dest) git(cmd) git(("remote", "set-url", "--push", "upstream", writeRepo), directory=dest) # Make it point relatively to the mirrors for relocation: as per Git specifics, the path has to # be relative to the repository's `.git` directory. Don't do it if no common path is found repoObjects = os.path.join(os.path.realpath(dest), ".git", "objects") refObjects = os.path.join(os.path.realpath(args.referenceSources), spec["package"].lower(), "objects") repoAltConf = os.path.join(repoObjects, "info", "alternates") if len(os.path.commonprefix([repoObjects, refObjects])) > 1: with open(repoAltConf, "w") as fil: fil.write(os.path.relpath(refObjects, repoObjects) + "\n") banner( "Development directory %s created%s", args.develPrefix, " for " + ", ".join(x["name"].lower() for x in pkgs) if pkgs else "")
def deps(recipesDir, topPackage, outFile, buildRequires, transitiveRed, disable): dot = {} keys = ["requires"] if buildRequires: keys.append("build_requires") for p in glob("%s/*.sh" % recipesDir): debug(format("Reading file %(filename)s", filename=p)) try: err, recipe, _ = parseRecipe(getRecipeReader(p)) name = recipe["package"] if name in disable: debug("Ignoring %s, disabled explicitly" % name) continue except Exception as e: error( format("Error reading recipe %(filename)s: %(type)s: %(msg)s", filename=p, type=type(e).__name__, msg=str(e))) sys.exit(1) dot[name] = dot.get(name, []) for k in keys: for d in recipe.get(k, []): d = d.split(":")[0] d in disable or dot[name].append(d) selected = None if topPackage != "all": if not topPackage in dot: error( format("Package %(topPackage)s does not exist", topPackage=topPackage)) return False selected = [topPackage] olen = 0 while len(selected) != olen: olen = len(selected) selected += [ x for s in selected if s in dot for x in dot[s] if not x in selected ] selected.sort() result = "digraph {\n" for p, deps in list(dot.items()): if selected and not p in selected: continue result += " \"%s\";\n" % p for d in deps: result += " \"%s\" -> \"%s\";\n" % (p, d) result += "}\n" with NamedTemporaryFile(delete=False) as fp: fp.write(result) try: if transitiveRed: execute( format( "tred %(dotFile)s > %(dotFile)s.0 && mv %(dotFile)s.0 %(dotFile)s", dotFile=fp.name)) execute(["dot", fp.name, "-Tpdf", "-o", outFile]) except Exception as e: error( format("Error generating dependencies with dot: %(type)s: %(msg)s", type=type(e).__name__, msg=str(e))) else: info( format("Dependencies graph generated: %(outFile)s", outFile=outFile)) remove(fp.name) return True