def fetchBuildconf(cfg): if os.path.isdir(cfg["devDir"] + "/autoproj"): if cfg["update"]: c.printNormal(" Update buildconf.") out, err, r = execute.do( ["git", "-C", cfg["devDir"] + "/autoproj", "pull"]) if r != 0: cfg["errors"].append("update: buildconf") c.printError("\ncan't update buildconf:\n" + err) else: address = cfg["buildconfAddress"] if len(address) == 0: c.printError("no address given") return branch = cfg["buildconfBranch"] c.printNormal(" Fetching \"" + address + branch + "\" into " + cfg["devDir"] + "/autoproj") command = [ "git", "clone", "-o", "autobuild", address, cfg["devDir"] + "/autoproj" ] if len(branch) > 0: command.append("-b") command.append(branch) execute.do(command)
def clonePackageSet(cfg, git, realPath, path, cloned, deps): # clone in tmp folder c.printNormal(" Fetching: " + git) out, err, r = execute.do( ["git", "clone", "-o", "autobuild", git, realPath]) if not os.path.isdir(realPath + "/.git"): c.printNormal(out) c.printError(err) cfg["errors"].append("clone: " + git) return # get the name of the remote with open(realPath + "/source.yml") as f: info = yaml.load(f) #os.system("rm -rf "+path+"remotes/"+info["name"]) os.system("ln -s " + realPath + " " + path + "remotes/" + info["name"]) if "imports" in info and info["imports"]: for i in info["imports"]: key, value = list(i.items())[0] realPath = cfg[ "devDir"] + "/.autoproj/remotes/" + key + "__" + value.strip( ).replace("/", "_").replace("-", "_") + "_git" if i not in deps and not os.path.isdir(realPath): deps.append(i) # store the info which package sets we have cloned already cloned.append(info["name"])
def show_log_(): packageList = [] package = sys.argv[2].replace("/", "_") for file in os.listdir(os.path.join(cfg["devDir"], "autoproj/bob/logs")): if file[-13:] == "configure.txt" and package in file: packageList.append(file[:-14]) for package in packageList: logFile = cfg[ "devDir"] + "/autoproj/bob/logs/" + package + "_configure.txt" c.printWarning("configure log:") with open(logFile) as f: for l in f: if "error" in l: c.printError(l.strip()) else: c.printNormal(l.strip()) logFile = cfg["devDir"] + "/autoproj/bob/logs/" + package + "_build.txt" c.printWarning("build log:") with open(logFile) as f: for l in f: if "error" in l: c.printError(l.strip()) else: c.printNormal(l.strip())
def do(cmd, cfg=None, errorString=None, path=None, logFile=None): outpipe = subprocess.PIPE if cfg and logFile: logPath = cfg["devDir"] + "/autoproj/bob/logs" if not os.path.isdir(logPath): makeDir(logPath) outpipe = open(logPath + "/" + logFile, "w") p = subprocess.Popen(" ".join(cmd), stdout=outpipe, stderr=outpipe, cwd=path, shell=True) if cfg and logFile: outpipe.close() out = "" err = "" if cfg and logFile: p.wait() else: out, err = p.communicate() if len(err) > 0: if cfg and errorString != None: c.printError(err) cfg["errors"].append(errorString) return out, err, p.returncode
def show_log_(): package = sys.argv[2] logFile = cfg["devDir"] + "/autoproj/bob/logs/"+package.replace("/", "_")+"_configure.txt" c.printWarning("configure log:") with open(logFile) as f: for l in f: if "error" in l: c.printError(l.strip()) else: c.printNormal(l.strip()) logFile = cfg["devDir"] + "/autoproj/bob/logs/"+package.replace("/", "_")+"_build.txt" c.printWarning("build log:") with open(logFile) as f: for l in f: if "error" in l: c.printError(l.strip()) else: c.printNormal(l.strip())
def clonePackage(cfg, package, server, gitPackage, branch): clonePath = package if package[-2:] == ".*": clonePath = "/".join(package.split("/")[:-1])+"/" + gitPackage.split("/")[1].split(".")[0] if package in cfg["updated"]: return False else: cfg["updated"].append(package) clonePath = cfg["devDir"]+"/"+clonePath if os.path.isdir(clonePath): if cfg["update"]: print "Updating "+clonePath+" ... "+c.END, # todo: check branch execute.do(["git", "-C", clonePath, "pull"], cfg) c.printWarning("done") return True else: if not cfg["fetch"]: c.printError(package+" is not cloned, call bob-fetch to update or clone the packages.") cfg["errors"].append("missing: "+package) c.printError("error") return True else: print "Fetching "+clonePath+" ... "+c.END, sys.stdout.flush() cmd = ["git", "clone", "-o", "autobuild", "-q", server+gitPackage, clonePath] if branch: cmd += ["-b", branch] execute.do(cmd, cfg) # apply patch if we have one patch = cfg["pyScriptDir"] + "/patches/" + package.split("/")[-1] + ".patch" print "check for patches", if os.path.exists(patch): cmd = ["patch", "-N", "-p0", "-d", clonePath, "-i", patch] print " ".join(cmd) out, err, r = execute.do(cmd) print out print err print r c.printWarning("done") return True return False
def setupEnv(cfg, update=False): global os prefix = cfg["devDir"] + "/install" if system() == "Windows": if prefix[1] == ':': prefix = prefix.replace(prefix[:2], "/"+prefix[0]) prefix_bin = prefix + "/bin" prefix_lib = prefix + "/lib" prefix_pkg = prefix_lib + "/pkgconfig" prefix_config = prefix + "/configuration" platform = system() # create env.sh p = subprocess.Popen("which cmake_debug", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() cmakeDebugPath = out.strip() if len(cmakeDebugPath) > 0: # check if path is correct expectPath = cfg["devDir"]+"/install/bin/cmake_debug" if platform == "Windows": c.printWarning("cmake_debug path check is not working on Windows currently (please always ensure that you only sourced the env.sh in your current dev folder!") else : if cmakeDebugPath != expectPath: c.printError('"cmake_debug" found in wrong folder.') c.printError('Found: '+cmakeDebugPath) c.printError('Expected: '+expectPath) c.printError('Maybe you already sourced an "env.sh" from a different "dev" folder?') return if not update: return if not update: if os.path.isfile(cfg["devDir"]+"/env.sh"): source(cfg["devDir"]+"/env.sh") p = subprocess.Popen("which autoproj", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() aPath = out.strip() if len(aPath) > 0: with open(cfg["devDir"]+"/bobenv.sh", "w") as f: f.write("#! /bin/sh\n") f.write(". env.sh\n") f.write('export MARS_SCRIPT_DIR="'+cfg["pyScriptDir"]+'"\n') f.write("alias bob='${MARS_SCRIPT_DIR}/pybob.py'\n") f.write("alias bob-bootstrap='${MARS_SCRIPT_DIR}/pybob.py bootstrap'\n") f.write("alias bob-install='${MARS_SCRIPT_DIR}/pybob.py install'\n") f.write("alias bob-rebuild='${MARS_SCRIPT_DIR}/pybob.py rebuild'\n") f.write("alias bob-build='${MARS_SCRIPT_DIR}/pybob.py'\n") f.write("alias bob-diff='${MARS_SCRIPT_DIR}/pybob.py diff'\n") f.write("alias bob-list='${MARS_SCRIPT_DIR}/pybob.py list'\n") f.write("alias bob-fetch='${MARS_SCRIPT_DIR}/pybob.py fetch'\n") f.write("alias bob-show-log='${MARS_SCRIPT_DIR}/pybob.py show-log'\n") f.write(". ${MARS_SCRIPT_DIR}/auto_complete.sh\n") else: with open(cfg["devDir"]+"/env.sh", "w") as f: f.write("#! /bin/sh\n") f.write('export AUTOPROJ_CURRENT_ROOT="'+cfg["devDir"]+'"\n') f.write('export MARS_SCRIPT_DIR="'+cfg["pyScriptDir"]+'"\n') f.write('export PATH="$PATH:'+prefix_bin+'"\n') if platform == "Darwin": f.write('export DYLD_LIBRARY_PATH="'+prefix_lib+':$DYLD_LIBRARY_PATH"\n') elif platform == "Linux": f.write('export LD_LIBRARY_PATH="'+prefix_lib+':$DYLD_LIBRARY_PATH"\n') else: f.write('export PATH="'+prefix_lib+':$PATH"\n') f.write('export ROCK_CONFIGURATION_PATH="'+prefix_config+'"\n') f.write('export PYTHONPATH="'+prefix_lib+'/python2.7/site-packages:$PYTHONPATH"\n') # todo: handle python path f.write('if [ x${PKG_CONFIG_PATH} = "x" ]; then\n') f.write(' export PKG_CONFIG_PATH="'+prefix_pkg+'"\n') f.write('else\n') f.write(' export PKG_CONFIG_PATH="'+prefix_pkg+':$PKG_CONFIG_PATH"\n') f.write('fi\n') f.write("alias bob='${MARS_SCRIPT_DIR}/pybob.py'\n") f.write("alias bob-bootstrap='${MARS_SCRIPT_DIR}/pybob.py bootstrap'\n") f.write("alias bob-install='${MARS_SCRIPT_DIR}/pybob.py install'\n") f.write("alias bob-rebuild='${MARS_SCRIPT_DIR}/pybob.py rebuild'\n") f.write("alias bob-build='${MARS_SCRIPT_DIR}/pybob.py'\n") f.write("alias bob-diff='${MARS_SCRIPT_DIR}/pybob.py diff'\n") f.write("alias bob-list='${MARS_SCRIPT_DIR}/pybob.py list'\n") f.write("alias bob-fetch='${MARS_SCRIPT_DIR}/pybob.py fetch'\n") f.write("alias bob-show-log='${MARS_SCRIPT_DIR}/pybob.py show-log'\n") f.write(". ${MARS_SCRIPT_DIR}/auto_complete.sh\n") execute.makeDir(cfg["devDir"]+"/install/bin") with open(cfg["devDir"]+"/install/bin/cmake_debug", "w") as f: f.write("#!/bin/bash\n") options = "-DROCK_TEST_ENABLED=OFF" if not "autoprojEnv" in cfg or not cfg["autoprojEnv"]: options += " -DBINDINGS_RUBY=OFF " if platform == "Windows": f.write("cmake .. "+options+"-DCMAKE_INSTALL_PREFIX="+cfg["devDir"]+"/install -DCMAKE_BUILD_TYPE=DEBUG -G \"MSYS Makefiles\" $@\n") else: f.write("cmake .. "+options+"-DCMAKE_INSTALL_PREFIX="+cfg["devDir"]+"/install -DCMAKE_BUILD_TYPE=DEBUG $@\n") with open(cfg["devDir"]+"/install/bin/cmake_release", "w") as f: f.write("#!/bin/bash\n") if platform == "Windows": f.write("cmake .. "+options+"-DCMAKE_INSTALL_PREFIX="+cfg["devDir"]+"/install -DCMAKE_BUILD_TYPE=RELEASE -G \"MSYS Makefiles\" $@\n") else: f.write("cmake .. "+options+"-DCMAKE_INSTALL_PREFIX="+cfg["devDir"]+"/install -DCMAKE_BUILD_TYPE=RELEASE $@\n") cmd = ["chmod", "+x", cfg["devDir"]+"/install/bin/cmake_debug"] execute.simpleExecute(cmd) cmd = ["chmod", "+x", cfg["devDir"]+"/install/bin/cmake_release"] execute.simpleExecute(cmd) source(cfg["devDir"]+"/env.sh")
def updatePackageSets(cfg): # the server configuration are handled in the init.rb for autoproj setupCfg(cfg) path = cfg["devDir"] + "/autoproj/" execute.makeDir(path + "remotes") execute.makeDir(cfg["devDir"] + "/.autoproj/remotes") cloned = [] deps = [] with open(path + "manifest") as f: manifest = yaml.load(f) for packageSet in manifest["package_sets"]: key, value = list(packageSet.items())[0] realPath = cfg[ "devDir"] + "/.autoproj/remotes/" + key + "__" + value.strip( ).replace("/", "_").replace("-", "_") + "_git" if not os.path.isdir(realPath): if key == "url": clonePackageSet(cfg, value.strip(), realPath, path, cloned, deps) else: clonePackageSet(cfg, cfg["server"][key] + value.strip() + ".git", realPath, path, cloned, deps) # update remotes that are not actually cloned for d in os.listdir(path + "remotes"): if os.path.isdir(path + "remotes/" + d): if d not in cloned: if cfg["update"]: c.printNormal(" Updating: " + d) out, err, r = execute.do( ["git", "-C", path + "remotes/" + d, "pull"]) if r != 0: cfg["errors"].append("update: " + d) c.printError("\ncan't update package set \"" + d + "\":\n" + err) if d not in cloned: with open(path + "remotes/" + d + "/source.yml") as f: info = yaml.load(f) if "imports" in info and info["imports"]: for i in info["imports"]: key, value = list(i.items())[0] realPath = cfg[ "devDir"] + "/.autoproj/remotes/" + key + "__" + value.strip( ).replace("/", "_").replace("-", "_") + "_git" if i not in deps and not os.path.isdir(realPath): deps.append(i) # now handle deps while len(deps) > 0: packageSet = deps.pop(0) key, value = list(packageSet.items())[0] realPath = cfg[ "devDir"] + "/.autoproj/remotes/" + key + "__" + value.strip( ).replace("/", "_").replace("-", "_") + "_git" clonePackageSet(cfg, cfg["server"][key] + value.strip() + ".git", realPath, path, cloned, deps) # last step: write all packages int a file to speed up pybob usage packages, wildcards = listPackages(cfg) pDict = {} with open(path + "/bob/packages.txt", "wb") as f: for p in packages: if len(p[1]) > 0: if sys.version_info.major <= 2: f.write(p[1] + "\n") else: f.write(bytes(p[1] + "\n", "utf-8")) pDict[p[1]] = p[0] else: if sys.version_info.major <= 2: f.write(p[0] + "\n") else: f.write(bytes(p[0] + "\n", "utf-8")) pDict[p[0]] = p[0] for p in wildcards: if len(p[1]) > 0: pDict[p[1]] = p[0] else: pDict[p[0]] = p[0] with open(path + "/bob/packages.yml", "w") as f: yaml.dump(pDict, f)
def fetchPackage(cfg, package, layout_packages): print("Check: " + package + " ... " + c.END, end="") sys.stdout.flush() setupCfg(cfg) if package in cfg["ignorePackages"]: # or "orogen" in package: c.printWarning("done") return True if package in cfg["osdeps"]: if cfg["fetch"]: if len(cfg["osdeps"][package]) > 1: cfg["osdeps"][package][0](cfg, cfg["osdeps"][package][1]) else: cfg["osdeps"][package][0](cfg, package) c.printWarning("done") return True if package in cfg["overrides"] and "fetch" in cfg["overrides"][package]: le = len(cfg["errors"]) if cfg["fetch"]: cfg["overrides"][package]["fetch"](cfg) else: cfg["overrides"][package]["check"](cfg) if len(cfg["errors"]) == le: layout_packages.append(package) c.printWarning("done") return True else: cfg["errors"].append("missing: " + package) c.printError("error") return False path = cfg["devDir"] + "/autoproj/remotes/" matches = [] for key, value in cfg["packages"].items(): if package in key and key not in layout_packages: matches.append(key) if package not in cfg["packages"]: result = True for match in matches: if not fetchPackage(cfg, match, layout_packages): result = False return result elif package == cfg["packages"][package]: info = [] print("\n ", end="") if getPackageInfoFromRemoteFolder(cfg, package, path + package, info): le = len(cfg["errors"]) endM = True for i in info: if "$" not in i["gitPackage"]: if "*" not in i["package"]: fetchPackage(cfg, i["package"], layout_packages) if len(cfg["errors"]) > le: if endM: c.printError("error") return False if endM: c.printWarning("done") return True else: info = {} if getPackageInfo(cfg, package, info): endM = True le = len(cfg["errors"]) branch = None if not "server" in info: cfg["errors"].append("fetch: " + package) return server = info["server"] server2 = info["gitPackage"] if "branch" in info: branch = info["branch"] if package in cfg["overrides"]: value = cfg["overrides"][package] if "branch" in value: branch = value["branch"] if "url" in value: server = value["url"] server2 = "" else: for key, value in cfg["overrides"].items(): r = re.compile(key) m = r.match(package) if m and m.group() == package: if "branch" in value: branch = value["branch"] if "url" in value: server = value["url"] server2 = "" if "basename" in info: if clonePackage(cfg, package, server, server2, branch): endM = False else: if "server" in info: if clonePackage(cfg, info["package"], server, server2, branch): endM = False layout_packages.append(package) if len(cfg["errors"]) > le: if endM: c.printError("error") return False if endM: c.printWarning("done") return True cfg["errors"].append("fetch: " + package) c.printError("error") return False
def printErrors(): if len(cfg["errors"]) > 0: c.printError("\nErrors:") for e in cfg["errors"]: c.printError(" - "+e)
def install_(): global cfg layout_packages = [] cfg["update"] = False if len(sys.argv) < 3: buildconf.fetchPackages(cfg, layout_packages) else: buildconf.fetchPackage(cfg, sys.argv[2], layout_packages) deps = [] checked = [] if cfg["checkDeps"]: for p in layout_packages: bob_package.getDeps(cfg, p, deps, checked) #print deps toInstall = [] for d in deps[::-1]: if d not in toInstall: toInstall.append(d) for p in layout_packages: if p not in toInstall: toInstall.append(p) while len(toInstall) > 0: threads = [] jobs = [] iList = list(toInstall) toInstall = [] for p in iList: wait = False #c.printWarning(str(cfg["deps"][p])) if p in cfg["deps"]: for d in cfg["deps"][p]: if d in iList: wait = True break if not wait: jobs.append(p) if p in cfg["overrides"] and "install" in cfg["overrides"][p]: if cfg["multiprocessing"]: threads.append(Thread(target=cfg["overrides"][p]["install"], args=(cfg,))) else: c.printNormal("Install: "+p) le = len(cfg["errors"]) cfg["overrides"][p]["install"](cfg) if len(cfg["errors"]) <= le: c.printWarning("done") else: c.printError("error") elif p in cfg["osdeps"]: # os deps are installed in fetch phase continue else: if cfg["multiprocessing"]: threads.append(Thread(target=bob_package.installPackage, args=(cfg, p))) else: c.printNormal("Install: "+p) le = len(cfg["errors"]) bob_package.installPackage(cfg, p) if len(cfg["errors"]) <= le: c.printWarning("done") else: c.printError("error") else: toInstall.append(p) if cfg["multiprocessing"]: c.printBold("Install: "+str(jobs)) le = len(cfg["errors"]) for t in threads: t.start() for t in threads: t.join() if len(cfg["errors"]) > le: foo = ""
def setupEnv(cfg, update=False): global os prefix = cfg["devDir"] + "/install" if system() == "Windows": if prefix[1] == ':': prefix = prefix.replace(prefix[:2], "/"+prefix[0]) prefix_bin = prefix + "/bin" prefix_lib = prefix + "/lib" prefix_pkg = prefix_lib + "/pkgconfig" pythonpath = prefix_lib + "/python%d.%d/site-packages" % (sys.version_info.major, sys.version_info.minor) platform = system() if platform == "Windows": # todo: make this more generic pythonpath = "/mingw64/lib/python2.7:/mingw64/lib/python2.7/plat-win32:/mingw64/lib/python2.7/lib-tk:/mingw64/lib/python2.7/lib-dynload:/mingw64/lib/python2.7/site-packages:"+pythonpath elif platform == "Linux": prefix_lib += ":" + prefix + "/lib/x86_64-linux-gnu" prefix_pkg += ":" + prefix + "/lib/x86_64-linux-gnu/pkgconfig" prefix_config = prefix + "/configuration" # create env.sh p = subprocess.Popen("which cmake_debug", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() cmakeDebugPath = out.strip() if len(cmakeDebugPath) > 0: # check if path is correct expectPath = cfg["devDir"]+"/install/bin/cmake_debug" if platform == "Windows": c.printWarning("cmake_debug path check is not working on Windows currently (please always ensure that you only sourced the env.sh in your current dev folder!") else : if cmakeDebugPath.decode("utf-8") != expectPath: c.printError('"cmake_debug" found in wrong folder.') c.printError('Found: ' + cmakeDebugPath.decode("utf-8")) c.printError('Expected: ' + expectPath) c.printError('Maybe you already sourced an "env.sh" from a different "dev" folder?') return if not update: return if not update: if os.path.isfile(cfg["devDir"]+"/env.sh"): source(cfg["devDir"]+"/env.sh") p = subprocess.Popen("which autoproj", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = p.communicate() aPath = out.strip() if len(aPath) > 0: with open(cfg["devDir"]+"/bobenv.sh", "w") as f: f.write("#! /bin/bash\n") f.write(". env.sh\n") f.write('export MARS_SCRIPT_DIR="'+cfg["pyScriptDir"]+'"\n') _make_pybob_aliases(f) else: with open(cfg["devDir"]+"/env.sh", "w") as f: f.write("#! /bin/bash\n") f.write('export AUTOPROJ_CURRENT_ROOT="'+cfg["devDir"]+'"\n') f.write('if [ x${CMAKE_PREFIX_PATH} = "x" ]; then\n') f.write(' export CMAKE_PREFIX_PATH="'+cfg["devDir"]+'/install"\n') f.write('else\n') f.write(' export CMAKE_PREFIX_PATH="'+cfg["devDir"]+'/install:$CMAKE_PREFIX_PATH"\n') f.write('fi\n') f.write('export MARS_SCRIPT_DIR="'+cfg["pyScriptDir"]+'"\n') f.write('export PATH="$PATH:'+prefix_bin+'"\n') if platform == "Darwin": f.write('export DYLD_LIBRARY_PATH="'+prefix_lib+':$DYLD_LIBRARY_PATH"\n') f.write('export MYLD_LIBRARY_PATH="$DYLD_LIBRARY_PATH"\n') elif platform == "Linux": f.write('export LD_LIBRARY_PATH="'+prefix_lib+':$LD_LIBRARY_PATH"\n') f.write('export CXXFLAGS="-std=c++11"\n') else: f.write('export PATH="'+prefix_lib+':$PATH"\n') f.write('export ROCK_CONFIGURATION_PATH="'+prefix_config+'"\n') f.write('export PYTHONPATH="' + pythonpath + ':$PYTHONPATH"\n') f.write('if [ x${PKG_CONFIG_PATH} = "x" ]; then\n') f.write(' export PKG_CONFIG_PATH="'+prefix_pkg+'"\n') f.write('else\n') f.write(' export PKG_CONFIG_PATH="'+prefix_pkg+':$PKG_CONFIG_PATH"\n') f.write('fi\n') _make_pybob_aliases(f) execute.makeDir(cfg["devDir"]+"/install/bin") if len(aPath) == 0: with open(cfg["devDir"]+"/install/bin/amake", "w") as f: f.write("#!/bin/bash\n") f.write("${AUTOPROJ_CURRENT_ROOT}/pybob/pybob.py install $@\n") cmd = ["chmod", "+x", cfg["devDir"]+"/install/bin/amake"] execute.simpleExecute(cmd) with open(cfg["devDir"]+"/install/bin/cmake_debug", "w") as f: f.write("#!/bin/bash\n") options = "-DROCK_TEST_ENABLED=OFF" if not "autoprojEnv" in cfg or not cfg["autoprojEnv"]: options += " -DBINDINGS_RUBY=OFF " if platform == "Windows": f.write("cmake .. "+options+"-DCMAKE_INSTALL_PREFIX=$AUTOPROJ_CURRENT_ROOT/install -DCMAKE_BUILD_TYPE=DEBUG -G \"MSYS Makefiles\" $@\n") else: f.write("cmake .. "+options+"-DCMAKE_INSTALL_PREFIX=$AUTOPROJ_CURRENT_ROOT/install -DCMAKE_BUILD_TYPE=DEBUG $@\n") with open(cfg["devDir"]+"/install/bin/cmake_release", "w") as f: f.write("#!/bin/bash\n") if platform == "Windows": f.write("cmake .. "+options+"-DCMAKE_INSTALL_PREFIX=$AUTOPROJ_CURRENT_ROOT/install -DCMAKE_BUILD_TYPE=RELEASE -G \"MSYS Makefiles\" $@\n") else: f.write("cmake .. "+options+"-DCMAKE_INSTALL_PREFIX=$AUTOPROJ_CURRENT_ROOT/install -DCMAKE_BUILD_TYPE=RELEASE $@\n") cmd = ["chmod", "+x", cfg["devDir"]+"/install/bin/cmake_debug"] execute.simpleExecute(cmd) cmd = ["chmod", "+x", cfg["devDir"]+"/install/bin/cmake_release"] execute.simpleExecute(cmd) source(cfg["devDir"]+"/env.sh")
def install_(): global cfg layout_packages = [] cfg["update"] = False filterArgs = ["-n", "-k"] if len(sys.argv) < 3 or sys.argv[2] in filterArgs: # search path upwards for a manifest.xml # if not found build manifest from buildconf pathToCheck = os.getcwd() found = False done = False while not done: if os.path.isfile(pathToCheck + "/manifest.xml"): found = True done = True elif os.path.exists(pathToCheck + "/autoproj"): # found dev root done = True else: arrPath = pathToCheck.split("/") if len(arrPath) == 1: done = True else: pathToCheck = "/".join(arrPath[:-1]) if found: layout_packages.append(os.path.relpath(pathToCheck, cfg["devDir"])) else: buildconf.fetchPackages(cfg, layout_packages) else: pathToCheck = os.path.join(os.getcwd(), sys.argv[2]) if os.path.isfile(pathToCheck + "/manifest.xml"): layout_packages.append(os.path.relpath(pathToCheck, cfg["devDir"])) else: buildconf.fetchPackage(cfg, sys.argv[2], layout_packages) deps = [] checked = [] if cfg["checkDeps"]: for p in layout_packages: bob_package.getDeps(cfg, p, deps, checked) toInstall = [] for d in deps[::-1]: if d not in toInstall: toInstall.append(d) for p in layout_packages: if p not in toInstall: toInstall.append(p) iList = [] while len(toInstall) > 0: threads = [] jobs = [] oldList = iList iList = list(toInstall) if oldList == iList: # detect unresolved deps loop for p in oldList: c.printError("detect dependency cycle:\n " + str(p)) c.printWarning(" deps:") if p in cfg["deps"]: for d in cfg["deps"][p]: if d in iList: c.printWarning(" - " + str(d)) exit(-1) toInstall = [] for p in iList: wait = False if p in cfg["deps"]: for d in cfg["deps"][p]: if d in iList: wait = True break if not wait: jobs.append(p) if p in cfg["overrides"] and "install" in cfg["overrides"][p]: if cfg["multiprocessing"]: threads.append( Thread(target=cfg["overrides"][p]["install"], args=(cfg, ))) else: c.printNormal("Install: " + p) le = len(cfg["errors"]) cfg["overrides"][p]["install"](cfg) if len(cfg["errors"]) <= le: c.printWarning("done") else: c.printError("error") elif p in cfg["osdeps"]: # os deps are installed in fetch phase continue else: if cfg["multiprocessing"]: threads.append( Thread(target=bob_package.installPackage, args=(cfg, p))) else: c.printNormal("Install: " + p) le = len(cfg["errors"]) bob_package.installPackage(cfg, p) if len(cfg["errors"]) <= le: c.printWarning("done") else: c.printError("error") else: toInstall.append(p) if cfg["multiprocessing"]: c.printBold("Install: " + str(jobs)) le = len(cfg["errors"]) for t in threads: t.start() for t in threads: t.join() if len(cfg["errors"]) > le: foo = ""