def install_protobuf(cfg): # todo add curl dependency cmd = ["pkg-config", "--exists", "protobuf"] out, err, r = execute.do(cmd) if r == 0: print(c.BOLD + "external/protobuf" + c.WARNING + " installed" + c.END) sys.stdout.flush() return path = cfg["devDir"] + "/external/protobuf" cmd = ["./autogen.sh; ./configure -prefix=" + cfg["devDir"] + "/install"] out, err, r = execute.do(cmd, cfg, None, path, "external_protobuf_configure.txt") print(c.BOLD + "external/protobuf" + c.WARNING + " configured" + c.END) sys.stdout.flush() cmd = ["make", "-C", path, "install", "-j", str(cfg["numCores"])] out, err, r = execute.do(cmd, cfg, None, None, "external_protobuf_build.txt") cmd = ["python", "setup.py", "build"] # print " ".join(cmd) out, err, r = execute.do(cmd, cfg, None, path + "/python", "external_protobuf_build_python.txt") cmd = [ "cp", "-r", "build/lib/google", cfg["devDir"] + "/install/lib/python2.7/site-packages", ] out, err, r = execute.do(cmd, cfg, None, path + "/python", "external_protobuf_build_python_install.txt") print(c.BOLD + "external/protobuf" + c.WARNING + " installed" + c.END) sys.stdout.flush()
def fetchBuildconf(cfg): if os.path.isdir(cfg["devDir"] + "/autoproj"): if cfg["update"]: c.printNormal(" Update buildconf.") out, err, r = execute.do( ["git", "-C", cfg["devDir"] + "/autoproj", "pull"]) if r != 0: cfg["errors"].append("update: buildconf") c.printError("\ncan't update buildconf:\n" + err) else: address = cfg["buildconfAddress"] if len(address) == 0: c.printError("no address given") return branch = cfg["buildconfBranch"] c.printNormal(" Fetching \"" + address + branch + "\" into " + cfg["devDir"] + "/autoproj") command = [ "git", "clone", "-o", "autobuild", address, cfg["devDir"] + "/autoproj" ] if len(branch) > 0: command.append("-b") command.append(branch) execute.do(command)
def pipInstall(cfg, pkg): """PIP installation command.""" platform = system() log = cfg["devDir"] + "/autoproj/bob/logs/os_deps.txt" path = cfg["devDir"] + "/autoproj/bob/logs" if not os.path.isdir(path): execute.makeDir(path) with open(log, "a") as f: f.write("@pip " + pkg + "\n") if platform == "Windows": if not os.popen('which pip').read(): out, err, r = execute.do([ "pacman", "--noconfirm", "-S", "mingw-w64-x86_64-python2-pip" ]) if len(out) > 0: return cmd = ["pip", "install", "-U", "--noinput", pkg] else: cmd = ["yes", "|", "pip", "install", "-U", pkg] print(" ".join(cmd)) out, err, r = execute.do(cmd) if len(out) > 0: return
def patch_ode(cfg): srcPath = cfg["pyScriptDir"]+"/patches/" targetPath = cfg["devDir"]+"/simulation/ode-0.12" cmd = ["patch", "-N", "-p0", "-d", targetPath, "-i"] out, err, r = execute.do(cmd+[srcPath+"ode-0.12-va_end.patch"]) out, err, r = execute.do(cmd+[srcPath+"ode-0.12-lambda.patch"]) out, err, r = execute.do(cmd+[srcPath+"ode-0.12-export_joint_internals.patch"]) out, err, r = execute.do(cmd+[srcPath+"ode-0.12-abort.patch"])
def clonePackage(cfg, package, server, gitPackage, branch): clonePath = package if package[-2:] == ".*": arrPackage = package.split("/")[:-1] p = gitPackage.split("/")[-1].split(".")[0] if arrPackage[-1] != p: arrPackage.append(p) clonePath = "/".join(arrPackage) if package in cfg["updated"]: return False else: cfg["updated"].append(package) clonePath = cfg["devDir"] + "/" + clonePath if os.path.isdir(clonePath): if cfg["update"]: print("Updating " + clonePath + " ... " + c.END, end="") # todo: check branch out, err, r = execute.do(["git", "-C", clonePath, "pull"], cfg) if r != 0: cfg["errors"].append("update: " + package) c.printError("\ncan't update \"" + clonePath + "\":\n" + err) c.printWarning("done") return True else: if not cfg["fetch"]: c.printError( package + " is not cloned, call bob-fetch to update or clone the packages." ) cfg["errors"].append("missing: " + package) c.printError("error") return True else: print("Fetching " + clonePath + " ... " + c.END, end="") sys.stdout.flush() cmd = [ "git", "clone", "-o", "autobuild", "-q", server + gitPackage, clonePath ] if branch: cmd += ["-b", branch] execute.do(cmd, cfg) # apply patch if we have one patch = cfg["pyScriptDir"] + "/patches/" + package.split( "/")[-1] + ".patch" print("check for patches", end="") if os.path.exists(patch): cmd = ["patch", "-N", "-p0", "-d", clonePath, "-i", patch] print(" ".join(cmd)) out, err, r = execute.do(cmd) print(out) print(err) print(r) c.printWarning("done") return True return False
def patch_ode_16(cfg): srcPath = cfg["pyScriptDir"] + "/patches/" targetPath = cfg["devDir"] + "/simulation/ode-0.16" cmd = ["patch", "-N", "-p0", "-d", targetPath, "-i"] out, err, r = execute.do(cmd + [srcPath + "ode-0.16-lambda.patch"]) # this patch is used to inherit of dJoint wich is normaly not done by MARS #out, err, r = execute.do(cmd + [srcPath + "ode-0.12-export_joint_internals.patch"]) out, err, r = execute.do(cmd + [srcPath + "ode-0.16-abort.patch"]) out, err, r = execute.do(cmd + [srcPath + "ode-0.16-heightfield.patch"])
def installPackage(cfg, p, cmake_options=[]): if p in cfg["ignorePackages"] or "orogen" in p: return path = cfg["devDir"] + "/" + p if not os.path.isdir(cfg["devDir"] + "/" + p): cfg["errors"].append("install: " + p + " path not found") return if not os.path.isfile(cfg["devDir"] + "/" + p + "/CMakeLists.txt"): print(p + c.WARNING + " skip \"no cmake package\"" + c.END) stdout.flush() return if cfg["rebuild"]: execute.do(["rm", "-rf", path + "/build"]) start = datetime.datetime.now() if os.path.isdir(path + "/build"): cmd = ["cmake", path] else: execute.makeDir(path + "/build") #cmd = ["cmake", "..", "-DCMAKE_INSTALL_PREFIX="+cfg["devDir"]+"/install", "-DCMAKE_BUILD_TYPE=DEBUG", "-Wno-dev"] cmake = "cmake_" + cfg["defBuildType"] cmd = [cmake] + cmake_options if system() == "Windows": cmd = ["bash"] + cmd out, err, r = execute.do(cmd, cfg, None, path + "/build", p.replace("/", "_") + "_configure.txt") if r != 0: print(p + c.ERROR + " configure error" + c.END) stdout.flush() cfg["errors"].append("configure: " + p) return print(p + c.WARNING + " configured" + c.END) stdout.flush() end = datetime.datetime.now() diff1 = end - start start = end out, err, r = execute.do( ["make", "install", "-j", str(cfg["numCores"]), "-C", path + "/build"], cfg, None, None, p.replace("/", "_") + "_build.txt") if r != 0: print(p + c.ERROR + " build error" + c.END) cfg["errors"].append("build: " + p) return end = datetime.datetime.now() diff2 = end - start print(p + c.WARNING + " installed" + c.END) cfg["profiling"].append( [p, { "configure time": str(diff1) }, { "compile time": str(diff2) }]) cfg["installed"].append(p)
def install(cfg, pkg): """Standard system package manager installation command.""" platform = system() if pkg == "": return if pkg == "cmake": if os.popen('which cmake').read(): return elif pkg == "pkg-config": if os.popen('which pkg-config').read(): return log = cfg["devDir"] + "/autoproj/bob/logs/os_deps.txt" path = cfg["devDir"] + "/autoproj/bob/logs" if not os.path.isdir(path): execute.makeDir(path) with open(log, "a") as f: f.write(pkg + "\n") if platform == "Windows": out, err, r = execute.do(["pacman", "-Qq", "mingw-w64-x86_64-" + pkg]) if len(out) > 0: return elif os.system('pkg-config --exists ' + pkg) == 0: return if platform == "Windows": if pkg == "cython": execute.do( ["pacman", "--noconfirm", "-S", "mingw-w64-x86_64-cython2"]) execute.do(["pacman", "--noconfirm", "-S", "mingw-w64-x86_64-" + pkg]) elif platform == "Darwin": pkgstr = '" ' + pkg + ' "' out, err, r = execute.do(['port', 'installed', '|', 'grep', pkgstr]) if len(out) > len(pkg): return print(c.BOLD + "Installing os dependency: " + pkg + c.END, end="") execute.do(["sudo", "port", "install", pkg]) else: out, err, r = execute.do(['dpkg', '-l', pkg]) if len(err) > 5: print(c.BOLD + "Installing os dependency: " + pkg + c.END, end="") arrPkg = pkg.split() for p in arrPkg: os.system("sudo apt-get install -y " + p) else: for line in out.split(b"\n"): arrLine = line.split() if len(arrLine) > 2 and arrLine[1] == pkg: if arrLine[0] != "ii": print(c.BOLD + "Installing os dependency: " + pkg + c.END, end="") arrPkg = pkg.split() for p in arrPkg: os.system("sudo apt-get install -y " + p) break
def clonePackageSet(cfg, git, realPath, path, cloned, deps): # clone in tmp folder c.printNormal(" Fetching: " + git) out, err, r = execute.do( ["git", "clone", "-o", "autobuild", git, realPath]) if not os.path.isdir(realPath + "/.git"): c.printNormal(out) c.printError(err) cfg["errors"].append("clone: " + git) return # get the name of the remote with open(realPath + "/source.yml") as f: info = yaml.load(f) #os.system("rm -rf "+path+"remotes/"+info["name"]) os.system("ln -s " + realPath + " " + path + "remotes/" + info["name"]) if "imports" in info and info["imports"]: for i in info["imports"]: key, value = list(i.items())[0] realPath = cfg[ "devDir"] + "/.autoproj/remotes/" + key + "__" + value.strip( ).replace("/", "_").replace("-", "_") + "_git" if i not in deps and not os.path.isdir(realPath): deps.append(i) # store the info which package sets we have cloned already cloned.append(info["name"])
def diff_(): global cfg layout_packages = [] cfg["update"] = False if len(sys.argv) < 3: buildconf.fetchPackages(cfg, layout_packages) else: if sys.argv[2] == "buildconf": diff_remotes() else: buildconf.fetchPackage(cfg, sys.argv[2], layout_packages) deps = [] checked = [] if cfg["checkDeps"]: for p in layout_packages: bob_package.getDeps(cfg, p, deps, checked) toInstall = [] diffs = [] for d in deps[::-1]: if d not in toInstall: toInstall.append(d) for p in layout_packages: if p not in toInstall: toInstall.append(p) for p in toInstall: if p in cfg["osdeps"]: continue if p in cfg["ignorePackages"] or "orogen" in p: continue if p in cfg["overrides"] and "fetch" in cfg["overrides"][p]: continue path = cfg["devDir"] + "/" + p p2 = p while not os.path.isdir(path + "/.git"): path = "/".join(path.split("/")[:-1]) p2 = "/".join(p2.split("/")[:-1]) if path == cfg["devDir"]: break if path == cfg["devDir"]: cfg["errors"].append("missing: git for " + p) continue if path not in diffs: diffs.append(path) out, err, r = execute.do( ["git", "diff"], cfg, None, path) #, p2.replace("/", "_")+"_diff.txt") if out: logFile = cfg["devDir"] + "/autoproj/bob/logs/" + p2.replace( "/", "_") + "_diff.txt" print(p2 + ": ", end="") c.printWarning("has diff") print(" check: less " + logFile) sys.stdout.flush() with open(logFile, "w") as f: f.write(out) else: print(p2 + ": ", end="") c.printBold("no diff")
def fetch_rbdl(cfg): path = cfg["devDir"] + "/external" print(c.BOLD + "Fetching " + "external/rbdl ... " + c.END, end="") sys.stdout.flush cwd = os.getcwd() execute.makeDir(path) os.chdir(path) if not os.path.isfile(path + "/rbdl/CMakeLists.txt"): if os.path.isdir(path + "/rbdl"): execute.do(["rm", "-rf", "rbdl"]) execute.do(["hg", "clone", "https://bitbucket.org/rbdl/rbdl"]) if not os.path.isfile("rbdl/CMakeLists.txt"): cfg["errors"].append("fetch: external/rbdl") os.chdir(cwd) return True
def fetch_minizip(cfg): path = cfg["devDir"]+"/external" print c.BOLD+"Fetching "+"external/minizip ... "+c.END, sys.stdout.flush cwd = os.getcwd() execute.makeDir(path) os.chdir(path) if not os.path.isfile(path+"/unzip101e.zip"): if os.path.isdir(path+"/minizip"): uninstall_minizip(cfg) execute.do(["wget", "-q", "http://www.winimage.com/zLibDll/unzip101e.zip"]) execute.do(["unzip", "unzip101e.zip", "-d", "minizip"]) if not os.path.isfile("minizip/minizip.c"): cfg["errors"].append("fetch: external/minizip") os.chdir(cwd) patch_minizip(cfg) return True
def install(cfg, pkg): platform = system() if pkg == "": return if pkg == "cmake": if os.popen('which cmake').read(): return elif pkg == "pkg-config": if os.popen('which pkg-config').read(): return elif platform == "Windows": out, err, r = execute.do(["pacman", "-Qq", "mingw-w64-x86_64-" + pkg]) if len(out) > 0: return elif os.system('pkg-config --exists ' + pkg) == 0: return if platform == "Windows": execute.do(["pacman", "--noconfirm", "-S", "mingw-w64-x86_64-" + pkg]) elif platform == "Darwin": pkgstr = '" ' + pkg + ' "' out, err, r = execute.do(['port', 'installed', '|', 'grep', pkgstr]) if len(out) > len(pkg): return print c.BOLD + "Installing os dependency: " + pkg + c.END, execute.do(["sudo", "port", "install", pkg]) else: out, err, r = execute.do(['dpkg', '-l', pkg]) if len(err) > 5: print c.BOLD + "Installing os dependency: " + pkg + c.END, os.system("sudo apt-get install -y " + pkg)
def install_ode(cfg): if os.path.isfile(cfg["devDir"]+"/install/lib/pkgconfig/ode.pc"): print c.BOLD + "simulation/ode"+c.WARNING+" installed"+c.END sys.stdout.flush return path = cfg["devDir"]+"/simulation/ode" cmd = ['CPPFLAGS="-DdNODEBUG"', 'CXXFLAGS="-O2 -ffast-math -fPIC"', 'CFLAGS="-O2 -ffast-math -fPIC"', "--enable-double-precision", "--prefix="+cfg["devDir"]+"/install", "--with-drawstuff=none", "--disable-demos"] if system() == "Windows": cmd = ["bash", "configure"] + cmd else: cmd = ["./configure"] + cmd out, err, r = execute.do(cmd, cfg, None, path, "simulation_ode_configure.txt") print c.BOLD + "simulation/ode"+c.WARNING+" configured"+c.END sys.stdout.flush() if system() == "Linux": libtool = os.popen('which libtool').read() if len(libtool) > 0: execute.do(["mv", "libtool", "libtool_old"], None, None, path) execute.do(["ln", "-s", libtool, "libtool"], None, None, path) cmd = ["make", "-C", path, "install", "-j", str(cfg["numCores"])] print " ".join(cmd) out, err, r = execute.do(cmd, cfg, None, None, "simulation_ode_install.txt") print out print err print r print c.BOLD + "simulation/ode"+c.WARNING+" installed"+c.END sys.stdout.flush()
def install_protobuf(cfg): # todo add curl dependency cmd = ["pkg-config", "--exists", "protobuf"] out, err, r = execute.do(cmd) if r == 0: print c.BOLD + "external/protobuf"+c.WARNING+" installed"+c.END sys.stdout.flush return path = cfg["devDir"]+"/external/protobuf" cmd = ['./autogen.sh; ./configure -prefix='+cfg["devDir"]+'/install'] out, err, r = execute.do(cmd, cfg, None, path, "external_protobuf_configure.txt") print c.BOLD + "external/protobuf"+c.WARNING+" configured"+c.END sys.stdout.flush() cmd = ["make", "-C", path, "install", "-j", str(cfg["numCores"])] #print " ".join(cmd) out, err, r = execute.do(cmd, cfg, None, None, "external_protobuf_build.txt") #print out #print err #print r print c.BOLD + "external/protobuf"+c.WARNING+" installed"+c.END sys.stdout.flush()
def diff_remotes(): global cfg path = cfg["devDir"] + "/autoproj/remotes" for d in os.listdir(path): if os.path.isdir(path+"/"+d+"/.git"): out, err, r = execute.do(["git", "diff"], cfg, None, path+"/"+d) if out: logFile = cfg["devDir"] + "/autoproj/bob/logs/"+d.replace("/", "_")+"_diff.txt" print d+": ", c.printWarning("has diff") print " check: less " + logFile with open(logFile, "w") as f: f.write(out) else: print d+": ", c.printBold("no diff")
def fetch_sisl(cfg): path = cfg["devDir"]+"/external" print c.BOLD+"Fetching "+"external/sisl ... "+c.END, sys.stdout.flush cwd = os.getcwd() execute.makeDir(path) os.chdir(path) if not os.path.isfile(path+"/sisl-4.5.0.tar.gz"): if os.path.isdir(path+"/sisl"): execute.do(["rm", "-rf", "sisl"]) execute.do(["wget", "-q", "http://www.sintef.no/upload/IKT/9011/geometri/sisl/sisl-4.5.0.tar.gz"]) execute.do(["tar", "-xzf", "sisl-4.5.0.tar.gz"]) execute.do(["mv", "sisl-4.5.0", "sisl"]) if not os.path.isfile("sisl/CMakeLists.txt"): cfg["errors"].append("fetch: external/sisl") os.chdir(cwd) patch_sisl(cfg) return True
def fetch_ode(cfg): path = cfg["devDir"]+"/simulation" print c.BOLD+"Fetching "+"external/ode ... "+c.END, sys.stdout.flush cwd = os.getcwd() execute.makeDir(path) os.chdir(path) if not os.path.isfile(path+"/ode-0.12.tar.gz"): if os.path.isdir(path+"/ode"): uninstall_ode(cfg) execute.do(["wget", "-q", "http://sourceforge.net/projects/opende/files/ODE/0.12/ode-0.12.tar.gz"]) execute.do(["tar", "-xzf", "ode-0.12.tar.gz"]) patch_ode(cfg) execute.do(["mv", "ode-0.12", "ode"]) if not os.path.isfile("ode/ode.pc.in"): cfg["errors"].append("fetch: simulation/ode") os.chdir(cwd) cfg["installed"].append("simulation/ode") return True
def fetch_ode_16(cfg): path = cfg["devDir"] + "/simulation" print(c.BOLD + "Fetching " + "external/ode ... " + c.END, end="") sys.stdout.flush() cwd = os.getcwd() execute.makeDir(path) os.chdir(path) if not os.path.isfile(path + "/ode-0.16.tar.gz"): if os.path.isdir(path + "/ode"): uninstall_ode(cfg) execute.do([ "wget", "-q", "https://bitbucket.org/odedevs/ode/downloads/ode-0.16.tar.gz", ]) execute.do(["tar", "-xzf", "ode-0.16.tar.gz"]) patch_ode_16(cfg) execute.do(["mv", "ode-0.16", "ode"]) if not os.path.isfile("ode/ode.pc.in"): cfg["errors"].append("fetch: simulation/ode") os.chdir(cwd) cfg["installed"].append("simulation/ode") return True
def updatePackageSets(cfg): # the server configuration are handled in the init.rb for autoproj setupCfg(cfg) path = cfg["devDir"] + "/autoproj/" execute.makeDir(path + "remotes") execute.makeDir(cfg["devDir"] + "/.autoproj/remotes") cloned = [] deps = [] with open(path + "manifest") as f: manifest = yaml.load(f) for packageSet in manifest["package_sets"]: key, value = list(packageSet.items())[0] realPath = cfg[ "devDir"] + "/.autoproj/remotes/" + key + "__" + value.strip( ).replace("/", "_").replace("-", "_") + "_git" if not os.path.isdir(realPath): if key == "url": clonePackageSet(cfg, value.strip(), realPath, path, cloned, deps) else: clonePackageSet(cfg, cfg["server"][key] + value.strip() + ".git", realPath, path, cloned, deps) # update remotes that are not actually cloned for d in os.listdir(path + "remotes"): if os.path.isdir(path + "remotes/" + d): if d not in cloned: if cfg["update"]: c.printNormal(" Updating: " + d) out, err, r = execute.do( ["git", "-C", path + "remotes/" + d, "pull"]) if r != 0: cfg["errors"].append("update: " + d) c.printError("\ncan't update package set \"" + d + "\":\n" + err) if d not in cloned: with open(path + "remotes/" + d + "/source.yml") as f: info = yaml.load(f) if "imports" in info and info["imports"]: for i in info["imports"]: key, value = list(i.items())[0] realPath = cfg[ "devDir"] + "/.autoproj/remotes/" + key + "__" + value.strip( ).replace("/", "_").replace("-", "_") + "_git" if i not in deps and not os.path.isdir(realPath): deps.append(i) # now handle deps while len(deps) > 0: packageSet = deps.pop(0) key, value = list(packageSet.items())[0] realPath = cfg[ "devDir"] + "/.autoproj/remotes/" + key + "__" + value.strip( ).replace("/", "_").replace("-", "_") + "_git" clonePackageSet(cfg, cfg["server"][key] + value.strip() + ".git", realPath, path, cloned, deps) # last step: write all packages int a file to speed up pybob usage packages, wildcards = listPackages(cfg) pDict = {} with open(path + "/bob/packages.txt", "wb") as f: for p in packages: if len(p[1]) > 0: if sys.version_info.major <= 2: f.write(p[1] + "\n") else: f.write(bytes(p[1] + "\n", "utf-8")) pDict[p[1]] = p[0] else: if sys.version_info.major <= 2: f.write(p[0] + "\n") else: f.write(bytes(p[0] + "\n", "utf-8")) pDict[p[0]] = p[0] for p in wildcards: if len(p[1]) > 0: pDict[p[1]] = p[0] else: pDict[p[0]] = p[0] with open(path + "/bob/packages.yml", "w") as f: yaml.dump(pDict, f)
def patch_sisl(cfg): srcPath = cfg["pyScriptDir"]+"/patches/" targetPath = cfg["devDir"]+"/external/" cmd = ["patch", "-N", "-p0", "-d", targetPath, "-i"] execute.do(cmd+[srcPath+"sisl-limits.patch"])
def patch_minizip(cfg): srcPath = cfg["pyScriptDir"] + "/patches/" targetPath = cfg["devDir"] + "/external/" cmd = ["patch", "-N", "-p0", "-d", targetPath, "-i"] execute.do(cmd + [srcPath + "minizip.patch"]) execute.do(cmd + [srcPath + "minizip_unzip.patch"])
def uninstall_ode(cfg): execute.do(["make", "-C", cfg["devDir"] + "/simulation/ode", "clean"])
def uninstall_minizip(cfg): path = cfg["devDir"] + "/external" cwd = os.getcwd() os.chdir(path + "/minizip") execute.do(["make", "clean"]) os.chdir(cwd)
def uninstall_protobuf(cfg): path = cfg["devDir"] + "/external" cwd = os.getcwd() os.chdir(path + "/protobuf") execute.do(["make", "clean"]) os.chdir(cwd)