def install(self, dest_dir, *args, **kwargs): """ Install the projects and the packages to the dest_dir """ installed = list() projects = self.deps_solver.get_dep_projects(self.projects, self.dep_types) packages = self.deps_solver.get_dep_packages(self.projects, self.dep_types) if "install_tc_packages" in kwargs: install_tc_packages = kwargs["install_tc_packages"] del kwargs["install_tc_packages"] if not install_tc_packages: packages = list() # Compute the real path where to install the packages: prefix = kwargs.get("prefix", "/") prefix = prefix[1:] real_dest = os.path.join(dest_dir, prefix) components = kwargs.get("components") build_type = "Release" if projects: ui.info(ui.green, "the following projects") for project in projects: ui.info(ui.green, " *", ui.blue, project.name) if packages: ui.info(ui.green, "and the following packages") for package in packages: ui.info(ui.green, " *", ui.blue, package.name) ui.info(ui.green, "will be installed to", ui.blue, real_dest) runtime_only = self.dep_types == ["runtime"] if runtime_only: ui.info(ui.green, "(runtime components only)") build_type = projects[0].build_type release = build_type == "Release" if packages: ui.info(ui.green, ":: ", "installing packages") for i, package in enumerate(packages): ui.info_count(i, len(packages), ui.green, "Installing", ui.blue, package.name, update_title=True) files = package.install(real_dest, components=components, release=release) installed.extend(files) # Remove qitest.json so that we don't append tests twice # when running qibuild install --with-tests twice qitest_json = os.path.join(dest_dir, "qitest.json") qisys.sh.rm(qitest_json) if projects: ui.info(ui.green, ":: ", "installing projects") for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Installing", ui.blue, project.name, update_title=True) files = project.install(dest_dir, **kwargs) installed.extend(files) return installed
def intl_update(self): ui.info(ui.blue, "::", ui.reset, "Generating message catalogs ...") import sphinx from sphinx_intl.commands import run as sphinx_intl_run # First step: run sphinx-build -b gettext cmd = [sys.executable, "-c", self.build_dir, "-b", "gettext"] cmd.append(self.source_dir) locale_dir = os.path.join(self.source_dir, "locale") cmd.append(locale_dir) rc = 0 try: sphinx.main(argv=cmd) except SystemExit as e: rc = e.code if rc != 0: raise SphinxBuildError(self) ui.info() # Second step: run sphinx-intl update -l <lingua> for every lingua ui.info(ui.blue, "::", ui.reset, "Updating .po files ...") for i, lingua in enumerate(self.linguas): ui.info_count(i, len(self.linguas), ui.blue, lingua) cmd = [ "update", "-c", os.path.join(self.build_dir, "conf.py"), "--pot-dir", locale_dir, "--locale-dir", locale_dir, "--language", lingua ] sphinx_intl_run(cmd)
def install(self, dest, *args, **kwargs): """ Just copy the Python scripts, modules and packages If there are extensions written in CMake, they will be installed by the CMakeBuilder """ if not self.projects: return n = len(self.projects) for i, project in enumerate(self.projects): ui.info_count(i, n, ui.green, "Installing", ui.reset, ui.blue, project.name) project.install(dest) # Also install a python wrapper so that everything goes smoothly to_write = """\ #!/bin/bash SDK_DIR="$(dirname "$(readlink -f $0 2>/dev/null)")" export LD_LIBRARY_PATH="${SDK_DIR}/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}" export PYTHONPATH="${SDK_DIR}/lib/python2.7/site-packages${PYTHONPATH:+:$PYTHONPATH}" exec python "$@" """ python_wrapper = os.path.join(dest, "python") with open(python_wrapper, "w") as fp: fp.write(to_write) os.chmod(python_wrapper, 0755)
def push_projects(git_projects, dry_run=False): """ Push Projects """ if not git_projects: return ui.info(ui.green, "Pushing ", len(git_projects), "projects") for i, git_project in enumerate(git_projects): default_branch = git_project.default_branch.name remote_branch = git_project.default_branch.remote_branch ui.info_count(i, len(git_projects), git_project.src) git = qisrc.git.Git(git_project.path) if git_project.review: push_remote = git_project.review_remote else: push_remote = git_project.default_remote remote_ref = "%s/%s" % (push_remote.name, remote_branch) display_changes(git, default_branch, remote_ref) answer = qisys.interact.ask_yes_no("OK to push?", default=False) if not answer: return to_push = "%s:%s" % (default_branch, remote_branch) push_args = [push_remote.name, to_push] push_args.append("--force") if dry_run: push_args.append("--dry-run") rc, out = git.push(*push_args, raises=False) if rc == 0: ui.info(out) else: ui.error(out)
def foreach(projects, cmd, ignore_errors=True): """ Execute the command on every project :param ignore_errors: whether to stop at first failure """ errors = list() ui.info(ui.green, "Running `%s` on every project" % " ".join(cmd)) for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.blue, project.src) command = cmd[:] try: qisys.command.call(command, cwd=project.path) except qisys.command.CommandFailedException: if ignore_errors: errors.append(project) continue else: raise if not errors: return print ui.info(ui.red, "Command failed on the following projects:") for project in errors: ui.info(ui.green, " * ", ui.reset, ui.blue, project.src) sys.exit(1)
def handle_pure_python(venv_path, python_worktree, env=None): """ Add the paths of all python projects to the virtualenv """ lib_path = virtualenv.path_locations(venv_path)[1] qi_pth_dest = os.path.join(venv_path, lib_path, "site-packages/qi.pth") res = True with open(qi_pth_dest, "w") as fp: fp.write("") for i, project in enumerate(python_worktree.python_projects): ui.info_count(i, len(python_worktree.python_projects), ui.blue, project.name) if project.setup_with_distutils: cmd = [python_worktree.pip, "install"] if not ui.CONFIG["verbose"]: cmd.append("--quiet") cmd.extend(["--editable", "."]) rc = qisys.command.call(cmd, cwd=project.path, ignore_ret_code=True, env=env) if rc != 0: ui.warning("Failed to run pip install on", project.src) res = False else: ui.debug("Adding python path for project", project.name, ":\n", project.python_path) for path in project.python_path: fp.write(path + "\n") return res
def do(args): """ Main Entry Point """ doc_builder = qidoc.parsers.get_doc_builder(args) doc_projects = doc_builder.get_dep_projects() to_clean = list() for doc_project in doc_projects: # FIXME: this can create an empty build dir for nothing, so we remove it if we don't need it try: build_dir = doc_project.build_dir except AttributeError: continue if not os.path.exists(build_dir): continue if qisys.sh.is_empty(build_dir): qisys.sh.rm(build_dir) continue to_clean.append(build_dir) if not to_clean: ui.info(ui.green, "Nothing to clean") return if not args.force: ui.info(ui.green, "Build directories that will be removed", ui.white, "(use -f to apply)") for i, build_dir in enumerate(to_clean): if args.force: ui.info_count(i, len(to_clean), ui.green, "Cleaning", ui.reset, build_dir) qisys.sh.rm(build_dir) else: ui.info_count(i, len(to_clean), build_dir)
def configure_projects(self, projects=None): """ Configure the given projects so that the actual git config matches the one coming from the manifest : Configure default remotes, default branches and code review, then save config To be called _after_ sync() """ if projects is None: projects = self.git_worktree.get_git_projects() if not projects: return to_configure = list() srcs = {project.src: project for project in projects} for repo in self.new_repos: if repo.src in srcs.keys(): to_configure.append(repo) if not to_configure: return ui.info(ui.green, ":: Setup git projects ...") max_src = max(len(x.src) for x in to_configure) n = len(to_configure) for i, repo in enumerate(to_configure): ui.info_count(i, n, ui.white, "Setup", ui.reset, ui.blue, repo.src.ljust(max_src), end="\r") git_project = srcs[repo.src] git_project.read_remote_config(repo) git_project.apply_config() ui.info(" " * (max_src + 19), end="\r") self.git_worktree.save_git_config()
def configure_virtualenv(config, python_worktree, build_worktree=None, remote_packages=None, site_packages=True): if not remote_packages: remote_packages = list() # create a new virtualenv python_worktree.config = config venv_path = python_worktree.venv_path pip = python_worktree.pip try: virtualenv.create_environment(python_worktree.venv_path, site_packages=site_packages) except: ui.error("Failed to create virtualenv") return # Install all Python projects using pip install -e . python_projects = python_worktree.python_projects for i, project in enumerate(python_projects): ui.info_count(i, len(python_projects), ui.green, "Configuring", ui.reset, ui.blue, project.src) cmd = [pip, "install", "--editable", "."] qisys.command.call(cmd, cwd=project.path) # Write a qi.pth file containing path to C/C++ extensions if build_worktree: handle_extensions(venv_path, python_worktree, build_worktree) # Install the extension in the virtualenv binaries_path = virtualenv.path_locations(venv_path)[-1] pip_binary = os.path.join(binaries_path, "pip") if remote_packages: cmd = [pip_binary, "install"] + remote_packages subprocess.check_call(cmd)
def convert_from_conan(package_path, name, version="0.0.1"): """ Convert a conan build output directory to a qibuild package. """ assert conan_json_exists(package_path), "{} not found".format( os.path.join(package_path, "conanbuildinfo.json")) info = load_conan_json(package_path) settings = info.get("settings") ui.info( ui.white, "Compiled on {} {} with {} version {}".format( settings.get("os"), settings.get("arch"), settings.get("compiler"), settings.get("compiler.version"))) ui.info( ui.white, "Compiled in {} with {} ".format(settings.get("build_type"), settings.get("compiler.libcxx"))) ui.info("Exposed librairies:") for n, deps in enumerate(info.get("dependencies")): ui.info_count(n, len(info.get("dependencies")), ui.blue, "{}@{}".format(deps.get("name"), deps.get("version"))) _generate_conan_share_cmake(package_path, deps) if sys.platform == "darwin": _fix_rpaths(os.path.join(package_path, "lib")) add_package_xml(package_path, name, version, settings.get("os")) res = _compress_package(package_path, name, settings, version) ui.info(ui.green, "Archive generated in", res) return res
def do(args): test_runners = qitest.parsers.get_test_runners(args) # rule to check for tests which doesn't follow naming convention expr = re.compile("^test_.*") warn_name_count = 0 warn_type_count = 0 for test_runner in test_runners: ui.info("Tests in ", test_runner.project.sdk_directory) for i, test in enumerate(test_runner.tests): n = len(test_runner.tests) name = test["name"] name_ok = re.match(expr, name) type_ok = (test.get("pytest") or test.get("gtest")) if name_ok and type_ok: ui.info_count(i, n, test["name"]) else: message = "" if not name_ok: warn_name_count += 1 message += "(invalid name) " if not type_ok: warn_type_count += 1 message += "(no type)" ui.info_count(i, n, name, ui.brown, message) if warn_name_count: msg = "%i on %i tests do not respect naming convention" % ( warn_name_count, len(test_runner.tests)) ui.warning(msg) if warn_type_count: msg = "%i on %i tests do not have any type" % (warn_type_count, len(test_runner.tests)) ui.warning(msg)
def do(args): """Main entry point""" git_worktree = qisrc.parsers.get_git_worktree(args) sync_ok = git_worktree.sync() git_projects = qisrc.parsers.get_git_projects(git_worktree, args, default_all=True, use_build_deps=True) if not git_projects: qisrc.worktree.on_no_matching_projects(git_worktree, groups=args.groups) return git_worktree.configure_projects(git_projects) skipped = list() failed = list() ui.info(ui.green, ":: Syncing projects ...") max_src = max(len(x.src) for x in git_projects) for (i, git_project) in enumerate(git_projects): ui.info_count(i, len(git_projects), ui.blue, git_project.src.ljust(max_src), end="\r") (status, out) = git_project.sync(rebase_devel=args.rebase_devel) if status is None: ui.info("\n", ui.brown, " [skipped]") skipped.append((git_project.src, out)) if status is False: ui.info("\n", ui.red, " [failed]") failed.append((git_project.src, out)) if out: print ui.indent(out + "\n\n", num=2) #clean the screen ui.info_count(i, len(git_projects), ui.blue, " ".ljust(max_src), end="\r") print_overview(len(git_projects), len(skipped), len(failed)) if failed or not sync_ok: sys.exit(1)
def handle_extensions(venv_path, python_worktree, build_worktree): """ Check if there is a build project matching the given source, and add the correct path to the virtualenv. """ extensions_projects = list() build_projects = build_worktree.build_projects for project in python_worktree.python_projects: parent_project = qisys.parsers.find_parent_project(build_projects, project.path) if parent_project: extensions_projects.append(parent_project) if extensions_projects: ui.info() ui.info(ui.blue, "::", ui.reset, "Registering C++ extensions") to_write = "" for i, project in enumerate(extensions_projects): ui.info_count(i, len(extensions_projects), ui.blue, project.name) qi_pth_src = os.path.join(project.sdk_directory, "qi.pth") if os.path.exists(qi_pth_src): with open(qi_pth_src, "r") as fp: to_write += fp.read() if not to_write.endswith("\n"): to_write += "\n" lib_path = virtualenv.path_locations(venv_path)[1] qi_pth_dest = os.path.join(venv_path, lib_path, "site-packages/qi.pth") with open(qi_pth_dest, "a") as fp: fp.write(to_write)
def summary(self): """ Display the tests results. Called at the end of self.run() Sets ``self.ok`` """ if not self.tests: self.ok = False return num_tests = len(self.results) failures = [x for x in self.results.values() if x.ok is False] num_failed = len(failures) message = "Ran %i tests in %is" % (num_tests, self.elapsed_time) ui.info(message) self.ok = (not failures) and not self._interrupted if self.ok: ui.info(ui.green, "All pass. Congrats!") return if num_failed != 0: ui.error(num_failed, "failures") if failures: max_len = max(len(x.test["name"]) for x in failures) for i, failure in enumerate(failures): ui.info_count(i, num_failed, ui.blue, failure.test["name"].ljust(max_len + 2), ui.reset, *failure.message)
def intl_update(self): ui.info(ui.blue, "::", ui.reset, "Generating message catalogs ...") import sphinx from sphinx_intl.commands import run as sphinx_intl_run # First step: run sphinx-build -b gettext cmd = [sys.executable, "-c", self.build_dir, "-b", "gettext"] cmd.append(self.source_dir) locale_dir = os.path.join(self.source_dir, "locale") cmd.append(locale_dir) rc = 0 try: sphinx.main(argv=cmd) except SystemExit as e: rc = e.code if rc != 0: raise SphinxBuildError(self) ui.info() # Second step: run sphinx-intl update -l <lingua> for every lingua ui.info(ui.blue, "::", ui.reset, "Updating .po files ...") for i, lingua in enumerate(self.linguas): ui.info_count(i, len(self.linguas), ui.blue, lingua) cmd = ["update", "-c", os.path.join(self.build_dir, "conf.py"), "--pot-dir", locale_dir, "--locale-dir", locale_dir, "--language", lingua] sphinx_intl_run(cmd)
def foreach(projects, cmd, ignore_errors=True): """ Execute the command on every project :param ignore_errors: whether to stop at first failure """ errors = list() ui.info(ui.green, "Running `%s` on every project" % " ".join(cmd)) for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.blue, project.src) command = cmd[:] try: qisys.command.call(command, cwd=project.path) except qisys.command.CommandFailedException: if ignore_errors: errors.append(project) continue else: raise if not errors: return print() ui.info(ui.red, "Command failed on the following projects:") for project in errors: ui.info(ui.green, " * ", ui.reset, ui.blue, project.src) sys.exit(1)
def install(self, dest, *args, **kwargs): """ Just copy the Python scripts, modules and packages. If there are extensions written in CMake, they will be installed by the CMakeBuilder. """ if not self.projects: return n = len(self.projects) for i, project in enumerate(self.projects): ui.info_count(i, n, ui.green, "Installing", ui.reset, ui.blue, project.name) project.install(dest) # Also install a python wrapper so that everything goes smoothly to_write = """\ #!/bin/bash SDK_DIR="$(dirname "$(readlink -f $0 2>/dev/null)")" export LD_LIBRARY_PATH="${SDK_DIR}/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}" export PYTHONPATH="${SDK_DIR}/lib/python2.7/site-packages${PYTHONPATH:+:$PYTHONPATH}" exec python "$@" """ python_wrapper = os.path.join(dest, "python") with open(python_wrapper, "w") as fp: fp.write(to_write) os.chmod(python_wrapper, 0o755)
def checkout(self, branch, force=False): """ Called by ``qisrc checkout`` For each project, checkout the branch if it is different than the default branch of the manifest. """ ui.info(ui.green, ":: Checkout projects ...") errors = list() manifest_xml = os.path.join(self._syncer.manifest_repo, "manifest.xml") manifest = qisrc.manifest.Manifest(manifest_xml) max_src = max([len(x.src) for x in self.git_projects]) n = len(self.git_projects) for i, project in enumerate(self.git_projects): ui.info_count(i, n, ui.bold, "Checkout", ui.reset, ui.blue, project.src.ljust(max_src), end="\r") if project.default_branch is None: continue branch_name = project.default_branch.name remote_name = project.default_remote.name git = qisrc.git.Git(project.path) ok, err = git.safe_checkout(branch_name, remote_name, force=force) if not ok: errors.append((project.src, err)) if not errors: return ui.error("Failed to checkout some projects") for (project, error) in errors: ui.info(project, ":", error)
def summary(self): """ Display the tests results. Called at the end of self.run() Sets ``self.ok`` """ if not self.tests: self.ok = False return num_tests = len(self.results) failures = [x for x in self.results.values() if x.ok is False] num_failed = len(failures) message = "Ran %i tests in %is" % (num_tests, self.elapsed_time) ui.info(message) self.ok = (not failures) and not self._interrupted if self.ok: ui.info(ui.green, "All pass. Congrats!") else: if num_failed != 0: ui.error(num_failed, "failures") if failures: max_len = max(len(x.test["name"]) for x in failures) for i, failure in enumerate(failures): ui.info_count(i, num_failed, ui.blue, failure.test["name"].ljust(max_len + 2), ui.reset, *failure.message) self.write_failures(failures)
def do(args): doc_worktree = qidoc.parsers.get_doc_worktree(args) doc_projects = qidoc.parsers.get_doc_projects(doc_worktree, args) to_clean = list() for doc_project in doc_projects: # FIXME # this can create an empty build dir for nothing, so # we remove it if we don't need it try: build_dir = doc_project.build_dir except AttributeError: continue if not os.path.exists(build_dir): continue if qisys.sh.is_empty(build_dir): qisys.sh.rm(build_dir) continue to_clean.append(build_dir) if not to_clean: ui.info(ui.green, "Nothing to clean") return if not args.force: ui.info(ui.green, "Build directories that will be removed", ui.white, "(use -f to apply") for i, build_dir in enumerate(to_clean): if args.force: ui.info_count(i, len(to_clean), ui.green, "Cleaning", ui.reset, build_dir) qisys.sh.rm(build_dir) else: ui.info_count(i, len(to_clean), build_dir)
def install(self, destdir, clean=False): """ Install the doc projects to a dest dir """ projects = self.get_dep_projects() ui.info(ui.blue, "::", ui.reset, "Building all projects") for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Building", ui.blue, project.name) options = { "version" : self.version, "hosted" : self.hosted, "build_type" : self.build_type, "rel_paths" : True, } if clean: project.clean() project.configure(**options) project.build(build_type=self.build_type, language=self.language) if clean: qisys.sh.rm(destdir) qisys.sh.mkdir(destdir) ui.info(ui.blue, "::", ui.reset, "Installing all projects") for i, project in enumerate(projects): real_dest = os.path.join(destdir, project.dest) ui.info_count(i, len(projects), ui.green, "Installing", ui.blue, project.name, ui.reset, "->", ui.white, real_dest) project.install(real_dest)
def handle_extensions(venv_path, python_worktree, build_worktree): """ Check if there is a build project matching the given source, and add the correct path to the virtualenv. """ extensions_projects = list() build_projects = build_worktree.build_projects for project in python_worktree.python_projects: parent_project = qisys.parsers.find_parent_project( build_projects, project.path) if parent_project: extensions_projects.append(parent_project) if extensions_projects: ui.info() ui.info(ui.blue, "::", ui.reset, "Registering C++ extensions") to_write = "" for i, project in enumerate(extensions_projects): ui.info_count(i, len(extensions_projects), ui.blue, project.name) qi_pth_src = os.path.join(project.sdk_directory, "qi.pth") if os.path.exists(qi_pth_src): with open(qi_pth_src, "r") as fp: to_write += fp.read() if not to_write.endswith("\n"): to_write += "\n" lib_path = virtualenv.path_locations(venv_path)[1] qi_pth_dest = os.path.join(venv_path, lib_path, "site-packages/qi.pth") with open(qi_pth_dest, "a") as fp: fp.write(to_write)
def do(args): """Main entry point""" git_worktree = qisrc.parsers.get_git_worktree(args) sync_ok = git_worktree.sync() git_projects = qisrc.parsers.get_git_projects(git_worktree, args, default_all=True, use_build_deps=True) if not git_projects: qisrc.worktree.on_no_matching_projects(git_worktree, groups=args.groups) return git_worktree.configure_projects(git_projects) skipped = list() failed = list() ui.info(ui.green, ":: Syncing projects ...") max_src = max(len(x.src) for x in git_projects) for (i, git_project) in enumerate(git_projects): ui.info_count(i, len(git_projects), ui.blue, git_project.src.ljust(max_src), end="\r") (status, out) = git_project.sync(rebase_devel=args.rebase_devel) if status is None: ui.info("\n", "\n", ui.brown, git_project.src, " [skipped]") skipped.append((git_project.src, out)) if status is False: ui.info("\n", "\n", git_project.src, ui.red, " [failed]") failed.append((git_project.src, out)) if out: print ui.indent(out, num=2) #clean the screen ui.info_count(i, len(git_projects), ui.blue, " ".ljust(max_src), end="\r") print_overview(len(git_projects), len(skipped), len(failed)) if failed or not sync_ok: sys.exit(1)
def do(args): test_runners = qitest.parsers.get_test_runners(args) # rule to check for tests which doesn't follow naming convention expr = re.compile("^test_.*") warn_name_count = 0 warn_type_count = 0 for test_runner in test_runners: ui.info("Tests in ", test_runner.project.sdk_directory) for i, test in enumerate(test_runner.tests): n = len(test_runner.tests) name = test["name"] name_ok = re.match(expr, name) type_ok = (test.get("pytest") or test.get("gtest")) if name_ok and type_ok: ui.info_count(i, n, test["name"]) else: message = "" if not name_ok: warn_name_count += 1 message += "(invalid name) " if not type_ok: warn_type_count += 1 message += "(no type)" ui.info_count(i, n, name, ui.brown, message) if warn_name_count: msg = "%i on %i tests do not respect naming convention" % (warn_name_count, len(test_runner.tests)) ui.warning(msg) if warn_type_count: msg = "%i on %i tests do not have any type" % (warn_type_count, len(test_runner.tests)) ui.warning(msg)
def install(self, destdir, clean=False): """ Install the doc projects to a dest dir """ projects = self.get_dep_projects() ui.info(ui.blue, "::", ui.reset, "Building all projects") for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Building", ui.blue, project.name) options = { "version": self.version, "hosted": self.hosted, "build_type": self.build_type, "rel_paths": True, } if clean: project.clean() project.configure(**options) project.build(build_type=self.build_type, language=self.language) if clean: qisys.sh.rm(destdir) qisys.sh.mkdir(destdir) ui.info(ui.blue, "::", ui.reset, "Installing all projects") for i, project in enumerate(projects): real_dest = os.path.join(destdir, project.dest) ui.info_count(i, len(projects), ui.green, "Installing", ui.blue, project.name, ui.reset, "->", ui.white, real_dest) project.install(real_dest)
def install(self, dest_dir, *args, **kwargs): """ Install the projects and the packages to the dest_dir """ installed = list() projects = self.deps_solver.get_dep_projects(self.projects, self.dep_types) packages = self.deps_solver.get_dep_packages(self.projects, self.dep_types) if "install_tc_packages" in kwargs: install_tc_packages = kwargs["install_tc_packages"] del kwargs["install_tc_packages"] if not install_tc_packages: packages = list() # Compute the real path where to install the packages: prefix = kwargs.get("prefix", "/") prefix = prefix[1:] real_dest = os.path.join(dest_dir, prefix) components = kwargs.get("components") build_type = "Release" if projects: ui.info(ui.green, "the following projects") for project in projects: ui.info(ui.green, " *", ui.blue, project.name) if packages: ui.info(ui.green, "and the following packages") for package in packages: ui.info(ui.green, " *", ui.blue, package.name) ui.info(ui.green, "will be installed to", ui.blue, real_dest) runtime_only = self.dep_types == ["runtime"] if runtime_only: ui.info(ui.green, "(runtime components only)") build_type = projects[0].build_type release = build_type == "Release" if packages: ui.info(ui.green, ":: ", "installing packages") for i, package in enumerate(packages): ui.info_count(i, len(packages), ui.green, "Installing", ui.blue, package.name) files = package.install(real_dest, components=components, release=release) installed.extend(files) # Remove qitest.json so that we don't append tests twice # when running qibuild install --with-tests twice qitest_json = os.path.join(dest_dir, "qitest.json") qisys.sh.rm(qitest_json) if projects: ui.info(ui.green, ":: ", "installing projects") for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Installing", ui.blue, project.name) files = project.install(dest_dir, **kwargs) installed.extend(files) return installed
def intl_update(self): """ Regenerate translation catalogs for the top project """ projects = self.get_dep_projects() for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Updating", ui.blue, project.name) if project.doc_type == "sphinx": project.intl_update()
def do(args): """ Main entry point. """ build_worktree = qibuild.parsers.get_build_worktree(args) projects = qibuild.parsers.get_build_projects(build_worktree, args, solve_deps=True) clean_selection = _get_clean_selection(args.remove_known_configs, args.remove_unknown_configs) bdirs = {'known_configs': list(), 'unknown_configs': list()} all_configs = clean_selection != "given_config" for project in projects: bdirs_ = project.get_build_dirs(all_configs=all_configs) for cat in bdirs_.keys(): bdirs[cat].extend(bdirs_[cat]) if clean_selection in ["given_config", "all_configs", "known_configs"]: bdir_count = len(bdirs['known_configs']) if bdir_count == 0: ui.info(ui.green, "No build directory to clean") elif not args.force: ui.info(ui.green, "Build directories that will be removed", ui.reset, ui.bold, "(use -f to apply):") for i, bdir in enumerate(bdirs['known_configs']): message = list() if args.force: message.extend([ui.green, "Cleaning", ui.reset, bdir]) # delete the build directory qisys.sh.rm(bdir) else: message.append(bdir) ui.info_count(i, bdir_count, *message) if clean_selection in ["all_configs", "unknown_configs"]: bdir_count = len(bdirs['unknown_configs']) if bdir_count == 0: ui.info(ui.green, "No build directory matching unknown configuration to clean") elif not args.force: ui.info(ui.green, "Build directories matching unknown configuration that may be removed", ui.reset, ui.bold, "(interactive mode, use -f to apply):") # remove uncertain build directories, by configuration name, so sort them sorted_bdirs = {} for bdir in bdirs['unknown_configs']: # all build directory names should be prefixed with "build-", so strip it config_name = os.path.basename(bdir)[6:] if config_name not in sorted_bdirs: sorted_bdirs[config_name] = [] sorted_bdirs[config_name].append(bdir) for c, sbdirs in sorted_bdirs.items(): question = "Remove build directories matching the '%s' configuration?" % c answer = qisys.interact.ask_yes_no(question, default=False) if not answer: continue bdir_count = len(sbdirs) for i, bdir in enumerate(sbdirs, start=1): to_print = [ui.green, "*", ui.reset, "(%i/%i)" % (i, bdir_count)] if args.force: to_print.extend([ui.green, "Cleaning", ui.reset, bdir]) # delete the build directory qisys.sh.rm(bdir) else: to_print.extend([ui.reset, bdir]) ui.info(*to_print)
def build(self, *args, **kwargs): """ Build the projects in the correct order """ projects = self.deps_solver.get_dep_projects(self.projects, self.dep_types) for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Building", ui.blue, project.name, update_title=True) self.pre_build(project) project.build(**kwargs)
def _sync_repos(self, old_repos, new_repos, force=False): """ Sync the remote repo configurations with the git worktree """ res = True # 1/ create, remove or move the git projects: # Compute the work that needs to be done: (to_add, to_move, to_rm, to_update) = \ compute_repo_diff(old_repos, new_repos) if to_rm or to_add or to_move or to_update: ui.info(ui.green, ":: Computing diff ...") if to_rm: for repo in to_rm: ui.info(ui.red, "* ", ui.reset, "removing", ui.blue, repo.src) if to_add: for repo in to_add: ui.info(ui.green, "* ", ui.reset, "adding", ui.blue, repo.src) if to_move: for (repo, new_src) in to_move: ui.info(ui.brown, "* ", ui.reset, "moving", ui.blue, repo.src, ui.reset, " to ", ui.blue, new_src) if to_update: for (old_repo, new_repo) in to_update: ui.info(ui.green, "* ", ui.reset, "updating", ui.blue, old_repo.src) if new_repo.review and not old_repo.review: ui.info(ui.tabs(2), ui.green, "(now using code review)") project = self.git_worktree.get_git_project(new_repo.src) project.read_remote_config(new_repo) project.save_config() for repo in to_rm: self.git_worktree.remove_repo(repo) if to_add: ui.info(ui.green, ":: Cloning new repositories ...") for i, repo in enumerate(to_add): ui.info_count(i, len(to_add), ui.blue, repo.project, ui.green, "->", ui.blue, repo.src, ui.white, "(%s)" % repo.default_branch) project = self.git_worktree.get_git_project(repo.src) if project: # Repo is already there, re-apply config project.read_remote_config(repo) project.save_config() continue if not self.git_worktree.clone_missing(repo): res = False else: project = self.git_worktree.get_git_project(repo.src) project.read_remote_config(repo) project.save_config() if to_move: ui.info(ui.green, ":: Moving repositories ...") for (repo, new_src) in to_move: if self.git_worktree.move_repo(repo, new_src, force=force): project = self.git_worktree.get_git_project(new_src) project.read_remote_config(repo) project.save_config() else: res = False return res
def do(args): """Main entry points.""" git_worktree = qisrc.parsers.get_git_worktree(args) snapshot = None if args.snapshot: snapshot = qisrc.snapshot.Snapshot() snapshot.load(args.snapshot) if snapshot and snapshot.format_version and snapshot.format_version >= 1: reset_manifest(git_worktree, snapshot, ignore_groups=args.ignore_groups) git_projects = qisrc.parsers.get_git_projects(git_worktree, args, default_all=True, use_build_deps=True) errors = list() for i, git_project in enumerate(git_projects): ui.info_count(i, len(git_projects), "Reset", git_project.src) src = git_project.src git = qisrc.git.Git(git_project.path) ok, message = git.require_clean_worktree() if not ok and not args.force: ui.warning(message) errors.append(src) continue if not git_project.default_branch: ui.warning(git_project.src, "not in any manifest, skipping") continue branch = git_project.default_branch.name remote = git_project.default_remote.name git.safe_checkout(branch, remote, force=True) to_reset = None if args.snapshot: to_reset = snapshot.refs.get(src) if not to_reset: ui.warning(src, "not found in the snapshot") continue elif args.tag: to_reset = args.tag else: to_reset = "%s/%s" % (remote, branch) try: qisrc.reset.clever_reset_ref(git_project, to_reset) except: errors.append(src) if not errors: return ui.error("Failed to reset some projects") for error in errors: ui.info(ui.red, " * ", error) sys.exit(1)
def configure(self, *args, **kwargs): """ Configure the projects in the correct order """ self.bootstrap_projects() projects = self.deps_solver.get_dep_projects(self.projects, self.dep_types) for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Configuring", ui.blue, project.name) project.configure(**kwargs)
def rebase_projects(git_projects, upstream_projects, branch): rebased_projects = list() errors = list() max_src = max(len(x.src) for x in git_projects) for i, git_project in enumerate(git_projects): ui.info_count(i, len(git_projects), git_project.src) git = qisrc.git.Git(git_project.path) git.fetch() local_branch = git_project.default_branch.name remote_branch = git_project.default_branch.remote_branch remote_name = git_project.default_remote.name remote_ref = "%s/%s" % (remote_name, remote_branch) if git.get_current_branch() != local_branch: ui.info(ui.brown, git_project.src, "[skipped]") ui.info("Not on %s branch" % local_branch) continue if not git_project.src in upstream_projects: ui.info(ui.brown, git_project.src, "[skipped]") ui.info("No match for %s on %s branch" % (git_project.src, branch)) continue status = qisrc.git.get_status(git, local_branch, remote_ref) if status == "ahead": ui.info(ui.brown, git_project.src, "[skipped]") ui.info("You have local changes not pushed yet") continue if status == "behind": ui.info(ui.brown, git_project.src, "[skipped]") ui.info("Local branch is not up-to-date") continue upstream_project = upstream_projects[git_project.src] upstream_branch = upstream_project.default_branch.name upstream_ref = "%s/%s" % (upstream_project.default_remote.name, upstream_branch) status = qisrc.git.get_status(git, local_branch, upstream_ref) if status == "no-diff": ui.info("no changes") continue if status == "behind": git.merge(upstream_ref) rebased_projects.append(git_project) else: git.call("tag", "-f", "before-rebase") rc, out = git.call("rebase", upstream_ref, raises=False) if rc == 0: rebased_projects.append(git_project) else: ui.info(ui.red, git_project.src, " [failed]") ui.info(out) git.call("rebase", "--abort", raises=False) errors.append(git_project) continue return rebased_projects, errors
def do(args): """Main entry points.""" git_worktree = qisrc.parsers.get_git_worktree(args) snapshot = None if args.snapshot: snapshot = qisrc.snapshot.Snapshot() snapshot.load(args.snapshot) if snapshot and snapshot.format_version and snapshot.format_version >= 1: reset_manifest(git_worktree, snapshot, groups=args.groups) git_projects = qisrc.parsers.get_git_projects(git_worktree, args, default_all=True, use_build_deps=True) errors = list() for i, git_project in enumerate(git_projects): ui.info_count(i, len(git_projects), "Reset", git_project.src) src = git_project.src git = qisrc.git.Git(git_project.path) ok, message = git.require_clean_worktree() if not ok and not args.force: ui.warning(message) errors.append(src) continue git.checkout(".") if not git_project.default_branch: ui.warning(git_project.src, "not in any manifest, skipping") continue branch = git_project.default_branch.name remote = git_project.default_remote.name git.safe_checkout(branch, remote, force=True) to_reset = None if args.snapshot: to_reset = snapshot.refs.get(src) if not to_reset: ui.warning(src, "not found in the snapshot") continue elif args.tag: to_reset = args.tag else: to_reset = "%s/%s" % (remote, branch) try: qisrc.reset.clever_reset_ref(git_project, to_reset) except: errors.append(src) if not errors: return ui.error("Failed to reset some projects") for error in errors: ui.info(ui.red, " * ", error) sys.exit(1)
def build(self): """ Build the projects in the right order, making sure they are configured first """ projects = self.get_dep_projects() for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Building", ui.blue, project.name) project.build(werror=self.werror, build_type=self.build_type) ui.info(ui.green, "Doc generated in", ui.reset, ui.bold, project.html_dir)
def do(args): """Main entry point.""" git_worktree = qisrc.parsers.get_git_worktree(args) git_projects = qisrc.parsers.get_git_projects(git_worktree, args, default_all=True, use_build_deps=args.use_deps) git_grep_opts = args.git_grep_opts if args.path == 'none': git_grep_opts.append("-h") else: git_grep_opts.append("-H") if args.path == 'absolute' or args.path == 'worktree': git_grep_opts.append("-I") git_grep_opts.append("--null") if ui.config_color(sys.stdout): git_grep_opts.append("--color=always") git_grep_opts.append(args.pattern) if not git_projects: qisrc.worktree.on_no_matching_projects(git_worktree, groups=args.groups) sys.exit(0) max_src = max(len(x.src) for x in git_projects) retcode = 1 for i, project in enumerate(git_projects): ui.info_count(i, len(git_projects), ui.green, "Looking in", ui.blue, project.src.ljust(max_src), end="\r") git = qisrc.git.Git(project.path) (status, out) = git.call("grep", *git_grep_opts, raises=False) if out != "": if args.path == 'absolute' or args.path == 'worktree': lines = out.splitlines() out_lines = list() for line in lines: line_split = line.split('\0') prepend = project.src if args.path == 'worktree' else project.path line_split[0] = os.path.join(prepend, line_split[0]) out_lines.append(":".join(line_split)) out = '\n'.join(out_lines) ui.info("\n", ui.reset, out) if status == 0: retcode = 0 if not out: ui.info(ui.reset) sys.exit(retcode)
def deploy(pkg_paths, url): for i, pkg_path in enumerate(pkg_paths): ui.info_count(i, len(pkg_paths), ui.green, "Deploying", ui.reset, ui.blue, pkg_path, ui.reset, ui.green, "to", ui.reset, ui.blue, url.as_string) scp_cmd = ["scp", pkg_path, "%s@%s:" % (url.user, url.host)] qisys.command.call(scp_cmd) try: _install_package(url, pkg_path) except Exception as e: ui.error("Unable to install package on target") ui.error("Error was: ", e)
def checkout(self, branch, force=False): # pylint: disable=too-many-locals,unused-argument """ Called by ``qisrc checkout`` For each project, checkout the branch if it is different than the default branch of the manifest. """ ui.info(ui.green, ":: Checkout projects ...") errors = list() manifest_xml = os.path.join(self.syncer.manifest_repo, "manifest.xml") manifest = qisrc.manifest.Manifest(manifest_xml) # pylint: disable=unused-variable to_checkout = list() for project in self.git_projects: if project.default_branch is None: continue if project.fixed_ref: continue branch_name = project.default_branch.name git = qisrc.git.Git(project.path) if git.get_current_branch() != branch_name: to_checkout.append(project) n = len(to_checkout) if n == 0: ui.info(ui.green, "Nothing to checkout") return True max_src = max([len(x.src) for x in to_checkout]) for i, project in enumerate(to_checkout): ui.info_count(i, n, ui.bold, "Checkout", ui.reset, ui.blue, project.src.ljust(max_src), end="\r") if project.default_branch is None: continue branch_name = project.default_branch.name remote_name = project.default_remote.name git = qisrc.git.Git(project.path) ok, err = git.safe_checkout(branch_name, remote_name, force=force) if not ok: errors.append((project.src, err)) if not errors: return True ui.error("Failed to checkout some projects") for (project, error) in errors: ui.info(project, ":", error) return False
def execute(self, *_args, **kwargs): """ Execute """ ui.info_count(self.index, self.num_projects, ui.green, "Building", ui.blue, self.project.name, ui.green, "in", ui.blue, self.project.build_type, update_title=True) self.project.build(**kwargs) # job ended, say that to dependants with self.lock: for parent_job in self.back_deps: ui.debug("Signaling end to job", ui.reset, ui.bold, parent_job.project.name) parent_job.on_dependent_job_finished(self)
def configure(self, *args, **kwargs): """ Configure the projects in the correct order. """ self.bootstrap_projects() if kwargs.get("single"): projects = self.projects else: projects = self.deps_solver.get_dep_projects( self.projects, ["build", "runtime", "test"]) # Make sure to not pass the 'single' option to project.configure() kwargs.pop("single", None) for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Configuring", ui.blue, project.name) project.configure(**kwargs)
def configure(self, *args, **kwargs): """ Configure the projects in the correct order. """ self.bootstrap_projects() if kwargs.get("single"): projects = self.projects else: projects = self.deps_solver.get_dep_projects(self.projects, ["build", "runtime", "test"]) # Make sure to not pass the 'single' option to project.configure() kwargs.pop("single", None) for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Configuring", ui.blue, project.name) project.configure(**kwargs)
def build(self, pdb=False): """ Build the projects in the right order, making sure they are configured first """ projects = self.get_dep_projects() for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Building", ui.blue, project.name) project.build(werror=self.werror, build_type=self.build_type, spellcheck=self.spellcheck, language=self.language, pdb=pdb) if not self.spellcheck: ui.info(ui.green, "Doc generated in", ui.reset, ui.bold, project.html_dir)
def build(self, pdb=False): """ Build the projects in the right order, making sure they are configured first. """ projects = self.get_dep_projects() for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Building", ui.blue, project.name) project.build(werror=self.werror, build_type=self.build_type, spellcheck=self.spellcheck, language=self.language, pdb=pdb) if not self.spellcheck: ui.info(ui.green, "Doc generated in", ui.reset, ui.bold, project.html_dir)
def checkout(self, branch, force=False): """ Called by ``qisrc checkout`` For each project, checkout the branch if it is different than the default branch of the manifest. """ # FIXME: Why is there a branch parameter if not used ui.info(ui.green, ":: Checkout projects ...") errors = list() to_checkout = list() self.branch = branch for project in self.git_projects: if project.default_branch is None: continue if project.fixed_ref: continue branch_name = project.default_branch.name git = qisrc.git.Git(project.path) if git.get_current_branch() != branch_name: to_checkout.append(project) n = len(to_checkout) if n == 0: ui.info(ui.green, "Nothing to checkout") return True max_src = max([len(x.src) for x in to_checkout]) for i, project in enumerate(to_checkout): ui.info_count(i, n, ui.bold, "Checkout", ui.reset, ui.blue, project.src.ljust(max_src), end="\r") if project.default_branch is None: continue branch_name = project.default_branch.name remote_name = project.default_remote.name git = qisrc.git.Git(project.path) ok, err = git.safe_checkout(branch_name, remote_name, force=force) if not ok: errors.append((project.src, err)) if not errors: return True ui.error("Failed to checkout some projects") for (project, error) in errors: ui.info(project, ":", error) return False
def install(self, dest_dir, *args, **kwargs): """ Install the projects and the packages to the dest_dir """ installed = list() projects = self.deps_solver.get_dep_projects(self.projects, self.dep_types) packages = self.deps_solver.get_dep_packages(self.projects, self.dep_types) # Compute the real path where to install the packages: prefix = kwargs.get("prefix", "/") prefix = prefix[1:] real_dest = os.path.join(dest_dir, prefix) if projects: ui.info(ui.green, "The following projects") for project in projects: ui.info(ui.green, " *", ui.blue, project.name) if packages: ui.info(ui.green, "and the following packages") for package in packages: ui.info(" *", ui.blue, package.name) ui.info(ui.green, "will be installed to", ui.blue, real_dest) runtime_only = self.dep_types == ["runtime"] if runtime_only: ui.info(ui.green, "(runtime components only)") if packages: print ui.info(ui.green, ":: ", "Installing packages") for i, package in enumerate(packages): ui.info_count(i, len(packages), ui.green, "Installing", ui.blue, package.name) files = package.install(real_dest, runtime=runtime_only) installed.extend(files) if projects: print ui.info(ui.green, ":: ", "Installing projects") for i, project in enumerate(projects): ui.info_count(i, len(projects), ui.green, "Installing", ui.blue, project.name) files = project.install(dest_dir, **kwargs) installed.extend(files) return installed
def do_sync(git_project): """ Do Sync """ if reset: (status, out) = git_project.reset() else: (status, out) = git_project.sync(rebase_devel=args.rebase_devel) with lock: ui.info_count(i[0], len(git_projects), ui.blue, git_project.src.ljust(max_src)) if status is None: ui.info(git_project.src, ui.brown, " [skipped]") skipped.append((git_project.src, out)) if status is False: ui.info(git_project.src, ui.red, " [failed]") failed.append((git_project.src, out)) if out: ui.info(ui.indent(out + "\n\n", num=2)) i[0] += 1
def install(self, destdir): """ Install the doc projects to a dest dir """ projects = self.get_dep_projects() for i, project in enumerate(projects): real_dest = os.path.join(destdir, project.dest) ui.info_count(i, len(projects), ui.green, "Installing", ui.blue, project.name, ui.reset, "->", ui.white, real_dest) options = { "version": self.version, "hosted": self.hosted, "build_type": self.build_type, "rel_paths": True, } project.configure(**options) project.build(**options) project.install(real_dest)
def convert_from_conan(package_path, name, version="0.0.1"): """ Convert a conan build output directory to a qibuild package. """ assert conan_json_exists(package_path), "{} not found".format(os.path.join(package_path, "conanbuildinfo.json")) info = load_conan_json(package_path) settings = info.get("settings") ui.info(ui.white, "Compiled on {} {} with {} version {}".format(settings.get("os"), settings.get("arch"), settings.get("compiler"), settings.get("compiler.version"))) ui.info(ui.white, "Compiled in {} with {} ".format(settings.get("build_type"), settings.get("compiler.libcxx"))) ui.info("Exposed librairies:") for n, deps in enumerate(info.get("dependencies")): ui.info_count(n, len(info.get("dependencies")), ui.blue, "{}@{}".format(deps.get("name"), deps.get("version"))) _generate_conan_share_cmake(package_path, deps) if sys.platform == "darwin": _fix_rpaths(os.path.join(package_path, "lib")) _add_conan_package_xml(package_path, name, info, version) res = _compress_package(package_path, name, settings, version) ui.info(ui.green, "Archive generated in", res) return res
def handle_modules(venv_path, python_worktree): """ Register the qi modules by writing the .mod file in the correct location """ qimodules = list() for project in python_worktree.python_projects: for module in project.modules: if module.qimodule: qimodules.append(module) for package in project.packages: if package.qimodule: qimodules.append(package) if qimodules: ui.info() ui.info(ui.blue, "::", ui.reset, "Registering Python qi modules") for i, qimodule in enumerate(qimodules): ui.info_count(i, len(qimodules), ui.blue, qimodule.name) to_make = os.path.join(venv_path, "share", "qi", "module") qisys.sh.mkdir(to_make, recursive=True) to_write = os.path.join(to_make, "%s.mod" % qimodule.name) with open(to_write, "w") as fp: fp.write("python\n")