def configure_qtcreator(qibuild_cfg): """ Configure QtCreator """ ide = qibuild.config.IDE() ide.name = "QtCreator" build_env = qibuild.config.get_build_env() qtcreator_path = qibuild.command.find_program("qtcreator", env=build_env) if qtcreator_path: ui.info(ui.green, "::", ui.reset, "Found QtCreator:", qtcreator_path) mess = "Do you want to use qtcreator from %s ?\n" % qtcreator_path mess += "Answer 'no' if you installed qtcreator from Nokia's installer" answer = qibuild.interact.ask_yes_no(mess, default=True) if not answer: qtcreator_path = None else: ui.warning("QtCreator not found") if not qtcreator_path: qtcreator_path = qibuild.interact.ask_program( "Please enter full qtcreator path") if not qtcreator_path: ui.warning("Not adding config for QtCreator", "qibuild open will not work", sep="\n") return ide.path = qtcreator_path qibuild_cfg.add_ide(ide)
def num_jobs_to_args(num_jobs, cmake_generator): """ Convert a number of jobs to a list of cmake args >>> num_jobs_to_args(3, "Unix Makefiles") ["-j", "3"] >>> num_jobs_to_args(3, "NMake Makefiles" Error: -j is not supported for NMake, use Jom >>> num_jobs_to_args(3, "Visual Studio") Warning: -j is ignored for Visual Studio """ if num_jobs == 1: return list() if "Unix Makefiles" in cmake_generator: return ["-j", str(num_jobs)] if cmake_generator == "NMake Makefiles": mess = "-j is not supported for %s\n" % cmake_generator mess += "On windows, you can use Jom instead to compile " mess += "with multiple processors" raise Exception(mess) if "Visual Studio" in cmake_generator or \ cmake_generator == "Xcode" or \ "JOM" in cmake_generator: ui.warning("-j is ignored when used with", cmake_generator) return list() ui.warning("cannot parse -j into a cmake option for generator: %s" % cmake_generator) return list()
def get_deps_tree(self): """ Get the tree of dependencies It is a dict {type:deps_tree} where type is 'sphinx' or 'doxygen', and deps_tree is a dict: {name:[dep names]} """ doxy_tree = dict() sphinx_tree = dict() res = dict() for doxydoc in self.doxydocs.values(): doxy_tree[doxydoc.name] = doxydoc.depends # Check that every dep exists: for dep in doxydoc.depends: if self.get_doc("doxygen", dep) is None: mess = "Could not find doxygen doc dep: %s\n" % dep mess += "(brought by: %s)" % doxydoc.name ui.warning(mess) doxydoc.depends.remove(dep) for sphinxdoc in self.sphinxdocs.values(): sphinx_tree[sphinxdoc.name] = sphinxdoc.depends # Check that every dep exists: for dep in sphinxdoc.depends: if self.get_doc("sphinx", dep) is None: mess = "Could not find sphinx doc dep %s\n" % dep mess += "(brought by: %s)" % sphinxdoc.name ui.warning(mess) sphinxdoc.depends.remove(dep) res["doxygen"] = doxy_tree res["sphinx"] = sphinx_tree return res
def ask_gerrit_username(server, gerrit_ssh_port=29418): """ Run a wizard to try to configure gerrit access If that fails, ask the user for its username If that fails, give up and suggest upload the public key """ ui.info(ui.green, "Configuring gerrit ssh access ...") # works on UNIX and git bash: username = os.environ.get("USERNAME") if not username: username = qibuild.interact.ask_string("Please enter your username") if not username: return ui.info("Checking gerrit connection with %s@%s:%i" % (username, server, gerrit_ssh_port)) if check_gerrit_connection(username, server, gerrit_ssh_port): ui.info("Success") return username ui.warning("Could not connect to ssh using username", username) try_other = qibuild.interact.ask_yes_no("Do you want to try with an other username ?") if not try_other: return username = qibuild.interact.ask_string("Please enter your username ") if not username: return if check_gerrit_connection(username, server, gerrit_ssh_port): return username
def do(args): """Main entry point """ feed = args.feed tc_name = args.name dry_run = args.dry_run if tc_name: toolchain = qitoolchain.get_toolchain(tc_name) if not feed: feed = qitoolchain.toolchain.get_tc_feed(tc_name) if not feed: mess = "Could not find feed for toolchain %s\n" % tc_name mess += "Pleas check configuration or specifiy a feed on the command line\n" raise Exception(mess) ui.info(ui.green, "Updating toolchain", tc_name, "with", feed) toolchain.parse_feed(feed, dry_run=dry_run) else: tc_names = qitoolchain.get_tc_names() i = 0 for tc_name in tc_names: i += 1 tc_feed = qitoolchain.toolchain.get_tc_feed(tc_name) ui.info(ui.green, "*", ui.reset, "(%i/%i)" % (i, len(tc_names)), ui.green, "Updating", ui.blue, tc_name) if not tc_feed: ui.warning("No feed found for %s, skipping" % tc_name) continue ui.info(ui.green, "Reading", tc_feed) toolchain = qitoolchain.Toolchain(tc_name) toolchain.parse_feed(tc_feed, dry_run=dry_run)
def clone_project(worktree, url, src=None, branch=None, remote="origin", skip_if_exists=False): """ Add a project to a worktree given its url. If src is not given, it will be guessed from the url If skip_if_exists is False, an error message will be raised if the project already exists """ should_add = True if not src: src = url.split("/")[-1].replace(".git", "") if os.path.isabs(src): src = os.path.relpath(worktree.root, src) src = qibuild.sh.to_posix_path(src) project = worktree.get_project(src, raises=False) if project: if not skip_if_exists: mess = "Could not add project from %s in %s\n" % (url, src) mess += "This path is already registered for worktree in %s\n" % worktree.root raise Exception(mess) else: if os.path.exists(project.path): ui.debug("Found project in %s, skipping" % src) return # Some one erase the project manually without telling qiworktree should_add = False path = os.path.join(worktree.root, src) path = qibuild.sh.to_native_path(path) if os.path.exists(path): if skip_if_exists: if qisrc.git.is_submodule(path): ui.warning("erasing submodule: ", path) qibuild.sh.rm(path) else: ui.debug("Adding project in %s", src) worktree.add_project(src) return else: mess = "Could not add project from %s in %s\n" % (url, src) mess += "This path already exists\n" raise Exception(mess) ui.info(ui.green, "Git clone: %s -> %s" % (url, path)) dirname = os.path.dirname(path) qibuild.sh.mkdir(dirname, recursive=True) git = qisrc.git.Git(path) if branch: git.clone(url, "-b", branch, "-o", remote) else: git.clone(url, "-o", remote) if should_add: worktree.add_project(path)
def warn_gerrit(): """Emit a warning telling the user that: * connection to gerrit has failed * qisrc push won't work """ ui.warning("""Failed to configure gerrit connection `qisrc push` won't work When you have resolved this problem, just re-run ``qisrc sync -a``""")
def main(): ui.info(ui.red, "This is a an error message\n", ui.reset, "And here are the details") ui.error("could not build") ui.warning("-j ignored for this generator") ui.info("building foo") ui.debug("debug message") ui.info(ui.brown, "this is brown") ui.info(ui.bold, ui.brown, "this is bold brown") ui.info(ui.red, "red is dead") ui.info(ui.darkred, "darkred is really dead") ui.info(ui.yellow, "this is yellow")
def do(args): """Main entry point""" worktree = args.worktree if not worktree: worktree = os.getcwd() if args.config: # Just make sure the user choose a valid default toolchain qitoolchain.get_toolchain(args.config) worktree = qibuild.sh.to_native_path(worktree) parent_worktree = qisrc.worktree.guess_worktree(worktree) if parent_worktree: if parent_worktree != worktree: # Refuse to create nested worktrees ui.error("""A qi worktree already exists in {parent_worktree} Refusing to create a nested worktree in {worktree} Use: qibuild init -f -w {parent_worktree} If you want to re-initialize the worktree in {parent_worktree} """.format(worktree=worktree, parent_worktree=parent_worktree)) sys.exit(1) else: # Refuse to re-initalize the worktree unless "-f" is given if not args.force: ui.warning("There is already a worktree in", worktree, "\n" "Use --force if you want to re-initialize this worktree") return qibuild.toc.create(worktree, force=args.force) # User maybe re-running qibuild init because it has a # bad default exception ... try: toc = qibuild.toc.toc_open(worktree) except qibuild.toc.WrongDefaultException: pass toc_cfg_path = os.path.join(worktree, ".qi", "qibuild.xml") qibuild_cfg = qibuild.config.QiBuildConfig() qibuild_cfg.read() qibuild_cfg.read_local_config(toc_cfg_path) qibuild_cfg.local.defaults.config = args.config qibuild_cfg.write_local_config(toc_cfg_path) # Safe to be called now that we've created it # and that we know we don't have a wrong defaut config: toc = qibuild.toc.toc_open(worktree) if not args.interactive: return qibuild.wizard.run_config_wizard(toc)
def init_worktree(worktree, manifest_location, setup_review=True): """ (re)-intianlize a worktree given a manifest location. Clonie any missing repository, set the correct remote and tracking branch on every repository :param setup_review: Also set up the projects for review """ errors = list() manifest = qisrc.manifest.load(manifest_location) if not manifest.projects: return project_count = len(manifest.projects) ui.info(ui.green, "Initializing worktree ...") setup_ok = True for i, project in enumerate(manifest.projects): ui.info( ui.green, "*", ui.reset, "(%2i/%2i)" % (i+1, project_count), ui.blue, project.name) # Use the same branch for the project as the branch # for the manifest, unless explicitely set: p_revision = project.revision p_url = project.fetch_url p_remote = project.remote p_src = project.path clone_project(worktree, p_url, src=p_src, branch=p_revision, remote=p_remote, skip_if_exists=True) wt_project = worktree.get_project(p_src) p_path = wt_project.path if project.review and setup_review and setup_ok: worktree.set_project_review(p_src, True) # If setup failed once, no point in trying for every project setup_ok = qisrc.review.setup_project(p_path, project.name, project.review_url, p_revision) git = qisrc.git.Git(p_path) git.set_remote(p_remote, p_url) git.set_tracking_branch(p_revision, p_remote) cur_branch = git.get_current_branch() if cur_branch != p_revision: if not cur_branch: ui.warning("Project", project.name, "is on a detached HEAD", "but should be on", p_revision) else: ui.warning("Project", project.name, "is on", cur_branch, "but should be on", p_revision) worktree.set_git_project_config(p_src, p_remote, p_revision) if not setup_ok: qisrc.review.warn_gerrit()
def get_editor(): """Find the editor searching the environment, lastly ask the user. Returns the editor. """ editor = os.environ.get("VISUAL") if not editor: editor = os.environ.get("EDITOR") if not editor: # Ask the user to choose, and store the answer so # that we never ask again ui.warning("Could not find the editor to use.") editor = qibuild.interact.ask_program("Please enter an editor") return editor
def ask_yes_no(question, default=False): """Ask the user to answer by yes or no""" while True: if default: ui.info(ui.green, "::", ui.reset, question, "(Y/n)?") else: ui.info(ui.green, "::", ui.reset, question, "(y/N)?") answer = read_input() if answer in ["y", "yes", "Yes"]: return True if answer in ["n", "no", "No"]: return False if not answer: return default ui.warning("Please anwser by 'yes' or 'no'")
def configure(src, dest, templates, intersphinx_mapping, doxylink, opts): """ Configure a sphinx repo The sphix repo MUST have a src/source/ directory (NOT the default of sphinx-quickstart) The conf.py will be put in src/qidoc/conf.py concatening the contents of the templates with the conf.py from src/qidoc/conf.in.py, so that you can put src/source/conf.py under version control """ # Rebuild a doxylink dict with relative paths rel_doxylink = doxylink.copy() for (name, (tag_file, prefix)) in rel_doxylink.iteritems(): full_prefix = os.path.join(dest, prefix) rel_prefix = os.path.relpath(full_prefix, dest) rel_doxylink[name] = (tag_file, rel_prefix) # Deal with conf.py conf_py_tmpl = os.path.join(templates, "sphinx", "conf.in.py") conf_py_in = os.path.join(src, "qidoc", "conf.in.py") if not os.path.exists(conf_py_in): mess = "Could not configure sphinx sources in:%s \n" % src mess += "qidoc/conf.in.py does not exists" ui.warning(mess) return opts["doxylink"] = str(rel_doxylink) opts["intersphinx_mapping"] = str(intersphinx_mapping) opts["themes_path"] = os.path.join(templates, "sphinx", "_themes") opts["themes_path"] = qibuild.sh.to_posix_path(opts["themes_path"]) opts["ext_path"] = os.path.join(templates, "sphinx", "tools") opts["ext_path"] = qibuild.sh.to_posix_path(opts["ext_path"]) conf_py_out = os.path.join(src, "qidoc", "conf.py") qidoc.templates.configure_file(conf_py_tmpl, conf_py_out, append_file=conf_py_in, opts=opts)
def is_submodule(path): """ Tell if the given path is a submodule """ repo_root = get_repo_root(path) if not repo_root: return False git = Git(repo_root) (retcode, out) = git.call("submodule", raises=False) if retcode == 0: if not out: return False else: lines = out.splitlines() submodules = [x.split()[-1] for x in lines] rel_path = os.path.relpath(path, repo_root) return rel_path in submodules else: ui.warning("git submodules configuration is broken for", repo_root, "!", "\nError was: ", ui.reset, "\n", " " + out) # clone_project will just erase it and create a git repo instead return True
def check_root_cmake_list(cmake_list_file, project_name): """Check that the root CMakeLists.txt is correct. Those checks are necessary for cross-compilation to work well, among other things. """ # Check that the root CMakeLists contains a project() call # The call to project() is necessary for cmake --build # to work when used with Visual Studio generator. lines = list() with open(cmake_list_file, "r") as fp: lines = fp.readlines() project_line_number = None include_line_number = None for (i, line) in enumerate(lines): if re.match(r'^\s*project\s*\(', line, re.IGNORECASE): project_line_number = i if re.match(r'^\s*include\s*\(.*qibuild\.cmake.*\)', line, re.IGNORECASE): include_line_number = i if project_line_number is None: mess = """Incorrect CMakeLists file detected ! Missing call to project(). Please fix this by editing {cmake_list_file} so that it looks like cmake_minimum_required(VERSION 2.8) project({project_name}) find_package(qibuild) """.format( cmake_list_file=cmake_list_file, project_name=project_name) ui.warning(mess) return if include_line_number is None: # Using qibuild command line, but not the qiBuild framework: # -> nothing to do ;) return if project_line_number > include_line_number: mess = """Incorrect CMakeLists file detected ! The call to include(qibuild.cmake) should be AFTER the call to project() Please exchange the following lines: {cmake_list_file}:{include_line_number} {include_line} {cmake_list_file}:{project_line_number} {project_line} """.format( cmake_list_file=cmake_list_file, include_line_number=include_line_number, project_line_number=project_line_number, include_line=lines[include_line_number], project_line=lines[project_line_number]) ui.warning(mess)
def run_tests(project, build_env, pattern=None, verbose=False, slow=False, dry_run=False, valgrind=False): """ Called by :py:meth:`qibuild.toc.Toc.test_project` :param test_name: If given, only run this test Always write some XML files in build-<config>/test-results (even if they were no tests to run at all) :return: a boolean to indicate if test was sucessful """ build_dir = project.build_directory results_dir = os.path.join(project.build_directory, "test-results") all_tests = parse_ctest_test_files(build_dir) tests = list() slow_tests = list() if pattern: tests = [x for x in all_tests if re.search(pattern, x[0])] if not tests: mess = "No tests matching %s\n" % pattern mess += "Known tests are:\n" for x in all_tests: mess += " * " + x[0] + "\n" raise Exception(mess) else: for test in all_tests: (name, cmd_, properties) = test cost = properties.get("COST") if not slow and cost and float(cost) > 50: ui.debug("Skipping test", name, "because cost", "(%s)"% cost, "is greater than 50") slow_tests.append(name) continue tests.append(test) if not tests: # Create a fake test result to keep CI jobs happy: fake_test_res = TestResult("compilation") fake_test_res.ok = True xml_out = os.path.join(results_dir, "compilation.xml") write_xml(xml_out, fake_test_res) ui.warning("No tests found for project", project.name) return if dry_run: ui.info(ui.green, "List of tests for", project.name) for (test_name, _, _) in tests: ui.info(ui.green, " * ", ui.reset, test_name) return ui.info(ui.green, "Testing", project.name, "...") ok = True fail_tests = list() for (i, test) in enumerate(tests): (test_name, cmd, properties) = test ui.info(ui.green, " * ", ui.reset, ui.bold, "(%2i/%2i)" % (i+1, len(tests)), ui.blue, test_name.ljust(25), end="") if verbose: print sys.stdout.flush() test_res = run_test(build_dir, test_name, cmd, properties, build_env, valgrind=valgrind, verbose=verbose) if test_res.ok: ui.info(ui.green, "[OK]") else: ok = False ui.info(ui.red, "[FAIL]") if not verbose: print test_res.out fail_tests.append(test_name) xml_out = os.path.join(results_dir, test_name + ".xml") if not os.path.exists(xml_out): write_xml(xml_out, test_res) if ok: ui.info("Ran %i tests" % len(tests)) if slow_tests and not slow: ui.info("Note: %i" % len(slow_tests), "slow tests did not run, use --slow to run them") ui.info("All pass. Congrats!") return True ui.error("Ran %i tests, %i failures" % (len(tests), len(fail_tests))) for fail_test in fail_tests: ui.info(ui.bold, " -", ui.blue, fail_test) return False
if tc_name == "system": raise Exception("'system' is a reserved name") toc = None if args.default: try: toc = qibuild.toc.toc_open(args.worktree) except qibuild.toc.TocException, e: mess = "You need to be in a valid toc worktree to use --default\n" mess += "Exception was:\n" mess += str(e) raise Exception(mess) if tc_name in qitoolchain.get_tc_names(): ui.warning(tc_name, "already exists,", "removing previous toolchain and creating a new one") toolchain = qitoolchain.Toolchain(tc_name) toolchain.remove() toolchain = qitoolchain.Toolchain(tc_name) if feed: ui.info(ui.green, "Updating toolchain", tc_name, "with feed:", feed) toolchain.parse_feed(feed, dry_run=dry_run) if args.default: toc.config.set_default_config(tc_name) toc.save_config() ui.info("Now using toolchain", ui.blue, tc_name, ui.reset, "by default") else: ui.info(ui.green, "Now try using", "\n" " qibuild configure -c", ui.blue, tc_name, ui.green, "\n"
def do(args): """Main entry point""" url = args.url (username, server, remote_directory) = qibuild.deploy.parse_url(url) toc = qibuild.toc_open(args.worktree, args) ui.info(ui.green, "Current worktree:", ui.reset, ui.bold, toc.worktree.root) if toc.active_config: ui.info(ui.green, "Active configuration: ", ui.blue, "%s (%s)" % (toc.active_config, toc.build_type)) rsync = qibuild.command.find_program("rsync", env=toc.build_env) use_rsync = False if rsync: use_rsync = True else: ui.warning("Please install rsync to get faster synchronisation") scp = qibuild.command.find_program("scp", env=toc.build_env) if not scp: raise Exception("Could not find rsync or scp") # Resolve deps: (project_names, package_names, _) = toc.resolve_deps(runtime=True) projects = [toc.get_project(name) for name in project_names] if not args.single: ui.info(ui.green, "The following projects") for project_name in project_names: ui.info(ui.green, " *", ui.blue, project_name) if not args.single and package_names: ui.info(ui.green, "and the following packages") for package_name in package_names: ui.info(" *", ui.blue, package_name) ui.info(ui.green, "will be deployed to", ui.blue, url) # Deploy packages: install all of them in the same temp dir, then # deploy this temp dir to the target if not args.single and package_names: print ui.info(ui.green, ":: ", "Deploying packages") with qibuild.sh.TempDir() as tmp: for (i, package_name) in enumerate(package_names): ui.info(ui.green, "*", ui.reset, "(%i/%i)" % (i+1, len(package_names)), ui.green, "Deploying package", ui.blue, package_name, ui.green, "to", ui.blue, url) toc.toolchain.install_package(package_name, tmp, runtime=True) qibuild.deploy.deploy(tmp, args.url, use_rsync=use_rsync, port=args.port) print if not args.single: ui.info(ui.green, ":: ", "Deploying projects") # Deploy projects: install them inside a 'deploy' dir inside the build dir, # then deploy this dir to the target for (i, project) in enumerate(projects): ui.info(ui.green, "*", ui.reset, "(%i/%i)" % (i+1, len(projects)), ui.green, "Deploying project", ui.blue, project.name, ui.green, "to", ui.blue, url) destdir = os.path.join(project.build_directory, "deploy") #create folder for project without install rules qibuild.sh.mkdir(destdir, recursive=True) toc.install_project(project, destdir, prefix="/", runtime=True, num_jobs=args.num_jobs, split_debug=True) qibuild.deploy.deploy(destdir, args.url, use_rsync=use_rsync, port=args.port) qibuild.deploy.generate_debug_scripts(toc, project.name, args.url)
error += "Error was: %s" % e errors.append(error) continue finally: if fp: fp.close() if package_url: print "Would add ", package_name, "from", package_url continue else: handle_package(package, package_tree, toolchain) if package.path is None: mess = "could guess package path from this configuration:\n" mess += ElementTree.tostring(package_tree) mess += "Please make sure you have at least an url or a directory\n" ui.warning(mess) continue if not dry_run: toolchain.add_package(package) if dry_run and errors: print "Errors when parsing %s\n" % feed for error in errors: print error sys.exit(2) # Finally, if the feed contains a cmake_generator, # add it to the qibuild config if toolchain.cmake_generator: config = qibuild.config.Config() config.name = toolchain.name