def get_repo_manifests(repo_folder, manifest='package'): append_pymodules_if_needed() import rospkg manifest_type = rospkg.MANIFEST_FILE if manifest == 'stack': manifest_type = rospkg.STACK_FILE location_cache = {} rospkg.list_by_path(manifest_type, repo_folder, location_cache) return location_cache
def document_necessary(workspace, docspace, ros_distro, repo, rosdoc_lite_version, jenkins_scripts_version, force_doc=False): append_pymodules_if_needed() print "Working on distro %s and repo %s" % (ros_distro, repo) #Load the rosinstall configurations for the repository doc_conf, depends_conf = load_configuration(ros_distro, repo) #Install the repository install_repo(docspace, workspace, repo, doc_conf, depends_conf) #Load information about existing tags tags_db = TagsDb(ros_distro, workspace) #Check to see if we need to document this repo list by checking if any of #the repositories revision numbers/hashes have changed changes = False or force_doc for conf in [('%s' % repo, doc_conf), ('%s_depends' % repo, depends_conf)]: changes = rev_changes(conf[0], conf[1], docspace, tags_db) or changes #We also want to make sure that we run documentation generation anytime #jenkins_scripts or rosdoc_lite has changed since the last time this job was #run repo_hashes = tags_db.get_rosinstall_hashes( repo) if tags_db.has_rosinstall_hashes(repo) else {} old_rosdoc_lite_hash = repo_hashes.get('rosdoc_lite-sys', None) old_jenkins_scripts_hash = repo_hashes.get('jenkins_scripts-sys', None) print "REPO HASHES: %s" % repo_hashes if changes and old_rosdoc_lite_hash == rosdoc_lite_version and old_jenkins_scripts_hash == jenkins_scripts_version: print "There were no changes to any of the repositories we document. Not running documentation." copy_test_results(workspace, docspace) tags_db.delete_tag_index_repo() return False #Make sure to update the versions of jenkins_scripts and rosdoc_lite for this repo list repo_hashes['rosdoc_lite-sys'] = rosdoc_lite_version repo_hashes['jenkins_scripts-sys'] = jenkins_scripts_version tags_db.set_rosinstall_hashes(repo, repo_hashes) return { 'doc_conf': doc_conf, 'depends_conf': depends_conf, 'tags_db': tags_db }
def get_repo_packages(repo_folder): append_pymodules_if_needed() from catkin_pkg import packages as catkin_packages paths = [] #find wet packages paths.extend([os.path.abspath(os.path.join(repo_folder, pkg_path)) for pkg_path in catkin_packages.find_package_paths(repo_folder)]) #Remove any duplicates paths = list(set(paths)) packages = {} for path in paths: pkg_info = catkin_packages.parse_package(path) packages[pkg_info.name] = path return packages
def document_necessary(workspace, docspace, ros_distro, repo, rosdoc_lite_version, jenkins_scripts_version, force_doc=False): append_pymodules_if_needed() print "Working on distro %s and repo %s" % (ros_distro, repo) #Load the rosinstall configurations for the repository doc_conf, depends_conf = load_configuration(ros_distro, repo) #Install the repository install_repo(docspace, workspace, repo, doc_conf, depends_conf) #Load information about existing tags tags_db = TagsDb(ros_distro, workspace) #Check to see if we need to document this repo list by checking if any of #the repositories revision numbers/hashes have changed changes = False or force_doc for conf in [('%s' % repo, doc_conf), ('%s_depends' % repo, depends_conf)]: changes = rev_changes(conf[0], conf[1], docspace, tags_db) or changes #We also want to make sure that we run documentation generation anytime #jenkins_scripts or rosdoc_lite has changed since the last time this job was #run repo_hashes = tags_db.get_rosinstall_hashes(repo) if tags_db.has_rosinstall_hashes(repo) else {} old_rosdoc_lite_hash = repo_hashes.get('rosdoc_lite-sys', None) old_jenkins_scripts_hash = repo_hashes.get('jenkins_scripts-sys', None) print "REPO HASHES: %s" % repo_hashes if changes and old_rosdoc_lite_hash == rosdoc_lite_version and old_jenkins_scripts_hash == jenkins_scripts_version: print "There were no changes to any of the repositories we document. Not running documentation." copy_test_results(workspace, docspace) tags_db.delete_tag_index_repo() return False #Make sure to update the versions of jenkins_scripts and rosdoc_lite for this repo list repo_hashes['rosdoc_lite-sys'] = rosdoc_lite_version repo_hashes['jenkins_scripts-sys'] = jenkins_scripts_version tags_db.set_rosinstall_hashes(repo, repo_hashes) return {'doc_conf': doc_conf, 'depends_conf': depends_conf, 'tags_db': tags_db}
def document_necessary(workspace, docspace, ros_distro, repo, rosdoc_lite_version, jenkins_scripts_version, force_doc=False): append_pymodules_if_needed() print("Working on distro %s and repo %s" % (ros_distro, repo)) #Load the rosinstall configurations for the repository doc_conf, depends_conf = load_configuration(ros_distro, repo) #Install the repository try: install_repo(docspace, workspace, repo, doc_conf, depends_conf) except BuildException: # checkout failed, try to get default branches of repos to notify the maintainers print('Failed to checkout repositories, trying to checkout default branches to collect maintainer information for notification about failure') for tuple in doc_conf: for repo in tuple.values(): repo['version'] = None install_repo(docspace, workspace, repo, doc_conf, []) extract_notification_recipients(docspace, doc_conf) raise extract_notification_recipients(docspace, doc_conf) #Load information about existing tags jenkins_scripts_path = os.path.join(workspace, 'jenkins_scripts') if not os.path.exists(jenkins_scripts_path): # if jenkins_scripts has not been checked out in the workspace # expect that the user call doc from within a jenkins_scripts checkout jenkins_scripts_path = os.getcwd() rosdoc_tag_index_path = os.path.join(workspace, 'rosdoc_tag_index') tags_db = TagsDb(ros_distro, jenkins_scripts_path, rosdoc_tag_index_path) #Check to see if we need to document this repo list by checking if any of #the repositories revision numbers/hashes have changed changes = False or force_doc for conf in [('%s' % repo, doc_conf), ('%s_depends' % repo, depends_conf)]: changes = rev_changes(conf[0], conf[1], docspace, tags_db) or changes #We also want to make sure that we run documentation generation anytime #jenkins_scripts or rosdoc_lite has changed since the last time this job was #run repo_hashes = tags_db.get_rosinstall_hashes(repo) if tags_db.has_rosinstall_hashes(repo) else {} old_rosdoc_lite_hash = repo_hashes.get('rosdoc_lite-sys', None) old_jenkins_scripts_hash = repo_hashes.get('jenkins_scripts-sys', None) print("REPO HASHES: %s" % repo_hashes) if not changes and old_rosdoc_lite_hash == rosdoc_lite_version and old_jenkins_scripts_hash == jenkins_scripts_version: print("There were no changes to any of the repositories we document. Not running documentation.") copy_test_results(workspace, docspace) tags_db.delete_tag_index_repo() # create marker files for all packages an upload them doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro)) if os.path.exists(doc_path): shutil.rmtree(doc_path) repo_path = os.path.realpath("%s" % (docspace)) stacks, manifest_packages, catkin_packages, _ = build_repo_structure(repo_path, doc_conf, depends_conf) folders = sorted(set(stacks.keys() + manifest_packages.keys() + catkin_packages.keys())) if folders: dsts = ['%s/api/%s/stamp' % (doc_path, f) for f in folders] for dst in dsts: os.makedirs(os.path.dirname(dst)) with open(dst, 'w'): pass print("Uploading marker files to identify that documentation is up-to-date.") command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s/api/ [email protected]:/home/rosbot/docs/%s/api' % (doc_path, ros_distro)] call_with_list(command) return False #Make sure to update the versions of jenkins_scripts and rosdoc_lite for this repo list repo_hashes['rosdoc_lite-sys'] = rosdoc_lite_version repo_hashes['jenkins_scripts-sys'] = jenkins_scripts_version tags_db.set_rosinstall_hashes(repo, repo_hashes) return {'doc_conf': doc_conf, 'depends_conf': depends_conf, 'tags_db': tags_db}
def document_repo(workspace, docspace, ros_distro, repo, platform, arch, homepage, rosdoc_lite_version, jenkins_scripts_version): append_pymodules_if_needed() doc_job = "doc-%s-%s" % (ros_distro, repo) print "Working on distro %s and repo %s" % (ros_distro, repo) #Load the rosinstall configurations for the repository doc_conf, depends_conf = load_configuration(ros_distro, repo) #Get the list of repositories that should have documentation run on them #These are all of the repos that are not in the depends rosinsall file repos_to_doc = get_repositories_from_rosinstall(doc_conf) #Install the repository install_repo(docspace, workspace, repo, doc_conf, depends_conf) repo_path = os.path.realpath("%s" % (docspace)) print "Repo path %s" % repo_path #Walk through the installed repositories and find old-style packages, new-stye packages, and stacks stacks, manifest_packages, catkin_packages, repo_map = build_repo_structure(repo_path, doc_conf, depends_conf) print "Running documentation generation on\npackages: %s" % (manifest_packages.keys() + catkin_packages.keys()) print "Catkin packages: %s" % catkin_packages print "Manifest packages: %s" % manifest_packages print "Stacks: %s" % stacks #Load information about existing tags tags_db = TagsDb(ros_distro, workspace) #Check to see if we need to document this repo list by checking if any of #the repositories revision numbers/hashes have changed changes = False for conf in [('%s' % repo, doc_conf), ('%s_depends' % repo, depends_conf)]: changes = rev_changes(conf[0], conf[1], docspace, tags_db) or changes #We also want to make sure that we run documentation generation anytime #jenkins_scripts or rosdoc_lite has changed since the last time this job was #run repo_hashes = tags_db.get_rosinstall_hashes(repo) if tags_db.has_rosinstall_hashes(repo) else {} old_rosdoc_lite_hash = repo_hashes.get('rosdoc_lite-sys', None) old_jenkins_scripts_hash = repo_hashes.get('jenkins_scripts-sys', None) print "REPO HASHES: %s" % repo_hashes if not changes and old_rosdoc_lite_hash == rosdoc_lite_version and old_jenkins_scripts_hash == jenkins_scripts_version: print "There were no changes to any of the repositories we document. Not running documentation." copy_test_results(workspace, docspace) return #Make sure to update the versions of jenkins_scripts and rosdoc_lite for this repo list repo_hashes['rosdoc_lite-sys'] = rosdoc_lite_version repo_hashes['jenkins_scripts-sys'] = jenkins_scripts_version tags_db.set_rosinstall_hashes(repo, repo_hashes) #Get any non local apt dependencies ros_dep = RosDepResolver(ros_distro) import rosdistro if ros_distro == 'electric': apt = rosdistro.AptDistro(platform, arch, shadow=False) else: apt = rosdistro.AptDistro(platform, arch, shadow=True) apt_deps = get_apt_deps(apt, ros_dep, ros_distro, catkin_packages, stacks, manifest_packages) print "Apt dependencies: %s" % apt_deps #Build a local dependency graph to be used for build order local_dep_graph = build_local_dependency_graph(catkin_packages, manifest_packages) #Write stack manifest files for all stacks, we can just do this off the #stack.xml files write_stack_manifests(stacks, docspace, ros_distro, repo_map, tags_db, doc_job, homepage) #Need to make sure to re-order packages to be run in dependency order build_order = get_dependency_build_order(local_dep_graph) print "Build order that honors deps:\n%s" % build_order #We'll need the full list of apt_deps to get tag files full_apt_deps = get_full_apt_deps(apt_deps, apt) print "Installing all dependencies for %s" % repo if apt_deps: call("apt-get install %s --yes" % (' '.join(apt_deps))) print "Done installing dependencies" #Set up the list of things that need to be sourced to run rosdoc_lite #TODO: Hack for electric if ros_distro == 'electric': #lucid doesn't have /usr/local on the path by default... weird sources = ['export PATH=/usr/local/sbin:/usr/local/bin:$PATH'] sources.append('source /opt/ros/fuerte/setup.bash') sources.append('export ROS_PACKAGE_PATH=/opt/ros/electric/stacks:$ROS_PACKAGE_PATH') else: sources = ['source /opt/ros/%s/setup.bash' % ros_distro] #We assume that there will be no build errors to start build_errors = [] #Everything that is after fuerte supports catkin workspaces, so everything #that has packages with package.xml files if catkin_packages \ and not 'rosdoc_lite' in catkin_packages.keys() and not 'catkin' in catkin_packages.keys(): source, errs = build_repo_messages(catkin_packages, docspace, ros_distro) build_errors.extend(errs) if source: sources.append(source) #For all our manifest packages (dry or fuerte catkin) we want to build #messages. Note, for fuerte catkin the messages arent' generated, TODO #to come back and fix this if necessary source, errs = build_repo_messages_manifest(manifest_packages, build_order, ros_distro) build_errors.extend(errs) sources.append(source) repo_tags = document_packages(manifest_packages, catkin_packages, build_order, repos_to_doc, sources, tags_db, full_apt_deps, ros_dep, repo_map, repo_path, docspace, ros_distro, homepage, doc_job) doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro)) #Copy the files to the appropriate place #call("rsync -e \"ssh -o StrictHostKeyChecking=no\" -qr %s rosbuild@wgs32:/var/www/www.ros.org/html/rosdoclite" % (doc_path)) command = ['bash', '-c', 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s rosbuild@wgs32:/var/www/www.ros.org/html/rosdoclite' % doc_path] call_with_list(command) #Remove the autogenerated doc files since they take up a lot of space if left on the server shutil.rmtree(doc_path) #Write the new tags to the database if there are any to write for name, tags in repo_tags.iteritems(): #Get the apt name of the current stack/repo if ros_dep.has_ros(name): deb_name = ros_dep.to_apt(name)[0] else: deb_name = "ros-%s-%s" % (ros_distro, name.replace('_', '-')) #We only want to write tags for packages that have a valid deb name #For others, the only way to get cross referencing is to document everything #together with a rosinstall file if apt.has_package(deb_name): tags_db.set_tags(deb_name, tags) #Make sure to write changes to tag files and deps #We don't want to write hashes on an unsuccessful build excludes = ['rosinstall_hashes'] if build_errors else [] tags_db.commit_db(excludes) #Tell jenkins that we've succeeded print "Preparing xml test results" try: os.makedirs(os.path.join(workspace, 'test_results')) print "Created test results directory" except: pass if build_errors: copy_test_results(workspace, docspace, """Failed to generate messages by calling cmake for %s. Look in the console for cmake failures, search for "CMake Error" Also, are you sure that the rosinstall files are pulling from the right branch for %s? Check the repos below, you can update information the %s.rosinstall and %s-depends.rosinstall files by submitting a pull request at https://github.com/ros/rosdistro/tree/master/doc/%s Documentation rosinstall:\n%s Depends rosinstall:\n%s""" % (build_errors, ros_distro, repo, repo, ros_distro, yaml.safe_dump(doc_conf, default_flow_style=False), yaml.safe_dump(depends_conf, default_flow_style=False)), "message_generation_failure") else: copy_test_results(workspace, docspace)
def document_necessary( workspace, docspace, ros_distro, repo, rosdoc_lite_version, jenkins_scripts_version, force_doc=False ): append_pymodules_if_needed() print("Working on distro %s and repo %s" % (ros_distro, repo)) # Load the rosinstall configurations for the repository doc_conf, depends_conf = load_configuration(ros_distro, repo) # Install the repository try: install_repo(docspace, workspace, repo, doc_conf, depends_conf) except BuildException: # checkout failed, try to get default branches of repos to notify the maintainers print( "Failed to checkout repositories, trying to checkout default branches to collect maintainer information for notification about failure" ) for tuple in doc_conf: for repo in tuple.values(): repo["version"] = None install_repo(docspace, workspace, repo, doc_conf, []) extract_notification_recipients(docspace, doc_conf) raise extract_notification_recipients(docspace, doc_conf) # Load information about existing tags jenkins_scripts_path = os.path.join(workspace, "jenkins_scripts") if not os.path.exists(jenkins_scripts_path): # if jenkins_scripts has not been checked out in the workspace # expect that the user call doc from within a jenkins_scripts checkout jenkins_scripts_path = os.getcwd() rosdoc_tag_index_path = os.path.join(workspace, "rosdoc_tag_index") tags_db = TagsDb(ros_distro, jenkins_scripts_path, rosdoc_tag_index_path) # Check to see if we need to document this repo list by checking if any of # the repositories revision numbers/hashes have changed changes = False or force_doc for conf in [("%s" % repo, doc_conf), ("%s_depends" % repo, depends_conf)]: changes = rev_changes(conf[0], conf[1], docspace, tags_db) or changes # We also want to make sure that we run documentation generation anytime # jenkins_scripts or rosdoc_lite has changed since the last time this job was # run repo_hashes = tags_db.get_rosinstall_hashes(repo) if tags_db.has_rosinstall_hashes(repo) else {} old_rosdoc_lite_hash = repo_hashes.get("rosdoc_lite-sys", None) old_jenkins_scripts_hash = repo_hashes.get("jenkins_scripts-sys", None) print("REPO HASHES: %s" % repo_hashes) if ( not changes and old_rosdoc_lite_hash == rosdoc_lite_version and old_jenkins_scripts_hash == jenkins_scripts_version ): print("There were no changes to any of the repositories we document. Not running documentation.") copy_test_results(workspace, docspace) tags_db.delete_tag_index_repo() # create marker files for all packages an upload them doc_path = os.path.realpath("%s/doc/%s" % (docspace, ros_distro)) if os.path.exists(doc_path): shutil.rmtree(doc_path) repo_path = os.path.realpath("%s" % (docspace)) stacks, manifest_packages, catkin_packages, _ = build_repo_structure(repo_path, doc_conf, depends_conf) folders = sorted(set(stacks.keys() + manifest_packages.keys() + catkin_packages.keys())) if folders: dsts = ["%s/api/%s/stamp" % (doc_path, f) for f in folders] for dst in dsts: os.makedirs(os.path.dirname(dst)) with open(dst, "w"): pass print("Uploading marker files to identify that documentation is up-to-date.") command = [ "bash", "-c", 'rsync -e "ssh -o StrictHostKeyChecking=no" -qr %s/api/ [email protected]:/home/rosbot/docs/%s/api' % (doc_path, ros_distro), ] call_with_list(command) return False # Make sure to update the versions of jenkins_scripts and rosdoc_lite for this repo list repo_hashes["rosdoc_lite-sys"] = rosdoc_lite_version repo_hashes["jenkins_scripts-sys"] = jenkins_scripts_version tags_db.set_rosinstall_hashes(repo, repo_hashes) return {"doc_conf": doc_conf, "depends_conf": depends_conf, "tags_db": tags_db}
def _test_repositories(ros_distro, repo_list, version_list, workspace, test_depends_on, repo_sourcespace, dependson_sourcespace, repo_buildspace, dependson_buildspace, sudo=False, no_chroot=False): append_pymodules_if_needed() from catkin_pkg.package import InvalidPackage, parse_package_string from rosdistro import get_cached_release, get_index, get_index_url, get_source_file from rosdistro.dependency_walker import DependencyWalker from rosdistro.manifest_provider import get_release_tag index = get_index(get_index_url()) print("Parsing rosdistro file for %s" % ros_distro) release = get_cached_release(index, ros_distro) print("Parsing devel file for %s" % ros_distro) source_file = get_source_file(index, ros_distro) # Create rosdep object print("Create rosdep object") strands_rosdep = "https://raw.githubusercontent.com/strands-project/rosdistro/strands-devel/rosdep/sources.list.d/50-strands.list" strands_rosdep_list_file = "/etc/ros/rosdep/sources.list.d/50-strands.list" additional_rosdeps={strands_rosdep_list_file: strands_rosdep} rosdep_resolver = rosdep.RosDepResolver(ros_distro, sudo, no_chroot, additional_rosdeps=additional_rosdeps) if repo_list: # download the repo_list from source print("Creating rosinstall file for repo list") rosinstall = "" for repo_name, version in zip(repo_list, version_list): if version == 'devel': if repo_name not in source_file.repositories: raise BuildException("Repository %s does not exist in Devel Distro" % repo_name) print("Using devel distro file to download repositories") rosinstall += _generate_rosinstall_for_repo(source_file.repositories[repo_name]) else: if repo_name not in release.repositories: raise BuildException("Repository %s does not exist in Ros Distro" % repo_name) repo = release.repositories[repo_name] if version not in ['latest', 'master']: assert repo.version is not None, 'Repository "%s" does not have a version set' % repo_name assert 'release' in repo.tags, 'Repository "%s" does not have a "release" tag set' % repo_name for pkg_name in repo.package_names: release_tag = get_release_tag(repo, pkg_name) if version in ['latest', 'master']: release_tag = '/'.join(release_tag.split('/')[:-1]) print('Using tag "%s" of release distro file to download package "%s from repo "%s' % (version, pkg_name, repo_name)) rosinstall += _generate_rosinstall_for_pkg_version(release.repositories[repo_name], pkg_name, release_tag) print("rosinstall file for all repositories: \n %s" % rosinstall) with open(os.path.join(workspace, "repo.rosinstall"), 'w') as f: f.write(rosinstall) print("Install repo list from source") os.makedirs(repo_sourcespace) call("rosinstall %s %s/repo.rosinstall --catkin" % (repo_sourcespace, workspace)) extract_notification_recipients(repo_sourcespace) # get the repositories build dependencies print("Get build dependencies of repo list") repo_build_dependencies = get_dependencies(repo_sourcespace, build_depends=True, run_depends=True) # ensure that catkin gets installed, for non-catkin packages so that catkin_make_isolated is available if 'catkin' not in repo_build_dependencies: repo_build_dependencies.append('catkin') print("Install build dependencies of repo list: %s" % (', '.join(repo_build_dependencies))) apt_get_install(repo_build_dependencies, rosdep_resolver, sudo) # replace the CMakeLists.txt file for repositories that use catkin root_cmakelists = os.path.join(repo_sourcespace, 'CMakeLists.txt') if os.path.exists(root_cmakelists): print("Removing the CMakeLists.txt file generated by rosinstall") os.remove(root_cmakelists) print("Create a new CMakeLists.txt file using catkin") # get environment ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro) # check if source workspace contains only package built with catkin non_catkin_pkgs = _get_non_catkin_packages(repo_sourcespace) # make build folder and change into it os.makedirs(repo_buildspace) os.chdir(repo_buildspace) # make test results dir test_results_dir = os.path.join(workspace, 'test_results') if os.path.exists(test_results_dir): shutil.rmtree(test_results_dir) os.makedirs(test_results_dir) if not non_catkin_pkgs: print("Build catkin workspace") call("catkin_init_workspace %s" % repo_sourcespace, ros_env) repos_test_results_dir = os.path.join(test_results_dir, 'repos') # set env variable to hot fix https://github.com/strands-project/strands_ci/issues/13 ros_env['ROS_TEST_RESULTS_DIR'] = repos_test_results_dir call("cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (repo_sourcespace, repos_test_results_dir), ros_env) #ros_env_repo = get_ros_env(os.path.join(repo_buildspace, 'devel/setup.bash')) # build repositories and tests print("Build repo list") call("make", ros_env) call("make install", ros_env) call("make tests", ros_env) # get the repositories test and run dependencies print("Get run dependencies of repo list") repo_test_dependencies = get_dependencies(repo_sourcespace, build_depends=False, run_depends=True) print("Install run dependencies of repo list: %s" % (', '.join(repo_test_dependencies))) apt_get_install(repo_test_dependencies, rosdep_resolver, sudo) # get environment after installing test and run dependencies ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro) # set env variable to hot fix https://github.com/strands-project/strands_ci/issues/13 ros_env['ROS_TEST_RESULTS_DIR'] = repos_test_results_dir # run tests print("Test repo list") call("vglrun -d :0 make run_tests", ros_env) # anything after this should build on this env ros_env = get_ros_env(os.path.join(repo_buildspace, 'devel/setup.bash')) # set env variable to hot fix https://github.com/strands-project/strands_ci/issues/13 ros_env['ROS_TEST_RESULTS_DIR'] = repos_test_results_dir else: print("Build workspace with non-catkin packages in isolation") # work around catkin_make_isolated issue (at least with version 0.5.65 of catkin) os.makedirs(os.path.join(repo_buildspace, 'devel_isolated')) call('catkin_make_isolated -C %s --source %s --install' % (repo_buildspace, repo_sourcespace), ros_env) setup_file = os.path.join(repo_buildspace, 'install_isolated', 'setup.sh') # anything after this should build on this env ros_env = get_ros_env(setup_file) # don't do depends-on on things not in release not_in_release = set(repo_list) - set(release.repositories.keys()) if not_in_release: print("Removed [%s] repositories which are not in the " % ', '.join(sorted(not_in_release)), "release file for depends-on testing") repo_list = list(set(repo_list) - not_in_release) # see if we need to do more work or not if not test_depends_on: print("We're not testing the depends-on repositories") ensure_test_results(test_results_dir) return # get repo_list depends-on list print("Get list of wet repositories that build-depend on repo list: %s" % ', '.join(repo_list)) walker = DependencyWalker(release) depends_on = set([]) try: for repo_name in repo_list: print('repo_name', repo_name) repo = release.repositories[repo_name] for pkg_name in repo.package_names: print('pkg_name', pkg_name) depends_on |= walker.get_recursive_depends_on(pkg_name, ['buildtool', 'build', 'test'], ignore_pkgs=depends_on) print('depends_on', depends_on) # remove all packages which are already in the workspace from catkin_pkg.packages import find_packages pkgs = find_packages(repo_sourcespace) depends_on -= set([pkg.name for pkg in pkgs.values()]) except RuntimeError: print("Exception %s: If you are not in the rosdistro and only in the devel", " builds there will be no depends on") depends_on = set([]) print("Build depends_on list of pkg list: %s" % (', '.join(depends_on))) if len(depends_on) == 0: print("No wet packages depend on our repo list. Test finished here") ensure_test_results(test_results_dir) return # install depends_on packages from source from release repositories rosinstall = '' non_catkin_pkgs = [] for pkg_name in depends_on: repo = release.repositories[release.packages[pkg_name].repository_name] if repo.version is None: continue pkg_xml = release.get_package_xml(pkg_name) if pkg_xml is None: raise BuildException('Could not retrieve package.xml for package "%s" from rosdistro cache' % pkg_name) try: pkg = parse_package_string(pkg_xml) except InvalidPackage as e: raise BuildException('package.xml for package "%s" from rosdistro cache is invalid: %s' % (pkg_name, e)) if _is_non_catkin_package(pkg): non_catkin_pkgs.append(pkg.name) rosinstall += _generate_rosinstall_for_pkg(repo, pkg_name) if non_catkin_pkgs: print('Non-catkin packages depend on our repo list (%s). Skipping depends_on packages here' % ', '.join(sorted(non_catkin_pkgs))) create_test_result(test_results_dir, failure='Non-catkin packages depend on the repos (%s). Skip building and testing depends_on packages.' % ', '.join(sorted(non_catkin_pkgs))) return print("Rosinstall for depends_on:\n %s" % rosinstall) with open(workspace + "/depends_on.rosinstall", 'w') as f: f.write(rosinstall) print("Created rosinstall file for depends on") # install all repository and system dependencies of the depends_on list print("Install all depends_on from source: %s" % (', '.join(depends_on))) os.makedirs(dependson_sourcespace) call("rosinstall --catkin %s %s/depends_on.rosinstall" % (dependson_sourcespace, workspace)) # check if depends_on workspace contains only package built with catkin non_catkin_pkgs = _get_non_catkin_packages(dependson_sourcespace) if non_catkin_pkgs: print('Non-catkin packages depend on our repo list (%s). Skipping depends_on packages here' % ', '.join(sorted(non_catkin_pkgs))) create_test_result(test_results_dir, failure='Non-catkin packages depend on the repos (%s). Skip building and testing depends_on packages.' % ', '.join(sorted(non_catkin_pkgs))) return # get build and run dependencies of depends_on list dependson_build_dependencies = [] for d in get_dependencies(dependson_sourcespace, build_depends=True, run_depends=False): print(" Checking dependency %s" % d) if d in dependson_build_dependencies: print(" Already in dependson_build_dependencies") if d in depends_on: print(" Is a direct dependency of the repo list, and is installed from source") if d in repo_list: print(" Is one of the repositories tested") if not d in dependson_build_dependencies and not d in depends_on and not d in repo_list: dependson_build_dependencies.append(d) print("Build dependencies of depends_on list are %s" % (', '.join(dependson_build_dependencies))) dependson_test_dependencies = [] for d in get_dependencies(dependson_sourcespace, build_depends=False, run_depends=True): if not d in dependson_test_dependencies and not d in depends_on and not d in repo_list: dependson_test_dependencies.append(d) print("Test dependencies of depends_on list are %s" % (', '.join(dependson_test_dependencies))) # install build dependencies print("Install all build dependencies of the depends_on list") apt_get_install(dependson_build_dependencies, rosdep_resolver, sudo) # replace the CMakeLists.txt file again print("Removing the CMakeLists.txt file generated by rosinstall") os.remove(os.path.join(dependson_sourcespace, 'CMakeLists.txt')) os.makedirs(dependson_buildspace) os.chdir(dependson_buildspace) print("Create a new CMakeLists.txt file using catkin") call("catkin_init_workspace %s" % dependson_sourcespace, ros_env) depends_on_test_results_dir = os.path.join(test_results_dir, 'depends_on') # set env variable to hot fix https://github.com/strands-project/strands_ci/issues/13 ros_env['ROS_TEST_RESULTS_DIR'] = depends_on_test_results_dir call("cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (dependson_sourcespace, depends_on_test_results_dir), ros_env) #ros_env_depends_on = get_ros_env(os.path.join(dependson_buildspace, 'devel/setup.bash')) # build repositories print("Build depends-on packages") call("make", ros_env) # install test dependencies print("Install all test dependencies of the depends_on list") apt_get_install(dependson_test_dependencies, rosdep_resolver, sudo) # test repositories print("Test depends-on packages") call("make run_tests", ros_env) ensure_test_results(test_results_dir)
def _test_repositories(ros_distro, repo_list, version_list, workspace, test_depends_on, repo_sourcespace, dependson_sourcespace, repo_buildspace, dependson_buildspace, sudo=False, no_chroot=False): append_pymodules_if_needed() from catkin_pkg.package import InvalidPackage, parse_package_string from rosdistro import get_cached_release, get_index, get_index_url, get_source_file from rosdistro.dependency_walker import DependencyWalker from rosdistro.manifest_provider import get_release_tag index = get_index(get_index_url()) print("Parsing rosdistro file for %s" % ros_distro) release = get_cached_release(index, ros_distro) print("Parsing devel file for %s" % ros_distro) source_file = get_source_file(index, ros_distro) # Create rosdep object print("Create rosdep object") rosdep_resolver = rosdep.RosDepResolver(ros_distro, sudo, no_chroot) if repo_list: # download the repo_list from source print("Creating rosinstall file for repo list") rosinstall = "" for repo_name, version in zip(repo_list, version_list): if version == 'devel': if repo_name not in source_file.repositories: raise BuildException( "Repository %s does not exist in Devel Distro" % repo_name) print("Using devel distro file to download repositories") rosinstall += _generate_rosinstall_for_repo( source_file.repositories[repo_name]) else: if repo_name not in release.repositories: raise BuildException( "Repository %s does not exist in Ros Distro" % repo_name) repo = release.repositories[repo_name] if version not in ['latest', 'master']: assert repo.version is not None, 'Repository "%s" does not have a version set' % repo_name assert 'release' in repo.tags, 'Repository "%s" does not have a "release" tag set' % repo_name for pkg_name in repo.package_names: release_tag = get_release_tag(repo, pkg_name) if version in ['latest', 'master']: release_tag = '/'.join(release_tag.split('/')[:-1]) print( 'Using tag "%s" of release distro file to download package "%s from repo "%s' % (version, pkg_name, repo_name)) rosinstall += _generate_rosinstall_for_pkg_version( release.repositories[repo_name], pkg_name, release_tag) print("rosinstall file for all repositories: \n %s" % rosinstall) with open(os.path.join(workspace, "repo.rosinstall"), 'w') as f: f.write(rosinstall) print("Install repo list from source") os.makedirs(repo_sourcespace) call("rosinstall %s %s/repo.rosinstall --catkin" % (repo_sourcespace, workspace)) extract_notification_recipients(repo_sourcespace) # get the repositories build dependencies print("Get build dependencies of repo list") repo_build_dependencies = get_dependencies(repo_sourcespace, build_depends=True, run_depends=True) # ensure that catkin gets installed, for non-catkin packages so that catkin_make_isolated is available if 'catkin' not in repo_build_dependencies: repo_build_dependencies.append('catkin') print("Install build dependencies of repo list: %s" % (', '.join(repo_build_dependencies))) apt_get_install(repo_build_dependencies, rosdep_resolver, sudo) # replace the CMakeLists.txt file for repositories that use catkin root_cmakelists = os.path.join(repo_sourcespace, 'CMakeLists.txt') if os.path.exists(root_cmakelists): print("Removing the CMakeLists.txt file generated by rosinstall") os.remove(root_cmakelists) print("Create a new CMakeLists.txt file using catkin") # get environment ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro) # check if source workspace contains only package built with catkin non_catkin_pkgs = _get_non_catkin_packages(repo_sourcespace) # make build folder and change into it os.makedirs(repo_buildspace) os.chdir(repo_buildspace) # make test results dir test_results_dir = os.path.join(workspace, 'test_results') if os.path.exists(test_results_dir): shutil.rmtree(test_results_dir) os.makedirs(test_results_dir) if not non_catkin_pkgs: print("Build catkin workspace") call("catkin_init_workspace %s" % repo_sourcespace, ros_env) repos_test_results_dir = os.path.join(test_results_dir, 'repos') call( "cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (repo_sourcespace, repos_test_results_dir), ros_env) #ros_env_repo = get_ros_env(os.path.join(repo_buildspace, 'devel/setup.bash')) # build repositories and tests print("Build repo list") call("make", ros_env) call("make tests", ros_env) # get the repositories test and run dependencies print("Get run dependencies of repo list") repo_test_dependencies = get_dependencies(repo_sourcespace, build_depends=False, run_depends=True) print("Install run dependencies of repo list: %s" % (', '.join(repo_test_dependencies))) apt_get_install(repo_test_dependencies, rosdep_resolver, sudo) # get environment after installing test and run dependencies ros_env = get_ros_env('/opt/ros/%s/setup.bash' % ros_distro) # run tests print("Test repo list") call("make run_tests", ros_env) # anything after this should build on this env ros_env = get_ros_env(os.path.join(repo_buildspace, 'devel/setup.bash')) else: print("Build workspace with non-catkin packages in isolation") # work around catkin_make_isolated issue (at least with version 0.5.65 of catkin) os.makedirs(os.path.join(repo_buildspace, 'devel_isolated')) call( 'catkin_make_isolated -C %s --source %s --install' % (repo_buildspace, repo_sourcespace), ros_env) setup_file = os.path.join(repo_buildspace, 'install_isolated', 'setup.sh') # anything after this should build on this env ros_env = get_ros_env(setup_file) # don't do depends-on on things not in release not_in_release = set(repo_list) - set(release.repositories.keys()) if not_in_release: print( "Removed [%s] repositories which are not in the " % ', '.join(sorted(not_in_release)), "release file for depends-on testing") repo_list = list(set(repo_list) - not_in_release) # see if we need to do more work or not if not test_depends_on: print("We're not testing the depends-on repositories") ensure_test_results(test_results_dir) return # get repo_list depends-on list print("Get list of wet repositories that build-depend on repo list: %s" % ', '.join(repo_list)) walker = DependencyWalker(release) depends_on = set([]) try: for repo_name in repo_list: print('repo_name', repo_name) repo = release.repositories[repo_name] for pkg_name in repo.package_names: print('pkg_name', pkg_name) depends_on |= walker.get_recursive_depends_on( pkg_name, ['buildtool', 'build', 'test'], ignore_pkgs=depends_on) print('depends_on', depends_on) # remove all packages which are already in the workspace from catkin_pkg.packages import find_packages pkgs = find_packages(repo_sourcespace) depends_on -= set([pkg.name for pkg in pkgs.values()]) except RuntimeError: print( "Exception %s: If you are not in the rosdistro and only in the devel", " builds there will be no depends on") depends_on = set([]) print("Build depends_on list of pkg list: %s" % (', '.join(depends_on))) if len(depends_on) == 0: print("No wet packages depend on our repo list. Test finished here") ensure_test_results(test_results_dir) return # install depends_on packages from source from release repositories rosinstall = '' non_catkin_pkgs = [] for pkg_name in depends_on: repo = release.repositories[release.packages[pkg_name].repository_name] if repo.version is None: continue pkg_xml = release.get_package_xml(pkg_name) if pkg_xml is None: raise BuildException( 'Could not retrieve package.xml for package "%s" from rosdistro cache' % pkg_name) try: pkg = parse_package_string(pkg_xml) except InvalidPackage as e: raise BuildException( 'package.xml for package "%s" from rosdistro cache is invalid: %s' % (pkg_name, e)) if _is_non_catkin_package(pkg): non_catkin_pkgs.append(pkg.name) rosinstall += _generate_rosinstall_for_pkg(repo, pkg_name) if non_catkin_pkgs: print( 'Non-catkin packages depend on our repo list (%s). Skipping depends_on packages here' % ', '.join(sorted(non_catkin_pkgs))) create_test_result( test_results_dir, failure= 'Non-catkin packages depend on the repos (%s). Skip building and testing depends_on packages.' % ', '.join(sorted(non_catkin_pkgs))) return print("Rosinstall for depends_on:\n %s" % rosinstall) with open(workspace + "/depends_on.rosinstall", 'w') as f: f.write(rosinstall) print("Created rosinstall file for depends on") # install all repository and system dependencies of the depends_on list print("Install all depends_on from source: %s" % (', '.join(depends_on))) os.makedirs(dependson_sourcespace) call("rosinstall --catkin %s %s/depends_on.rosinstall" % (dependson_sourcespace, workspace)) # check if depends_on workspace contains only package built with catkin non_catkin_pkgs = _get_non_catkin_packages(dependson_sourcespace) if non_catkin_pkgs: print( 'Non-catkin packages depend on our repo list (%s). Skipping depends_on packages here' % ', '.join(sorted(non_catkin_pkgs))) create_test_result( test_results_dir, failure= 'Non-catkin packages depend on the repos (%s). Skip building and testing depends_on packages.' % ', '.join(sorted(non_catkin_pkgs))) return # get build and run dependencies of depends_on list dependson_build_dependencies = [] for d in get_dependencies(dependson_sourcespace, build_depends=True, run_depends=False): print(" Checking dependency %s" % d) if d in dependson_build_dependencies: print(" Already in dependson_build_dependencies") if d in depends_on: print( " Is a direct dependency of the repo list, and is installed from source" ) if d in repo_list: print(" Is one of the repositories tested") if not d in dependson_build_dependencies and not d in depends_on and not d in repo_list: dependson_build_dependencies.append(d) print("Build dependencies of depends_on list are %s" % (', '.join(dependson_build_dependencies))) dependson_test_dependencies = [] for d in get_dependencies(dependson_sourcespace, build_depends=False, run_depends=True): if not d in dependson_test_dependencies and not d in depends_on and not d in repo_list: dependson_test_dependencies.append(d) print("Test dependencies of depends_on list are %s" % (', '.join(dependson_test_dependencies))) # install build dependencies print("Install all build dependencies of the depends_on list") apt_get_install(dependson_build_dependencies, rosdep_resolver, sudo) # replace the CMakeLists.txt file again print("Removing the CMakeLists.txt file generated by rosinstall") os.remove(os.path.join(dependson_sourcespace, 'CMakeLists.txt')) os.makedirs(dependson_buildspace) os.chdir(dependson_buildspace) print("Create a new CMakeLists.txt file using catkin") call("catkin_init_workspace %s" % dependson_sourcespace, ros_env) depends_on_test_results_dir = os.path.join(test_results_dir, 'depends_on') call( "cmake %s -DCATKIN_TEST_RESULTS_DIR=%s" % (dependson_sourcespace, depends_on_test_results_dir), ros_env) #ros_env_depends_on = get_ros_env(os.path.join(dependson_buildspace, 'devel/setup.bash')) # build repositories print("Build depends-on packages") call("make", ros_env) # install test dependencies print("Install all test dependencies of the depends_on list") apt_get_install(dependson_test_dependencies, rosdep_resolver, sudo) # test repositories print("Test depends-on packages") call("make run_tests", ros_env) ensure_test_results(test_results_dir)