def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate the 'doc' management jobs on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'doc') args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_doc_build_files(config, args.rosdistro_name) build_file = build_files[args.doc_build_name] if build_file.documentation_type != DOC_TYPE_ROSDOC: print(("The doc build file '%s' has the wrong documentation type to " + "be used with this script") % args.doc_build_name, file=sys.stderr) return 1 jenkins = connect(config.jenkins_url) configure_management_view(jenkins) group_name = get_doc_view_name( args.rosdistro_name, args.doc_build_name) configure_reconfigure_jobs_job( jenkins, group_name, args, config, build_file) configure_trigger_jobs_job(jenkins, group_name, build_file)
def get_job_config(args, config): template_name = 'misc/rosdistro_cache_job.xml.em' repository_args, script_generating_key_files = \ get_repositories_and_script_generating_key_files(config=config) reconfigure_job_names = [] build_files = get_release_build_files(config, args.rosdistro_name) for release_build_name in sorted(build_files.keys()): group_name = get_release_job_prefix( args.rosdistro_name, release_build_name) job_name = '%s_%s' % (group_name, 'reconfigure-jobs') reconfigure_job_names.append(job_name) reconfigure_doc_job_names = [] build_files = get_doc_build_files(config, args.rosdistro_name) for doc_build_name in sorted(build_files.keys()): group_name = get_doc_view_name( args.rosdistro_name, doc_build_name) job_name = '%s_%s' % (group_name, 'reconfigure-jobs') reconfigure_doc_job_names.append(job_name) reconfigure_source_job_names = [] build_files = get_source_build_files(config, args.rosdistro_name) for source_build_name in sorted(build_files.keys()): group_name = get_devel_view_name( args.rosdistro_name, source_build_name) job_name = '%s_%s' % (group_name, 'reconfigure-jobs') reconfigure_source_job_names.append(job_name) job_data = copy.deepcopy(args.__dict__) job_data.update({ 'ros_buildfarm_repository': get_repository(), 'script_generating_key_files': script_generating_key_files, 'rosdistro_index_url': config.rosdistro_index_url, 'repository_args': repository_args, 'reconfigure_job_names': reconfigure_job_names, 'reconfigure_doc_job_names': reconfigure_doc_job_names, 'reconfigure_source_job_names': reconfigure_source_job_names, 'notification_emails': config.distributions[args.rosdistro_name]['notification_emails'], 'git_ssh_credential_id': config.git_ssh_credential_id, }) job_config = expand_template(template_name, job_data) return job_config
def configure_doc_metadata_job( config_url, rosdistro_name, doc_build_name, config=None, build_file=None): if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_doc_build_files(config, rosdistro_name) build_file = build_files[doc_build_name] from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) job_name = get_doc_view_name(rosdistro_name, doc_build_name) job_config = _get_doc_metadata_job_config( config, config_url, rosdistro_name, doc_build_name, build_file) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, job_name, job_config)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'doc_metadata' job on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'doc') add_argument_dry_run(parser) args = parser.parse_args(argv) config = get_index(args.config_url) build_files = get_doc_build_files(config, args.rosdistro_name) build_file = build_files[args.doc_build_name] if build_file.documentation_type != DOC_TYPE_MANIFEST: print(("The doc build file '%s' has the wrong documentation type to " + "be used with this script") % args.doc_build_name, file=sys.stderr) return 1 return configure_doc_metadata_job( args.config_url, args.rosdistro_name, args.doc_build_name, config=config, build_file=build_file, dry_run=args.dry_run)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the doc job") add_argument_config_url(parser) parser.add_argument( '--rosdistro-name', required=True, help='The name of the ROS distro to identify the setup file to be ' 'sourced') add_argument_build_name(parser, 'doc') parser.add_argument( '--workspace-root', required=True, help='The root path of the workspace to compile') parser.add_argument( '--rosdoc-lite-dir', required=True, help='The root path of the rosdoc_lite repository') parser.add_argument( '--catkin-sphinx-dir', required=True, help='The root path of the catkin-sphinx repository') parser.add_argument( '--rosdoc-index-dir', required=True, help='The root path of the rosdoc_index folder') add_argument_repository_name(parser) parser.add_argument( '--os-name', required=True, help="The OS name (e.g. 'ubuntu')") parser.add_argument( '--os-code-name', required=True, help="The OS code name (e.g. 'trusty')") parser.add_argument( '--arch', required=True, help="The architecture (e.g. 'amd64')") add_argument_vcs_information(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_force(parser) add_argument_output_dir(parser, required=True) add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) config = get_config_index(args.config_url) with Scope('SUBSECTION', 'packages'): # find packages in workspace source_space = os.path.join(args.workspace_root, 'src') print("Crawling for packages in workspace '%s'" % source_space) pkgs = find_packages(source_space) pkg_names = [pkg.name for pkg in pkgs.values()] print('Found the following packages:') for pkg_name in sorted(pkg_names): print(' -', pkg_name) maintainer_emails = set([]) for pkg in pkgs.values(): for m in pkg.maintainers: maintainer_emails.add(m.email) if maintainer_emails: print('Package maintainer emails: %s' % ' '.join(sorted(maintainer_emails))) rosdoc_index = RosdocIndex( [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)]) vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2) with Scope('SUBSECTION', 'determine need to run documentation generation'): # compare hashes to determine if documentation needs to be regenerated current_hashes = {} current_hashes['ros_buildfarm'] = 2 # increase to retrigger doc jobs current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir) current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir) repo_dir = os.path.join( args.workspace_root, 'src', args.repository_name) current_hashes[args.repository_name] = get_hash(repo_dir) print('Current repository hashes: %s' % current_hashes) tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {}) print('Stored repository hashes: %s' % tag_index_hashes) skip_doc_generation = current_hashes == tag_index_hashes if skip_doc_generation: print('No changes to the source repository or any tooling repository') if not args.force: print('Skipping generation of documentation') # create stamp files print('Creating marker files to identify that documentation is ' + 'up-to-date') create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api')) # check if any entry needs to be updated print('Creating update manifest.yaml files') for pkg_name in pkg_names: # update manifest.yaml files current_manifest_yaml_file = os.path.join( args.rosdoc_index_dir, args.rosdistro_name, 'api', pkg_name, 'manifest.yaml') if not os.path.exists(current_manifest_yaml_file): print('- %s: skipping no manifest.yaml yet' % pkg_name) continue with open(current_manifest_yaml_file, 'r') as h: remote_data = yaml.load(h) data = copy.deepcopy(remote_data) data['vcs'] = vcs_type data['vcs_uri'] = vcs_url data['vcs_version'] = vcs_version data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg_name, [])) if data == remote_data: print('- %s: skipping same data' % pkg_name) continue # write manifest.yaml if it has changes print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name)) dst = os.path.join( args.output_dir, 'api', pkg_name, 'manifest.yaml') dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as h: yaml.dump(data, h, default_flow_style=False) return 0 print("But job was started with the 'force' parameter set") else: print('The source repository and/or a tooling repository has changed') print('Running generation of documentation') rosdoc_index.hashes[args.repository_name] = current_hashes rosdoc_index.write_modified_data(args.output_dir, ['hashes']) # create stamp files print('Creating marker files to identify that documentation is ' + 'up-to-date') create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc')) index = get_index(config.rosdistro_index_url) dist_file = get_distribution_file(index, args.rosdistro_name) assert args.repository_name in dist_file.repositories valid_package_names = \ set(pkg_names) | set(dist_file.release_packages.keys()) # update package deps and metapackage deps with Scope('SUBSECTION', 'updated rosdoc_index information'): for pkg in pkgs.values(): print("Updating dependendencies for package '%s'" % pkg.name) depends = _get_build_run_doc_dependencies(pkg) ros_dependency_names = sorted(set([ d.name for d in depends if d.name in valid_package_names])) rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names) if pkg.is_metapackage(): print("Updating dependendencies for metapackage '%s'" % pkg.name) depends = _get_run_dependencies(pkg) ros_dependency_names = sorted(set([ d.name for d in depends if d.name in valid_package_names])) else: ros_dependency_names = None rosdoc_index.set_metapackage_deps( pkg.name, ros_dependency_names) rosdoc_index.write_modified_data( args.output_dir, ['deps', 'metapackage_deps']) # generate changelog html from rst package_names_with_changelogs = set([]) with Scope('SUBSECTION', 'generate changelog html from rst'): for pkg_path, pkg in pkgs.items(): abs_pkg_path = os.path.join(source_space, pkg_path) assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml')) changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst') if os.path.exists(changelog_file): print(("Package '%s' contains a CHANGELOG.rst, generating " + "html") % pkg.name) package_names_with_changelogs.add(pkg.name) with open(changelog_file, 'r') as h: rst_code = h.read() from docutils.core import publish_string html_code = publish_string(rst_code, writer_name='html') html_code = html_code.decode() # strip system message from html output open_tag = re.escape('<div class="first system-message">') close_tag = re.escape('</div>') pattern = '(' + open_tag + '.+?' + close_tag + ')' html_code = re.sub(pattern, '', html_code, flags=re.DOTALL) pkg_changelog_doc_path = os.path.join( args.output_dir, 'changelogs', pkg.name) os.makedirs(pkg_changelog_doc_path) with open(os.path.join( pkg_changelog_doc_path, 'changelog.html'), 'w') as h: h.write(html_code) ordered_pkg_tuples = topological_order_packages(pkgs) # create rosdoc tag list and location files with Scope('SUBSECTION', 'create rosdoc tag list and location files'): for _, pkg in ordered_pkg_tuples: dst = os.path.join( args.output_dir, 'rosdoc_tags', '%s.yaml' % pkg.name) print("Generating rosdoc tag list file for package '%s'" % pkg.name) dep_names = rosdoc_index.get_recursive_dependencies(pkg.name) # make sure that we don't pass our own tagfile to ourself # bad things happen when we do this assert pkg.name not in dep_names locations = [] for dep_name in sorted(dep_names): if dep_name not in rosdoc_index.locations: print("- skipping not existing location file of " + "dependency '%s'" % dep_name) continue print("- including location files of dependency '%s'" % dep_name) dep_locations = rosdoc_index.locations[dep_name] if dep_locations: for dep_location in dep_locations: assert dep_location['package'] == dep_name # update tag information to point to local location location = copy.deepcopy(dep_location) if not location['location'].startswith('file://'): location['location'] = 'file://%s' % os.path.join( args.rosdoc_index_dir, location['location']) locations.append(location) dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as h: yaml.dump(locations, h) print("Creating location file for package '%s'" % pkg.name) data = { 'docs_url': '../../../api/%s/html' % pkg.name, 'location': 'file://%s' % os.path.join( args.output_dir, 'symbols', '%s.tag' % pkg.name), 'package': pkg.name, } rosdoc_index.locations[pkg.name] = [data] # do not write these local locations # used to determine all source and release jobs source_build_files = get_source_build_files(config, args.rosdistro_name) release_build_files = get_release_build_files(config, args.rosdistro_name) # TODO this should reuse the logic from the job generation used_source_build_names = [] for source_build_name, build_file in source_build_files.items(): repo_names = build_file.filter_repositories([args.repository_name]) if not repo_names: continue matching_dist_file = get_distribution_file_matching_build_file( index, args.rosdistro_name, build_file) repo = matching_dist_file.repositories[args.repository_name] if not repo.source_repository: continue if not repo.source_repository.version: continue if build_file.test_commits_force is False: continue elif repo.source_repository.test_commits is False: continue elif repo.source_repository.test_commits is None and \ not build_file.test_commits_default: continue used_source_build_names.append(source_build_name) # create manifest.yaml files from repository / package meta information # will be merged with the manifest.yaml file generated by rosdoc_lite later repository = dist_file.repositories[args.repository_name] with Scope('SUBSECTION', 'create manifest.yaml files'): for pkg in pkgs.values(): data = {} data['vcs'] = vcs_type data['vcs_uri'] = vcs_url data['vcs_version'] = vcs_version data['repo_name'] = args.repository_name data['timestamp'] = time.time() data['depends'] = sorted(rosdoc_index.forward_deps.get(pkg.name, [])) data['depends_on'] = sorted(rosdoc_index.reverse_deps.get(pkg.name, [])) if pkg.name in rosdoc_index.metapackage_index: data['metapackages'] = rosdoc_index.metapackage_index[pkg.name] if pkg.name in rosdoc_index.metapackage_deps: data['packages'] = rosdoc_index.metapackage_deps[pkg.name] if pkg.name in package_names_with_changelogs: data['has_changelog_rst'] = True data['api_documentation'] = 'http://docs.ros.org/%s/api/%s/html' % \ (args.rosdistro_name, pkg.name) pkg_status = None pkg_status_description = None # package level status information if pkg.name in repository.status_per_package: pkg_status_data = repository.status_per_package[pkg.name] pkg_status = pkg_status_data.get('status', None) pkg_status_description = pkg_status_data.get( 'status_description', None) # repository level status information if pkg_status is None: pkg_status = repository.status if pkg_status_description is None: pkg_status_description = repository.status_description if pkg_status is not None: data['maintainer_status'] = pkg_status if pkg_status_description is not None: data['maintainer_status_description'] = pkg_status_description # add doc job url data['doc_job'] = get_doc_job_url( config.jenkins_url, args.rosdistro_name, args.doc_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch) # add devel job urls build_files = {} for build_name in used_source_build_names: build_files[build_name] = source_build_files[build_name] devel_job_urls = get_devel_job_urls( config.jenkins_url, build_files, args.rosdistro_name, args.repository_name) if devel_job_urls: data['devel_jobs'] = devel_job_urls # TODO this should reuse the logic from the job generation used_release_build_names = [] for release_build_name, build_file in release_build_files.items(): filtered_pkg_names = build_file.filter_packages([pkg.name]) if not filtered_pkg_names: continue matching_dist_file = get_distribution_file_matching_build_file( index, args.rosdistro_name, build_file) repo = matching_dist_file.repositories[args.repository_name] if not repo.release_repository: continue if not repo.release_repository.version: continue used_release_build_names.append(release_build_name) # add release job urls build_files = {} for build_name in used_release_build_names: build_files[build_name] = release_build_files[build_name] release_job_urls = get_release_job_urls( config.jenkins_url, build_files, args.rosdistro_name, pkg.name) if release_job_urls: data['release_jobs'] = release_job_urls # write manifest.yaml dst = os.path.join( args.output_dir, 'manifests', pkg.name, 'manifest.yaml') dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as h: yaml.dump(data, h) # overwrite CMakeLists.txt files of each package with Scope( 'SUBSECTION', 'overwrite CMakeLists.txt files to only generate messages' ): for pkg_path, pkg in pkgs.items(): abs_pkg_path = os.path.join(source_space, pkg_path) build_types = [ e.content for e in pkg.exports if e.tagname == 'build_type'] build_type_cmake = build_types and build_types[0] == 'cmake' data = { 'package_name': pkg.name, 'build_type_cmake': build_type_cmake, } content = expand_template('doc/CMakeLists.txt.em', data) print("Generating 'CMakeLists.txt' for package '%s'" % pkg.name) cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt') with open(cmakelist_file, 'w') as h: h.write(content) with Scope( 'SUBSECTION', 'determine dependencies and generate Dockerfile' ): # initialize rosdep view context = initialize_resolver( args.rosdistro_name, args.os_name, args.os_code_name) apt_cache = Cache() debian_pkg_names = [ 'build-essential', 'openssh-client', 'python3', 'python3-yaml', 'rsync', # the following are required by rosdoc_lite 'doxygen', 'python-catkin-pkg', 'python-epydoc', 'python-kitchen', 'python-rospkg', 'python-sphinx', 'python-yaml', # since catkin is not a run dependency but provides the setup files get_debian_package_name(args.rosdistro_name, 'catkin'), # rosdoc_lite does not work without genmsg being importable get_debian_package_name(args.rosdistro_name, 'genmsg'), ] if 'actionlib_msgs' in pkg_names: # to document actions in other packages in the same repository debian_pkg_names.append( get_debian_package_name(args.rosdistro_name, 'actionlib_msgs')) print('Always install the following generic dependencies:') for debian_pkg_name in sorted(debian_pkg_names): print(' -', debian_pkg_name) debian_pkg_versions = {} # get build, run and doc dependencies and map them to binary packages depends = get_dependencies( pkgs.values(), 'build, run and doc', _get_build_run_doc_dependencies) debian_pkg_names_depends = resolve_names(depends, **context) debian_pkg_names_depends -= set(debian_pkg_names) debian_pkg_names += order_dependencies(debian_pkg_names_depends) missing_debian_pkg_names = [] for debian_pkg_name in debian_pkg_names: try: debian_pkg_versions.update( get_binary_package_versions(apt_cache, [debian_pkg_name])) except KeyError: missing_debian_pkg_names.append(debian_pkg_name) if missing_debian_pkg_names: # we allow missing dependencies to support basic documentation # of packages which use not released dependencies print('# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build') for debian_pkg_name in missing_debian_pkg_names: print("Could not find apt package '%s', skipping dependency" % debian_pkg_name) debian_pkg_names.remove(debian_pkg_name) print('# END SUBSECTION') build_files = get_doc_build_files(config, args.rosdistro_name) build_file = build_files[args.doc_build_name] rosdoc_config_files = {} for pkg_path, pkg in pkgs.items(): abs_pkg_path = os.path.join(source_space, pkg_path) rosdoc_exports = [ e.attributes['content'] for e in pkg.exports if e.tagname == 'rosdoc' and 'content' in e.attributes] prefix = '${prefix}' rosdoc_config_file = rosdoc_exports[-1] \ if rosdoc_exports else '%s/rosdoc.yaml' % prefix rosdoc_config_file = rosdoc_config_file.replace(prefix, abs_pkg_path) if os.path.isfile(rosdoc_config_file): rosdoc_config_files[pkg.name] = rosdoc_config_file # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'uid': get_user_id(), 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'canonical_base_url': build_file.canonical_base_url, 'ordered_pkg_tuples': ordered_pkg_tuples, 'rosdoc_config_files': rosdoc_config_files, } create_dockerfile( 'doc/doc_task.Dockerfile.em', data, args.dockerfile_dir)
def configure_doc_jobs( config_url, rosdistro_name, doc_build_name, groovy_script=None): """ Configure all Jenkins doc jobs. L{configure_doc_job} will be invoked for doc repository and target which matches the build file criteria. """ config = get_config_index(config_url) build_files = get_doc_build_files(config, rosdistro_name) build_file = build_files[doc_build_name] index = get_index(config.rosdistro_index_url) dist_cache = None if build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) # get targets targets = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) print('The build file contains the following targets:') for os_name, os_code_name, arch in targets: print(' -', os_name, os_code_name, arch) dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: print('No distribution file matches the build file') return doc_view_name = get_doc_view_name(rosdistro_name, doc_build_name) from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) views = [] views.append(configure_doc_view(jenkins, doc_view_name)) if groovy_script is not None: # all further configuration will be handled by the groovy script jenkins = False repo_names = dist_file.repositories.keys() filtered_repo_names = build_file.filter_repositories(repo_names) job_names = [] job_configs = {} for repo_name in sorted(repo_names): is_disabled = repo_name not in filtered_repo_names if is_disabled and build_file.skip_ignored_repositories: print("Skipping ignored repository '%s'" % repo_name, file=sys.stderr) continue repo = dist_file.repositories[repo_name] if not repo.doc_repository: print("Skipping repository '%s': no doc section" % repo_name) continue if not repo.doc_repository.version: print("Skipping repository '%s': no doc version" % repo_name) continue for os_name, os_code_name, arch in targets: try: job_name, job_config = configure_doc_job( config_url, rosdistro_name, doc_build_name, repo_name, os_name, os_code_name, arch, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, views=views, is_disabled=is_disabled, groovy_script=groovy_script) job_names.append(job_name) if groovy_script is not None: print("Configuration for job '%s'" % job_name) job_configs[job_name] = job_config except JobValidationError as e: print(e.message, file=sys.stderr) job_prefix = '%s__' % doc_view_name if groovy_script is None: # delete obsolete jobs in this view from ros_buildfarm.jenkins import remove_jobs print('Removing obsolete doc jobs') remove_jobs(jenkins, job_prefix, job_names) else: print("Writing groovy script '%s' to reconfigure %d jobs" % (groovy_script, len(job_configs))) data = { 'expected_num_jobs': len(job_configs), 'job_prefixes_and_names': { 'doc': (job_prefix, job_names), } } content = expand_template('snippet/reconfigure_jobs.groovy.em', data) write_groovy_script_and_configs( groovy_script, content, job_configs)
def configure_doc_job( config_url, rosdistro_name, doc_build_name, repo_name, os_name, os_code_name, arch, config=None, build_file=None, index=None, dist_file=None, dist_cache=None, jenkins=None, views=None, is_disabled=False, groovy_script=None, doc_repository=None): """ Configure a single Jenkins doc job. This includes the following steps: - clone the doc repository to use - clone the ros_buildfarm repository - write the distribution repository keys into files - invoke the run_doc_job.py script """ if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_doc_build_files(config, rosdistro_name) build_file = build_files[doc_build_name] if index is None: index = get_index(config.rosdistro_index_url) if dist_file is None: dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: raise JobValidationError( 'No distribution file matches the build file') repo_names = dist_file.repositories.keys() if repo_name is not None: if repo_name not in repo_names: raise JobValidationError( "Invalid repository name '%s' " % repo_name + 'choose one of the following: %s' % ', '.join(sorted(repo_names))) repo = dist_file.repositories[repo_name] if not repo.doc_repository: raise JobValidationError( "Repository '%s' has no doc section" % repo_name) if not repo.doc_repository.version: raise JobValidationError( "Repository '%s' has no doc version" % repo_name) doc_repository = repo.doc_repository if os_name not in build_file.targets.keys(): raise JobValidationError( "Invalid OS name '%s' " % os_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets.keys()))) if os_code_name not in build_file.targets[os_name].keys(): raise JobValidationError( "Invalid OS code name '%s' " % os_code_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets[os_name].keys()))) if arch not in build_file.targets[os_name][os_code_name]: raise JobValidationError( "Invalid architecture '%s' " % arch + 'choose one of the following: %s' % ', '.join(sorted( build_file.targets[os_name][os_code_name]))) if dist_cache is None and build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) if jenkins is None: from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) if views is None: view_name = get_doc_view_name( rosdistro_name, doc_build_name) configure_doc_view(jenkins, view_name) job_name = get_doc_job_name( rosdistro_name, doc_build_name, repo_name, os_name, os_code_name, arch) job_config = _get_doc_job_config( config, config_url, rosdistro_name, doc_build_name, build_file, os_name, os_code_name, arch, doc_repository, repo_name, dist_cache=dist_cache, is_disabled=is_disabled) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, job_name, job_config) return job_name, job_config
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate `manifest.yaml` from released package manifests") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'doc') add_argument_output_dir(parser, required=True) args = parser.parse_args(argv) config = get_config_index(args.config_url) build_files = get_doc_build_files(config, args.rosdistro_name) build_file = build_files[args.doc_build_name] source_build_files = get_source_build_files(config, args.rosdistro_name) release_build_files = get_release_build_files(config, args.rosdistro_name) index = get_index(config.rosdistro_index_url) distribution = get_cached_distribution(index, args.rosdistro_name) # get rosdistro distribution cache # iterate over all released repositories # which don't have a doc entry # extract information from package.xml and generate manifest.yaml repo_names = get_repo_names_with_release_but_no_doc(distribution) pkg_names = get_package_names(distribution, repo_names) filtered_pkg_names = build_file.filter_packages(pkg_names) print("Generate 'manifest.yaml' files for the following packages:") api_path = os.path.join(args.output_dir, 'api') for pkg_name in sorted(filtered_pkg_names): print('- %s' % pkg_name) try: data = get_metadata(distribution, pkg_name) except Exception: print('Could not extract meta data:', file=sys.stderr) traceback.print_exc(file=sys.stderr) continue # add devel job urls rel_pkg = distribution.release_packages[pkg_name] repo_name = rel_pkg.repository_name repo = distribution.repositories[repo_name] if repo.source_repository and repo.source_repository.version: build_files = {} for build_name in source_build_files.keys(): build_files[build_name] = source_build_files[build_name] devel_job_urls = get_devel_job_urls( config.jenkins_url, build_files, args.rosdistro_name, repo_name) if devel_job_urls: data['devel_jobs'] = devel_job_urls # add release job urls build_files = {} for build_name in release_build_files.keys(): build_files[build_name] = release_build_files[build_name] release_job_urls = get_release_job_urls( config.jenkins_url, build_files, args.rosdistro_name, pkg_name) if release_job_urls: data['release_jobs'] = release_job_urls manifest_yaml = os.path.join(api_path, pkg_name, 'manifest.yaml') write_manifest_yaml(manifest_yaml, data) return 0
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Generate all jobs on Jenkins') add_argument_config_url(parser) parser.add_argument( '--ros-distro-names', nargs='*', metavar='ROS_DISTRO_NAME', default=[], help='The list of ROS distribution names if not generating all') parser.add_argument( '--skip-rosdistro-cache-job', action='store_true', help='Skip generating the rosdistro-cache jobs') parser.add_argument( '--commit', action='store_true', help='Apply the changes to Jenkins instead of only showing them') args = parser.parse_args(argv) if args.commit: print('The following changes will be applied to the Jenkins server.') else: print('This is a dry run. The Jenkins configuration is not changed.') print('') config = get_index(args.config_url) ros_distro_names = sorted(config.distributions.keys()) invalid_ros_distro_name = [ n for n in args.ros_distro_names if n not in ros_distro_names] if invalid_ros_distro_name: parser.error( 'The following ROS distribution names are not part of the ' + 'buildfarm index: ' + ', '.join(sorted(invalid_ros_distro_name))) # try to connect to Jenkins master jenkins = connect(config.jenkins_url) configure_view( jenkins, 'Queue', filter_queue=False, dry_run=not args.commit) generate_check_slaves_job(args.config_url, dry_run=not args.commit) if not args.ros_distro_names: generate_dashboard_job(args.config_url, dry_run=not args.commit) for doc_build_name in sorted(config.doc_builds.keys()): generate_doc_independent_job( args.config_url, doc_build_name, dry_run=not args.commit) selected_ros_distro_names = [ n for n in ros_distro_names if not args.ros_distro_names or n in args.ros_distro_names] for ros_distro_name in selected_ros_distro_names: print(ros_distro_name) if not args.skip_rosdistro_cache_job: generate_rosdistro_cache_job( args.config_url, ros_distro_name, dry_run=not args.commit) release_build_files = get_release_build_files(config, ros_distro_name) for release_build_name in release_build_files.keys(): generate_release_status_page_job( args.config_url, ros_distro_name, release_build_name, dry_run=not args.commit) generate_release_maintenance_jobs( args.config_url, ros_distro_name, release_build_name, dry_run=not args.commit) source_build_files = get_source_build_files(config, ros_distro_name) for source_build_name in source_build_files.keys(): generate_devel_maintenance_jobs( args.config_url, ros_distro_name, source_build_name, dry_run=not args.commit) doc_build_files = get_doc_build_files(config, ros_distro_name) for doc_build_name, doc_build_file in doc_build_files.items(): if doc_build_file.documentation_type == DOC_TYPE_ROSDOC: generate_doc_maintenance_jobs( args.config_url, ros_distro_name, doc_build_name, dry_run=not args.commit) elif doc_build_file.documentation_type == DOC_TYPE_MANIFEST: generate_doc_metadata_job( args.config_url, ros_distro_name, doc_build_name, dry_run=not args.commit) else: assert False, ("Unknown documentation type '%s' in doc " + "build file '%s'") % \ (doc_build_file.documentation_type, doc_build_name) generate_repos_status_page_jobs( args.config_url, ros_distro_name, dry_run=not args.commit) index = ros_distro_names.index(ros_distro_name) if index > 0: # Generate comparison pages for this rosdistro against all older ones. generate_release_compare_page_job( args.config_url, ros_distro_name, ros_distro_names[:index], dry_run=not args.commit)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate `manifest.yaml` from released package manifests") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'doc') add_argument_output_dir(parser, required=True) args = parser.parse_args(argv) config = get_config_index(args.config_url) build_files = get_doc_build_files(config, args.rosdistro_name) build_file = build_files[args.doc_build_name] source_build_files = get_source_build_files(config, args.rosdistro_name) release_build_files = get_release_build_files(config, args.rosdistro_name) index = get_index(config.rosdistro_index_url) distribution = get_cached_distribution(index, args.rosdistro_name) # get rosdistro distribution cache # iterate over all released repositories # which don't have a doc entry # extract information from package.xml and generate manifest.yaml repo_names = get_repo_names_with_release_but_no_doc(distribution) pkg_names = get_package_names(distribution, repo_names) filtered_pkg_names = build_file.filter_packages(pkg_names) print("Generate 'manifest.yaml' files for the following packages:") api_path = os.path.join(args.output_dir, 'api') for pkg_name in sorted(filtered_pkg_names): print('- %s' % pkg_name) try: data = get_metadata(distribution, pkg_name) except Exception: print('Could not extract meta data:', file=sys.stderr) traceback.print_exc(file=sys.stderr) continue # add devel job urls build_files = {} for build_name in source_build_files.keys(): build_files[build_name] = source_build_files[build_name] rel_pkg = distribution.release_packages[pkg_name] repo_name = rel_pkg.repository_name devel_job_urls = get_devel_job_urls(config.jenkins_url, build_files, args.rosdistro_name, repo_name) if devel_job_urls: data['devel_jobs'] = list(devel_job_urls) # add release job urls build_files = {} for build_name in release_build_files.keys(): build_files[build_name] = release_build_files[build_name] release_job_urls = get_release_job_urls(config.jenkins_url, build_files, args.rosdistro_name, pkg_name) if release_job_urls: data['release_jobs'] = list(release_job_urls) manifest_yaml = os.path.join(api_path, pkg_name, 'manifest.yaml') write_manifest_yaml(manifest_yaml, data) return 0
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the doc job") add_argument_config_url(parser) parser.add_argument( '--rosdistro-name', required=True, help='The name of the ROS distro to identify the setup file to be ' 'sourced') add_argument_build_name(parser, 'doc') parser.add_argument('--workspace-root', required=True, help='The root path of the workspace to compile') parser.add_argument('--rosdoc-lite-dir', required=True, help='The root path of the rosdoc_lite repository') parser.add_argument('--catkin-sphinx-dir', required=True, help='The root path of the catkin-sphinx repository') parser.add_argument('--rosdoc-index-dir', required=True, help='The root path of the rosdoc_index folder') add_argument_repository_name(parser) parser.add_argument('--os-name', required=True, help="The OS name (e.g. 'ubuntu')") parser.add_argument('--os-code-name', required=True, help="The OS code name (e.g. 'xenial')") parser.add_argument('--arch', required=True, help="The architecture (e.g. 'amd64')") add_argument_build_tool(parser, required=True) add_argument_vcs_information(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_force(parser) add_argument_output_dir(parser, required=True) add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) config = get_config_index(args.config_url) condition_context = { 'ROS_DISTRO': args.rosdistro_name, 'ROS_PYTHON_VERSION': 2, 'ROS_VERSION': 1, } with Scope('SUBSECTION', 'packages'): # find packages in workspace source_space = os.path.join(args.workspace_root, 'src') print("Crawling for packages in workspace '%s'" % source_space) pkgs = find_packages(source_space) for pkg in pkgs.values(): pkg.evaluate_conditions(condition_context) pkg_names = [pkg.name for pkg in pkgs.values()] print('Found the following packages:') for pkg_name in sorted(pkg_names): print(' -', pkg_name) maintainer_emails = set([]) for pkg in pkgs.values(): for m in pkg.maintainers: maintainer_emails.add(m.email) if maintainer_emails: print('Package maintainer emails: %s' % ' '.join(sorted(maintainer_emails))) rosdoc_index = RosdocIndex( [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)]) vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2) with Scope('SUBSECTION', 'determine need to run documentation generation'): # compare hashes to determine if documentation needs to be regenerated current_hashes = {} current_hashes['ros_buildfarm'] = 2 # increase to retrigger doc jobs current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir) current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir) repo_dir = os.path.join(args.workspace_root, 'src', args.repository_name) current_hashes[args.repository_name] = get_hash(repo_dir) print('Current repository hashes: %s' % current_hashes) tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {}) print('Stored repository hashes: %s' % tag_index_hashes) skip_doc_generation = current_hashes == tag_index_hashes if skip_doc_generation: print('No changes to the source repository or any tooling repository') if not args.force: print('Skipping generation of documentation') # create stamp files print('Creating marker files to identify that documentation is ' + 'up-to-date') create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api')) # check if any entry needs to be updated print('Creating update manifest.yaml files') for pkg_name in pkg_names: # update manifest.yaml files current_manifest_yaml_file = os.path.join( args.rosdoc_index_dir, args.rosdistro_name, 'api', pkg_name, 'manifest.yaml') if not os.path.exists(current_manifest_yaml_file): print('- %s: skipping no manifest.yaml yet' % pkg_name) continue with open(current_manifest_yaml_file, 'r') as h: remote_data = yaml.safe_load(h) data = copy.deepcopy(remote_data) data['vcs'] = vcs_type data['vcs_uri'] = vcs_url data['vcs_version'] = vcs_version data['depends_on'] = sorted( rosdoc_index.reverse_deps.get(pkg_name, [])) if data == remote_data: print('- %s: skipping same data' % pkg_name) continue # write manifest.yaml if it has changes print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name)) dst = os.path.join(args.output_dir, 'api', pkg_name, 'manifest.yaml') dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as h: yaml.dump(data, h, default_flow_style=False) return 0 print("But job was started with the 'force' parameter set") else: print('The source repository and/or a tooling repository has changed') print('Running generation of documentation') rosdoc_index.hashes[args.repository_name] = current_hashes rosdoc_index.write_modified_data(args.output_dir, ['hashes']) # create stamp files print('Creating marker files to identify that documentation is ' + 'up-to-date') create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc')) index = get_index(config.rosdistro_index_url) dist_file = get_distribution_file(index, args.rosdistro_name) assert args.repository_name in dist_file.repositories valid_package_names = \ set(pkg_names) | set(dist_file.release_packages.keys()) # update package deps and metapackage deps with Scope('SUBSECTION', 'updated rosdoc_index information'): for pkg in pkgs.values(): print("Updating dependendencies for package '%s'" % pkg.name) depends = _get_build_run_doc_dependencies(pkg) ros_dependency_names = sorted( set([d.name for d in depends if d.name in valid_package_names])) rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names) if pkg.is_metapackage(): print("Updating dependendencies for metapackage '%s'" % pkg.name) depends = _get_run_dependencies(pkg) ros_dependency_names = sorted( set([ d.name for d in depends if d.name in valid_package_names ])) else: ros_dependency_names = None rosdoc_index.set_metapackage_deps(pkg.name, ros_dependency_names) rosdoc_index.write_modified_data(args.output_dir, ['deps', 'metapackage_deps']) # generate changelog html from rst package_names_with_changelogs = set([]) with Scope('SUBSECTION', 'generate changelog html from rst'): for pkg_path, pkg in pkgs.items(): abs_pkg_path = os.path.join(source_space, pkg_path) assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml')) changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst') if os.path.exists(changelog_file): print(("Package '%s' contains a CHANGELOG.rst, generating " + "html") % pkg.name) package_names_with_changelogs.add(pkg.name) with open(changelog_file, 'r') as h: rst_code = h.read() from docutils.core import publish_string html_code = publish_string(rst_code, writer_name='html') html_code = html_code.decode() # strip system message from html output open_tag = re.escape('<div class="first system-message">') close_tag = re.escape('</div>') pattern = '(' + open_tag + '.+?' + close_tag + ')' html_code = re.sub(pattern, '', html_code, flags=re.DOTALL) pkg_changelog_doc_path = os.path.join(args.output_dir, 'changelogs', pkg.name) os.makedirs(pkg_changelog_doc_path) with open( os.path.join(pkg_changelog_doc_path, 'changelog.html'), 'w') as h: h.write(html_code) ordered_pkg_tuples = topological_order_packages(pkgs) # create rosdoc tag list and location files with Scope('SUBSECTION', 'create rosdoc tag list and location files'): rosdoc_config_files = {} for pkg_path, pkg in pkgs.items(): abs_pkg_path = os.path.join(source_space, pkg_path) rosdoc_exports = [ e.attributes['content'] for e in pkg.exports if e.tagname == 'rosdoc' and 'content' in e.attributes ] prefix = '${prefix}' rosdoc_config_file = rosdoc_exports[-1] \ if rosdoc_exports else '%s/rosdoc.yaml' % prefix rosdoc_config_file = rosdoc_config_file.replace( prefix, abs_pkg_path) if os.path.isfile(rosdoc_config_file): rosdoc_config_files[pkg.name] = rosdoc_config_file for _, pkg in ordered_pkg_tuples: dst = os.path.join(args.output_dir, 'rosdoc_tags', '%s.yaml' % pkg.name) print("Generating rosdoc tag list file for package '%s'" % pkg.name) dep_names = rosdoc_index.get_recursive_dependencies(pkg.name) # make sure that we don't pass our own tagfile to ourself # bad things happen when we do this assert pkg.name not in dep_names locations = [] for dep_name in sorted(dep_names): if dep_name not in rosdoc_index.locations: print("- skipping not existing location file of " + "dependency '%s'" % dep_name) continue print("- including location files of dependency '%s'" % dep_name) dep_locations = rosdoc_index.locations[dep_name] if dep_locations: for dep_location in dep_locations: assert dep_location['package'] == dep_name # update tag information to point to local location location = copy.deepcopy(dep_location) if not location['location'].startswith('file://'): location['location'] = 'file://%s' % os.path.join( args.rosdoc_index_dir, location['location']) locations.append(location) dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as h: yaml.dump(locations, h) print("Creating location file for package '%s'" % pkg.name) data = { 'docs_url': '../../../api/%s/html' % pkg.name, 'location': 'file://%s' % os.path.join(args.output_dir, 'symbols', '%s.tag' % pkg.name), 'package': pkg.name, } # fetch generator specific output folders from rosdoc_lite if pkg.name in rosdoc_config_files: output_folders = get_generator_output_folders( rosdoc_config_files[pkg.name], pkg.name) if 'doxygen' in output_folders: data['docs_url'] += '/' + output_folders['doxygen'] rosdoc_index.locations[pkg.name] = [data] # do not write these local locations # used to determine all source and release jobs source_build_files = get_source_build_files(config, args.rosdistro_name) release_build_files = get_release_build_files(config, args.rosdistro_name) # TODO this should reuse the logic from the job generation used_source_build_names = [] for source_build_name, build_file in source_build_files.items(): repo_names = build_file.filter_repositories([args.repository_name]) if not repo_names: continue matching_dist_file = get_distribution_file_matching_build_file( index, args.rosdistro_name, build_file) repo = matching_dist_file.repositories[args.repository_name] if not repo.source_repository: continue if not repo.source_repository.version: continue if build_file.test_commits_force is False: continue elif repo.source_repository.test_commits is False: continue elif repo.source_repository.test_commits is None and \ not build_file.test_commits_default: continue used_source_build_names.append(source_build_name) doc_build_files = get_doc_build_files(config, args.rosdistro_name) doc_build_file = doc_build_files[args.doc_build_name] # create manifest.yaml files from repository / package meta information # will be merged with the manifest.yaml file generated by rosdoc_lite later repository = dist_file.repositories[args.repository_name] with Scope('SUBSECTION', 'create manifest.yaml files'): for pkg in pkgs.values(): data = {} data['vcs'] = vcs_type data['vcs_uri'] = vcs_url data['vcs_version'] = vcs_version data['repo_name'] = args.repository_name data['timestamp'] = time.time() data['depends'] = sorted( rosdoc_index.forward_deps.get(pkg.name, [])) data['depends_on'] = sorted( rosdoc_index.reverse_deps.get(pkg.name, [])) if pkg.name in rosdoc_index.metapackage_index: data['metapackages'] = rosdoc_index.metapackage_index[pkg.name] if pkg.name in rosdoc_index.metapackage_deps: data['packages'] = rosdoc_index.metapackage_deps[pkg.name] if pkg.name in package_names_with_changelogs: data['has_changelog_rst'] = True data['api_documentation'] = '%s/%s/api/%s/html' % \ (doc_build_file.canonical_base_url, args.rosdistro_name, pkg.name) pkg_status = None pkg_status_description = None # package level status information if pkg.name in repository.status_per_package: pkg_status_data = repository.status_per_package[pkg.name] pkg_status = pkg_status_data.get('status', None) pkg_status_description = pkg_status_data.get( 'status_description', None) # repository level status information if pkg_status is None: pkg_status = repository.status if pkg_status_description is None: pkg_status_description = repository.status_description if pkg_status is not None: data['maintainer_status'] = pkg_status if pkg_status_description is not None: data['maintainer_status_description'] = pkg_status_description # add doc job url data['doc_job'] = get_doc_job_url(config.jenkins_url, args.rosdistro_name, args.doc_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch) # add devel job urls build_files = {} for build_name in used_source_build_names: build_files[build_name] = source_build_files[build_name] devel_job_urls = get_devel_job_urls(config.jenkins_url, build_files, args.rosdistro_name, args.repository_name) if devel_job_urls: data['devel_jobs'] = devel_job_urls # TODO this should reuse the logic from the job generation used_release_build_names = [] for release_build_name, build_file in release_build_files.items(): filtered_pkg_names = build_file.filter_packages([pkg.name]) if not filtered_pkg_names: continue matching_dist_file = get_distribution_file_matching_build_file( index, args.rosdistro_name, build_file) repo = matching_dist_file.repositories[args.repository_name] if not repo.release_repository: continue if not repo.release_repository.version: continue used_release_build_names.append(release_build_name) # add release job urls build_files = {} for build_name in used_release_build_names: build_files[build_name] = release_build_files[build_name] release_job_urls = get_release_job_urls(config.jenkins_url, build_files, args.rosdistro_name, pkg.name) if release_job_urls: data['release_jobs'] = release_job_urls # write manifest.yaml dst = os.path.join(args.output_dir, 'manifests', pkg.name, 'manifest.yaml') dst_dir = os.path.dirname(dst) if not os.path.exists(dst_dir): os.makedirs(dst_dir) with open(dst, 'w') as h: yaml.dump(data, h) # overwrite CMakeLists.txt files of each package with Scope('SUBSECTION', 'overwrite CMakeLists.txt files to only generate messages'): for pkg_path, pkg in pkgs.items(): abs_pkg_path = os.path.join(source_space, pkg_path) build_types = [ e.content for e in pkg.exports if e.tagname == 'build_type' ] build_type_cmake = build_types and build_types[0] == 'cmake' data = { 'package_name': pkg.name, 'build_type_cmake': build_type_cmake, } content = expand_template('doc/CMakeLists.txt.em', data) print("Generating 'CMakeLists.txt' for package '%s'" % pkg.name) cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt') with open(cmakelist_file, 'w') as h: h.write(content) with Scope('SUBSECTION', 'determine dependencies and generate Dockerfile'): # initialize rosdep view context = initialize_resolver(args.rosdistro_name, args.os_name, args.os_code_name) apt_cache = Cache() debian_pkg_names = [ 'build-essential', 'openssh-client', 'python3', 'python3-yaml', 'rsync', # the following are required by rosdoc_lite 'doxygen', 'python-catkin-pkg-modules', 'python-epydoc', 'python-kitchen', 'python-rospkg', 'python-sphinx', 'python-yaml', # since catkin is not a run dependency but provides the setup files get_os_package_name(args.rosdistro_name, 'catkin'), # rosdoc_lite does not work without genmsg being importable get_os_package_name(args.rosdistro_name, 'genmsg'), ] if args.build_tool == 'colcon': debian_pkg_names.append('python3-colcon-ros') if 'actionlib_msgs' in pkg_names: # to document actions in other packages in the same repository debian_pkg_names.append( get_os_package_name(args.rosdistro_name, 'actionlib_msgs')) print('Always install the following generic dependencies:') for debian_pkg_name in sorted(debian_pkg_names): print(' -', debian_pkg_name) debian_pkg_versions = {} # get build, run and doc dependencies and map them to binary packages depends = get_dependencies(pkgs.values(), 'build, run and doc', _get_build_run_doc_dependencies) debian_pkg_names_depends = resolve_names(depends, **context) debian_pkg_names_depends -= set(debian_pkg_names) debian_pkg_names += order_dependencies(debian_pkg_names_depends) missing_debian_pkg_names = [] for debian_pkg_name in debian_pkg_names: try: debian_pkg_versions.update( get_binary_package_versions(apt_cache, [debian_pkg_name])) except KeyError: missing_debian_pkg_names.append(debian_pkg_name) if missing_debian_pkg_names: # we allow missing dependencies to support basic documentation # of packages which use not released dependencies print( '# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build' ) for debian_pkg_name in missing_debian_pkg_names: print("Could not find apt package '%s', skipping dependency" % debian_pkg_name) debian_pkg_names.remove(debian_pkg_name) print('# END SUBSECTION') # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'build_tool': doc_build_file.build_tool, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'uid': get_user_id(), 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'install_lists': [], 'canonical_base_url': doc_build_file.canonical_base_url, 'ordered_pkg_tuples': ordered_pkg_tuples, 'rosdoc_config_files': rosdoc_config_files, } create_dockerfile('doc/doc_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Generate all jobs on Jenkins') add_argument_config_url(parser) parser.add_argument( '--ros-distro-names', nargs='*', metavar='ROS_DISTRO_NAME', default=[], help='The list of ROS distribution names if not generating all') parser.add_argument( '--skip-rosdistro-cache-job', action='store_true', help='Skip generating the rosdistro-cache jobs') parser.add_argument( '--commit', action='store_true', help='Apply the changes to Jenkins instead of only showing them') args = parser.parse_args(argv) if args.commit: print('The following changes will be applied to the Jenkins server.') else: print('This is a dry run. The Jenkins configuration is not changed.') print('') config = get_index(args.config_url) if args.config_url.startswith('file:'): print( 'WARNING: Local file system path used for configuration. ', 'Configuration will not be accessible to jobs during execution. ', 'Consider using a web(http) hosted configuration repository.', file=sys.stderr) ros_distro_names = sorted(config.distributions.keys()) invalid_ros_distro_name = [ n for n in args.ros_distro_names if n not in ros_distro_names] if invalid_ros_distro_name: parser.error( 'The following ROS distribution names are not part of the ' + 'buildfarm index: ' + ', '.join(sorted(invalid_ros_distro_name))) # try to connect to Jenkins master jenkins = connect(config.jenkins_url) configure_view( jenkins, 'Queue', filter_queue=False, dry_run=not args.commit) generate_check_agents_job(args.config_url, dry_run=not args.commit) if not args.ros_distro_names: generate_dashboard_job(args.config_url, dry_run=not args.commit) for doc_build_name in sorted(config.doc_builds.keys()): generate_doc_independent_job( args.config_url, doc_build_name, dry_run=not args.commit) selected_ros_distro_names = [ n for n in ros_distro_names if not args.ros_distro_names or n in args.ros_distro_names] for ros_distro_name in selected_ros_distro_names: print(ros_distro_name) if not args.skip_rosdistro_cache_job: generate_rosdistro_cache_job( args.config_url, ros_distro_name, dry_run=not args.commit) generate_failing_jobs_job( args.config_url, ros_distro_name, dry_run=not args.commit) release_build_files = get_release_build_files(config, ros_distro_name) for release_build_name in release_build_files.keys(): generate_release_status_page_job( args.config_url, ros_distro_name, release_build_name, dry_run=not args.commit) generate_release_maintenance_jobs( args.config_url, ros_distro_name, release_build_name, dry_run=not args.commit) source_build_files = get_source_build_files(config, ros_distro_name) for source_build_name in source_build_files.keys(): generate_devel_maintenance_jobs( args.config_url, ros_distro_name, source_build_name, dry_run=not args.commit) ci_build_files = get_ci_build_files(config, ros_distro_name) for ci_build_name in ci_build_files.keys(): generate_ci_maintenance_jobs( args.config_url, ros_distro_name, ci_build_name, dry_run=not args.commit) doc_build_files = get_doc_build_files(config, ros_distro_name) for doc_build_name, doc_build_file in doc_build_files.items(): if doc_build_file.documentation_type == DOC_TYPE_ROSDOC: generate_doc_maintenance_jobs( args.config_url, ros_distro_name, doc_build_name, dry_run=not args.commit) elif doc_build_file.documentation_type == DOC_TYPE_MANIFEST: generate_doc_metadata_job( args.config_url, ros_distro_name, doc_build_name, dry_run=not args.commit) else: assert False, ("Unknown documentation type '%s' in doc " + "build file '%s'") % \ (doc_build_file.documentation_type, doc_build_name) generate_repos_status_page_jobs( args.config_url, ros_distro_name, dry_run=not args.commit) index = ros_distro_names.index(ros_distro_name) if index > 0: # generate compare pages for this rosdistro against all older ones generate_release_compare_page_job( args.config_url, ros_distro_name, ros_distro_names[:index], dry_run=not args.commit) generate_blocked_releases_page_job( args.config_url, ros_distro_name, dry_run=not args.commit)
def configure_doc_jobs(config_url, rosdistro_name, doc_build_name, groovy_script=None): """ Configure all Jenkins doc jobs. L{configure_doc_job} will be invoked for doc repository and target which matches the build file criteria. """ config = get_config_index(config_url) build_files = get_doc_build_files(config, rosdistro_name) build_file = build_files[doc_build_name] index = get_index(config.rosdistro_index_url) dist_cache = None if build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) # get targets targets = [] for os_name in build_file.targets.keys(): for os_code_name in build_file.targets[os_name].keys(): for arch in build_file.targets[os_name][os_code_name]: targets.append((os_name, os_code_name, arch)) print('The build file contains the following targets:') for os_name, os_code_name, arch in targets: print(' -', os_name, os_code_name, arch) dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: print('No distribution file matches the build file') return doc_view_name = get_doc_view_name(rosdistro_name, doc_build_name) from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) views = [] views.append(configure_doc_view(jenkins, doc_view_name)) if groovy_script is not None: # all further configuration will be handled by the groovy script jenkins = False repo_names = dist_file.repositories.keys() filtered_repo_names = build_file.filter_repositories(repo_names) job_names = [] job_configs = {} for repo_name in sorted(repo_names): is_disabled = repo_name not in filtered_repo_names if is_disabled and build_file.skip_ignored_repositories: print("Skipping ignored repository '%s'" % repo_name, file=sys.stderr) continue repo = dist_file.repositories[repo_name] if not repo.doc_repository: print("Skipping repository '%s': no doc section" % repo_name) continue if not repo.doc_repository.version: print("Skipping repository '%s': no doc version" % repo_name) continue for os_name, os_code_name, arch in targets: try: job_name, job_config = configure_doc_job( config_url, rosdistro_name, doc_build_name, repo_name, os_name, os_code_name, arch, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=jenkins, views=views, is_disabled=is_disabled, groovy_script=groovy_script) job_names.append(job_name) if groovy_script is not None: print("Configuration for job '%s'" % job_name) job_configs[job_name] = job_config except JobValidationError as e: print(e.message, file=sys.stderr) job_prefix = '%s__' % doc_view_name if groovy_script is None: # delete obsolete jobs in this view from ros_buildfarm.jenkins import remove_jobs print('Removing obsolete doc jobs') remove_jobs(jenkins, job_prefix, job_names) else: print("Writing groovy script '%s' to reconfigure %d jobs" % (groovy_script, len(job_configs))) data = { 'expected_num_jobs': len(job_configs), 'job_prefixes_and_names': { 'doc': (job_prefix, job_names), } } content = expand_template('snippet/reconfigure_jobs.groovy.em', data) write_groovy_script_and_configs(groovy_script, content, job_configs)
def configure_doc_job(config_url, rosdistro_name, doc_build_name, repo_name, os_name, os_code_name, arch, config=None, build_file=None, index=None, dist_file=None, dist_cache=None, jenkins=None, views=None, is_disabled=False, groovy_script=None, doc_repository=None): """ Configure a single Jenkins doc job. This includes the following steps: - clone the doc repository to use - clone the ros_buildfarm repository - write the distribution repository keys into files - invoke the run_doc_job.py script """ if config is None: config = get_config_index(config_url) if build_file is None: build_files = get_doc_build_files(config, rosdistro_name) build_file = build_files[doc_build_name] if index is None: index = get_index(config.rosdistro_index_url) if dist_file is None: dist_file = get_distribution_file(index, rosdistro_name, build_file) if not dist_file: raise JobValidationError( 'No distribution file matches the build file') repo_names = dist_file.repositories.keys() if repo_name is not None: if repo_name not in repo_names: raise JobValidationError("Invalid repository name '%s' " % repo_name + 'choose one of the following: %s' % ', '.join(sorted(repo_names))) repo = dist_file.repositories[repo_name] if not repo.doc_repository: raise JobValidationError("Repository '%s' has no doc section" % repo_name) if not repo.doc_repository.version: raise JobValidationError("Repository '%s' has no doc version" % repo_name) doc_repository = repo.doc_repository if os_name not in build_file.targets.keys(): raise JobValidationError("Invalid OS name '%s' " % os_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets.keys()))) if os_code_name not in build_file.targets[os_name].keys(): raise JobValidationError( "Invalid OS code name '%s' " % os_code_name + 'choose one of the following: ' + ', '.join(sorted(build_file.targets[os_name].keys()))) if arch not in build_file.targets[os_name][os_code_name]: raise JobValidationError( "Invalid architecture '%s' " % arch + 'choose one of the following: %s' % ', '.join(sorted(build_file.targets[os_name][os_code_name]))) if dist_cache is None and build_file.notify_maintainers: dist_cache = get_distribution_cache(index, rosdistro_name) if jenkins is None: from ros_buildfarm.jenkins import connect jenkins = connect(config.jenkins_url) if views is None: view_name = get_doc_view_name(rosdistro_name, doc_build_name) configure_doc_view(jenkins, view_name) job_name = get_doc_job_name(rosdistro_name, doc_build_name, repo_name, os_name, os_code_name, arch) job_config = _get_doc_job_config(config, config_url, rosdistro_name, doc_build_name, build_file, os_name, os_code_name, arch, doc_repository, repo_name, dist_cache=dist_cache, is_disabled=is_disabled) # jenkinsapi.jenkins.Jenkins evaluates to false if job count is zero if isinstance(jenkins, object) and jenkins is not False: from ros_buildfarm.jenkins import configure_job configure_job(jenkins, job_name, job_config) return job_name, job_config
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Generate all jobs on Jenkins') add_argument_config_url(parser) parser.add_argument( '--ros-distro-names', nargs='*', metavar='ROS_DISTRO_NAME', default=[], help='The list of ROS distribution names if not generating all') parser.add_argument( '--skip-rosdistro-cache-job', action='store_true', help='Skip generating the rosdistro-cache jobs') args = parser.parse_args(argv) config = get_index(args.config_url) ros_distro_names = config.distributions.keys() invalid_ros_distro_name = [ n for n in args.ros_distro_names if n not in ros_distro_names] if invalid_ros_distro_name: parser.error( 'The following ROS distribution names are not part of the ' + 'buildfarm index: ' + ', '.join(sorted(invalid_ros_distro_name))) # try to connect to Jenkins master connect(config.jenkins_url) generate_check_slaves_job(args.config_url) if not args.ros_distro_names: generate_dashboard_job(args.config_url) for doc_build_name in sorted(config.doc_builds.keys()): generate_doc_independent_job(args.config_url, doc_build_name) selected_ros_distro_names = [ n for n in ros_distro_names if not args.ros_distro_names or n in args.ros_distro_names] for ros_distro_name in sorted(selected_ros_distro_names): print(ros_distro_name) if not args.skip_rosdistro_cache_job: generate_rosdistro_cache_job(args.config_url, ros_distro_name) release_build_files = get_release_build_files(config, ros_distro_name) for release_build_name in release_build_files.keys(): generate_release_status_page_job( args.config_url, ros_distro_name, release_build_name) generate_release_maintenance_jobs( args.config_url, ros_distro_name, release_build_name) source_build_files = get_source_build_files(config, ros_distro_name) for source_build_name in source_build_files.keys(): generate_devel_maintenance_jobs( args.config_url, ros_distro_name, source_build_name) doc_build_files = get_doc_build_files(config, ros_distro_name) for doc_build_name, doc_build_file in doc_build_files.items(): if doc_build_file.documentation_type == DOC_TYPE_ROSDOC: generate_doc_maintenance_jobs( args.config_url, ros_distro_name, doc_build_name) elif doc_build_file.documentation_type == DOC_TYPE_MANIFEST: generate_doc_metadata_job( args.config_url, ros_distro_name, doc_build_name) else: assert False, ("Unknown documentation type '%s' in doc " + "build file '%s'") % \ (doc_build_file.documentation_type, doc_build_name) generate_repos_status_page_jobs( args.config_url, ros_distro_name)