def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for building the binarydeb") add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_binarydeb_dir(parser) add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) debian_package_name = get_debian_package_name( args.rosdistro_name, args.package_name) # get expected package version from rosdistro index = get_index(args.rosdistro_index_url) dist_file = get_distribution_file(index, args.rosdistro_name) assert args.package_name in dist_file.release_packages pkg = dist_file.release_packages[args.package_name] repo = dist_file.repositories[pkg.repository_name] package_version = repo.release_repository.version debian_package_version = package_version # find PKGBUILD dependencies pkgbuild_proc = subprocess.Popen(["/bin/bash","-c","source PKGBUILD ; echo $(printf \"'%s' \" \"${makedepends[@]}\") $(printf \"'%s' \" \"${depends[@]}\")"], stdout=subprocess.PIPE) pkgbuild_out,_ = pkgbuild_proc.communicate() archlinux_pkg_names = pkgbuild_proc.decode('ascii').split(" ") # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'uid': get_user_id(), 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'dependencies': archlinux_pkg_names, 'rosdistro_name': args.rosdistro_name, 'package_name': args.package_name, 'binarydeb_dir': args.binarydeb_dir, } create_dockerfile( 'release/binary_archlinux_task.Dockerfile.em', data, args.dockerfile_dir) # output hints about necessary volumes to mount ros_buildfarm_basepath = os.path.normpath( os.path.join(os.path.dirname(__file__), '..', '..')) print('Mount the following volumes when running the container:') print(' -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath) print(' -v %s:/tmp/binary_archlinux' % args.binarydeb_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Run the 'sourcedeb' job") add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) parser.add_argument( '--source-dir', required=True, help='The directory where the package sources will be stored') add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'arch': get_system_architecture(), 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'uid': get_user_id(), }) create_dockerfile( 'release/sourcedeb_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser(description="Run the 'devel' job") add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_custom_rosdep_urls(parser) parser.add_argument('--prerelease-overlay', action='store_true', help='Operate on two catkin workspaces') add_argument_env_vars(parser) add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'custom_rosdep_urls': args.custom_rosdep_urls, 'uid': get_user_id(), }) create_dockerfile('devel/devel_create_tasks.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser(description="Run the 'doc' job") add_argument_config_url(parser) add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'doc') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_tool(parser, required=True) add_argument_vcs_information(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_custom_rosdep_urls(parser) add_argument_force(parser) add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'custom_rosdep_urls': args.custom_rosdep_urls, 'uid': get_user_id(), }) create_dockerfile('doc/doc_create_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Run the 'devel' job") add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_custom_rosdep_urls(parser) parser.add_argument( '--prerelease-overlay', action='store_true', help='Operate on two catkin workspaces') add_argument_build_tool(parser, required=True) add_argument_ros_version(parser) add_argument_env_vars(parser) add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'custom_rosdep_urls': args.custom_rosdep_urls, 'uid': get_user_id(), }) create_dockerfile( 'devel/devel_create_tasks.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Run the 'binarydeb' job") add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_binarydeb_dir(parser) add_argument_dockerfile_dir(parser) add_argument_skip_download_sourcedeb(parser) add_argument_append_timestamp(parser) args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'uid': get_user_id(), 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'skip_download_sourcedeb': args.skip_download_sourcedeb, 'binarydeb_dir': '/tmp/binarydeb', 'dockerfile_dir': '/tmp/docker_build_binarydeb', }) create_dockerfile( 'release/binarydeb_create_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for installing the binarydeb") add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_binarydeb_dir(parser) add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), } create_dockerfile( 'release/binarydeb_install_task.Dockerfile.em', data, args.dockerfile_dir) # output hints about necessary volumes to mount print('Mount the following volumes when running the container:') print(' -v %s:/tmp/binarydeb:ro' % args.binarydeb_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Run the 'sourcedeb' job") add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) parser.add_argument( '--source-dir', required=True, help='The directory where the package sources will be stored') add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'maintainer_email': '*****@*****.**', 'maintainer_name': 'Dirk Thomas', 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'uid': os.getuid(), }) create_dockerfile( 'release/sourcedeb_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for installing the binarydeb") add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_binarypkg_dir(parser) add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), } create_dockerfile('release/deb/binarypkg_install_task.Dockerfile.em', data, args.dockerfile_dir) # output hints about necessary volumes to mount print('Mount the following volumes when running the container:') print(' -v %s:/tmp/binarydeb:ro' % args.binarypkg_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser(description="Run the 'sourcerpm' job") add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_dockerfile_dir(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_sourcepkg_dir(parser) args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'target_repository': None, 'uid': get_user_id(), }) create_dockerfile('release/rpm/sourcepkg_task.Dockerfile.em', data, args.dockerfile_dir) with open(os.path.join(args.dockerfile_dir, 'mock_config.cfg'), 'w') as mock_cfg: mock_cfg.write(expand_template('release/rpm/mock_config.cfg.em', data))
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser(description="Run the 'binarydeb' job") add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_binarydeb_dir(parser) add_argument_dockerfile_dir(parser) add_argument_skip_download_sourcedeb(parser) add_argument_append_timestamp(parser) args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'uid': get_user_id(), 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'skip_download_sourcedeb': args.skip_download_sourcedeb, 'binarydeb_dir': '/tmp/binarydeb', 'dockerfile_dir': '/tmp/docker_build_binarydeb', }) create_dockerfile('release/binarydeb_create_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Run the 'doc' job") add_argument_config_url(parser) add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'doc') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_vcs_information(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_force(parser) add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'uid': get_user_id(), }) create_dockerfile( 'doc/doc_create_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser(description="Run the 'sourcedeb' job") add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) parser.add_argument( '--source-dir', required=True, help='The directory where the package sources will be stored') add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'arch': get_system_architecture(), 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'uid': get_user_id(), }) create_dockerfile('release/deb/sourcepkg_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): with Scope('SUBSECTION', 'build sourcedeb'): parser = argparse.ArgumentParser(description='Build package sourcedeb') add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_source_dir(parser) args = parser.parse_args(argv) return build_sourcedeb(args.source_dir, os_name=args.os_name, os_code_name=args.os_code_name)
def main(argv=sys.argv[1:]): with Scope('SUBSECTION', 'build sourcedeb'): parser = argparse.ArgumentParser( description='Build package sourcedeb') add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_source_dir(parser) args = parser.parse_args(argv) return build_sourcedeb( args.source_dir, os_name=args.os_name, os_code_name=args.os_code_name)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'devel' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) args = parser.parse_args(argv) # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): Hook.__init__(self) self.scms = [] self.scripts = [] def beforeInclude(self, *args, **kwargs): template_path = kwargs['file'].name if template_path.endswith('/snippet/scm.xml.em'): self.scms.append( (kwargs['locals']['repo_spec'], kwargs['locals']['path'])) if template_path.endswith('/snippet/builder_shell.xml.em'): self.scripts.append(kwargs['locals']['script']) hook = IncludeHook() from ros_buildfarm import templates templates.template_hooks = [hook] configure_devel_job( args.config_url, args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch, jenkins=False, views=False) templates.template_hooks = None devel_job_name = get_devel_job_name( args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch) value = expand_template( 'devel/devel_script.sh.em', { 'devel_job_name': devel_job_name, 'scms': hook.scms, 'scripts': hook.scripts}, options={BANGPATH_OPT: False}) value = value.replace('python3', sys.executable) print(value)
def main(argv=sys.argv[1:]): global templates parser = argparse.ArgumentParser( description="Generate a 'devel' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) args = parser.parse_args(argv) # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): super(IncludeHook, self).__init__() self.scms = [] self.scripts = [] def beforeInclude(self, *args, **kwargs): template_path = kwargs['file'].name if template_path.endswith('/snippet/scm.xml.em'): self.scms.append( (kwargs['locals']['repo_spec'], kwargs['locals']['path'])) if template_path.endswith('/snippet/builder_shell.xml.em'): self.scripts.append(kwargs['locals']['script']) hook = IncludeHook() templates.template_hooks = [hook] configure_devel_job( args.config_url, args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch, jenkins=False, views=False) templates.template_hooks = None devel_job_name = get_devel_job_name( args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch) value = expand_template( 'devel/devel_script.sh.em', { 'devel_job_name': devel_job_name, 'scms': hook.scms, 'scripts': hook.scripts}, options={BANGPATH_OPT: False}) print(value)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'devel' job on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) args = parser.parse_args(argv) return configure_devel_job( args.config_url, args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch)
def main(argv=sys.argv[1:]): with Scope('SUBSECTION', 'get sources'): parser = argparse.ArgumentParser( description="Get released package sources") add_argument_rosdistro_index_url(parser) add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_source_dir(parser) args = parser.parse_args(argv) return get_sources( args.rosdistro_index_url, args.rosdistro_name, args.package_name, args.os_name, args.os_code_name, args.source_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'release' job on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'release') add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) args = parser.parse_args(argv) return configure_release_job(args.config_url, args.rosdistro_name, args.release_build_name, args.package_name, args.os_name, args.os_code_name)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'devel' job on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) args = parser.parse_args(argv) configure_devel_job(args.config_url, args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'release' job on Jenkins") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'release') add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) args = parser.parse_args(argv) return configure_release_job( args.config_url, args.rosdistro_name, args.release_build_name, args.package_name, args.os_name, args.os_code_name)
def main(argv=sys.argv[1:]): with Scope('SUBSECTION', 'build sourcerpm'): parser = argparse.ArgumentParser( description='Build package sourcerpm') add_argument_rosdistro_index_url(parser) add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_source_dir(parser) args = parser.parse_args(argv) return build_sourcerpm( args.rosdistro_index_url, args.rosdistro_name, args.package_name, args.os_name, args.os_code_name, args.source_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser(description="Run the 'CI' job") # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_tool(parser, required=True) add_argument_distribution_repository_key_files(parser) add_argument_distribution_repository_urls(parser) add_argument_dockerfile_dir(parser) add_argument_env_vars(parser) add_argument_install_packages(parser) a1 = add_argument_package_selection_args(parser) a2 = add_argument_build_tool_args(parser) add_argument_repos_file_urls(parser) add_argument_repository_names(parser, optional=True) add_argument_ros_version(parser) add_argument_skip_rosdep_keys(parser) add_argument_test_branch(parser) parser.add_argument( '--workspace-mount-point', nargs='*', help='Locations within the docker image where the workspace(s) ' 'will be mounted when the docker image is run.') remainder_args = extract_multiple_remainders(argv, (a1, a2)) args = parser.parse_args(argv) for k, v in remainder_args.items(): setattr(args, k, v) assert args.repos_file_urls or args.repository_names data = copy.deepcopy(args.__dict__) data.update({ 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'uid': get_user_id(), }) create_dockerfile('ci/ci_create_tasks.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'CI' job on Jenkins") # Positional add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'ci') add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) args = parser.parse_args(argv) configure_ci_job( args.config_url, args.rosdistro_name, args.ci_build_name, args.os_name, args.os_code_name, args.arch)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Check if the sync criteria are matched to sync ' + 'packages from the building to the testing repo') add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'release') add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_cache_dir(parser, '/tmp/package_repo_cache') args = parser.parse_args(argv) success = check_sync_criteria(args.config_url, args.rosdistro_name, args.release_build_name, args.os_name, args.os_code_name, args.arch, args.cache_dir) return 0 if success else 1
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Run the 'devel' job") add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_custom_rosdep_urls(parser) parser.add_argument( '--prerelease-overlay', action='store_true', help='Operate on two catkin workspaces') add_argument_build_tool(parser, required=True) add_argument_custom_rosdep_update_options(parser) add_argument_ros_version(parser) add_argument_env_vars(parser) add_argument_dockerfile_dir(parser) add_argument_run_abichecker(parser) add_argument_require_gpu_support(parser) a1 = add_argument_build_tool_args(parser) a2 = add_argument_build_tool_test_args(parser) remainder_args = extract_multiple_remainders(argv, (a1, a2)) args = parser.parse_args(argv) for k, v in remainder_args.items(): setattr(args, k, v) data = copy.deepcopy(args.__dict__) data.update({ 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'custom_rosdep_urls': args.custom_rosdep_urls, 'rosdep_update_options': args.custom_rosdep_update_options, 'uid': get_user_id(), }) create_dockerfile( 'devel/devel_create_tasks.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser(description="Run the 'binarypkg' job") add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_target_repository(parser) add_argument_dockerfile_dir(parser) add_argument_skip_download_sourcepkg(parser) add_argument_append_timestamp(parser) add_argument_env_vars(parser) add_argument_binarypkg_dir(parser) args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'uid': get_user_id(), 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'target_repository': os.path.join(args.target_repository, args.os_code_name, 'SRPMS'), 'skip_download_sourcepkg': args.skip_download_sourcepkg, 'sourcepkg_dir': os.path.join(args.binarypkg_dir, 'source'), 'build_environment_variables': args.env_vars, }) create_dockerfile('release/rpm/binarypkg_task.Dockerfile.em', data, args.dockerfile_dir) with open(os.path.join(args.dockerfile_dir, 'mock_config.cfg'), 'w') as mock_cfg: mock_cfg.write(expand_template('release/rpm/mock_config.cfg.em', data))
def main(argv=sys.argv[1:]): global templates parser = argparse.ArgumentParser( description="Generate a 'devel' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) args = parser.parse_args(argv) # collect all template snippets of specific types scms = [] scripts = [] def template_hook(template_name, data, content): if template_name == 'snippet/scm.xml.em': scms.append((data['repo_spec'], data['path'])) if template_name == 'snippet/builder_shell.xml.em': scripts.append(data['script']) templates.template_hook = template_hook configure_devel_job( args.config_url, args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch, jenkins=False, view=False) templates.template_hook = None devel_job_name = get_devel_job_name( args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch) value = expand_template( 'devel/devel_script.sh.em', { 'devel_job_name': devel_job_name, 'scms': scms, 'scripts': scripts}, options={BANGPATH_OPT: False}) print(value)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Run the 'CI' job") # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_ignore(parser) add_argument_build_tool(parser, required=True) add_argument_distribution_repository_key_files(parser) add_argument_distribution_repository_urls(parser) add_argument_dockerfile_dir(parser) add_argument_env_vars(parser) add_argument_install_packages(parser) add_argument_package_selection_args(parser) add_argument_repos_file_urls(parser, required=True) add_argument_ros_version(parser) add_argument_skip_rosdep_keys(parser) add_argument_test_branch(parser) parser.add_argument( '--workspace-mount-point', nargs='*', help='Locations within the docker image where the workspace(s) ' 'will be mounted when the docker image is run.') args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'uid': get_user_id(), }) create_dockerfile( 'ci/ci_create_tasks.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Run the 'binarydeb' job") add_argument_rosdistro_index_url(parser, required=True) add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_target_repository(parser) add_argument_binarypkg_dir(parser) add_argument_dockerfile_dir(parser) add_argument_skip_download_sourcepkg(parser) add_argument_append_timestamp(parser) add_argument_env_vars(parser) args = parser.parse_args(argv) data = copy.deepcopy(args.__dict__) data.update({ 'uid': get_user_id(), 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'target_repository': args.target_repository, 'skip_download_sourcepkg': args.skip_download_sourcepkg, 'binarypkg_dir': '/tmp/binarydeb', 'build_environment_variables': ['%s=%s' % key_value for key_value in args.env_vars.items()], 'dockerfile_dir': '/tmp/docker_build_binarydeb', }) create_dockerfile( 'release/deb/binarypkg_create_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Run the 'check_sync_criteria' job") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'release') parser.add_argument('os_name', nargs='?', help='An OS name from the build file') add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_cache_dir(parser) add_argument_dockerfile_dir(parser) args = parser.parse_args(argv) if args.os_name is None: print('WARNING: Calling %s without specifying os_name is deprecated' % argv[0], file=sys.stderr) args.os_name = 'ubuntu' data = copy.deepcopy(args.__dict__) data.update({ 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'uid': get_user_id(), }) create_dockerfile('release/release_check_sync_criteria_task.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the CI job") # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_ignore(parser) add_argument_distribution_repository_key_files(parser) add_argument_distribution_repository_urls(parser) add_argument_dockerfile_dir(parser) add_argument_env_vars(parser) add_argument_package_selection_args(parser) add_argument_repos_file_urls(parser, required=True) add_argument_skip_rosdep_keys(parser) add_argument_test_branch(parser) parser.add_argument( '--workspace-root', nargs='+', help='The root path of the workspace to compile') args = parser.parse_args(argv) debian_pkg_names = [ 'git', 'python3-apt', 'python3-colcon-common-extensions', 'python3-rosdep', 'python3-vcstool', ] # get versions for build dependencies apt_cache = Cache() debian_pkg_versions = get_binary_package_versions( apt_cache, debian_pkg_names) # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'custom_rosdep_urls': [], 'uid': get_user_id(), 'build_environment_variables': args.env_vars, 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'repos_file_urls': args.repos_file_urls, 'test_branch': args.test_branch, 'skip_rosdep_keys': args.skip_rosdep_keys, 'build_ignore': args.build_ignore, 'package_selection_args': args.package_selection_args, 'workspace_root': args.workspace_root, } create_dockerfile( 'ci/create_workspace.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the CI job") # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_tool(parser, required=True) add_argument_distribution_repository_key_files(parser) add_argument_distribution_repository_urls(parser) add_argument_dockerfile_dir(parser) add_argument_env_vars(parser) add_argument_install_packages(parser) add_argument_ros_version(parser) add_argument_testing(parser) parser.add_argument( '--workspace-root', nargs='*', action=check_len_action(1, 2), help='The root path of the workspace to compile') args = parser.parse_args(argv) apt_cache = Cache() debian_pkg_names = set(['build-essential']) debian_pkg_names.update(args.install_packages) if args.build_tool == 'colcon': debian_pkg_names.update([ 'python3-catkin-pkg-modules', 'python3-colcon-ros', 'python3-colcon-test-result', 'python3-rosdistro-modules', ]) print('Always install the following generic dependencies:') for debian_pkg_name in sorted(debian_pkg_names): print(' -', debian_pkg_name) install_list = 'install_list.txt' write_install_list( os.path.join(args.dockerfile_dir, install_list), debian_pkg_names, apt_cache) install_lists = [install_list, 'install_list_build.txt'] if args.testing: install_lists.append('install_list_test.txt') # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'uid': get_user_id(), 'build_tool': args.build_tool, 'ros_version': args.ros_version, 'build_environment_variables': args.env_vars, 'install_lists': install_lists, 'dependencies': [], 'dependency_versions': [], 'testing': args.testing, 'prerelease_overlay': len(args.workspace_root) > 1, } create_dockerfile( 'devel/devel_task.Dockerfile.em', data, args.dockerfile_dir) # output hints about necessary volumes to mount ros_buildfarm_basepath = os.path.normpath( os.path.join(os.path.dirname(__file__), '..', '..')) print('Mount the following volumes when running the container:') print(' -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath) if len(args.workspace_root) == 1: print(' -v %s:/tmp/ws' % args.workspace_root[0]) else: for i, workspace_root in enumerate(args.workspace_root[0:-1]): print(' -v %s:/tmp/ws%s' % (workspace_root, i or '')) print(' -v %s:/tmp/ws_overlay' % args.workspace_root[-1])
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Lists available binary packages and versions which are' 'needed to satisfy rosdep keys for ROS packages in the workspace') # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_output_dir(parser) add_argument_skip_rosdep_keys(parser) parser.add_argument( '--package-root', nargs='+', help='The path to the directory containing packages') args = parser.parse_args(argv) with Scope('SUBSECTION', 'Enumerating packages needed to build'): # find all of the underlay packages underlay_pkgs = {} for package_root in args.package_root[0:-1]: print("Crawling for packages in '%s'" % package_root) underlay_pkgs.update(find_packages(package_root)) underlay_pkg_names = [pkg.name for pkg in underlay_pkgs.values()] print('Found the following underlay packages:') for pkg_name in sorted(underlay_pkg_names): print(' -', pkg_name) # get direct build dependencies package_root = args.package_root[-1] print("Crawling for packages in '%s'" % package_root) pkgs = find_packages(package_root) pkg_names = [pkg.name for pkg in pkgs.values()] print('Found the following packages:') for pkg_name in sorted(pkg_names): print(' -', pkg_name) # get build dependencies and map them to binary packages all_pkgs = set(pkgs.values()).union(underlay_pkgs.values()) for pkg in all_pkgs: pkg.evaluate_conditions(os.environ) for pkg in all_pkgs: for group_depend in pkg.group_depends: if group_depend.evaluated_condition: group_depend.extract_group_members(all_pkgs) dependency_keys_build = get_dependencies( all_pkgs, 'build', _get_build_and_recursive_run_dependencies, pkgs.values()) dependency_keys_test = get_dependencies( all_pkgs, 'run and test', _get_test_and_recursive_run_dependencies, pkgs.values()) if args.skip_rosdep_keys: dependency_keys_build.difference_update(args.skip_rosdep_keys) dependency_keys_test.difference_update(args.skip_rosdep_keys) context = initialize_resolver( args.rosdistro_name, args.os_name, args.os_code_name) os_pkg_names_build = resolve_names(dependency_keys_build, **context) os_pkg_names_test = resolve_names(dependency_keys_test, **context) os_pkg_names_test -= os_pkg_names_build with Scope('SUBSECTION', 'Resolving packages versions using apt cache'): apt_cache = Cache() os_pkg_versions = get_binary_package_versions( apt_cache, os_pkg_names_build | os_pkg_names_test) with open(os.path.join(args.output_dir, 'install_list_build.txt'), 'w') as out_file: for package in sorted(os_pkg_names_build): out_file.write('%s=%s\n' % (package, os_pkg_versions[package])) with open(os.path.join(args.output_dir, 'install_list_test.txt'), 'w') as out_file: for package in sorted(os_pkg_names_test): out_file.write('%s=%s\n' % (package, os_pkg_versions[package]))
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'CI' script") # Positional add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'ci') add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_ignore(parser) add_argument_build_tool(parser) add_argument_package_selection_args(parser) add_argument_repos_file_urls(parser) add_argument_skip_cleanup(parser) add_argument_test_branch(parser) parser.add_argument( '--underlay-source-path', nargs='*', metavar='DIR_NAME', help='Path to one or more install spaces to use as an underlay') args = parser.parse_args(argv) # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): Hook.__init__(self) self.scms = [] self.scripts = [] self.parameters = {} if args.skip_cleanup: self.parameters['skip_cleanup'] = 'true' if args.repos_file_urls is not None: self.parameters['repos_file_urls'] = ' '.join(args.repos_file_urls) if args.test_branch is not None: self.parameters['test_branch'] = args.test_branch if args.build_ignore is not None: self.parameters['build_ignore'] = ' '.join(args.build_ignore) if args.package_selection_args is not None: self.parameters['package_selection_args'] = ' '.join(args.package_selection_args) def beforeInclude(self, *_, **kwargs): template_path = kwargs['file'].name if template_path.endswith('/snippet/scm.xml.em'): self.scms.append( (kwargs['locals']['repo_spec'], kwargs['locals']['path'])) if template_path.endswith('/snippet/builder_shell.xml.em'): script = kwargs['locals']['script'] # reuse existing ros_buildfarm folder if it exists if 'Clone ros_buildfarm' in script: lines = script.splitlines() lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then') lines += [ 'else', 'echo "Using existing ros_buildfarm folder"', 'fi', ] script = '\n'.join(lines) if args.build_tool and ' --build-tool ' in script: script = script.replace( ' --build-tool catkin_make_isolated', ' --build-tool ' + args.build_tool) self.scripts.append(script) if template_path.endswith('/snippet/property_parameters-definition.xml.em'): for parameter in reversed(kwargs['locals']['parameters']): name = parameter['name'] value_type = parameter['type'] if value_type in ['string', 'text']: default_value = parameter['default_value'] elif value_type == 'boolean': default_value = 'true' if parameter.get( 'default_value', False) else 'false' else: continue self.parameters.setdefault(name, default_value) hook = IncludeHook() from ros_buildfarm import templates templates.template_hooks = [hook] config = get_config_index(args.config_url) build_files = get_ci_build_files(config, args.rosdistro_name) build_file = build_files[args.ci_build_name] underlay_source_paths = [os.path.abspath(p) for p in args.underlay_source_path or []] configure_ci_job( args.config_url, args.rosdistro_name, args.ci_build_name, args.os_name, args.os_code_name, args.arch, config=config, build_file=build_file, jenkins=False, views=False, underlay_source_paths=underlay_source_paths) templates.template_hooks = None ci_job_name = get_ci_job_name( args.rosdistro_name, args.os_name, args.os_code_name, args.arch, 'script') value = expand_template( 'ci/ci_script.sh.em', { 'ci_job_name': ci_job_name, 'scms': hook.scms, 'scripts': hook.scripts, 'build_tool': args.build_tool or build_file.build_tool, 'parameters': hook.parameters}, options={BANGPATH_OPT: False}) value = value.replace('python3 ', sys.executable + ' ') print(value)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser(description="Generate a 'doc' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'doc') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_force(parser) args = parser.parse_args(argv) # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): Hook.__init__(self) self.scms = [] self.scripts = [] def beforeInclude(self, *args, **kwargs): template_path = kwargs['file'].name if template_path.endswith('/snippet/scm.xml.em'): self.scms.append( (kwargs['locals']['repo_spec'], kwargs['locals']['path'])) if template_path.endswith('/snippet/builder_shell.xml.em'): self.scripts.append(kwargs['locals']['script']) hook = IncludeHook() from ros_buildfarm import templates templates.template_hooks = [hook] configure_doc_job(args.config_url, args.rosdistro_name, args.doc_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch, jenkins=False, views=[]) templates.template_hooks = None scripts = hook.scripts doc_job_name = get_doc_job_name(args.rosdistro_name, args.doc_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch) # set force flag force_flag = '$force' for i, script in enumerate(scripts): offset = script.find(force_flag) if offset != -1: script = script[:offset] + ('true' if args.force else 'false') + \ script[offset + len(force_flag):] scripts[i] = script break # remove rsync from server rsync_cmd = 'rsync' for i, script in enumerate(scripts): offset = script.find(rsync_cmd) if offset != -1: del scripts[i] break # remove rsync back to server cmd_part = '--delete' for i, script in enumerate(scripts): offset = script.find(cmd_part) if offset != -1: del scripts[i] break value = expand_template('doc/doc_script.sh.em', { 'doc_job_name': doc_job_name, 'scms': hook.scms, 'scripts': scripts }, options={BANGPATH_OPT: False}) value = value.replace('python3', sys.executable) print(value)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'doc' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'doc') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_force(parser) args = parser.parse_args(argv) # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): Hook.__init__(self) self.scms = [] self.scripts = [] def beforeInclude(self, *args, **kwargs): template_path = kwargs['file'].name if template_path.endswith('/snippet/scm.xml.em'): self.scms.append( (kwargs['locals']['repo_spec'], kwargs['locals']['path'])) if template_path.endswith('/snippet/builder_shell.xml.em'): self.scripts.append(kwargs['locals']['script']) hook = IncludeHook() from ros_buildfarm import templates templates.template_hooks = [hook] configure_doc_job( args.config_url, args.rosdistro_name, args.doc_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch, jenkins=False, views=[]) templates.template_hooks = None scripts = hook.scripts doc_job_name = get_doc_job_name( args.rosdistro_name, args.doc_build_name, args.repository_name, args.os_name, args.os_code_name, args .arch) # set force flag force_flag = '$force' for i, script in enumerate(scripts): offset = script.find(force_flag) if offset != -1: script = script[:offset] + ('true' if args.force else 'false') + \ script[offset + len(force_flag):] scripts[i] = script break # remove rsync from server rsync_cmd = 'rsync' for i, script in enumerate(scripts): offset = script.find(rsync_cmd) if offset != -1: del scripts[i] break # remove rsync back to server cmd_part = '--delete' for i, script in enumerate(scripts): offset = script.find(cmd_part) if offset != -1: del scripts[i] break value = expand_template( 'doc/doc_script.sh.em', { 'doc_job_name': doc_job_name, 'scms': hook.scms, 'scripts': scripts}, options={BANGPATH_OPT: False}) value = value.replace('python3', sys.executable) print(value)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'prerelease' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_tool(parser) add_argument_output_dir(parser, required=True) group = parser.add_argument_group( 'Repositories in underlay workspace', description='The repositories in the underlay workspace will be ' + 'built and installed as well as built and tested. ' + 'Dependencies will be provided by binary packages.') group.add_argument( 'source_repos', nargs='*', default=[], metavar='REPO_NAME', help="A name of a 'repository' from the distribution file") group.add_argument( '--custom-branch', nargs='*', type=_repository_name_and_branch, default=[], metavar='REPO_NAME:BRANCH_OR_TAG_NAME', help="A name of a 'repository' from the distribution file followed " + 'by a colon and a branch / tag name') group.add_argument( '--custom-repo', nargs='*', type=_repository_name_and_type_and_url_and_branch, default=[], metavar='REPO_NAME:REPO_TYPE:REPO_URL:BRANCH_OR_TAG_NAME', help='The name, type, url and branch / tag name of a repository, ' 'e.g. "common_tutorials:git:https://github.com/ros/common_tutorials:pullrequest-1"') add_overlay_arguments(parser) args = parser.parse_args(argv) print('Fetching buildfarm configuration...') config = get_config_index(args.config_url) build_files = get_source_build_files(config, args.rosdistro_name) build_file = build_files[args.source_build_name] print('Fetching rosdistro cache...') # Targets defined by source build file are subset of targets # defined by release build files. To increase the number of supported # pre-release targets, we combine all targets defined by all release # build files and use that when configuring the devel job. release_build_files = get_release_build_files(config, args.rosdistro_name) release_targets_combined = {} if release_build_files: release_targets_combined[args.os_name] = {} for build_name, rel_obj in release_build_files.items(): if args.os_name not in rel_obj.targets: continue for dist_name, targets in rel_obj.targets[args.os_name].items(): if dist_name not in release_targets_combined[args.os_name]: release_targets_combined[args.os_name][dist_name] = {} release_targets_combined[args.os_name][dist_name].update(targets) index = get_index(config.rosdistro_index_url) dist_cache = get_distribution_cache(index, args.rosdistro_name) dist_file = dist_cache.distribution_file # determine source repositories for underlay workspace repositories = {} for repo_name in args.source_repos: if repo_name in repositories: print("The repository '%s' appears multiple times" % repo_name, file=sys.stderr) return 1 try: repositories[repo_name] = \ dist_file.repositories[repo_name].source_repository except KeyError: print(("The repository '%s' was not found in the distribution " + "file") % repo_name, file=sys.stderr) return 1 if not repositories[repo_name]: print(("The repository '%s' has no source entry in the " + "distribution file") % repo_name, file=sys.stderr) return 1 for repo_name, custom_version in args.custom_branch: if repo_name in repositories: print("The repository '%s' appears multiple times" % repo_name, file=sys.stderr) return 1 try: source_repo = dist_file.repositories[repo_name].source_repository except KeyError: print(("The repository '%s' was not found in the distribution " + "file") % repo_name, file=sys.stderr) return 1 if not source_repo: print(("The repository '%s' has no source entry in the " + "distribution file") % repo_name, file=sys.stderr) return 1 source_repo = deepcopy(source_repo) source_repo.version = custom_version repositories[repo_name] = source_repo for repo_name, repo_type, repo_url, version in args.custom_repo: if repo_name in repositories and repositories[repo_name]: print("custom_repos option overriding '%s' to pull via '%s' " "from '%s' with version '%s'. " % (repo_name, repo_type, repo_url, version), file=sys.stderr) source_repo = RepositorySpecification( repo_name, { 'type': repo_type, 'url': repo_url, 'version': version, }) repositories[repo_name] = source_repo scms = [(repositories[k], 'ws/src/%s' % k) for k in sorted(repositories.keys())] # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): Hook.__init__(self) self.scripts = [] def beforeInclude(self, *_, **kwargs): template_path = kwargs['file'].name if template_path.endswith('/snippet/builder_shell.xml.em'): script = kwargs['locals']['script'] # reuse existing ros_buildfarm folder if it exists if 'Clone ros_buildfarm' in script: lines = script.splitlines() lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then') lines += [ 'else', 'echo "Using existing ros_buildfarm folder"', 'fi', ] script = '\n'.join(lines) if args.build_tool and ' --build-tool ' in script: script = script.replace( ' --build-tool catkin_make_isolated', ' --build-tool ' + args.build_tool) self.scripts.append(script) hook = IncludeHook() from ros_buildfarm import templates templates.template_hooks = [hook] # use any source repo to pass to devel job template if index.distributions[args.rosdistro_name].get('distribution_type', 'ros1') == 'ros1': package_name = 'catkin' elif index.distributions[args.rosdistro_name].get('distribution_type', 'ros1') == 'ros2': package_name = 'ros_workspace' else: assert False, 'Unsupported ROS version ' + \ str(index.distributions[args.rosdistro_name].get('distribution_type', None)) source_repository = deepcopy( dist_file.repositories[package_name].source_repository) if not source_repository: print(("The repository '%s' does not have a source entry in the distribution " + 'file. We cannot generate a prerelease without a source entry.') % package_name, file=sys.stderr) return 1 source_repository.name = 'prerelease' print('Evaluating job templates...') configure_devel_job( args.config_url, args.rosdistro_name, args.source_build_name, None, args.os_name, args.os_code_name, args.arch, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=False, views=False, source_repository=source_repository, build_targets=release_targets_combined) templates.template_hooks = None # derive scripts for overlay workspace from underlay overlay_scripts = [] for script in hook.scripts: # skip cloning of ros_buildfarm repository if 'git clone' in script and '.git ros_buildfarm' in script: continue # skip build-and-install step if 'build and install' in script: continue # add prerelease overlay flag run_devel_job = '/run_devel_job.py' if run_devel_job in script: script = script.replace( run_devel_job, run_devel_job + ' --prerelease-overlay') # replace mounted workspace volume with overlay and underlay # used by: # - create_devel_task_generator.py needs to find packages in both # the underlay as well as the overlay workspace # - build_and_test.py needs to source the environment of # the underlay before building the overlay mount_volume = '-v $WORKSPACE/ws:/tmp/ws' if mount_volume in script: script = script.replace( mount_volume, mount_volume + ':ro ' + '-v $WORKSPACE/' + 'ws_overlay:/tmp/ws_overlay') # relocate all docker files docker_path = '$WORKSPACE/docker_' if docker_path in script: script = script.replace( docker_path, docker_path + 'overlay_') # rename all docker images name_suffix = '_prerelease' if name_suffix in script: script = script.replace( name_suffix, name_suffix + '_overlay') overlay_scripts.append(script) from ros_buildfarm import __file__ as ros_buildfarm_file data = deepcopy(args.__dict__) data.update({ 'scms': scms, 'scripts': hook.scripts, 'overlay_scripts': overlay_scripts, 'ros_buildfarm_python_path': os.path.dirname( os.path.dirname(os.path.abspath(ros_buildfarm_file))), 'python_executable': sys.executable, 'prerelease_script_path': os.path.dirname(os.path.abspath(__file__)), 'build_tool': args.build_tool or build_file.build_tool}) if not os.path.exists(args.output_dir): os.makedirs(args.output_dir) # generate multiple scripts for script_name in [ 'prerelease', 'prerelease_build_overlay', 'prerelease_build_underlay', 'prerelease_clone_overlay', 'prerelease_clone_underlay']: content = expand_template( 'prerelease/%s_script.sh.em' % script_name, data, options={BANGPATH_OPT: False}) script_file = os.path.join(args.output_dir, script_name + '.sh') with open(script_file, 'w') as h: h.write(content) os.chmod(script_file, os.stat(script_file).st_mode | stat.S_IEXEC) print('') print('Generated prerelease script - to execute it run:') if os.path.abspath(args.output_dir) != os.path.abspath(os.curdir): print(' cd %s' % args.output_dir) print(' ./prerelease.sh')
def main(argv=sys.argv[1:]): global templates parser = argparse.ArgumentParser( description="Generate a 'prerelease' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_output_dir(parser, required=True) group = parser.add_argument_group( 'Repositories in underlay workspace', description='The repositories in the underlay workspace will be ' + 'built and installed as well as built and tested. ' + 'Dependencies will be provided by binary packages.') group.add_argument( 'source_repos', nargs='*', default=[], metavar='REPO_NAME', help="A name of a 'repository' from the distribution file") group.add_argument( '--custom-branch', nargs='*', type=_repository_name_and_branch, default=[], metavar='REPO_NAME:BRANCH_OR_TAG_NAME', help="A name of a 'repository' from the distribution file followed " + 'by a colon and a branch / tag name') group.add_argument( '--custom-repo', nargs='*', type=_repository_name_and_type_and_url_and_branch, default=[], metavar='REPO_NAME:REPO_TYPE:REPO_URL:BRANCH_OR_TAG_NAME', help='The name, type, url and branch / tag name of a repository') add_overlay_arguments(parser) args = parser.parse_args(argv) print('Fetching buildfarm configuration...') config = get_config_index(args.config_url) build_files = get_source_build_files(config, args.rosdistro_name) build_file = build_files[args.source_build_name] print('Fetching rosdistro cache...') index = get_index(config.rosdistro_index_url) dist_cache = get_distribution_cache(index, args.rosdistro_name) dist_file = dist_cache.distribution_file # determine source repositories for underlay workspace repositories = {} for repo_name in args.source_repos: if repo_name in repositories: print("The repository '%s' appears multiple times" % repo_name, file=sys.stderr) return 1 try: repositories[repo_name] = \ dist_file.repositories[repo_name].source_repository except KeyError: print(("The repository '%s' was not found in the distribution " + "file") % repo_name, file=sys.stderr) return 1 for repo_name, custom_version in args.custom_branch: if repo_name in repositories: print("The repository '%s' appears multiple times" % repo_name, file=sys.stderr) return 1 try: source_repo = dist_file.repositories[repo_name].source_repository except KeyError: print(("The repository '%s' was not found in the distribution " + "file") % repo_name, file=sys.stderr) return 1 source_repo = deepcopy(source_repo) source_repo.version = custom_version repositories[repo_name] = source_repo for repo_name, repo_type, repo_url, version in args.custom_repo: if repo_name in repositories: print("The repository '%s' appears multiple times" % repo_name, file=sys.stderr) return 1 source_repo = RepositorySpecification( repo_name, { 'type': repo_type, 'url': repo_url, 'version': version, }) repositories[repo_name] = source_repo scms = [(repositories[k], 'catkin_workspace/src/%s' % k) for k in sorted(repositories.keys())] # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): Hook.__init__(self) self.scripts = [] def beforeInclude(self, *args, **kwargs): template_path = kwargs['file'].name print(template_path, file=sys.stderr) if template_path.endswith('/snippet/builder_shell.xml.em'): self.scripts.append(kwargs['locals']['script']) hook = IncludeHook() templates.template_hooks = [hook] # use random source repo to pass to devel job template source_repository = deepcopy(list(repositories.values())[0]) source_repository.name = 'prerelease' print('Evaluating job templates...') configure_devel_job( args.config_url, args.rosdistro_name, args.source_build_name, None, args.os_name, args.os_code_name, args.arch, config=config, build_file=build_file, index=index, dist_file=dist_file, dist_cache=dist_cache, jenkins=False, views=False, source_repository=source_repository) templates.template_hooks = None # derive scripts for overlay workspace from underlay overlay_scripts = [] for script in hook.scripts: # skip cloning of ros_buildfarm repository if 'git clone' in script and '.git ros_buildfarm' in script: continue # skip build-and-install step if 'build and install' in script: continue # add prerelease overlay flag run_devel_job = '/run_devel_job.py' if run_devel_job in script: script = script.replace( run_devel_job, run_devel_job + ' --prerelease-overlay') # replace mounted workspace volume with overlay and underlay # used by: # - create_devel_task_generator.py needs to find packages in both # the underlay as well as the overlay workspace # - catkin_make_isolated_and_test.py needs to source the environment of # the underlay before building the overlay mount_volume = '-v $WORKSPACE/catkin_workspace:/tmp/catkin_workspace' if mount_volume in script: script = script.replace( mount_volume, mount_volume + ':ro ' + '-v $WORKSPACE/' + 'catkin_workspace_overlay:/tmp/catkin_workspace_overlay') # relocate all docker files docker_path = '$WORKSPACE/docker_' if docker_path in script: script = script.replace( docker_path, docker_path + 'overlay_') # rename all docker images name_suffix = '_prerelease' if name_suffix in script: script = script.replace( name_suffix, name_suffix + '_overlay') overlay_scripts.append(script) from ros_buildfarm import __file__ as ros_buildfarm_file data = deepcopy(args.__dict__) data.update({ 'scms': scms, 'scripts': hook.scripts, 'overlay_scripts': overlay_scripts, 'ros_buildfarm_python_path': os.path.dirname( os.path.dirname(os.path.abspath(ros_buildfarm_file))), 'python_executable': sys.executable, 'prerelease_script_path': os.path.dirname(os.path.abspath(__file__))}) if not os.path.exists(args.output_dir): os.makedirs(args.output_dir) # generate multiple scripts for script_name in [ 'prerelease', 'prerelease_build_overlay', 'prerelease_build_underlay', 'prerelease_clone_overlay', 'prerelease_clone_underlay']: content = expand_template( 'prerelease/%s_script.sh.em' % script_name, data, options={BANGPATH_OPT: False}) script_file = os.path.join(args.output_dir, script_name + '.sh') with open(script_file, 'w') as h: h.write(content) os.chmod(script_file, os.stat(script_file).st_mode | stat.S_IEXEC) print('') print('Generated prerelease script - to execute it run:') if os.path.abspath(args.output_dir) != os.path.abspath(os.curdir): print(' cd %s' % args.output_dir) print(' ./prerelease.sh')
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the CI job") # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_key_files(parser) add_argument_distribution_repository_urls(parser) add_argument_dockerfile_dir(parser) add_argument_env_vars(parser) add_argument_package_selection_args(parser) add_argument_repos_file_urls(parser) add_argument_repository_names(parser, optional=True) add_argument_skip_rosdep_keys(parser) add_argument_test_branch(parser) parser.add_argument('--workspace-root', nargs='+', help='The root path of the workspace to compile') args = parser.parse_args(argv) assert args.repos_file_urls or args.repository_names debian_pkg_names = [ 'git', 'python3-apt', 'python3-colcon-metadata', 'python3-colcon-package-information', 'python3-colcon-package-selection', 'python3-colcon-recursive-crawl', 'python3-colcon-ros', 'python3-rosdep', 'python3-vcstool', ] # get versions for build dependencies apt_cache = Cache() debian_pkg_versions = get_binary_package_versions(apt_cache, debian_pkg_names) # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'custom_rosdep_urls': [], 'uid': get_user_id(), 'build_environment_variables': ['%s=%s' % key_value for key_value in args.env_vars.items()], 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'repos_file_urls': args.repos_file_urls, 'repository_names': args.repository_names, 'test_branch': args.test_branch, 'skip_rosdep_keys': args.skip_rosdep_keys, 'package_selection_args': args.package_selection_args, 'workspace_root': args.workspace_root, } create_dockerfile('ci/create_workspace.Dockerfile.em', data, args.dockerfile_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser(description="Generate a 'CI' script") # Positional add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'ci') add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_tool(parser) a1 = add_argument_package_selection_args(parser) a2 = add_argument_build_tool_args(parser) a3 = add_argument_build_tool_test_args(parser) add_argument_repos_file_urls(parser) add_argument_skip_cleanup(parser) add_argument_test_branch(parser) parser.add_argument( '--underlay-source-path', nargs='*', metavar='DIR_NAME', help='Path to one or more install spaces to use as an underlay') remainder_args = extract_multiple_remainders(argv, (a1, a2, a3)) args = parser.parse_args(argv) for k, v in remainder_args.items(): setattr(args, k, v) # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): Hook.__init__(self) self.scms = [] self.scripts = [] self.parameters = {} if args.skip_cleanup: self.parameters['skip_cleanup'] = 'true' if args.repos_file_urls is not None: self.parameters['repos_file_urls'] = ' '.join( args.repos_file_urls) if args.test_branch is not None: self.parameters['test_branch'] = args.test_branch if args.package_selection_args is not None: self.parameters['package_selection_args'] = ' '.join( args.package_selection_args) if args.build_tool_args is not None: self.parameters['build_tool_args'] = ' '.join( args.build_tool_args) if args.build_tool_test_args is not None: self.parameters['build_tool_test_args'] = ' '.join( args.build_tool_test_args) def beforeInclude(self, *_, **kwargs): template_path = kwargs['file'].name if template_path.endswith('/snippet/scm.xml.em'): self.scms.append( (kwargs['locals']['repo_spec'], kwargs['locals']['path'])) if template_path.endswith('/snippet/builder_shell.xml.em'): script = kwargs['locals']['script'] # reuse existing ros_buildfarm folder if it exists if 'Clone ros_buildfarm' in script: lines = script.splitlines() lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then') lines += [ 'else', 'echo "Using existing ros_buildfarm folder"', 'fi', ] script = '\n'.join(lines) if args.build_tool and ' --build-tool ' in script: script = script.replace( ' --build-tool catkin_make_isolated', ' --build-tool ' + args.build_tool) self.scripts.append(script) if template_path.endswith( '/snippet/property_parameters-definition.xml.em'): for parameter in reversed(kwargs['locals']['parameters']): name = parameter['name'] value_type = parameter['type'] if value_type in ['string', 'text']: default_value = parameter['default_value'] elif value_type == 'boolean': default_value = 'true' if parameter.get( 'default_value', False) else 'false' else: continue self.parameters.setdefault(name, default_value) hook = IncludeHook() from ros_buildfarm import templates templates.template_hooks = [hook] config = get_config_index(args.config_url) build_files = get_ci_build_files(config, args.rosdistro_name) build_file = build_files[args.ci_build_name] underlay_source_paths = [ os.path.abspath(p) for p in args.underlay_source_path or [] ] configure_ci_job(args.config_url, args.rosdistro_name, args.ci_build_name, args.os_name, args.os_code_name, args.arch, config=config, build_file=build_file, jenkins=False, views=False, underlay_source_paths=underlay_source_paths) templates.template_hooks = None ci_job_name = get_ci_job_name(args.rosdistro_name, args.os_name, args.os_code_name, args.arch, 'script') value = expand_template('ci/ci_script.sh.em', { 'ci_job_name': ci_job_name, 'scms': hook.scms, 'scripts': hook.scripts, 'build_tool': args.build_tool or build_file.build_tool, 'parameters': hook.parameters }, options={BANGPATH_OPT: False}) value = value.replace('python3 ', sys.executable + ' ') print(value)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser(description="Generate a 'devel' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_tool(parser) args = parser.parse_args(argv) # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): Hook.__init__(self) self.scms = [] self.scripts = [] def beforeInclude(self, *_, **kwargs): template_path = kwargs['file'].name if template_path.endswith('/snippet/scm.xml.em'): self.scms.append( (kwargs['locals']['repo_spec'], kwargs['locals']['path'])) if template_path.endswith('/snippet/builder_shell.xml.em'): script = kwargs['locals']['script'] # reuse existing ros_buildfarm folder if it exists if 'Clone ros_buildfarm' in script: lines = script.splitlines() lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then') lines += [ 'else', 'echo "Using existing ros_buildfarm folder"', 'fi', ] script = '\n'.join(lines) if args.build_tool and ' --build-tool ' in script: script = script.replace( ' --build-tool catkin_make_isolated', ' --build-tool ' + args.build_tool) self.scripts.append(script) hook = IncludeHook() from ros_buildfarm import templates templates.template_hooks = [hook] config = get_config_index(args.config_url) build_files = get_source_build_files(config, args.rosdistro_name) build_file = build_files[args.source_build_name] configure_devel_job(args.config_url, args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch, config=config, build_file=build_file, jenkins=False, views=False) templates.template_hooks = None devel_job_name = get_devel_job_name(args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch) value = expand_template( 'devel/devel_script.sh.em', { 'devel_job_name': devel_job_name, 'scms': hook.scms, 'scripts': hook.scripts, 'build_tool': args.build_tool or build_file.build_tool }, options={BANGPATH_OPT: False}) value = value.replace('python3', sys.executable) print(value)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Lists available binary packages and versions which are' 'needed to satisfy rosdep keys for ROS packages in the workspace') # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_output_dir(parser) add_argument_package_selection_args(parser) add_argument_skip_rosdep_keys(parser) parser.add_argument('--package-root', nargs='+', help='The path to the directory containing packages') args = parser.parse_args(argv) workspace_root = args.package_root[-1] os.chdir(workspace_root) with Scope('SUBSECTION', 'mark packages with IGNORE files'): all_packages = locate_packages(workspace_root) selected_packages = all_packages if args.package_selection_args: print('Using package selection arguments:', args.package_selection_args) selected_packages = locate_packages( workspace_root, extra_args=args.package_selection_args) to_ignore = all_packages.keys() - selected_packages.keys() print('Ignoring %d packages' % len(to_ignore)) for package in sorted(to_ignore): print('-', package) package_root = all_packages[package] Path(package_root, 'COLCON_IGNORE').touch() print('There are %d packages which meet selection criteria' % len(selected_packages)) with Scope('SUBSECTION', 'Enumerating packages needed to build'): # find all of the underlay packages underlay_pkgs = {} all_underlay_pkg_names = set() for package_root in args.package_root[0:-1]: print("Crawling for packages in '%s'" % package_root) underlay_pkgs.update(find_packages(package_root)) # Check for a colcon index for non-ROS package detection colcon_index = os.path.join(package_root, 'colcon-core', 'packages') try: all_underlay_pkg_names.update(os.listdir(colcon_index)) except FileNotFoundError: pass underlay_pkg_names = [pkg.name for pkg in underlay_pkgs.values()] print('Found the following ROS underlay packages:') for pkg_name in sorted(underlay_pkg_names): print(' -', pkg_name) # get direct build dependencies package_root = args.package_root[-1] print("Crawling for packages in '%s'" % package_root) pkgs = find_packages(package_root) pkg_names = [pkg.name for pkg in pkgs.values()] print('Found the following ROS packages:') for pkg_name in sorted(pkg_names): print(' -', pkg_name) # get build dependencies and map them to binary packages all_pkgs = set(pkgs.values()).union(underlay_pkgs.values()) for pkg in all_pkgs: pkg.evaluate_conditions(os.environ) for pkg in all_pkgs: for group_depend in pkg.group_depends: if group_depend.evaluated_condition: group_depend.extract_group_members(all_pkgs) dependency_keys_build = get_dependencies( all_pkgs, 'build', _get_build_and_recursive_run_dependencies, pkgs.values()) dependency_keys_test = get_dependencies( all_pkgs, 'run and test', _get_test_and_recursive_run_dependencies, pkgs.values()) if args.skip_rosdep_keys: dependency_keys_build.difference_update(args.skip_rosdep_keys) dependency_keys_test.difference_update(args.skip_rosdep_keys) # remove all non-ROS packages and packages which are present but # specifically ignored every_package_name = all_packages.keys() | all_underlay_pkg_names dependency_keys_build -= every_package_name dependency_keys_test -= every_package_name context = initialize_resolver(args.rosdistro_name, args.os_name, args.os_code_name) os_pkg_names_build = resolve_names(dependency_keys_build, **context) os_pkg_names_test = resolve_names(dependency_keys_test, **context) os_pkg_names_test -= os_pkg_names_build with Scope('SUBSECTION', 'Resolving packages versions using apt cache'): apt_cache = Cache() os_pkg_versions = get_binary_package_versions( apt_cache, os_pkg_names_build | os_pkg_names_test) with open(os.path.join(args.output_dir, 'install_list_build.txt'), 'w') as out_file: for package in sorted(os_pkg_names_build): out_file.write('%s=%s\n' % (package, os_pkg_versions[package])) with open(os.path.join(args.output_dir, 'install_list_test.txt'), 'w') as out_file: for package in sorted(os_pkg_names_test): out_file.write('%s=%s\n' % (package, os_pkg_versions[package]))
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser(description="Generate a 'release' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'release') add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) parser.add_argument('--skip-install', action='store_true', help='Skip trying to install binarydeb') args = parser.parse_args(argv) # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): Hook.__init__(self) self.scripts = [] def beforeFile(self, *args, **kwargs): template_path = kwargs['file'].name if template_path.endswith('/release/binarydeb_job.xml.em'): self.scripts.append('--') if template_path.endswith('/snippet/builder_shell.xml.em'): self.scripts.append(kwargs['locals']['script']) hook = IncludeHook() from ros_buildfarm import templates templates.template_hooks = [hook] configure_release_job(args.config_url, args.rosdistro_name, args.release_build_name, args.package_name, args.os_name, args.os_code_name, jenkins=False, views=[], generate_import_package_job=False, generate_sync_packages_jobs=False, filter_arches=args.arch) templates.template_hooks = None source_job_name = get_sourcedeb_job_name(args.rosdistro_name, args.release_build_name, args.package_name, args.os_name, args.os_code_name) binary_job_name = get_binarydeb_job_name(args.rosdistro_name, args.release_build_name, args.package_name, args.os_name, args.os_code_name, args.arch) separator_index = hook.scripts.index('--') source_scripts = hook.scripts[:separator_index] binary_scripts = hook.scripts[separator_index + 1:] # inject additional argument to skip fetching sourcedeb from repo script_name = '/run_binarydeb_job.py ' additional_argument = '--skip-download-sourcedeb ' for i, script in enumerate(binary_scripts): offset = script.find(script_name) if offset != -1: offset += len(script_name) script = script[:offset] + additional_argument + script[offset:] binary_scripts[i] = script break # remove rm command for sourcedeb location rm_command = 'rm -fr $WORKSPACE/binarydeb' for i, script in enumerate(binary_scripts): offset = script.find(rm_command) if offset != -1: script = script[:offset] + script[offset + len(rm_command):] binary_scripts[i] = script break if args.skip_install: # remove install step script_name = '/create_binarydeb_install_task_generator.py ' for i, script in enumerate(binary_scripts): offset = script.find(script_name) if offset != -1: del binary_scripts[i] break value = expand_template('release/release_script.sh.em', { 'source_job_name': source_job_name, 'binary_job_name': binary_job_name, 'source_scripts': source_scripts, 'binary_scripts': binary_scripts }, options={BANGPATH_OPT: False}) value = value.replace('python3', sys.executable) print(value)
def main(argv=sys.argv[1:]): global templates parser = argparse.ArgumentParser( description="Generate a 'prerelease overlay' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_overlay_arguments(parser) parser.add_argument( '--underlay-packages', nargs='+', help='Names of packages on which the overlay builds ' '(by default package names come from packages found in ' "'ws/src')") group = parser.add_mutually_exclusive_group() group.add_argument( '--json', action='store_true', help='Output overlay information as JSON instead of a shell script') group.add_argument('--vcstool', action='store_true', help='Output overlay information as vcstool repos file') args = parser.parse_args(argv) config = get_config_index(args.config_url) index = get_index(config.rosdistro_index_url) dist_cache = get_distribution_cache(index, args.rosdistro_name) dist_file = dist_cache.distribution_file # determine source repositories for overlay workspace underlay_package_names = args.underlay_packages if underlay_package_names is None: packages = find_packages('ws/src') underlay_package_names = [pkg.name for pkg in packages.values()] print('Underlay workspace contains %d packages:%s' % (len(underlay_package_names), ''.join([ '\n- %s' % pkg_name for pkg_name in sorted(underlay_package_names) ])), file=sys.stderr) overlay_package_names = get_overlay_package_names( args.pkg, args.exclude_pkg, args.level, underlay_package_names, dist_cache.release_package_xmls, output=True) print('Overlay workspace will contain %d packages:%s' % (len(overlay_package_names), ''.join([ '\n- %s' % pkg_name for pkg_name in sorted(overlay_package_names) ])), file=sys.stderr) repositories = {} for pkg_name in overlay_package_names: repositories[pkg_name] = \ get_repository_specification_for_released_package( dist_file, pkg_name) scms = [(repositories[k], 'ws_overlay/src/%s' % k) for k in sorted(repositories.keys())] if args.json: print(json.dumps([vars(r) for r, p in scms], sort_keys=True, indent=2)) elif args.vcstool: print('repositories:') for r, p in scms: print(' %s:' % p) print(' type: ' + r.type) print(' url: ' + r.url) print(' version: ' + r.version) else: value = expand_template('prerelease/prerelease_overlay_script.sh.em', {'scms': scms}, options={BANGPATH_OPT: False}) print(value)
def main(argv=sys.argv[1:]): global templates parser = argparse.ArgumentParser( description="Generate a 'prerelease overlay' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_overlay_arguments(parser) parser.add_argument( '--underlay-packages', nargs='+', help='Names of packages on which the overlay builds ' '(by default package names come from packages found in ' "'catkin_workspace/src')" ) parser.add_argument( '--json', action='store_true', help='Output overlay information as JSON instead of a shell script' ) args = parser.parse_args(argv) config = get_config_index(args.config_url) index = get_index(config.rosdistro_index_url) dist_cache = get_distribution_cache(index, args.rosdistro_name) dist_file = dist_cache.distribution_file # determine source repositories for overlay workspace underlay_package_names = args.underlay_packages if underlay_package_names is None: packages = find_packages('catkin_workspace/src') underlay_package_names = [pkg.name for pkg in packages.values()] print('Underlay workspace contains %d packages:%s' % (len(underlay_package_names), ''.join(['\n- %s' % pkg_name for pkg_name in sorted(underlay_package_names)])), file=sys.stderr) overlay_package_names = get_overlay_package_names( args.pkg, args.exclude_pkg, args.level, underlay_package_names, dist_cache.release_package_xmls, output=True) print('Overlay workspace will contain %d packages:%s' % (len(overlay_package_names), ''.join(['\n- %s' % pkg_name for pkg_name in sorted(overlay_package_names)])), file=sys.stderr) repositories = {} for pkg_name in overlay_package_names: repositories[pkg_name] = \ get_repository_specification_for_released_package( dist_file, pkg_name) scms = [ (repositories[k], 'catkin_workspace_overlay/src/%s' % k) for k in sorted(repositories.keys())] if not args.json: value = expand_template( 'prerelease/prerelease_overlay_script.sh.em', { 'scms': scms}, options={BANGPATH_OPT: False}) print(value) else: print(json.dumps([vars(r) for r, p in scms], sort_keys=True, indent=2))
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'devel' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'source') add_argument_repository_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_tool(parser) args = parser.parse_args(argv) # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): Hook.__init__(self) self.scms = [] self.scripts = [] def beforeInclude(self, *_, **kwargs): template_path = kwargs['file'].name if template_path.endswith('/snippet/scm.xml.em'): self.scms.append( (kwargs['locals']['repo_spec'], kwargs['locals']['path'])) if template_path.endswith('/snippet/builder_shell.xml.em'): script = kwargs['locals']['script'] # reuse existing ros_buildfarm folder if it exists if 'Clone ros_buildfarm' in script: lines = script.splitlines() lines.insert(0, 'if [ ! -d "ros_buildfarm" ]; then') lines += [ 'else', 'echo "Using existing ros_buildfarm folder"', 'fi', ] script = '\n'.join(lines) if args.build_tool and ' --build-tool ' in script: script = script.replace( ' --build-tool catkin_make_isolated', ' --build-tool ' + args.build_tool) self.scripts.append(script) hook = IncludeHook() from ros_buildfarm import templates templates.template_hooks = [hook] config = get_config_index(args.config_url) build_files = get_source_build_files(config, args.rosdistro_name) build_file = build_files[args.source_build_name] configure_devel_job( args.config_url, args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch, config=config, build_file=build_file, jenkins=False, views=False) templates.template_hooks = None devel_job_name = get_devel_job_name( args.rosdistro_name, args.source_build_name, args.repository_name, args.os_name, args.os_code_name, args.arch) value = expand_template( 'devel/devel_script.sh.em', { 'devel_job_name': devel_job_name, 'scms': hook.scms, 'scripts': hook.scripts, 'build_tool': args.build_tool or build_file.build_tool}, options={BANGPATH_OPT: False}) value = value.replace('python3', sys.executable) print(value)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for the CI job") # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_build_tool(parser, required=True) add_argument_build_tool_args(parser) add_argument_distribution_repository_key_files(parser) add_argument_distribution_repository_urls(parser) add_argument_dockerfile_dir(parser) add_argument_env_vars(parser) add_argument_install_packages(parser) add_argument_ros_version(parser) add_argument_testing(parser) parser.add_argument( '--workspace-root', nargs='*', action=check_len_action(1, 2), help='The root path of the workspace to compile') args = parser.parse_args(argv) apt_cache = Cache() debian_pkg_names = set(['build-essential']) debian_pkg_names.update(args.install_packages) if args.build_tool == 'colcon': debian_pkg_names.update([ 'python3-catkin-pkg-modules', 'python3-colcon-output', 'python3-colcon-parallel-executor', 'python3-colcon-ros', 'python3-colcon-test-result', 'python3-rosdistro-modules', ]) print('Always install the following generic dependencies:') for debian_pkg_name in sorted(debian_pkg_names): print(' -', debian_pkg_name) install_list = 'install_list.txt' write_install_list( os.path.join(args.dockerfile_dir, install_list), debian_pkg_names, apt_cache) install_lists = [install_list, 'install_list_build.txt'] if args.testing: install_lists.append('install_list_test.txt') # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'rosdistro_name': args.rosdistro_name, 'uid': get_user_id(), 'build_tool': args.build_tool, 'build_tool_args': args.build_tool_args, 'ros_version': args.ros_version, 'build_environment_variables': args.env_vars, 'install_lists': install_lists, 'dependencies': [], 'dependency_versions': [], 'testing': args.testing, 'prerelease_overlay': len(args.workspace_root) > 1, } create_dockerfile( 'devel/devel_task.Dockerfile.em', data, args.dockerfile_dir) # output hints about necessary volumes to mount ros_buildfarm_basepath = os.path.normpath( os.path.join(os.path.dirname(__file__), '..', '..')) print('Mount the following volumes when running the container:') print(' -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath) if len(args.workspace_root) == 1: print(' -v %s:/tmp/ws' % args.workspace_root[0]) else: for i, workspace_root in enumerate(args.workspace_root[0:-1]): print(' -v %s:/tmp/ws%s' % (workspace_root, i or '')) print(' -v %s:/tmp/ws_overlay' % args.workspace_root[-1])
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'release' script") add_argument_config_url(parser) add_argument_rosdistro_name(parser) add_argument_build_name(parser, 'release') add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) parser.add_argument( '--skip-install', action='store_true', help='Skip trying to install binarydeb') args = parser.parse_args(argv) # collect all template snippets of specific types class IncludeHook(Hook): def __init__(self): Hook.__init__(self) self.scripts = [] def beforeFile(self, *args, **kwargs): template_path = kwargs['file'].name if template_path.endswith('/release/binarydeb_job.xml.em'): self.scripts.append('--') if template_path.endswith('/snippet/builder_shell.xml.em'): self.scripts.append(kwargs['locals']['script']) hook = IncludeHook() from ros_buildfarm import templates templates.template_hooks = [hook] configure_release_job( args.config_url, args.rosdistro_name, args.release_build_name, args.package_name, args.os_name, args.os_code_name, jenkins=False, views=[], generate_import_package_job=False, generate_sync_packages_jobs=False, filter_arches=args.arch) templates.template_hooks = None source_job_name = get_sourcedeb_job_name( args.rosdistro_name, args.release_build_name, args.package_name, args.os_name, args.os_code_name) binary_job_name = get_binarydeb_job_name( args.rosdistro_name, args.release_build_name, args.package_name, args.os_name, args.os_code_name, args.arch) separator_index = hook.scripts.index('--') source_scripts = hook.scripts[:separator_index] binary_scripts = hook.scripts[separator_index + 1:] # inject additional argument to skip fetching sourcedeb from repo script_name = '/run_binarydeb_job.py ' additional_argument = '--skip-download-sourcedeb ' for i, script in enumerate(binary_scripts): offset = script.find(script_name) if offset != -1: offset += len(script_name) script = script[:offset] + additional_argument + script[offset:] binary_scripts[i] = script break # remove rm command for sourcedeb location rm_command = 'rm -fr $WORKSPACE/binarydeb' for i, script in enumerate(binary_scripts): offset = script.find(rm_command) if offset != -1: script = script[:offset] + script[offset + len(rm_command):] binary_scripts[i] = script break if args.skip_install: # remove install step script_name = '/create_binarydeb_install_task_generator.py ' for i, script in enumerate(binary_scripts): offset = script.find(script_name) if offset != -1: del binary_scripts[i] break value = expand_template( 'release/release_script.sh.em', { 'source_job_name': source_job_name, 'binary_job_name': binary_job_name, 'source_scripts': source_scripts, 'binary_scripts': binary_scripts}, options={BANGPATH_OPT: False}) value = value.replace('python3', sys.executable) print(value)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description="Generate a 'Dockerfile' for building the binarydeb") add_argument_rosdistro_index_url(parser) add_argument_rosdistro_name(parser) add_argument_package_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_arch(parser) add_argument_distribution_repository_urls(parser) add_argument_distribution_repository_key_files(parser) add_argument_binarydeb_dir(parser) add_argument_dockerfile_dir(parser) add_argument_env_vars(parser) args = parser.parse_args(argv) debian_package_name = get_debian_package_name( args.rosdistro_name, args.package_name) # get expected package version from rosdistro index = get_index(args.rosdistro_index_url) dist_file = get_distribution_file(index, args.rosdistro_name) assert args.package_name in dist_file.release_packages pkg = dist_file.release_packages[args.package_name] repo = dist_file.repositories[pkg.repository_name] package_version = repo.release_repository.version debian_package_version = package_version # build_binarydeb dependencies debian_pkg_names = ['apt-src'] # add build dependencies from .dsc file dsc_file = get_dsc_file( args.binarydeb_dir, debian_package_name, debian_package_version) debian_pkg_names += sorted(get_build_depends(dsc_file)) # get versions for build dependencies apt_cache = Cache() debian_pkg_versions = get_binary_package_versions( apt_cache, debian_pkg_names) # generate Dockerfile data = { 'os_name': args.os_name, 'os_code_name': args.os_code_name, 'arch': args.arch, 'uid': get_user_id(), 'distribution_repository_urls': args.distribution_repository_urls, 'distribution_repository_keys': get_distribution_repository_keys( args.distribution_repository_urls, args.distribution_repository_key_files), 'build_environment_variables': args.env_vars, 'dependencies': debian_pkg_names, 'dependency_versions': debian_pkg_versions, 'install_lists': [], 'rosdistro_name': args.rosdistro_name, 'package_name': args.package_name, 'binarydeb_dir': args.binarydeb_dir, } create_dockerfile( 'release/binarydeb_task.Dockerfile.em', data, args.dockerfile_dir) # output hints about necessary volumes to mount ros_buildfarm_basepath = os.path.normpath( os.path.join(os.path.dirname(__file__), '..', '..')) print('Mount the following volumes when running the container:') print(' -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath) print(' -v %s:/tmp/binarydeb' % args.binarydeb_dir)
def main(argv=sys.argv[1:]): parser = argparse.ArgumentParser( description='Lists available binary packages and versions which are' 'needed to satisfy rosdep keys for ROS packages in the workspace') # Positional add_argument_rosdistro_name(parser) add_argument_os_name(parser) add_argument_os_code_name(parser) add_argument_output_dir(parser) add_argument_skip_rosdep_keys(parser) parser.add_argument('--package-root', nargs='+', help='The path to the directory containing packages') args = parser.parse_args(argv) with Scope('SUBSECTION', 'Enumerating packages needed to build'): # find all of the underlay packages underlay_pkgs = {} for package_root in args.package_root[0:-1]: print("Crawling for packages in '%s'" % package_root) underlay_pkgs.update(find_packages(package_root)) underlay_pkg_names = [pkg.name for pkg in underlay_pkgs.values()] print('Found the following underlay packages:') for pkg_name in sorted(underlay_pkg_names): print(' -', pkg_name) # get direct build dependencies package_root = args.package_root[-1] print("Crawling for packages in '%s'" % package_root) pkgs = find_packages(package_root) pkg_names = [pkg.name for pkg in pkgs.values()] print('Found the following packages:') for pkg_name in sorted(pkg_names): print(' -', pkg_name) # get build dependencies and map them to binary packages all_pkgs = set(pkgs.values()).union(underlay_pkgs.values()) for pkg in all_pkgs: pkg.evaluate_conditions(os.environ) for pkg in all_pkgs: for group_depend in pkg.group_depends: if group_depend.evaluated_condition: group_depend.extract_group_members(all_pkgs) dependency_keys_build = get_dependencies( all_pkgs, 'build', _get_build_and_recursive_run_dependencies, pkgs.values()) dependency_keys_test = get_dependencies( all_pkgs, 'run and test', _get_test_and_recursive_run_dependencies, pkgs.values()) if args.skip_rosdep_keys: dependency_keys_build.difference_update(args.skip_rosdep_keys) dependency_keys_test.difference_update(args.skip_rosdep_keys) context = initialize_resolver(args.rosdistro_name, args.os_name, args.os_code_name) os_pkg_names_build = resolve_names(dependency_keys_build, **context) os_pkg_names_test = resolve_names(dependency_keys_test, **context) os_pkg_names_test -= os_pkg_names_build with Scope('SUBSECTION', 'Resolving packages versions using apt cache'): apt_cache = Cache() os_pkg_versions = get_binary_package_versions( apt_cache, os_pkg_names_build | os_pkg_names_test) with open(os.path.join(args.output_dir, 'install_list_build.txt'), 'w') as out_file: for package in sorted(os_pkg_names_build): out_file.write('%s=%s\n' % (package, os_pkg_versions[package])) with open(os.path.join(args.output_dir, 'install_list_test.txt'), 'w') as out_file: for package in sorted(os_pkg_names_test): out_file.write('%s=%s\n' % (package, os_pkg_versions[package]))