Esempio n. 1
0
def doxygen(conf, package, deps, output_path, source_path, docs_build_path):
    # We run doxygen twice, once to generate the actual docs, and then a second time to generate
    # the tagfiles to link this documentation from other docs. See the following SO discussion
    # for this suggestion: http://stackoverflow.com/a/35640905/109517
    return [
        FunctionStage('generate_doxygen_config',
                      generate_doxygen_config,
                      conf=conf,
                      package=package,
                      recursive_build_deps=deps,
                      output_path=output_path,
                      source_path=source_path,
                      docs_build_path=docs_build_path),
        CommandStage(
            'rosdoc_doxygen',
            [which('doxygen'),
             os.path.join(docs_build_path, 'Doxyfile')],
            cwd=source_path),
        FunctionStage('generate_doxygen_config_tags',
                      generate_doxygen_config_tags,
                      conf=conf,
                      package=package,
                      source_path=source_path,
                      docs_build_path=docs_build_path),
        CommandStage(
            'rosdoc_doxygen_tags',
            [which('doxygen'),
             os.path.join(docs_build_path, 'Doxyfile_tags')],
            cwd=source_path),
        # Filter the tags XML to remove user-defined references that may appear in multiple
        # packages (like "codeapi"), since they are not namespaced.
        FunctionStage('filter_doxygen_tags',
                      filter_doxygen_tags,
                      docs_build_path=docs_build_path)
    ]
def pydoctor(conf, package, deps, doc_deps, output_path, source_path, docs_build_path, job_env):
    output_dir = os.path.join(output_path, 'html', conf.get('output_dir', ''))

    # TODO: Would be better to extract this information from the setup.py, but easier
    # for now to just codify an assumption of {pkg}/python, falling back to {pkg}/src.
    src_dir = os.path.join(source_path, 'python')
    if not os.path.isdir(src_dir):
        src_dir = os.path.join(source_path, 'src')

    command = [which('pydoctor'), '--project-name', package.name, '--html-output', output_dir]

    if 'config' in conf and 'epydoc' not in conf['config']:
        command.extend(['--config', os.path.join(source_path, conf['config'])])

    for subdir in os.listdir(src_dir):
        command.append(os.path.join(src_dir, subdir))

    # pydoctor returns error codes for minor issues we don't care about.
    wrapper_command = ['/bin/bash', '-c', '%s || true' % ' '.join(command)]

    return [
        FunctionStage(
            'mkdir_pydoctor',
            makedirs,
            path=output_dir),
        FunctionStage(
            'cache_pydoctor_output',
            write_file,
            contents=output_dir,
            dest_path=os.path.join(docs_build_path, output_dir_file('pydoctor'))),
        CommandStage(
            'rosdoc_pydoctor',
            wrapper_command,
            cwd=src_dir)
    ]
Esempio n. 3
0
def create_cmake_clean_job(
        context,
        package,
        package_path,
        dependencies,
        dry_run,
        clean_build,
        clean_devel,
        clean_install):
    """Generate a Job to clean a cmake package"""

    # Package build space path
    build_space = context.package_build_space(package)
    # Package metadata path
    metadata_path = context.package_metadata_path(package)
    # Environment dictionary for the job, empty for a clean job
    job_env = {}

    stages = []

    if clean_install and context.install:
        installed_files = get_installed_files(context.package_metadata_path(package))
        stages.append(FunctionStage(
            'cleaninstall',
            rmfiles,
            paths=sorted(installed_files),
            remove_empty=True,
            empty_root=context.install_space_abs,
            dry_run=dry_run))

    if clean_devel and not context.install:
        installed_files = get_installed_files(context.package_metadata_path(package))
        stages.append(FunctionStage(
            'cleandevel',
            rmfiles,
            paths=sorted(installed_files),
            remove_empty=True,
            empty_root=context.devel_space_abs,
            dry_run=dry_run))

    if clean_build:
        stages.append(FunctionStage(
            'rmbuild',
            rmfiles,
            paths=[build_space],
            dry_run=dry_run))

    # Remove cached metadata
    if clean_build and clean_devel and clean_install:
        stages.append(FunctionStage(
            'rmmetadata',
            rmfiles,
            paths=[metadata_path],
            dry_run=dry_run))

    return Job(
        jid=package.name,
        deps=dependencies,
        env=job_env,
        stages=stages)
Esempio n. 4
0
def create_package_job(context, package, package_path, deps):
    docs_space = os.path.join(context.build_space_abs, '..', 'docs', package.name)
    docs_build_space = os.path.join(context.build_space_abs, 'docs', package.name)
    package_path_abs = os.path.join(context.source_space_abs, package_path)

    # Load rosdoc config, if it exists.
    rosdoc_yaml_path = os.path.join(package_path_abs, 'rosdoc.yaml')
    if os.path.exists(rosdoc_yaml_path):
        with open(rosdoc_yaml_path) as f:
            rosdoc_conf = yaml.load(f)
    else:
        if os.path.exists(os.path.join(package_path_abs, 'src')) or \
            os.path.exists(os.path.join(package_path_abs, 'include')):
            rosdoc_conf = [{'builder': 'doxygen'}]
        else:
            rosdoc_conf = []

    stages = []

    # Create package docs spaces.
    stages.append(FunctionStage('mkdir_docs_build_space', makedirs, path=docs_build_space))

    # Generate msg/srv/action docs with package summary page.
    stages.append(FunctionStage('generate_messages', generate_messages,
                                package=package, package_path=package_path,
                                output_path=docs_build_space))
    stages.append(FunctionStage('generate_services', generate_services,
                                package=package, package_path=package_path,
                                output_path=docs_build_space))
    stages.append(FunctionStage('generate_package_summary', generate_package_summary,
                                package=package, package_path=package_path_abs,
                                rosdoc_conf=rosdoc_conf, output_path=docs_build_space))

    # Add steps to run native doc generators, as appropriate. This has to happen after
    # the package summary generates, as we're going to override the subdirectory index.html
    # files generated by that sphinx run.
    for conf in rosdoc_conf:
        try:
            stages.extend(getattr(builders, conf['builder'])(
                conf, package, deps, docs_space, package_path_abs, docs_build_space))
        except AttributeError:
            log(fmt("[document] @!@{yf}Warning:@| Skipping unrecognized rosdoc builder [%s] for package [%s]" %
                (conf['builder'], package.name)))

    return Job(jid=package.name, deps=deps, env={}, stages=stages)
Esempio n. 5
0
def create_cmake_test_job(
    context,
    package,
    package_path,
    test_target,
    verbose,
):
    """Generate a job to test a cmake package"""
    # Package build space path
    build_space = context.package_build_space(package)
    # Environment dictionary for the job, which will be built
    # up by the executions in the loadenv stage.
    job_env = dict(os.environ)

    # Create job stages
    stages = []

    # Load environment for job
    stages.append(
        FunctionStage(
            'loadenv',
            loadenv,
            locked_resource=None,
            job_env=job_env,
            package=package,
            context=context,
            verbose=False,
        ))

    # Check if the test target exists
    # make -q target_name returns 2 if the target does not exist, in that case we want to terminate this test job
    # the other cases (0=target is up-to-date, 1=target exists but is not up-to-date) can be ignored
    stages.append(
        CommandStage(
            'findtest',
            [MAKE_EXEC, '-q', test_target],
            cwd=build_space,
            early_termination_retcode=2,
            success_retcodes=(0, 1, 2),
        ))

    # Make command
    stages.append(
        CommandStage(
            'make',
            [MAKE_EXEC, test_target] + context.make_args,
            cwd=build_space,
            logger_factory=IOBufferProtocol.factory,
        ))

    return Job(
        jid=package.name,
        deps=[],
        env=job_env,
        stages=stages,
    )
def sphinx(conf, package, deps, doc_deps, output_path, source_path, docs_build_path, job_env):
    root_dir = os.path.join(source_path, conf.get('sphinx_root_dir', '.'))
    output_dir = os.path.join(output_path, 'html', conf.get('output_dir', ''))

    rpp = os.environ['ROS_PACKAGE_PATH'].split(':')
    rpp.insert(0, source_path)
    if os.path.isdir(os.path.join(source_path, 'src')):
        rpp.insert(0, os.path.join(source_path, 'src'))
    env = {
        'PATH': os.environ.get('PATH', ''),
        'PYTHONPATH': os.environ.get('PYTHONPATH', ''),
        'ROS_PACKAGE_PATH': ':'.join(rpp),
        'LD_LIBRARY_PATH': os.environ.get('LD_LIBRARY_PATH', '')
    }

    return [
        FunctionStage(
            'cache_sphinx_output',
            write_file,
            contents=output_dir,
            dest_path=os.path.join(docs_build_path, output_dir_file('sphinx')),
        ),
        FunctionStage(
            'job_env_set_intersphinx_mapping',
            generate_intersphinx_mapping,
            output_path=output_path,
            root_dir=root_dir,
            doc_deps=doc_deps,
            docs_build_path=docs_build_path,
            job_env=job_env),
        CommandStage(
            'rosdoc_sphinx',
            [which('sphinx-build'), '-E', root_dir, output_dir],
            cwd=root_dir,
            env=env),
        FunctionStage(
            'job_env_unset_intersphinx_mapping',
            unset_env,
            job_env=job_env,
            keys=['INTERSPHINX_MAPPING']),
    ]
Esempio n. 7
0
def create_summary_job(context, package_names):
    docs_space = os.path.join(context.build_space_abs, '..', 'docs')
    docs_build_space = os.path.join(context.build_space_abs, 'docs')

    stages = []

    stages.append(FunctionStage('generate_overall_summary', generate_overall_summary,
                                output_path=docs_build_space))

    # Run Sphinx for the package summary.
    stages.append(CommandStage(
        'summary_sphinx',
        [which('sphinx-build'), '-j8', '-E', '.', docs_space],
        cwd=docs_build_space
    ))

    return Job(jid='summary', deps=package_names, env={}, stages=stages)
Esempio n. 8
0
def epydoc(conf, package, deps, output_path, source_path, docs_build_path):
    output_dir = os.path.join(output_path, 'html', conf.get('output_dir', ''))

    command = [which('epydoc'), '--html', package.name, '-o', output_dir]
    for s in conf.get('exclude', []):
        command.extend(['--exclude', s])

    if 'config' in conf:
        command.extend(['--config', os.path.join(source_path, conf['config'])])
    else:
        # default options
        command.extend(['--inheritance', 'included', '--no-private'])

    env = {
        'PYTHONPATH': ':'.join(sys.path),
        'LD_LIBRARY_PATH': os.environ.get('LD_LIBRARY_PATH', '')
    }

    return [
        FunctionStage('mkdir_epydoc', makedirs, path=output_dir),
        CommandStage('rosdoc_epydoc', command, cwd=source_path, env=env)
    ]
Esempio n. 9
0
def create_package_job(context, package, package_tests):
    build_space = os.path.join(context.build_space_abs, package.name)
    if not os.path.exists(os.path.join(build_space, 'Makefile')):
        raise

    build_space = context.package_build_space(package)
    devel_space = context.package_devel_space(package)
    catkin_test_results_dir = os.path.join(build_space, 'test_results')
    #package_path_abs = os.path.join(context.source_space_abs, package_path)

    job_env = dict(os.environ)
    stages = []

    stages.append(
        FunctionStage('loadenv',
                      loadenv,
                      job_env=job_env,
                      package=package,
                      context=context))

    package_test_targets = [
        'run_tests_%s_%s' % (package.name, test_name)
        for test_name in package_tests
    ]

    make_args = handle_make_arguments(context.make_args +
                                      context.catkin_make_args +
                                      package_test_targets)

    stages.append(
        CommandStage(
            'make',
            [MAKE_EXEC] + make_args,
            cwd=build_space,
            #env_overrides=env_overrides,
            logger_factory=CMakeMakeIOBufferProtocol.factory))

    return Job(jid=package.name, deps=[], env=job_env, stages=stages)
Esempio n. 10
0
def epydoc(conf, package, deps, output_path, source_path, docs_build_path):
    try:
        which('epydoc')
    except KeyError:
        # If epydoc is missing, fall back to pydoctor.
        return pydoctor(conf, package, deps, output_path, source_path, docs_build_path)

    output_dir = os.path.join(output_path, 'html', conf.get('output_dir', ''))

    command = [which('epydoc'), '--html', package.name, '-o', output_dir]
    for s in conf.get('exclude', []):
        command.extend(['--exclude', s])

    if 'config' in conf:
        command.extend(['--config', os.path.join(source_path, conf['config'])])
    else:
        # default options
        command.extend(['--inheritance', 'included', '--no-private'])

    env = {
        'PYTHONPATH': os.environ.get('PYTHONPATH', ''),
        'LD_LIBRARY_PATH': os.environ.get('LD_LIBRARY_PATH', '')
    }
    
    # Swallow errors from epydoc until we figure out a better story for Python 3.
    wrapper_command = ['/bin/bash', '-c', '%s || true' % ' '.join(command)]

    return [
        FunctionStage(
            'mkdir_epydoc',
            makedirs,
            path=output_dir),
        CommandStage(
            'rosdoc_epydoc',
            wrapper_command,
            cwd=source_path,
            env=env)
    ]
Esempio n. 11
0
def create_catkin_test_job(
    context,
    package,
    package_path,
    test_target,
    verbose,
):
    """Generate a job that tests a package"""

    # Package source space path
    pkg_dir = os.path.join(context.source_space_abs, package_path)
    # Package build space path
    build_space = context.package_build_space(package)
    # Environment dictionary for the job, which will be built
    # up by the executions in the loadenv stage.
    job_env = dict(os.environ)

    # Create job stages
    stages = []

    # Load environment for job
    stages.append(
        FunctionStage(
            'loadenv',
            loadenv,
            locked_resource=None,
            job_env=job_env,
            package=package,
            context=context,
            verbose=False,
        ))

    # Check buildsystem command
    # The stdout is suppressed here instead of globally because for the actual tests,
    # stdout contains important information, but for cmake it is only relevant when verbose
    stages.append(
        CommandStage('check', [MAKE_EXEC, 'cmake_check_build_system'],
                     cwd=build_space,
                     logger_factory=CMakeIOBufferProtocol.factory_factory(
                         pkg_dir, suppress_stdout=not verbose),
                     occupy_job=True))

    # Check if the test target exists
    # make -q target_name returns 2 if the target does not exist, in that case we want to terminate this test job
    # the other cases (0=target is up-to-date, 1=target exists but is not up-to-date) can be ignored
    stages.append(
        CommandStage(
            'findtest',
            [MAKE_EXEC, '-q', test_target],
            cwd=build_space,
            early_termination_retcode=2,
            success_retcodes=(0, 1, 2),
        ))

    # Make command
    stages.append(
        CommandStage(
            'make',
            [MAKE_EXEC, test_target] + context.make_args,
            cwd=build_space,
            logger_factory=CMakeMakeRunTestsIOBufferProtocol.factory_factory(
                verbose),
        ))

    # catkin_test_results
    result_cmd = ['catkin_test_results']
    if verbose:
        result_cmd.append('--verbose')
    stages.append(
        CommandStage(
            'results',
            result_cmd,
            cwd=build_space,
            logger_factory=CatkinTestResultsIOBufferProtocol.factory,
        ))

    return Job(
        jid=package.name,
        deps=[],
        env=job_env,
        stages=stages,
    )
Esempio n. 12
0
def create_catkin_clean_job(context, package, package_path, dependencies,
                            dry_run, clean_build, clean_devel, clean_install):
    """Generate a Job that cleans a catkin package"""

    stages = []

    # Package build space path
    build_space = context.package_build_space(package)
    # Package metadata path
    metadata_path = context.package_metadata_path(package)
    # Environment dictionary for the job, empty for a clean job
    job_env = {}

    # Remove installed files
    if clean_install:
        installed_files = get_installed_files(
            context.package_metadata_path(package))
        install_dir = context.package_install_space(package)
        if context.merge_install:
            # Don't clean shared files in a merged install space layout.
            installed_files = [
                path for path in installed_files
                if os.path.dirname(path) != install_dir
            ]
        # If a Python package with the package name is installed, clean it too.
        python_dir = os.path.join(install_dir, get_python_install_dir(context),
                                  package.name)
        if os.path.exists(python_dir):
            installed_files.append(python_dir)
        stages.append(
            FunctionStage('cleaninstall',
                          rmfiles,
                          paths=sorted(installed_files),
                          remove_empty=True,
                          empty_root=context.install_space_abs,
                          dry_run=dry_run))

    # Remove products in develspace
    if clean_devel:
        if context.merge_devel:
            # Remove build targets from devel space
            stages.append(
                CommandStage(
                    'clean',
                    [MAKE_EXEC, 'clean'],
                    cwd=build_space,
                ))
        elif context.link_devel:
            # Remove symlinked products
            stages.append(
                FunctionStage(
                    'unlink',
                    unlink_devel_products,
                    locked_resource='symlink-collisions-file',
                    devel_space_abs=context.devel_space_abs,
                    private_devel_path=context.package_private_devel_path(
                        package),
                    metadata_path=context.metadata_path(),
                    package_metadata_path=context.package_metadata_path(
                        package),
                    dry_run=dry_run))

            # Remove devel space
            stages.append(
                FunctionStage(
                    'rmdevel',
                    rmfiles,
                    paths=[context.package_private_devel_path(package)],
                    dry_run=dry_run))
        elif context.isolate_devel:
            # Remove devel space
            stages.append(
                FunctionStage('rmdevel',
                              rmfiles,
                              paths=[context.package_devel_space(package)],
                              dry_run=dry_run))

    # Remove build space
    if clean_build:
        stages.append(
            FunctionStage('rmbuild',
                          rmfiles,
                          paths=[build_space],
                          dry_run=dry_run))

    # Remove cached metadata
    if clean_build and clean_devel and clean_install:
        stages.append(
            FunctionStage('rmmetadata',
                          rmfiles,
                          paths=[metadata_path],
                          dry_run=dry_run))

    return Job(jid=package.name, deps=dependencies, env=job_env, stages=stages)
Esempio n. 13
0
def create_catkin_build_job(context,
                            package,
                            package_path,
                            dependencies,
                            force_cmake,
                            pre_clean,
                            prebuild=False):
    """Job class for building catkin packages"""

    # Package source space path
    pkg_dir = os.path.join(context.source_space_abs, package_path)

    # Package build space path
    build_space = context.package_build_space(package)
    # Package devel space path
    devel_space = context.package_devel_space(package)
    # Package install space path
    install_space = context.package_install_space(package)
    # Package metadata path
    metadata_path = context.package_metadata_path(package)
    # Environment dictionary for the job, which will be built
    # up by the executions in the loadenv stage.
    job_env = dict(os.environ)

    # Create job stages
    stages = []

    # Load environment for job.
    stages.append(
        FunctionStage('loadenv',
                      loadenv,
                      locked_resource=None
                      if context.isolate_install else 'installspace',
                      job_env=job_env,
                      package=package,
                      context=context))

    # Create package build space
    stages.append(FunctionStage('mkdir', makedirs, path=build_space))

    # Create package metadata dir
    stages.append(FunctionStage('mkdir', makedirs, path=metadata_path))

    # Copy source manifest
    stages.append(
        FunctionStage('cache-manifest',
                      copyfiles,
                      source_paths=[
                          os.path.join(context.source_space_abs, package_path,
                                       'package.xml')
                      ],
                      dest_path=os.path.join(metadata_path, 'package.xml')))

    # Only run CMake if the Makefile doesn't exist or if --force-cmake is given
    # TODO: This would need to be different with `cmake --build`
    makefile_path = os.path.join(build_space, 'Makefile')

    if not os.path.isfile(makefile_path) or force_cmake:

        require_command('cmake', CMAKE_EXEC)

        # CMake command
        stages.append(
            CommandStage(
                'cmake',
                [
                    CMAKE_EXEC, pkg_dir, '--no-warn-unused-cli',
                    '-DCATKIN_DEVEL_PREFIX=' + devel_space,
                    '-DCMAKE_INSTALL_PREFIX=' + install_space
                ] + context.cmake_args,
                cwd=build_space,
                logger_factory=CMakeIOBufferProtocol.factory_factory(pkg_dir),
                occupy_job=True))
    else:
        # Check buildsystem command
        stages.append(
            CommandStage(
                'check', [MAKE_EXEC, 'cmake_check_build_system'],
                cwd=build_space,
                logger_factory=CMakeIOBufferProtocol.factory_factory(pkg_dir),
                occupy_job=True))

    # Filter make arguments
    make_args = handle_make_arguments(context.make_args +
                                      context.catkin_make_args)

    # Pre-clean command
    if pre_clean:
        # TODO: Remove target args from `make_args`
        stages.append(
            CommandStage(
                'preclean',
                [MAKE_EXEC, 'clean'] + make_args,
                cwd=build_space,
            ))

    require_command('make', MAKE_EXEC)

    # Make command
    stages.append(
        CommandStage('make', [MAKE_EXEC] + make_args,
                     cwd=build_space,
                     logger_factory=CMakeMakeIOBufferProtocol.factory))

    # Symlink command if using a linked develspace
    if context.link_devel:
        stages.append(
            FunctionStage(
                'symlink',
                link_devel_products,
                locked_resource='symlink-collisions-file',
                package=package,
                package_path=package_path,
                devel_manifest_path=context.package_metadata_path(package),
                source_devel_path=context.package_devel_space(package),
                dest_devel_path=context.devel_space_abs,
                metadata_path=context.metadata_path(),
                prebuild=prebuild))

    # Make install command, if installing
    if context.install:
        stages.append(
            CommandStage('install', [MAKE_EXEC, 'install'],
                         cwd=build_space,
                         logger_factory=CMakeMakeIOBufferProtocol.factory,
                         locked_resource=None
                         if context.isolate_install else 'installspace'))
        # Copy install manifest
        stages.append(
            FunctionStage(
                'register',
                copy_install_manifest,
                src_install_manifest_path=build_space,
                dst_install_manifest_path=context.package_metadata_path(
                    package)))

    return Job(jid=package.name, deps=dependencies, env=job_env, stages=stages)
Esempio n. 14
0
def create_cmake_build_job(context, package, package_path, dependencies,
                           force_cmake, pre_clean):

    # Package source space path
    pkg_dir = os.path.join(context.source_space_abs, package_path)

    # Package build space path
    build_space = context.package_build_space(package)
    # Package metadata path
    metadata_path = context.package_metadata_path(package)
    # Environment dictionary for the job, which will be built
    # up by the executions in the loadenv stage.
    job_env = dict(os.environ)

    # Get actual staging path
    dest_path = context.package_dest_path(package)
    final_path = context.package_final_path(package)

    # Create job stages
    stages = []

    # Load environment for job.
    stages.append(
        FunctionStage('loadenv',
                      loadenv,
                      locked_resource='installspace',
                      job_env=job_env,
                      package=package,
                      context=context))

    # Create package build space
    stages.append(FunctionStage('mkdir', makedirs, path=build_space))

    # Create package metadata dir
    stages.append(FunctionStage('mkdir', makedirs, path=metadata_path))

    # Copy source manifest
    stages.append(
        FunctionStage('cache-manifest',
                      copyfiles,
                      source_paths=[
                          os.path.join(context.source_space_abs, package_path,
                                       'package.xml')
                      ],
                      dest_path=os.path.join(metadata_path, 'package.xml')))

    require_command('cmake', CMAKE_EXEC)

    # CMake command
    makefile_path = os.path.join(build_space, 'Makefile')
    if not os.path.isfile(makefile_path) or force_cmake:
        stages.append(
            CommandStage(
                'cmake', ([
                    CMAKE_EXEC, pkg_dir, '--no-warn-unused-cli',
                    '-DCMAKE_INSTALL_PREFIX=' + final_path
                ] + context.cmake_args),
                cwd=build_space,
                logger_factory=CMakeIOBufferProtocol.factory_factory(pkg_dir)))
    else:
        stages.append(
            CommandStage(
                'check', [MAKE_EXEC, 'cmake_check_build_system'],
                cwd=build_space,
                logger_factory=CMakeIOBufferProtocol.factory_factory(pkg_dir)))

    # Pre-clean command
    if pre_clean:
        make_args = handle_make_arguments(context.make_args +
                                          context.catkin_make_args)
        stages.append(
            CommandStage(
                'preclean',
                [MAKE_EXEC, 'clean'] + make_args,
                cwd=build_space,
            ))

    require_command('make', MAKE_EXEC)

    # Make command
    stages.append(
        CommandStage('make',
                     [MAKE_EXEC] + handle_make_arguments(context.make_args),
                     cwd=build_space,
                     logger_factory=CMakeMakeIOBufferProtocol.factory))

    # Make install command (always run on plain cmake)
    stages.append(
        CommandStage('install', [MAKE_EXEC, 'install'],
                     cwd=build_space,
                     logger_factory=CMakeMakeIOBufferProtocol.factory,
                     locked_resource='installspace'))

    # Copy install manifest
    stages.append(
        FunctionStage(
            'register',
            copy_install_manifest,
            src_install_manifest_path=build_space,
            dst_install_manifest_path=context.package_metadata_path(package)))

    # Determine the location where the setup.sh file should be created
    stages.append(
        FunctionStage('setupgen',
                      generate_setup_file,
                      context=context,
                      install_target=dest_path))

    stages.append(
        FunctionStage('envgen',
                      generate_env_file,
                      context=context,
                      install_target=dest_path))

    return Job(jid=package.name, deps=dependencies, env=job_env, stages=stages)
Esempio n. 15
0
def create_package_job(context, package, package_path, deps, doc_deps):
    docs_space = os.path.join(context.docs_space_abs, package.name)
    docs_build_space = os.path.join(context.build_space_abs, 'docs',
                                    package.name)
    package_path_abs = os.path.join(context.source_space_abs, package_path)
    package_meta_path = context.package_metadata_path(package)

    # Load rosdoc config, if it exists.
    rosdoc_yaml_path = os.path.join(package_path_abs, 'rosdoc.yaml')
    for export in package.exports:
        if export.tagname == "rosdoc":
            config = export.attributes.get('config', '')
            if config:
                rosdoc_yaml_path_temp = os.path.join(package_path_abs, config)
                if os.path.isfile(rosdoc_yaml_path_temp):
                    # Stop if configuration is found which exists
                    rosdoc_yaml_path = rosdoc_yaml_path_temp
                    break

    if os.path.isfile(rosdoc_yaml_path):
        with open(rosdoc_yaml_path) as f:
            rosdoc_conf = yaml.full_load(f)
    else:
        if os.path.isdir(os.path.join(package_path_abs, 'src')) or \
                os.path.isdir(os.path.join(package_path_abs, 'include')):
            rosdoc_conf = [{'builder': 'doxygen'}]
        else:
            rosdoc_conf = []

    stages = []

    # Create package docs spaces.
    stages.append(
        FunctionStage('mkdir_docs_build_space',
                      makedirs,
                      path=docs_build_space))

    # Generate msg/srv/action docs with package summary page.
    stages.append(
        FunctionStage('generate_messages',
                      generate_messages,
                      package=package,
                      package_path=package_path,
                      output_path=docs_build_space))
    stages.append(
        FunctionStage('generate_services',
                      generate_services,
                      package=package,
                      package_path=package_path,
                      output_path=docs_build_space))
    stages.append(
        FunctionStage('generate_package_summary',
                      generate_package_summary,
                      package=package,
                      package_path=package_path_abs,
                      rosdoc_conf=rosdoc_conf,
                      output_path=docs_build_space))

    # Cache document config
    stages.append(
        FunctionStage('cache_rosdoc_config',
                      yaml_dump_file,
                      contents=rosdoc_conf,
                      dest_path=os.path.join(package_meta_path,
                                             'rosdoc.yaml')))

    job_env = {}

    # Add steps to run native doc generators, as appropriate. This has to happen after
    # the package summary generates, as we're going to override the subdirectory index.html
    # files generated by that sphinx run.
    for conf in rosdoc_conf:
        try:
            builder = conf['builder']
            if builder == 'doxygen':
                docs_space = os.path.realpath(docs_space)
                docs_build_space = os.path.realpath(docs_build_space)
                package_path_abs = os.path.realpath(package_path_abs)
            stages.extend(
                getattr(builders,
                        builder)(conf, package, deps, doc_deps, docs_space,
                                 package_path_abs, docs_build_space, job_env))
        except AttributeError:
            log(
                fmt("[document] @!@{yf}Warning:@| Skipping unrecognized rosdoc builder [%s] for package [%s]"
                    % (conf['builder'], package.name)))

    return Job(jid=package.name, deps=deps, env=job_env, stages=stages)
Esempio n. 16
0
def create_catkin_build_job(context,
                            package,
                            package_path,
                            dependencies,
                            force_cmake,
                            pre_clean,
                            prebuild=False):
    """Job class for building catkin packages"""

    # Package source space path
    pkg_dir = os.path.join(context.source_space_abs, package_path)

    # Package build space path
    build_space = context.package_build_space(package)
    # Package devel space path
    devel_space = context.package_devel_space(package)
    # Package install space path
    install_space = context.package_install_space(package)
    # Package metadata path
    metadata_path = context.package_metadata_path(package)

    # Create job stages
    stages = []

    # Create package build space
    stages.append(FunctionStage('mkdir', makedirs, path=build_space))

    # Create package metadata dir
    stages.append(FunctionStage('mkdir', makedirs, path=metadata_path))

    # Copy source manifest
    stages.append(
        FunctionStage('cache-manifest',
                      copyfiles,
                      source_paths=[
                          os.path.join(context.source_space_abs, package_path,
                                       'package.xml')
                      ],
                      dest_path=os.path.join(metadata_path, 'package.xml')))

    # Define test results directory
    catkin_test_results_dir = os.path.join(build_space, 'test_results')
    # Always override the CATKIN and ROS _TEST_RESULTS_DIR environment variables.
    # This is in order to avoid cross talk due to parallel builds.
    # This is only needed for ROS Hydro and earlier (the problem was addressed upstream in Indigo).
    # See: https://github.com/catkin/catkin_tools/issues/139
    ctr_env = {
        'CATKIN_TEST_RESULTS_DIR': catkin_test_results_dir,
        'ROS_TEST_RESULTS_DIR': catkin_test_results_dir
    }

    # Only run CMake if the Makefile doesn't exist or if --force-cmake is given
    # TODO: This would need to be different with `cmake --build`
    makefile_path = os.path.join(build_space, 'Makefile')

    if not os.path.isfile(makefile_path) or force_cmake:

        # Create an env-hook which clears the catkin and ros test results environment variable.
        stages.append(
            FunctionStage('ctr-nuke',
                          ctr_nuke,
                          prefix=context.package_dest_path(package)))

        # CMake command
        stages.append(
            CommandStage(
                'cmake',
                [
                    CMAKE_EXEC, pkg_dir, '--no-warn-unused-cli',
                    '-DCATKIN_DEVEL_PREFIX=' + devel_space,
                    '-DCMAKE_INSTALL_PREFIX=' + install_space
                ] + context.cmake_args,
                cwd=build_space,
                logger_factory=CMakeIOBufferProtocol.factory_factory(pkg_dir),
                occupy_job=True))
    else:
        # Check buildsystem command
        stages.append(
            CommandStage(
                'check', [MAKE_EXEC, 'cmake_check_build_system'],
                cwd=build_space,
                logger_factory=CMakeIOBufferProtocol.factory_factory(pkg_dir),
                occupy_job=True))

    # Filter make arguments
    make_args = handle_make_arguments(context.make_args +
                                      context.catkin_make_args)

    # Determine if the catkin test results env needs to be overridden
    env_overrides = ctr_env if 'test' in make_args else {}

    # Pre-clean command
    if pre_clean:
        # TODO: Remove target args from `make_args`
        stages.append(
            CommandStage(
                'preclean',
                [MAKE_EXEC, 'clean'] + make_args,
                cwd=build_space,
            ))

    # Make command
    stages.append(
        CommandStage('make', [MAKE_EXEC] + make_args,
                     cwd=build_space,
                     env_overrides=env_overrides,
                     logger_factory=CMakeMakeIOBufferProtocol.factory))

    # Symlink command if using a linked develspace
    if context.link_devel:
        stages.append(
            FunctionStage(
                'symlink',
                link_devel_products,
                locked_resource='symlink-collisions-file',
                package=package,
                package_path=package_path,
                devel_manifest_path=context.package_metadata_path(package),
                source_devel_path=context.package_devel_space(package),
                dest_devel_path=context.devel_space_abs,
                metadata_path=context.metadata_path(),
                prebuild=prebuild))

    # Make install command, if installing
    if context.install:
        stages.append(
            CommandStage('install', [MAKE_EXEC, 'install'],
                         cwd=build_space,
                         logger_factory=CMakeMakeIOBufferProtocol.factory,
                         locked_resource='installspace'))

    return Job(jid=package.name,
               deps=dependencies,
               env_loader=get_env_loader(package, context),
               stages=stages)
Esempio n. 17
0
def create_catkin_clean_job(context, package, package_path, dependencies,
                            dry_run, clean_build, clean_devel, clean_install):
    """Generate a Job that cleans a catkin package"""

    stages = []

    # Package build space path
    build_space = context.package_build_space(package)
    # Package metadata path
    metadata_path = context.package_metadata_path(package)

    # Remove installed files
    if clean_install:
        installed_files = get_installed_files(
            context.package_metadata_path(package))
        stages.append(
            FunctionStage('cleaninstall',
                          rmfiles,
                          paths=sorted(installed_files),
                          remove_empty=True,
                          empty_root=context.install_space_abs,
                          dry_run=dry_run))

    # Remove products in develspace
    if clean_devel:
        if context.merge_devel:
            # Remove build targets from devel space
            stages.append(
                CommandStage(
                    'clean',
                    [MAKE_EXEC, 'clean'],
                    cwd=build_space,
                ))
        elif context.link_devel:
            # Remove symlinked products
            stages.append(
                FunctionStage(
                    'unlink',
                    unlink_devel_products,
                    locked_resource='symlink-collisions-file',
                    devel_space_abs=context.devel_space_abs,
                    private_devel_path=context.package_private_devel_path(
                        package),
                    metadata_path=context.metadata_path(),
                    package_metadata_path=context.package_metadata_path(
                        package),
                    dry_run=dry_run))

            # Remove devel space
            stages.append(
                FunctionStage(
                    'rmdevel',
                    rmfiles,
                    paths=[context.package_private_devel_path(package)],
                    dry_run=dry_run))
        elif context.isolate_devel:
            # Remove devel space
            stages.append(
                FunctionStage('rmdevel',
                              rmfiles,
                              paths=[context.package_devel_space(package)],
                              dry_run=dry_run))

    # Remove build space
    if clean_build:
        stages.append(
            FunctionStage('rmbuild',
                          rmfiles,
                          paths=[build_space],
                          dry_run=dry_run))

    # Remove cached metadata
    if clean_build and clean_devel and clean_install:
        stages.append(
            FunctionStage('rmmetadata',
                          rmfiles,
                          paths=[metadata_path],
                          dry_run=dry_run))

    return Job(jid=package.name,
               deps=dependencies,
               env_loader=get_env_loader(package, context),
               stages=stages)
Esempio n. 18
0
def create_python_build_job(context, package, package_path, dependencies, force_cmake, pre_clean):

    # Package source space path
    pkg_dir = os.path.join(context.source_space_abs, package_path)

    # Package build space path
    build_space = context.package_build_space(package)

    # Package metadata path
    metadata_path = context.package_metadata_path(package)

    # Environment dictionary for the job, which will be built
    # up by the executions in the loadenv stage.
    job_env = dict(os.environ)

    # Some Python packages (in particular matplotlib) seem to struggle with
    # being built by ccache, so strip that out if present.
    def strip_ccache(cc_str):
        parts = cc_str.split()
        return ' '.join([part for part in parts if not 'ccache' in part])
    if 'CC' in job_env:
        job_env['CC'] = strip_ccache(job_env['CC'])
    if 'CXX' in job_env:
        job_env['CXX'] = strip_ccache(job_env['CXX'])

    # Get actual staging path
    dest_path = context.package_dest_path(package)
    final_path = context.package_final_path(package)


    # determine if python executable has been passed in
    determine_python_exec(context.cmake_args)

    # determine python version being used
    python_version = determine_python_version()

    # Create job stages
    stages = []

    # Load environment for job.
    stages.append(FunctionStage(
        'loadenv',
        loadenv,
        locked_resource='installspace',
        job_env=job_env,
        package=package,
        context=context
    ))

    # Create package metadata dir
    stages.append(FunctionStage(
        'mkdir',
        makedirs,
        path=metadata_path
    ))

    # Copy source manifest
    stages.append(FunctionStage(
        'cache-manifest',
        copyfiles,
        source_paths=[os.path.join(context.source_space_abs, package_path, 'package.xml')],
        dest_path=os.path.join(metadata_path, 'package.xml')
    ))

    # Check if this package supports --single-version-externally-managed flag, as some old
    # distutils packages don't, notably pyyaml. The following check is fast and cheap. A more
    # comprehensive check would be to parse the results of python setup.py --help or similar,
    # but that is expensive to do, since it has to occur at the start of the build.
    with open(os.path.join(pkg_dir, 'setup.py')) as f:
        setup_file_contents = f.read()
    svem_supported = re.search('(from|import) setuptools', setup_file_contents)

    # Python setup install
    stages.append(CommandStage(
        'python',
        [PYTHON_EXEC, 'setup.py',
         'build', '--build-base', build_space,
         'install',
         '--root', build_space,
         '--prefix', 'install'] +
        (['--single-version-externally-managed'] if svem_supported else []),
        cwd=pkg_dir
    ))

    # Special path rename required only on Debian.
    python_install_dir = get_python_install_dir()
    if 'dist-packages' in python_install_dir:
        python_install_dir_site = python_install_dir.replace('dist-packages', 'site-packages')
        if python_version['major'] == 3:
            python_install_dir = python_install_dir.replace('python%s.%s' % (python_version['major'], python_version['minor']), 'python%s' % python_version['major'])

        stages.append(FunctionStage(
            'debian-fix',
            renamepath,
            source_path=os.path.join(build_space, 'install', python_install_dir_site),
            dest_path=os.path.join(build_space, 'install', python_install_dir)
        ))

    # Create package install space.
    stages.append(FunctionStage(
        'mkdir-install',
        makedirs,
        path=dest_path
    ))


    # Copy files from staging area into final install path, using rsync. Despite
    # having to spawn a process, this is much faster than copying one by one
    # with native Python.
    stages.append(CommandStage(
        'install',
        [RSYNC_EXEC, '-a',
            os.path.join(build_space, 'install', ''),
            dest_path],
        cwd=pkg_dir,
        locked_resource='installspace'))

    # fix shebangs that point to the global space to use the python exec
    stages.append(FunctionStage(
        'fix-shebang',
        fix_shebangs,
        pkg_dir=dest_path,
        python_exec=PYTHON_EXEC,
        locked_resource=None if context.isolate_install else 'installspace'))

    # Determine the location where the setup.sh file should be created
    stages.append(FunctionStage(
        'setupgen',
        generate_setup_file,
        context=context,
        install_target=dest_path
    ))

    stages.append(FunctionStage(
        'envgen',
        generate_env_file,
        context=context,
        install_target=dest_path
    ))

    # fix the setup.sh which exports PYTHONPATH incorrectly for how we install python3 vs python3.5
    if python_version['major'] == 3:
        stages.append(FunctionStage(
            'fix_python3_install_space',
            fix_python3_install_space,
            install_space=dest_path,
            old_python="%s.%s" % (python_version['major'], python_version['minor']),
            new_python=python_version['major'],
            locked_resource='installspace'
        ))

    return Job(
        jid=package.name,
        deps=dependencies,
        env=job_env,
        stages=stages)