コード例 #1
0
ファイル: test_executor.py プロジェクト: stonier/colcon-core
def test_execute_jobs():
    context = Mock()
    context.args = Mock()
    context.args.event_handlers = None
    task_context = Mock()
    task_context.pkg = Mock()
    task_context.pkg.name = 'name'
    jobs = {
        'one':
        Job(identifier='id',
            dependencies=set(),
            task=None,
            task_context=task_context)
    }

    event_reactor = Mock()
    event_reactor.__enter__ = lambda self: self
    event_reactor.__exit__ = lambda self, *args: None
    with patch('colcon_core.executor.create_event_reactor',
               return_value=event_reactor):
        with EntryPointContext(extension1=Extension1, extension2=Extension2):
            # no extension selected
            with pytest.raises(AssertionError):
                execute_jobs(context, jobs)

            # execute method not implemented and sending skipped job event
            context.args.executor = 'extension2'
            with patch('colcon_core.executor.logger.error') as error:
                rc = execute_jobs(context, jobs)
            assert rc == 1
            assert error.call_count == 1
            assert len(error.call_args[0]) == 1
            assert error.call_args[0][0].startswith(
                "Exception in executor extension 'extension2': \n")
            assert event_reactor.get_queue().put.call_count == 2
            assert isinstance(
                event_reactor.get_queue().put.call_args_list[0][0][0][0],
                JobQueued)
            assert isinstance(
                event_reactor.get_queue().put.call_args_list[1][0][0][0],
                JobSkipped)

            # successful execution
            event_reactor.get_queue().put.reset_mock()
            jobs['one'].returncode = 0
            extensions = get_executor_extensions()
            extensions[110]['extension2'].execute = \
                lambda args, jobs, on_error: 0
            callback = Mock()
            rc = execute_jobs(context,
                              jobs,
                              on_error=OnError.interrupt,
                              pre_execution_callback=callback)
            assert rc == 0
            assert event_reactor.get_queue().put.call_count == 1
            assert isinstance(event_reactor.get_queue().put.call_args[0][0][0],
                              JobQueued)
            assert callback.call_count == 1
コード例 #2
0
    def _check_installer_dependency_update(self, context, decorators,
                                           installers, path_context):
        print('Collecting dependency information...')
        logger.info('Collecting dependency information...')
        jobs = self._get_jobs(context.args, installers, decorators)
        rc = execute_jobs(context, jobs)
        if rc != 0:
            return rc

        print('Fetching and installing dependencies...')
        logger.info('Fetching and installing dependencies...')
        installer_metadata = {}
        for name, installer in installers.items():
            installer_metadata[name] = installer.install()

        installer_metadata_string = json.dumps(installer_metadata,
                                               sort_keys=True)

        installer_metadata_path = path_context.installer_metadata_path()
        dependency_match = False
        if os.path.exists(installer_metadata_path):
            with open(installer_metadata_path, 'r') as f:
                previous_metadata = f.read()
                if previous_metadata == installer_metadata_string:
                    dependency_match = True

        with open(installer_metadata_path, 'w') as f:
            f.write(installer_metadata_string)

        return dependency_match
コード例 #3
0
ファイル: lock.py プロジェクト: ruffsl/colcon-cache
    def main(self, *, context):  # noqa: D102
        check_and_mark_build_tool(context.args.build_base)

        self._create_paths(context.args)

        decorators = get_packages(
            context.args,
            additional_argument_names=self.task_argument_destinations,
            recursive_categories=('run', ))

        jobs, unselected_packages = self._get_jobs(context.args, decorators)

        def post_unselected_packages(*, event_queue):
            nonlocal unselected_packages
            names = [pkg.name for pkg in unselected_packages]
            for name in sorted(names):
                event_queue.put((JobUnselected(name), None))

        on_error = OnError.continue_
        rc = execute_jobs(context,
                          jobs,
                          on_error=on_error,
                          pre_execution_callback=post_unselected_packages)

        return rc
コード例 #4
0
    def main(self, *, context):  # noqa: D102
        check_and_mark_build_tool(context.args.build_base)
        check_and_mark_install_layout(context.args.install_base,
                                      merge_install=context.args.merge_install)

        self._create_paths(context.args)

        decorators = get_packages(
            context.args,
            additional_argument_names=self.task_argument_destinations,
            recursive_categories=('run', ))

        install_base = os.path.abspath(
            os.path.join(os.getcwd(), context.args.install_base))
        jobs, unselected_packages = self._get_jobs(context.args, decorators,
                                                   install_base)

        on_error = OnError.interrupt \
            if not context.args.continue_on_error else OnError.skip_downstream

        def post_unselected_packages(*, event_queue):
            nonlocal unselected_packages
            names = [pkg.name for pkg in unselected_packages]
            for name in sorted(names):
                event_queue.put((JobUnselected(name), None))

        rc = execute_jobs(context,
                          jobs,
                          on_error=on_error,
                          pre_execution_callback=post_unselected_packages)

        self._create_prefix_scripts(install_base, context.args.merge_install)

        return rc
コード例 #5
0
ファイル: test.py プロジェクト: sniperkit/colcon-core
    def main(self, *, context):  # noqa: D102
        check_and_mark_build_tool(context.args.build_base)

        decorators = get_packages(
            context.args,
            additional_argument_names=self.task_argument_destinations,
            recursive_categories=('run', ))

        install_base = os.path.abspath(os.path.join(
            os.getcwd(), context.args.install_base))
        jobs = self._get_jobs(context.args, decorators, install_base)

        return execute_jobs(
            context, jobs, abort_on_error=context.args.abort_on_error)
コード例 #6
0
    def main(self, *, context):  # noqa: D102
        check_and_mark_build_tool(context.args.build_base)
        check_and_mark_install_layout(
            context.args.install_base,
            merge_install=context.args.merge_install)

        decorators = get_packages(
            context.args,
            additional_argument_names=self.task_argument_destinations,
            recursive_categories=('run', ))

        install_base = os.path.abspath(os.path.join(
            os.getcwd(), context.args.install_base))
        jobs = self._get_jobs(context.args, decorators, install_base)

        if context.args.return_code_on_test_failure:
            # watch published events on all jobs to detect any test failures
            any_test_failures = False

            def check_for_test_failures(put_event_into_queue):
                nonlocal any_test_failures

                def put_event_into_queue_(self, event):
                    nonlocal any_test_failures
                    nonlocal put_event_into_queue
                    if isinstance(event, TestFailure):
                        any_test_failures = True
                    return put_event_into_queue(event)

                return put_event_into_queue_

            for job in jobs.values():
                job.put_event_into_queue = types.MethodType(
                    check_for_test_failures(job.put_event_into_queue), job)

        on_error = OnError.continue_ \
            if not context.args.abort_on_error else OnError.interrupt
        rc = execute_jobs(context, jobs, on_error=on_error)

        if context.args.return_code_on_test_failure:
            if not rc and any_test_failures:
                return 1

        return rc
コード例 #7
0
    def main(self, *, context):  # noqa: D102
        check_and_mark_build_tool(context.args.build_base)

        self._create_paths(context.args)

        decorators = get_packages(
            context.args,
            additional_argument_names=self.task_argument_destinations,
            recursive_categories=('run', ))

        install_base = os.path.abspath(
            os.path.join(os.getcwd(), context.args.install_base))
        jobs = self._get_jobs(context.args, decorators, install_base)

        rc = execute_jobs(context, jobs)

        self._create_prefix_scripts(install_base, context.args.merge_install)

        return rc
コード例 #8
0
    def main(self, *, context):  # noqa: D102
        check_and_mark_build_tool(context.args.build_base)
        check_and_mark_install_layout(context.args.install_base,
                                      merge_install=context.args.merge_install)

        self._create_paths(context.args)

        decorators = get_packages(
            context.args,
            additional_argument_names=self.task_argument_destinations,
            recursive_categories=('run', ))

        install_base = os.path.abspath(
            os.path.join(os.getcwd(), context.args.install_base))
        jobs = self._get_jobs(context.args, decorators, install_base)

        on_error = OnError.interrupt \
            if not context.args.continue_on_error else OnError.skip_downstream
        rc = execute_jobs(context, jobs, on_error=on_error)

        self._create_prefix_scripts(install_base, context.args.merge_install)

        return rc
コード例 #9
0
    def _manage_dependencies(self, context,
                             path_context,
                             upgrade_deps_graph):
        destinations = self.task_argument_destinations
        decorators = get_packages(context.args,
                                  additional_argument_names=destinations,
                                  recursive_categories=('run',))
        if len(decorators) == 0:
            estr = 'We did not find any packages to add to the '\
                   'bundle. This might be because you are not '\
                   'in the right directory, or your workspace is '\
                   'not setup correctly for colcon. Please see '\
                   'https://github.com/colcon/colcon-ros-bundle/issues/13' \
                   'for some possible suggestions. If you are still having ' \
                   'trouble please post to our' \
                   'issues: https://github.com/colcon/colcon-bundle/issues '\
                   'and we will be happy to help.'
            raise RuntimeError(estr)

        self._installer_manager.setup_installers(context)

        print('Checking if dependency tarball exists...')
        logger.info('Checking if dependency tarball exists...')

        jobs = self._get_jobs(context.args,
                              self._installer_manager.installers,
                              decorators)
        rc = execute_jobs(context, jobs)
        if rc != 0:
            return rc

        direct_dependencies_changed = package_dependencies_changed(
            path_context, decorators)
        installer_parameters_changed = \
            self._installer_manager.cache_invalid()

        if not os.path.exists(path_context.dependencies_overlay_path()):
            self._installer_manager.run_installers(
                include_sources=context.args.include_sources)
            return True
        elif upgrade_deps_graph:
            print('Checking if dependency graph has changed since last '
                  'bundle...')
            logger.info('Checking if dependency graph has changed since last'
                        ' bundle...')
            if self._installer_manager.run_installers(
                    include_sources=context.args.include_sources):
                print('All dependencies in dependency graph not changed, '
                      'skipping dependencies update...')
                logger.info('All dependencies in dependency graph not changed,'
                            ' skipping dependencies update...')
                return False
        else:
            print('Checking if local dependencies have changed since last'
                  ' bundle...')
            logger.info(
                'Checking if local dependencies have changed since last'
                ' bundle...')
            if not direct_dependencies_changed and \
                    not installer_parameters_changed:
                print('Local dependencies not changed, skipping dependencies'
                      ' update...')
                logger.info(
                    'Local dependencies not changed, skipping dependencies'
                    ' update...')
                return False
            self._installer_manager.run_installers(
                include_sources=context.args.include_sources)
        return True
コード例 #10
0
    def main(self, *, context):
        check_and_mark_build_tool(context.args.build_base)

        lcov_base_abspath = Path(os.path.abspath(context.args.lcov_base))
        lcov_base_abspath.mkdir(exist_ok=True)

        gcc_pkgs = self._get_gcc_packages(context,
                                          additional_argument_names=['*'])

        jobs = OrderedDict()
        for pkg in gcc_pkgs:
            task_context = TaskContext(pkg=pkg,
                                       args=context.args,
                                       dependencies=OrderedDict())

            if context.args.zero_counters:
                extension = LcovZeroCountersTask()
            else:
                extension = LcovCaptureTask()
            extension.PACKAGE_TYPE = pkg.type

            job = Job(
                identifier=pkg.name,
                dependencies=set(),  # Can be generated in any order
                task=extension,
                task_context=task_context)
            jobs[pkg.name] = job

        rc = execute_jobs(context, jobs)

        if context.args.initial or context.args.zero_counters:
            return rc

        print("\nCalculating total coverage... ")
        total_output_file = str(lcov_base_abspath / 'total_coverage.info')
        if rc == 0:
            output_files = []
            for pkg in gcc_pkgs:
                output_file = os.path.abspath(
                    os.path.join(context.args.build_base, pkg.name,
                                 'coverage.info'))
                if os.stat(output_file).st_size != 0:
                    output_files.append(output_file)
            if len(output_files) == 0:
                logger.error(
                    'No valid coverage.info files found. Did you run tests?')
                return 1
            rc = lcov_add(context,
                          output_files,
                          total_output_file,
                          verbose=context.args.verbose)

        if rc != 0:
            return rc

        if context.args.filter:
            print("\nApplying filters... ")
            rc = lcov_remove(context, total_output_file)

        if rc != 0:
            return rc

        print("\nGenerating HTML: ", end='')
        # Check that genhtml exists
        if GENHTML_EXECUTABLE is None:
            raise RuntimeError("Could not find 'genhtml' executable")

        # Generate html
        cmd = [
            GENHTML_EXECUTABLE, '--quiet', '--output-directory',
            str(lcov_base_abspath), total_output_file, '--config-file',
            str(context.args.lcov_config_file)
        ]
        if CPP_FILT_EXECUTABLE is not None:
            cmd.extend(['--demangle-cpp'])
        # Strip paths to packages
        for path in context.args.base_paths:
            cmd.extend(['--prefix', str(os.path.abspath(path))])
        rc = subprocess.run(cmd).returncode
        print("Done")
        return rc
コード例 #11
0
    def main(self, *, context):  # noqa: D102
        check_and_mark_build_tool(context.args.build_base)
        check_and_mark_install_layout(context.args.install_base,
                                      merge_install=context.args.merge_install)

        self._create_paths(context.args)

        decorators = get_packages(
            context.args,
            additional_argument_names=self.task_argument_destinations,
            recursive_categories=('run', ))

        install_base = os.path.abspath(
            os.path.join(os.getcwd(), context.args.install_base))
        jobs, unselected_packages = self._get_jobs(context.args, decorators,
                                                   install_base)

        underlay_packages = {}
        for prefix_path in get_chained_prefix_path():
            packages = find_installed_packages(Path(prefix_path))
            if packages:
                for pkg, path in packages.items():
                    if pkg not in underlay_packages:
                        underlay_packages[pkg] = []
                    underlay_packages[pkg].append(str(path))

        override_messages = {}
        for overlay_package in jobs.keys():
            if overlay_package in underlay_packages:
                if overlay_package not in context.args.allow_overriding:
                    override_messages[overlay_package] = (
                        "'{overlay_package}'".format_map(locals()) +
                        ' is in: ' +
                        ', '.join(underlay_packages[overlay_package]))

        if override_messages:
            override_msg = (
                'Some selected packages are already built in one or more'
                ' underlay workspaces:'
                '\n\t' + '\n\t'.join(override_messages.values()) +
                '\nIf a package in a merged underlay workspace is overridden'
                ' and it installs headers, then all packages in the overlay'
                ' must sort their include directories by workspace order.'
                ' Failure to do so may result in build failures or undefined'
                ' behavior at run time.'
                '\nIf the overridden package is used by another package'
                ' in any underlay, then the overriding package in the'
                ' overlay must be API and ABI compatible or undefined'
                ' behavior at run time may occur.'
                '\n\nIf you understand the risks and want to override a'
                ' package anyways, add the following to the command'
                ' line:'
                '\n\t--allow-overriding ' +
                ' '.join(sorted(override_messages.keys())))

            logger.warn(override_msg +
                        '\n\nThis may be promoted to an error in a'
                        ' future release of colcon-core.')

        on_error = OnError.interrupt \
            if not context.args.continue_on_error else OnError.skip_downstream

        def post_unselected_packages(*, event_queue):
            nonlocal unselected_packages
            names = [pkg.name for pkg in unselected_packages]
            for name in sorted(names):
                event_queue.put((JobUnselected(name), None))

        rc = execute_jobs(context,
                          jobs,
                          on_error=on_error,
                          pre_execution_callback=post_unselected_packages)

        self._create_prefix_scripts(install_base, context.args.merge_install)

        return rc
コード例 #12
0
    def main(self, *, context):  # noqa: D102
        build_base = context.args.build_base
        check_and_mark_build_tool(build_base)

        # Check once if the 'coverage' command is available,
        # otherwise we will need to fall back on using the Python module
        has_command = has_coverage_command()
        logger.info("'coverage' command available: {has_command}".format_map(
            locals()))

        # Get packages
        coveragepy_pkgs = self._get_coveragepy_packages(context)
        if not coveragepy_pkgs:
            logger.warning('No packages selected or found')
            return 0

        # Combine each package's .coverage files
        jobs = OrderedDict()
        for pkg in coveragepy_pkgs:
            task_context = TaskContext(
                pkg=pkg,
                args=context.args,
                dependencies=OrderedDict(),
            )
            task = CoveragePyTask(has_command)
            job = Job(
                identifier=pkg.name,
                dependencies=set(),
                task=task,
                task_context=task_context,
            )
            jobs[pkg.name] = job
        rc = execute_jobs(context, jobs)

        # Get all packages' .coverage files
        coverage_files = [
            str(
                Path(
                    CoveragePyTask.get_package_combine_dir(
                        build_base, pkg.name)) / '.coverage')
            for pkg in coveragepy_pkgs
        ]
        # Filter out non-existing files in case processing failed for some packages
        coverage_files = list(filter(os.path.exists, coverage_files))
        if 0 == len(coverage_files):
            logger.warning('No coverage files found')
            return 0
        logger.info('Coverage files: {coverage_files}'.format_map(locals()))

        # Combine .coverage files
        coveragepy_base_dir = str(os.path.abspath(
            context.args.coveragepy_base))
        Path(coveragepy_base_dir).mkdir(exist_ok=True)
        rc, stdout, _ = coverage_combine(coverage_files, coveragepy_base_dir,
                                         has_command)
        if 0 == rc and context.args.verbose:
            # Print report
            rc, stdout, _ = coverage_report(
                coveragepy_base_dir,
                context.args.coverage_report_args,
                has_command,
            )
            if 0 == rc:
                print('\n' + stdout)
        # Generate HTML report
        rc, stdout, _ = coverage_html(
            coveragepy_base_dir,
            context.args.coverage_html_args,
            has_command,
        )
        return rc
コード例 #13
0
    def _manage_dependencies(self, context,
                             install_base, bundle_base,
                             staging_path, installer_metadata_path):

        check_and_mark_install_layout(
            install_base, merge_install=context.args.merge_install)
        self._create_path(bundle_base)
        check_and_mark_bundle_tool(bundle_base)

        destinations = self.task_argument_destinations
        decorators = get_packages(context.args,
                                  additional_argument_names=destinations,
                                  recursive_categories=('run',))

        installers = self._setup_installers(context)

        print('Collecting dependency information...')
        jobs = self._get_jobs(context.args, installers, decorators)
        rc = execute_jobs(context, jobs)
        if rc != 0:
            return rc

        print('Fetching and installing dependencies...')
        installer_metadata = {}
        for name, installer in installers.items():
            installer_metadata[name] = installer.install()

        installer_metadata_string = json.dumps(installer_metadata,
                                               sort_keys=True)

        dependencies_changed = True
        if os.path.exists(installer_metadata_path):
            with open(installer_metadata_path, 'r') as f:
                previous_metadata = f.read()
                if previous_metadata == installer_metadata_string:
                    dependencies_changed = False

        with open(installer_metadata_path, 'w') as f:
            f.write(installer_metadata_string)

        if context.args.include_sources and dependencies_changed:
            sources_tar_gz_path = os.path.join(bundle_base, 'sources.tar.gz')
            with tarfile.open(
                    sources_tar_gz_path, 'w:gz', compresslevel=5) as archive:
                for name, directory in self.installer_cache_dirs.items():
                    sources_path = os.path.join(directory, 'sources')
                    if not os.path.exists(sources_path):
                        continue
                    for filename in os.listdir(sources_path):
                        file_path = os.path.join(sources_path, filename)
                        archive.add(
                            file_path,
                            arcname=os.path.join(
                                name, os.path.basename(file_path)))

        if dependencies_changed:
            update_symlinks(staging_path)
            # TODO: Update pkgconfig files?
            update_shebang(staging_path)
            # TODO: Move this to colcon-ros-bundle
            rewrite_catkin_package_path(staging_path)

        return dependencies_changed