def _get_jobs(self, args, installers, decorators): jobs = OrderedDict() workspace_package_names = [ decorator.descriptor.name for decorator in decorators ] logger.info( 'Including {} in bundle...'.format(workspace_package_names)) for decorator in decorators: if not decorator.selected: continue pkg = decorator.descriptor extension = get_task_extension('colcon_bundle.task.bundle', pkg.type) if not extension: logger.warn('No task extension to bundle a {pkg.type} package'. format_map(locals())) continue recursive_dependencies = OrderedDict() for dep_name in decorator.recursive_dependencies: dep_path = args.install_base if not args.merge_install: dep_path = os.path.join(dep_path, dep_name) recursive_dependencies[dep_name] = dep_path dest = self.task_argument_destinations.values() package_args = BundlePackageArguments(pkg, installers, args, additional_destinations=dest) ordered_package_args = ', '.join([ ('%s: %s' % (repr(k), repr(package_args.__dict__[k]))) for k in sorted(package_args.__dict__.keys()) ]) logger.debug( 'Bundling package {pkg.name} with the following arguments: ' '{{{ordered_package_args}}}'.format_map(locals())) task_context = TaskContext(pkg=pkg, args=package_args, dependencies=recursive_dependencies) task_context.installers = installers job = Job(identifier=pkg.name, dependencies=set(recursive_dependencies.keys()), task=extension, task_context=task_context) jobs[pkg.name] = job return jobs
async def test_bundle(): descriptor = PackageDescriptor('some/path') descriptor.name = 'python_package' deps = [DependencyDescriptor('pkg1', metadata={'version_eq': '1.3.2'}), DependencyDescriptor('pkg_in_workspace'), DependencyDescriptor('pkg2', metadata={'version_lt': '1.2'}), 'ignored_pkg'] descriptor.dependencies['run'] = deps workspace = {'pkg_in_workspace': 'path/to/pkg'} task_args = MagicMock(build_base='build/base', install_base='install/base', bundle_base='bundle/base') pip_installer = MagicMock() apt_installer = MagicMock() args = BundlePackageArguments( descriptor, {'pip3': pip_installer, 'apt': apt_installer}, task_args) context = TaskContext(pkg=descriptor, args=args, dependencies=workspace) task = PythonBundleTask() task.set_context(context=context) await task.bundle() pip_installer.add_to_install_list.assert_called_once_with('pkg1==1.3.2') apt_calls = apt_installer.add_to_install_list.call_args_list assert len(apt_calls) == 2 assert apt_calls[0][0][0] == 'libpython3-dev' assert apt_calls[1][0][0] == 'python3-pip'
async def test_rosdistro_not_defined(): pkg = PackageDescriptor('package/path') pkg.name = 'MyPackageName' pkg.dependencies['run'] = {} installers = { 'apt': MagicMock(), } top_level_args = MagicMock(build_base='build/base', install_base='install/base', bundle_base='bundle/base') args = BundlePackageArguments(pkg, installers, top_level_args) context = TaskContext(pkg=pkg, args=args, dependencies={}) task = RosBundle() task.set_context(context=context) # Concise read on why it's patched this way. # http://www.voidspace.org.uk/python/mock/patch.html#where-to-patch with patch('colcon_ros_bundle.task.ros_bundle.RosdepWrapper') as wrapper: # noqa: E501 with pytest.raises(RuntimeError): wrapper().get_rule.side_effect = _get_rule_side_effect wrapper().resolve.side_effect = _resolve_side_effect await task.bundle() installers['apt'].add_to_install_list.assert_not_called()
async def test_include_ros_base(): pkg = PackageDescriptor('package/path') pkg.name = 'MyPackageName' pkg.dependencies['run'] = {} installers = { 'apt': MagicMock(), } top_level_args = MagicMock(build_base='build/base', install_base='install/base', bundle_base='bundle/base') args = BundlePackageArguments(pkg, installers, top_level_args) args.ros_distribution = 'kinetic' args.exclude_ros_base = False context = TaskContext(pkg=pkg, args=args, dependencies={}) task = RosBundle() task.set_context(context=context) # Concise read on why it's patched this way. # http://www.voidspace.org.uk/python/mock/patch.html#where-to-patch with patch('colcon_ros_bundle.task.ros_bundle.RosdepWrapper' ) as wrapper: # noqa: E501 with patch('os.environ') as environ: environ.__getitem__.side_effect = access_var wrapper().get_rule.side_effect = _get_rule_side_effect wrapper().resolve.side_effect = _resolve_side_effect await task.bundle() installers['apt'].add_to_install_list.assert_called_with( 'ros-kinetic-ros-base')
def test_build_package(): event_loop = new_event_loop() asyncio.set_event_loop(event_loop) try: with TemporaryDirectory(prefix='test_colcon_') as tmp_path_str: tmp_path = Path(tmp_path_str) python_build_task = PythonBuildTask() package = PackageDescriptor(tmp_path / 'src') package.name = 'test_package' package.type = 'python' context = TaskContext( pkg=package, args=SimpleNamespace( path=str(tmp_path / 'src'), build_base=str(tmp_path / 'build'), install_base=str(tmp_path / 'install'), symlink_install=False, ), dependencies={} ) python_build_task.set_context(context=context) pkg = python_build_task.context.pkg pkg.path.mkdir() (pkg.path / 'setup.py').write_text( 'from setuptools import setup\n' 'setup(\n' ' name="test_package",\n' ' packages=["my_module"],\n' ')\n' ) (pkg.path / 'my_module').mkdir() (pkg.path / 'my_module' / '__init__.py').touch() src_base = Path(python_build_task.context.args.path) source_files_before = set(src_base.rglob('*')) event_loop.run_until_complete(python_build_task.build()) source_files_after = set(src_base.rglob('*')) assert source_files_before == source_files_after build_base = Path(python_build_task.context.args.build_base) assert 1 == len(list(build_base.rglob('my_module/__init__.py'))) install_base = Path(python_build_task.context.args.install_base) assert 1 == len(list(install_base.rglob('my_module/__init__.py'))) pkg_info, = install_base.rglob('PKG-INFO') assert 'Name: test-package' in pkg_info.read_text().splitlines() finally: event_loop.close()
def _get_jobs(self, args, decorators, install_base): jobs = OrderedDict() unselected_packages = set() for decorator in decorators: pkg = decorator.descriptor if not decorator.selected: unselected_packages.add(pkg) continue extension = get_task_extension('colcon_core.task.build', pkg.type) if not extension: logger.warning( "No task extension to 'build' a '{pkg.type}' package". format_map(locals())) continue recursive_dependencies = OrderedDict() for dep_name in decorator.recursive_dependencies: dep_path = install_base if not args.merge_install: dep_path = os.path.join(dep_path, dep_name) recursive_dependencies[dep_name] = dep_path package_args = BuildPackageArguments( pkg, args, additional_destinations=self.task_argument_destinations.values( )) ordered_package_args = ', '.join([ ('%s: %s' % (repr(k), repr(package_args.__dict__[k]))) for k in sorted(package_args.__dict__.keys()) ]) logger.debug( "Building package '{pkg.name}' with the following arguments: " '{{{ordered_package_args}}}'.format_map(locals())) task_context = TaskContext(pkg=pkg, args=package_args, dependencies=recursive_dependencies) job = Job(identifier=pkg.name, dependencies=set(recursive_dependencies.keys()), task=extension, task_context=task_context) jobs[pkg.name] = job return jobs, unselected_packages
async def test_bundle(): pkg = PackageDescriptor('package/path') pkg.name = 'MyPackageName' pkg.dependencies['run'] = { DependencyDescriptor('source_pkg'), DependencyDescriptor('other_pkg'), DependencyDescriptor('system_pkg') } installers = { 'rdmanifest': MagicMock(), 'system': MagicMock(), 'other': MagicMock() } top_level_args = MagicMock(build_base='build/base', install_base='install/base', bundle_base='bundle/base') args = BundlePackageArguments(pkg, installers, top_level_args) args.ros_distribution = 'kinetic' args.exclude_ros_base = True context = TaskContext(pkg=pkg, args=args, dependencies={}) task = RosBundle() task.set_context(context=context) # Concise read on why it's patched this way. # http://www.voidspace.org.uk/python/mock/patch.html#where-to-patch with patch('colcon_ros_bundle.task.ros_bundle.RosdepWrapper' ) as wrapper: # noqa: E501 wrapper().get_rule.side_effect = _get_rule_side_effect wrapper().resolve.side_effect = _resolve_side_effect await task.bundle() installers['rdmanifest'].add_to_install_list.assert_called_with( 'source_pkg', { 's': 'ource', 'uri': 'rdmanifest' }) installers['other'].add_to_install_list.assert_called_with( 'other_pkg_name') installers['system'].add_to_install_list.assert_called_with( 'system_pkg_name')
async def test_task_test(): with TemporaryDirectory(prefix='test_colcon_') as basepath: extension = BazelTestTask() desc = PackageDescriptor(basepath) desc.name = "test" args_verb = MockArgs(basepath) args_pkg = TestPackageArguments(desc, args_verb) args_pkg.path = basepath args_pkg.build_base = args_verb.build_base args_pkg.install_base = args_verb.install_base args_pkg.merge_install = args_verb.merge_install args_pkg.test_result_base = args_verb.test_result_base context = TaskContext(pkg=desc, args=args_pkg, dependencies=set()) extension.set_context(context=context) ret = await extension.test() assert ret
def test_pytest_match(): extension = PytestPythonTestingStep() env = {} desc = PackageDescriptor('/dev/null') context = TaskContext(pkg=desc, args=None, dependencies=None) desc.name = 'pkg-name' desc.type = 'python' # no test requirements desc.metadata['get_python_setup_options'] = lambda env: {} assert not extension.match(context, env, get_setup_data(desc, env)) # pytest not in tests_require desc.metadata['get_python_setup_options'] = lambda env: { 'tests_require': ['nose'], } assert not extension.match(context, env, get_setup_data(desc, env)) # pytest not in extras_require.test desc.metadata['get_python_setup_options'] = lambda env: { 'extras_require': { 'test': ['nose'] }, } assert not extension.match(context, env, get_setup_data(desc, env)) # pytest in tests_require desc.metadata['get_python_setup_options'] = lambda env: { 'tests_require': ['pytest'], } assert extension.match(context, env, get_setup_data(desc, env)) # pytest in extras_require.test desc.metadata['get_python_setup_options'] = lambda env: { 'extras_require': { 'test': ['pytest'] }, } assert extension.match(context, env, get_setup_data(desc, env))
def test_context_interface(): context = TaskContext(pkg=None, args=None, dependencies=None) with pytest.raises(NotImplementedError): context.put_event_into_queue(None)
def main(self, *, context): check_and_mark_build_tool(context.args.build_base) lcov_base_abspath = Path(os.path.abspath(context.args.lcov_base)) lcov_base_abspath.mkdir(exist_ok=True) gcc_pkgs = self._get_gcc_packages(context, additional_argument_names=['*']) jobs = OrderedDict() for pkg in gcc_pkgs: task_context = TaskContext(pkg=pkg, args=context.args, dependencies=OrderedDict()) if context.args.zero_counters: extension = LcovZeroCountersTask() else: extension = LcovCaptureTask() extension.PACKAGE_TYPE = pkg.type job = Job( identifier=pkg.name, dependencies=set(), # Can be generated in any order task=extension, task_context=task_context) jobs[pkg.name] = job rc = execute_jobs(context, jobs) if context.args.initial or context.args.zero_counters: return rc print("\nCalculating total coverage... ") total_output_file = str(lcov_base_abspath / 'total_coverage.info') if rc == 0: output_files = [] for pkg in gcc_pkgs: output_file = os.path.abspath( os.path.join(context.args.build_base, pkg.name, 'coverage.info')) if os.stat(output_file).st_size != 0: output_files.append(output_file) if len(output_files) == 0: logger.error( 'No valid coverage.info files found. Did you run tests?') return 1 rc = lcov_add(context, output_files, total_output_file, verbose=context.args.verbose) if rc != 0: return rc if context.args.filter: print("\nApplying filters... ") rc = lcov_remove(context, total_output_file) if rc != 0: return rc print("\nGenerating HTML: ", end='') # Check that genhtml exists if GENHTML_EXECUTABLE is None: raise RuntimeError("Could not find 'genhtml' executable") # Generate html cmd = [ GENHTML_EXECUTABLE, '--quiet', '--output-directory', str(lcov_base_abspath), total_output_file, '--config-file', str(context.args.lcov_config_file) ] if CPP_FILT_EXECUTABLE is not None: cmd.extend(['--demangle-cpp']) # Strip paths to packages for path in context.args.base_paths: cmd.extend(['--prefix', str(os.path.abspath(path))]) rc = subprocess.run(cmd).returncode print("Done") return rc
def main(self, *, context): # noqa: D102 build_base = context.args.build_base check_and_mark_build_tool(build_base) # Check once if the 'coverage' command is available, # otherwise we will need to fall back on using the Python module has_command = has_coverage_command() logger.info("'coverage' command available: {has_command}".format_map( locals())) # Get packages coveragepy_pkgs = self._get_coveragepy_packages(context) if not coveragepy_pkgs: logger.warning('No packages selected or found') return 0 # Combine each package's .coverage files jobs = OrderedDict() for pkg in coveragepy_pkgs: task_context = TaskContext( pkg=pkg, args=context.args, dependencies=OrderedDict(), ) task = CoveragePyTask(has_command) job = Job( identifier=pkg.name, dependencies=set(), task=task, task_context=task_context, ) jobs[pkg.name] = job rc = execute_jobs(context, jobs) # Get all packages' .coverage files coverage_files = [ str( Path( CoveragePyTask.get_package_combine_dir( build_base, pkg.name)) / '.coverage') for pkg in coveragepy_pkgs ] # Filter out non-existing files in case processing failed for some packages coverage_files = list(filter(os.path.exists, coverage_files)) if 0 == len(coverage_files): logger.warning('No coverage files found') return 0 logger.info('Coverage files: {coverage_files}'.format_map(locals())) # Combine .coverage files coveragepy_base_dir = str(os.path.abspath( context.args.coveragepy_base)) Path(coveragepy_base_dir).mkdir(exist_ok=True) rc, stdout, _ = coverage_combine(coverage_files, coveragepy_base_dir, has_command) if 0 == rc and context.args.verbose: # Print report rc, stdout, _ = coverage_report( coveragepy_base_dir, context.args.coverage_report_args, has_command, ) if 0 == rc: print('\n' + stdout) # Generate HTML report rc, stdout, _ = coverage_html( coveragepy_base_dir, context.args.coverage_html_args, has_command, ) return rc