def select_libc_objects(platform: Platform, native_toolchain: NativeToolchain) -> LibcObjects: # We use lambdas here to avoid searching for libc on osx, where it will fail. paths = platform.match({ Platform.darwin: lambda: [], Platform.linux: lambda: native_toolchain._libc_dev.get_libc_objects(), })() yield LibcObjects(paths)
def test_gcc_version(self): scheduler = self._sched() platform = Platform.create() gcc_subsystem = global_subsystem_instance(GCC) gcc_version = gcc_subsystem.version() gcc_c_compiler = self.execute_expecting_one_result( scheduler, GCCCCompiler, self.toolchain).value gcc = gcc_c_compiler.c_compiler gcc_version_out = self._invoke_capturing_output( [gcc.exe_filename, '--version'], env=gcc.get_invocation_environment_dict(platform)) gcc_version_regex = re.compile('^gcc.*{}$'.format( re.escape(gcc_version)), flags=re.MULTILINE) self.assertIsNotNone(gcc_version_regex.search(gcc_version_out)) gcc_cpp_compiler = self.execute_expecting_one_result( scheduler, GCCCppCompiler, self.toolchain).value gpp = gcc_cpp_compiler.cpp_compiler gpp_version_out = self._invoke_capturing_output( [gpp.exe_filename, '--version'], env=gpp.get_invocation_environment_dict(platform)) gpp_version_regex = re.compile(r'^g\+\+.*{}$'.format( re.escape(gcc_version)), flags=re.MULTILINE) self.assertIsNotNone(gpp_version_regex.search(gpp_version_out))
def test_clang_version(self): scheduler = self._sched() platform = Platform.create() llvm_subsystem = global_subsystem_instance(LLVM) llvm_version = llvm_subsystem.version() llvm_version_regex = re.compile('^clang version {}'.format( re.escape(llvm_version)), flags=re.MULTILINE) llvm_c_compiler = self.execute_expecting_one_result( scheduler, LLVMCCompiler, self.toolchain).value clang = llvm_c_compiler.c_compiler llvm_version_out = self._invoke_capturing_output( [clang.exe_filename, '--version'], env=clang.get_invocation_environment_dict(platform)) self.assertIsNotNone(llvm_version_regex.search(llvm_version_out)) llvm_cpp_compiler = self.execute_expecting_one_result( scheduler, LLVMCppCompiler, self.toolchain).value clangpp = llvm_cpp_compiler.cpp_compiler gpp_version_out = self._invoke_capturing_output( [clangpp.exe_filename, '--version'], env=clangpp.get_invocation_environment_dict(platform)) self.assertIsNotNone(llvm_version_regex.search(gpp_version_out))
def _conan_os_name(self): return Platform.create().resolve_platform_specific({ 'darwin': lambda: 'Macos', 'linux': lambda: 'Linux', })
def _get_current_platform_string(self): return Platform.create().resolve_platform_specific({ 'darwin': lambda: 'macosx-10.12-x86_64', 'linux': lambda: 'linux-x86_64', })
def test_build_conan_cmdline_args(self): pkg_spec = 'test/1.0.0@conan/stable' cr = ConanRequirement(pkg_spec=pkg_spec) platform = Platform.create() conan_os_name = platform.resolve_platform_specific(self.CONAN_OS_NAME) expected = ['install', 'test/1.0.0@conan/stable', '-s', 'os={}'.format(conan_os_name)] self.assertEqual(cr.fetch_cmdline_args, expected)
def select_libc_objects(platform: Platform, native_toolchain: NativeToolchain) -> LibcObjects: # We use lambdas here to avoid searching for libc on osx, where it will fail. paths = platform.resolve_for_enum_variant({ 'darwin': lambda: [], 'linux': lambda: native_toolchain._libc_dev.get_libc_objects(), })() yield LibcObjects(paths)
def _assert_ctypes_third_party_integration(self, toolchain_variant): pants_binary = self.run_pants( ['binary', self._binary_target_with_third_party], config={ 'native-build-settings': { 'toolchain_variant': toolchain_variant, }, }) self.assert_success(pants_binary) # TODO(#6848): this fails when run with gcc on osx as it requires gcc's libstdc++.so.6.dylib to # be available on the runtime library path. attempt_pants_run = Platform.create().resolve_platform_specific({ 'darwin': lambda: toolchain_variant != 'gnu', 'linux': lambda: True, }) if attempt_pants_run: pants_run = self.run_pants( ['-q', 'run', self._binary_target_with_third_party], config={ 'native-build-settings': { 'toolchain_variant': toolchain_variant, }, }) self.assert_success(pants_run) self.assertIn('Test worked!\n', pants_run.stdout_data)
def test_build_conan_cmdline_args(self): pkg_spec = 'test/1.0.0@conan/stable' cr = ConanRequirement(pkg_spec=pkg_spec) platform = Platform.create() conan_os_name = platform.resolve_platform_specific(self.CONAN_OS_NAME) expected = [ 'install', 'test/1.0.0@conan/stable', '-s', 'os={}'.format(conan_os_name) ] self.assertEqual(cr.fetch_cmdline_args, expected)
def setUp(self): init_subsystems([LibcDev], options={ 'libc': { 'enable_libc_search': False, 'libc_dir': '/does/not/exist', }, }) self.libc = global_subsystem_instance(LibcDev) self.platform = Platform.create()
def setUp(self): init_subsystems([LibcDev], options={ 'libc': { 'enable_libc_search': True, 'host_compiler': 'this_executable_does_not_exist', }, }) self.libc = global_subsystem_instance(LibcDev) self.platform = Platform.create()
def _assert_ctypes_interop_with_mock_buildroot(self, toolchain_variant): # TODO: consider making this mock_buildroot/run_pants_with_workdir into a # PantsRunIntegrationTest method! with self.mock_buildroot( dirs_to_copy=[self._binary_interop_target_dir ]) as buildroot, buildroot.pushd(): # Replace strict_deps=False with nothing so we can override it (because target values for this # option take precedence over subsystem options). orig_wrapped_math_build = read_file(self._wrapped_math_build_file, binary_mode=False) without_strict_deps_wrapped_math_build = re.sub( 'strict_deps=False,', '', orig_wrapped_math_build) safe_file_dump(self._wrapped_math_build_file, without_strict_deps_wrapped_math_build, mode='w') # This should fail because it does not turn on strict_deps for a target which requires it. pants_binary_strict_deps_failure = self.run_pants_with_workdir( command=['binary', self._binary_target_with_interop], # Explicitly set to True (although this is the default). config={ # TODO(#6848): don't make it possible to forget to add the toolchain_variant option! 'native-build-settings': { 'toolchain_variant': toolchain_variant, 'strict_deps': True, }, }, workdir=os.path.join(buildroot.new_buildroot, '.pants.d'), build_root=buildroot.new_buildroot) self.assert_failure(pants_binary_strict_deps_failure) self.assertIn( self._include_not_found_message_for_variant[toolchain_variant], pants_binary_strict_deps_failure.stdout_data) # TODO(#6848): we need to provide the libstdc++.so.6.dylib which comes with gcc on osx in the # DYLD_LIBRARY_PATH during the 'run' goal somehow. attempt_pants_run = Platform.create().resolve_platform_specific({ 'darwin': lambda: toolchain_variant != 'gnu', 'linux': lambda: True, }) if attempt_pants_run: pants_run_interop = self.run_pants( ['-q', 'run', self._binary_target_with_interop], config={ 'native-build-settings': { 'toolchain_variant': toolchain_variant, 'strict_deps': False, }, }) self.assert_success(pants_run_interop) self.assertEqual('x=3, f(x)=299\n', pants_run_interop.stdout_data)
def _assert_ctypes_binary_creation(self, toolchain_variant): with temporary_dir() as tmp_dir: pants_run = self.run_pants(command=['binary', self._binary_target], config={ GLOBAL_SCOPE_CONFIG_SECTION: { 'pants_distdir': tmp_dir, }, 'native-build-step': { 'toolchain_variant': toolchain_variant, }, }) self.assert_success(pants_run) # Check that we have selected the appropriate compilers for our selected toolchain variant, # for both C and C++ compilation. # TODO(#6866): don't parse info logs for testing! for compiler_name in self._compiler_names_for_variant[toolchain_variant]: self.assertIn("selected compiler exe name: '{}'".format(compiler_name), pants_run.stdout_data) for linker_name in self._linker_names_for_variant[toolchain_variant]: self.assertIn("selected linker exe name: '{}'".format(linker_name), pants_run.stdout_data) # Check for the pex and for the wheel produced for our python_dist(). pex = os.path.join(tmp_dir, 'bin.pex') self.assertTrue(is_executable(pex)) # The + is because we append the target's fingerprint to the version. We test this version # string in test_build_local_python_distributions.py. wheel_glob = os.path.join(tmp_dir, 'ctypes_test-0.0.1+*.whl') wheel_dist_with_path = assert_single_element(glob.glob(wheel_glob)) wheel_dist = re.sub('^{}{}'.format(re.escape(tmp_dir), os.path.sep), '', wheel_dist_with_path) dist_name, dist_version, wheel_platform = name_and_platform(wheel_dist) self.assertEqual(dist_name, 'ctypes_test') contains_current_platform = Platform.create().resolve_platform_specific({ 'darwin': lambda: wheel_platform.startswith('macosx'), 'linux': lambda: wheel_platform.startswith('linux'), }) self.assertTrue(contains_current_platform) # Verify that the wheel contains our shared libraries. wheel_files = ZipFile(wheel_dist_with_path).namelist() dist_versioned_name = '{}-{}.data'.format(dist_name, dist_version) for shared_lib_filename in ['libasdf-c_ctypes.so', 'libasdf-cpp_ctypes.so']: full_path_in_wheel = os.path.join(dist_versioned_name, 'data', shared_lib_filename) self.assertIn(full_path_in_wheel, wheel_files) # Execute the binary and ensure its output is correct. binary_run_output = invoke_pex_for_output(pex) self.assertEqual(b'x=3, f(x)=17\n', binary_run_output)
def setUp(self): init_subsystems( [LibcDev], options={ 'libc': { 'enable_libc_search': False, 'libc_dir': '/does/not/exist', }, }) self.libc = global_subsystem_instance(LibcDev) self.platform = Platform.create()
def setUp(self): init_subsystems( [LibcDev], options={ 'libc': { 'enable_libc_search': True, 'host_compiler': 'this_executable_does_not_exist', }, }) self.libc = global_subsystem_instance(LibcDev) self.platform = Platform.create()
def setUp(self): super(TestNativeToolchain, self).setUp() init_subsystems([LibcDev, NativeToolchain], options={ 'libc': { 'enable_libc_search': True, }, }) self.platform = Platform.create() self.toolchain = global_subsystem_instance(NativeToolchain) self.rules = native_backend_rules()
def _compile(self, compile_request): """Perform the process of compilation, writing object files to the request's 'output_dir'. NB: This method must arrange the output files so that `collect_cached_objects()` can collect all of the results (or vice versa)! """ sources = compile_request.sources if len(sources) == 0: # TODO: do we need this log message? Should we still have it for intentionally header-only # libraries (that might be a confusing message to see)? self.context.log.debug( "no sources in request {}, skipping".format(compile_request)) return compiler = compile_request.compiler output_dir = compile_request.output_dir argv = self._make_compile_argv(compile_request) platform = Platform.create() with self.context.new_workunit(name=self.workunit_label, labels=[WorkUnitLabel.COMPILER ]) as workunit: try: process = subprocess.Popen( argv, cwd=output_dir, stdout=workunit.output('stdout'), stderr=workunit.output('stderr'), env=compiler.get_invocation_environment_dict(platform)) except OSError as e: workunit.set_outcome(WorkUnit.FAILURE) raise self.NativeCompileError( "Error invoking '{exe}' with command {cmd} for request {req}: {err}" .format(exe=compiler.exe_filename, cmd=argv, req=compile_request, err=e)) rc = process.wait() if rc != 0: workunit.set_outcome(WorkUnit.FAILURE) raise self.NativeCompileError( "Error in '{section_name}' with command {cmd} for request {req}. Exit code was: {rc}." .format(section_name=self.workunit_name, cmd=argv, req=compile_request, rc=rc))
def test_ctypes_binary(self): with temporary_dir() as tmp_dir: pants_run = self.run_pants(command=['binary', self._binary_target], config={ GLOBAL_SCOPE_CONFIG_SECTION: { 'pants_distdir': tmp_dir, } }) self.assert_success(pants_run) # Check for the pex and for the wheel produced for our python_dist(). pex = os.path.join(tmp_dir, 'bin.pex') self.assertTrue(is_executable(pex)) # The + is because we append the target's fingerprint to the version. We test this version # string in test_build_local_python_distributions.py. wheel_glob = os.path.join(tmp_dir, 'ctypes_test-0.0.1+*.whl') wheel_dist_with_path = assert_single_element(glob.glob(wheel_glob)) wheel_dist = re.sub( '^{}{}'.format(re.escape(tmp_dir), os.path.sep), '', wheel_dist_with_path) dist_name, dist_version, wheel_platform = name_and_platform( wheel_dist) self.assertEqual(dist_name, 'ctypes_test') contains_current_platform = Platform.create( ).resolve_platform_specific({ 'darwin': lambda: wheel_platform.startswith('macosx'), 'linux': lambda: wheel_platform.startswith('linux'), }) self.assertTrue(contains_current_platform) # Verify that the wheel contains our shared libraries. wheel_files = ZipFile(wheel_dist_with_path).namelist() dist_versioned_name = '{}-{}.data'.format(dist_name, dist_version) for shared_lib_filename in ['libasdf-c.so', 'libasdf-cpp.so']: full_path_in_wheel = os.path.join(dist_versioned_name, 'data', shared_lib_filename) self.assertIn(full_path_in_wheel, wheel_files) # Execute the binary and ensure its output is correct. binary_run_output = invoke_pex_for_output(pex) self.assertEqual('x=3, f(x)=17\n', binary_run_output)
def setUp(self): super(TestNativeToolchain, self).setUp() init_subsystems([LibcDev, NativeToolchain], options={ 'libc': { 'enable_libc_search': True, }, }) self.platform = Platform.create() self.toolchain = global_subsystem_instance(NativeToolchain) self.rules = native_backend_rules() gcc_subsystem = global_subsystem_instance(GCC) self.gcc_version = gcc_subsystem.version() llvm_subsystem = global_subsystem_instance(LLVM) self.llvm_version = llvm_subsystem.version()
def _execute_link_request(self, link_request): object_files = link_request.object_files if len(object_files) == 0: raise self.LinkSharedLibrariesError( "No object files were provided in request {}!".format( link_request)) platform = Platform.create() linker = link_request.linker native_artifact = link_request.native_artifact output_dir = link_request.output_dir resulting_shared_lib_path = os.path.join( output_dir, native_artifact.as_shared_lib(platform)) # We are executing in the results_dir, so get absolute paths for everything. cmd = ([linker.exe_filename] + self._get_shared_lib_cmdline_args(platform) + ['-o', os.path.abspath(resulting_shared_lib_path)] + [os.path.abspath(obj) for obj in object_files]) with self.context.new_workunit(name='link-shared-libraries', labels=[WorkUnitLabel.LINKER ]) as workunit: try: process = subprocess.Popen( cmd, cwd=output_dir, stdout=workunit.output('stdout'), stderr=workunit.output('stderr'), env=linker.get_invocation_environment_dict(platform)) except OSError as e: workunit.set_outcome(WorkUnit.FAILURE) raise self.LinkSharedLibrariesError( "Error invoking the native linker with command {} for request {}: {}" .format(cmd, link_request, e), e) rc = process.wait() if rc != 0: workunit.set_outcome(WorkUnit.FAILURE) raise self.LinkSharedLibrariesError( "Error linking native objects with command {} for request {}. Exit code was: {}." .format(cmd, link_request, rc)) return SharedLibrary(name=native_artifact.lib_name, path=resulting_shared_lib_path)
def test_pants_tests_local_dists_for_current_platform_only(self): platform_string = Platform.create().resolve_platform_specific({ 'darwin': lambda: 'macosx-10.12-x86_64', 'linux': lambda: 'linux-x86_64', }) # Use a platform-specific string for testing because the test goal # requires the coverage package and the pex resolver raises an Untranslatable error when # attempting to translate the coverage sdist for incompatible platforms. pants_ini_config = {'python-setup': {'platforms': [platform_string]}} # Clean all to rebuild requirements pex. with temporary_dir() as tmp_dir: command = [ '--pants-distdir={}'.format(tmp_dir), 'clean-all', 'test', '{}:fasthello'.format(self.fasthello_tests) ] pants_run = self.run_pants(command=command, config=pants_ini_config) self.assert_success(pants_run)
def _make_compile_argv(self, compile_request): """Return a list of arguments to use to compile sources. Subclasses can override and append.""" compiler = compile_request.compiler err_flags = ['-Werror'] if compile_request.fatal_warnings else [] platform = Platform.create() platform_specific_flags = platform.resolve_platform_specific({ 'linux': lambda: [], 'darwin': lambda: ['-mmacosx-version-min=10.11'], }) # We are going to execute in the target output, so get absolute paths for everything. # TODO: If we need to produce static libs, don't add -fPIC! (could use Variants -- see #5788). argv = ([compiler.exe_filename] + platform_specific_flags + self.extra_compile_args() + err_flags + ['-c', '-fPIC'] + [ '-I{}'.format(os.path.abspath(inc_dir)) for inc_dir in compile_request.include_dirs ] + [os.path.abspath(src) for src in compile_request.sources]) return argv
def test_binary(self): with temporary_dir() as tmp_dir: pants_run = self.run_pants(command=['binary', self._binary_target], config={ GLOBAL_SCOPE_CONFIG_SECTION: { 'pants_distdir': tmp_dir, } }) self.assert_success(pants_run) # Check for the pex and for the wheel produced for our python_dist(). pex = os.path.join(tmp_dir, 'bin.pex') self.assertTrue(is_executable(pex)) wheel_glob = os.path.join(tmp_dir, 'ctypes_test-0.0.1-*.whl') globbed_wheel = glob.glob(wheel_glob) self.assertEqual(len(globbed_wheel), 1) wheel_dist = globbed_wheel[0] _, _, wheel_platform = name_and_platform(wheel_dist) contains_current_platform = Platform.create().resolve_platform_specific({ 'darwin': lambda: wheel_platform.startswith('macosx'), 'linux': lambda: wheel_platform.startswith('linux'), }) self.assertTrue(contains_current_platform) # Verify that the wheel contains our shared libraries. wheel_files = ZipFile(wheel_dist).namelist() for shared_lib_filename in ['libasdf-c.so', 'libasdf-cpp.so']: full_path_in_wheel = os.path.join('ctypes_test-0.0.1.data', 'data', shared_lib_filename) self.assertIn(full_path_in_wheel, wheel_files) # Execute the binary and ensure its output is correct. binary_run_output = invoke_pex_for_output(pex) self.assertEqual('x=3, f(x)=17\n', binary_run_output)
def wrapper(self, *args, **kwargs): if Platform.current == Platform(normalized_os_name): test_fn(self, *args, **kwargs)
def _platform(cls): return Platform.create()
def platform(self): # TODO: convert this to a v2 engine dependency injection. return Platform.create()
def wrapper(self, *args, **kwargs): # FIXME: This should be drawn from the v2 engine somehow. platform = Platform.create() if platform.normalized_os_name == normalized_os_name: test_fn(self, *args, **kwargs)
def fetch_cmdline_args(self): platform = Platform.create() conan_os_name = platform.resolve_platform_specific(self.CONAN_OS_NAME) args = ['install', self.pkg_spec, '-s', 'os={}'.format(conan_os_name)] return args
def wrapper(self, *args, **kwargs): # TODO: This should be drawn from the v2 engine somehow. platform = Platform.create() if platform.normalized_os_name == normalized_os_name: test_fn(self, *args, **kwargs)
def setUp(self): super(TestNativeToolchain, self).setUp() self.platform = Platform.create() self.toolchain = global_subsystem_instance(NativeToolchain) self.rules = native_backend_rules()
def _current_platform_abbreviation(cls): return NativeBackendPlatform.create().resolve_platform_specific({ 'darwin': lambda: 'macosx', 'linux': lambda: 'linux', })