def store_stats(self): """Store stats about this run in local and optionally remote stats dbs.""" stats = { 'run_info': self.run_info.get_as_dict(), 'cumulative_timings': self.cumulative_timings.get_all(), 'self_timings': self.self_timings.get_all(), 'artifact_cache_stats': self.artifact_cache_stats.get_all(), 'outcomes': self.outcomes } # Dump individual stat file. # TODO(benjy): Do we really need these, once the statsdb is mature? stats_file = os.path.join(get_pants_cachedir(), 'stats', '{}.json'.format(self.run_info.get_info('id'))) safe_file_dump(stats_file, json.dumps(stats)) # Add to local stats db. StatsDBFactory.global_instance().get_db().insert_stats(stats) # Upload to remote stats db. stats_url = self.get_options().stats_upload_url if stats_url: pid = os.fork() if pid == 0: try: self.post_stats(stats_url, stats, timeout=self.get_options().stats_upload_timeout) finally: os._exit(0) # Write stats to local json file. stats_json_file_name = self.get_options().stats_local_json_file if stats_json_file_name: self.write_stats_to_json(stats_json_file_name, stats)
def test_ctypes_native_language_interop(self, toolchain_variant): # TODO: consider making this mock_buildroot/run_pants_with_workdir into a # PantsRunIntegrationTest method! with self.mock_buildroot( dirs_to_copy=[self._binary_interop_target_dir ]) as buildroot, buildroot.pushd(): # Replace strict_deps=False with nothing so we can override it (because target values for this # option take precedence over subsystem options). orig_wrapped_math_build = read_file(self._wrapped_math_build_file) without_strict_deps_wrapped_math_build = re.sub( 'strict_deps=False,', '', orig_wrapped_math_build) safe_file_dump(self._wrapped_math_build_file, without_strict_deps_wrapped_math_build) # This should fail because it does not turn on strict_deps for a target which requires it. pants_binary_strict_deps_failure = self.run_pants_with_workdir( command=['binary', self._binary_target_with_interop], # Explicitly set to True (although this is the default). config={ 'native-build-step': { 'toolchain_variant': toolchain_variant.value, }, # TODO(#6848): don't make it possible to forget to add the toolchain_variant option! 'native-build-settings': { 'strict_deps': True, }, }, workdir=os.path.join(buildroot.new_buildroot, '.pants.d'), build_root=buildroot.new_buildroot) self.assert_failure(pants_binary_strict_deps_failure) self.assertIn( toolchain_variant.resolve_for_enum_variant({ 'gnu': "fatal error: some_math.h: No such file or directory", 'llvm': "fatal error: 'some_math.h' file not found", }), pants_binary_strict_deps_failure.stdout_data) # TODO(#6848): we need to provide the libstdc++.so.6.dylib which comes with gcc on osx in the # DYLD_LIBRARY_PATH during the 'run' goal somehow. attempt_pants_run = Platform.current.resolve_for_enum_variant({ 'darwin': toolchain_variant == ToolchainVariant.llvm, 'linux': True, }) if attempt_pants_run: pants_run_interop = self.run_pants( ['-q', 'run', self._binary_target_with_interop], config={ 'native-build-step': { 'toolchain_variant': toolchain_variant.value, }, 'native-build-settings': { 'strict_deps': True, }, }) self.assert_success(pants_run_interop) self.assertEqual('x=3, f(x)=299\n', pants_run_interop.stdout_data)
def _spawn_nailgun_server(self, fingerprint, jvm_options, classpath, stdout, stderr): """Synchronously spawn a new nailgun server.""" # Truncate the nailguns stdout & stderr. safe_file_dump(self._ng_stdout, '') safe_file_dump(self._ng_stderr, '') jvm_options = jvm_options + [self._PANTS_NG_ARG, self._create_owner_arg(self._workdir), self._create_fingerprint_arg(fingerprint)] post_fork_child_opts = dict(fingerprint=fingerprint, jvm_options=jvm_options, classpath=classpath, stdout=stdout, stderr=stderr) logger.debug('Spawning nailgun server {i} with fingerprint={f}, jvm_options={j}, classpath={cp}' .format(i=self._identity, f=fingerprint, j=jvm_options, cp=classpath)) self.daemon_spawn(post_fork_child_opts=post_fork_child_opts) # Wait for and write the port information in the parent so we can bail on exception/timeout. self.await_pid(self._connect_timeout) self.write_socket(self._await_socket(self._connect_timeout)) logger.debug('Spawned nailgun server {i} with fingerprint={f}, pid={pid} port={port}' .format(i=self._identity, f=fingerprint, pid=self.pid, port=self.socket)) client = self._create_ngclient(self.socket, stdout, stderr) self.ensure_connectable(client) return client
def harness(): try: for name, content in BUILD_FILES.items(): safe_file_dump(name, dedent(content), binary_mode=False) yield finally: safe_rmtree(SUBPROJ_SPEC)
def test_poll(self): with temporary_dir() as dir: class TestPantsHandler(PantsHandler): def __init__(self, request, client_address, server): # TODO(6071): BaseHTTPServer.BaseHTTPRequestHandler is an old-style class, so we must # invoke its __init__ like this. # This will become unnecessary when we no longer support python2. PantsHandler.__init__( self, settings=ReportingServer.Settings( info_dir=dir, template_dir=dir, assets_dir=dir, root=dir, allowed_clients=['ALL'], ), renderer=None, request=request, client_address=client_address, server=server, ) safe_file_dump(os.path.join(dir, "file"), "hello") with http_server(TestPantsHandler) as port: response = requests.get("http://127.0.0.1:{}/poll?{}".format( port, urlencode({"q": json.dumps([{"id": "0", "path": "file"}])}), )) self.assertEqual(response.json(), {"0": "hello"})
def harness(): try: for name, content in BUILD_FILES.items(): safe_file_dump(name, dedent(content)) yield finally: safe_rmtree(SUBPROJ_SPEC)
def rewrite_record_file(workspace, src_record_file, mutated_file_tuples): """Given a RECORD file and list of mutated file tuples, update the RECORD file in place. The RECORD file should always be a member of the mutated files, due to both containing versions, and having a version in its filename. """ mutated_files = set() dst_record_file = None for src, dst in mutated_file_tuples: if src == src_record_file: dst_record_file = dst else: mutated_files.add(dst) if not dst_record_file: raise Exception('Malformed whl or bad globs: `{}` was not rewritten.'.format(src_record_file)) output_records = [] file_name = os.path.join(workspace, dst_record_file) for line in read_file(file_name).splitlines(): filename, fingerprint_str, size_str = line.rsplit(',', 3) if filename in mutated_files: fingerprint_str, size_str = fingerprint_file(workspace, filename) output_line = ','.join((filename, fingerprint_str, size_str)) else: output_line = line output_records.append(output_line) safe_file_dump(file_name, '\r\n'.join(output_records) + '\r\n')
def store_stats(self): """Store stats about this run in local and optionally remote stats dbs.""" run_information = self.run_info.get_as_dict() target_data = run_information.get('target_data', None) if target_data: run_information['target_data'] = ast.literal_eval(target_data) stats = { 'run_info': run_information, 'cumulative_timings': self.cumulative_timings.get_all(), 'self_timings': self.self_timings.get_all(), 'critical_path_timings': self.get_critical_path_timings().get_all(), 'artifact_cache_stats': self.artifact_cache_stats.get_all(), 'pantsd_stats': self.pantsd_stats.get_all(), 'outcomes': self.outcomes } # Dump individual stat file. # TODO(benjy): Do we really need these, once the statsdb is mature? stats_file = os.path.join(get_pants_cachedir(), 'stats', '{}.json'.format(self.run_info.get_info('id'))) binary_mode = False if PY3 else True safe_file_dump(stats_file, json.dumps(stats), binary_mode=binary_mode) # Add to local stats db. StatsDBFactory.global_instance().get_db().insert_stats(stats) # Upload to remote stats db. stats_url = self.get_options().stats_upload_url if stats_url: self.post_stats(stats_url, stats, timeout=self.get_options().stats_upload_timeout) # Write stats to local json file. stats_json_file_name = self.get_options().stats_local_json_file if stats_json_file_name: self.write_stats_to_json(stats_json_file_name, stats)
def store_stats(self): """Store stats about this run in local and optionally remote stats dbs.""" stats = { 'run_info': self.run_info.get_as_dict(), 'cumulative_timings': self.cumulative_timings.get_all(), 'self_timings': self.self_timings.get_all(), 'artifact_cache_stats': self.artifact_cache_stats.get_all() } # Dump individual stat file. # TODO(benjy): Do we really need these, once the statsdb is mature? stats_file = os.path.join(get_pants_cachedir(), 'stats', '{}.json'.format(self.run_info.get_info('id'))) safe_file_dump(stats_file, json.dumps(stats)) # Add to local stats db. StatsDBFactory.global_instance().get_db().insert_stats(stats) # Upload to remote stats db. stats_url = self.get_options().stats_upload_url if stats_url: self.post_stats(stats_url, stats, timeout=self.get_options().stats_upload_timeout) # Write stats to local json file. stats_json_file_name = self.get_options().stats_local_json_file if stats_json_file_name: self.write_stats_to_json(stats_json_file_name, stats)
def setUp(self): """Prepare targets, context, runtime classpath. """ super(TestBundleCreate, self).setUp() self.jar_artifact = self.create_artifact(org='org.example', name='foo', rev='1.0.0') self.zip_artifact = self.create_artifact(org='org.pantsbuild', name='bar', rev='2.0.0', ext='zip') self.bundle_artifact = self.create_artifact(org='org.apache', name='baz', rev='3.0.0', classifier='tests') self.tar_gz_artifact = self.create_artifact(org='org.gnu', name='gary', rev='4.0.0', ext='tar.gz') self.jar_lib = self.make_target(spec='3rdparty/jvm/org/example:foo', target_type=JarLibrary, jars=[JarDependency(org='org.example', name='foo', rev='1.0.0'), JarDependency(org='org.pantsbuild', name='bar', rev='2.0.0', ext='zip'), JarDependency(org='org.apache', name='baz', rev='3.0.0', classifier='tests'), JarDependency(org='org.gnu', name='gary', rev='4.0.0', ext='tar.gz')]) # This is so that payload fingerprint can be computed. safe_file_dump(os.path.join(self.build_root, 'foo/Foo.java'), '// dummy content') self.binary_target = self.make_target(spec='//foo:foo-binary', target_type=JvmBinary, source='Foo.java', dependencies=[self.jar_lib]) self.app_target = self.make_target(spec='//foo:foo-app', target_type=JvmApp, basename='FooApp', dependencies=[self.binary_target]) self.task_context = self.context(target_roots=[self.app_target]) self._setup_classpath(self.task_context)
def test_sigint_kills_request_waiting_for_lock(self): """Test that, when a pailgun request is blocked waiting for another one to end, sending SIGINT to the blocked run will kill it.""" config = {"GLOBAL": {"pantsd_timeout_when_multiple_invocations": -1, "level": "debug"}} with self.pantsd_test_context(extra_config=config) as (workdir, config, checker): # Run a process that will wait forever. first_run_handle, _, _, file_to_create = launch_waiter(workdir=workdir, config=config) checker.assert_started() checker.assert_running() # And another that will block on the first. blocking_run_handle = self.run_pants_with_workdir_without_waiting( command=["goals"], workdir=workdir, config=config ) # Block until the second request is waiting for the lock. time.sleep(10) # Sends SIGINT to the run that is waiting. blocking_run_client_pid = blocking_run_handle.process.pid os.kill(blocking_run_client_pid, signal.SIGINT) blocking_run_handle.join() # Check that pantsd is still serving the other request. checker.assert_running() # Exit the second run by writing the file it is waiting for, and confirm that it # exited, and that pantsd is still running. safe_file_dump(file_to_create, "content!") result = first_run_handle.join() result.assert_success() checker.assert_running()
def test_dependencies_swap(self): template = dedent( """ python_source( name='A', source='A.py', {a_deps} ) python_source( name='B', source='B.py', {b_deps} ) """ ) with self.pantsd_successful_run_context() as ctx, temporary_dir(".") as directory: safe_file_dump(os.path.join(directory, "A.py"), mode="w") safe_file_dump(os.path.join(directory, "B.py"), mode="w") if directory.startswith("./"): directory = directory[2:] def list_and_verify(a_deps: str, b_deps: str) -> None: Path(directory, "BUILD").write_text(template.format(a_deps=a_deps, b_deps=b_deps)) result = ctx.runner(["list", f"{directory}:"]) ctx.checker.assert_started() result.assert_success() expected_targets = {f"{directory}:{target}" for target in ("A", "B")} assert expected_targets == set(result.stdout.strip().split("\n")) list_and_verify(a_deps='dependencies = [":B"],', b_deps="") list_and_verify(a_deps="", b_deps='dependencies = [":A"],')
def _spawn_nailgun_server(self, fingerprint, jvm_options, classpath, stdout, stderr, stdin): """Synchronously spawn a new nailgun server.""" # Truncate the nailguns stdout & stderr. safe_file_dump(self._ng_stdout, '') safe_file_dump(self._ng_stderr, '') jvm_options = jvm_options + [self._PANTS_NG_BUILDROOT_ARG, self._create_owner_arg(self._workdir), self._create_fingerprint_arg(fingerprint)] post_fork_child_opts = dict(fingerprint=fingerprint, jvm_options=jvm_options, classpath=classpath, stdout=stdout, stderr=stderr) logger.debug('Spawning nailgun server {i} with fingerprint={f}, jvm_options={j}, classpath={cp}' .format(i=self._identity, f=fingerprint, j=jvm_options, cp=classpath)) self.daemon_spawn(post_fork_child_opts=post_fork_child_opts) # Wait for and write the port information in the parent so we can bail on exception/timeout. self.await_pid(self._connect_timeout) self.write_socket(self._await_socket(self._connect_timeout)) logger.debug('Spawned nailgun server {i} with fingerprint={f}, pid={pid} port={port}' .format(i=self._identity, f=fingerprint, pid=self.pid, port=self.socket)) client = self._create_ngclient(self.socket, stdout, stderr, stdin) self.ensure_connectable(client) return client
def test_poll(self): with temporary_dir() as dir: class TestPantsHandler(PantsHandler): def __init__(self, request, client_address, server): super().__init__( settings=ReportingServer.Settings( info_dir=dir, template_dir=dir, assets_dir=dir, root=dir, allowed_clients=['ALL'], ), renderer=None, request=request, client_address=client_address, server=server, ) safe_file_dump(os.path.join(dir, "file"), "hello") with http_server(TestPantsHandler) as port: response = requests.get("http://127.0.0.1:{}/poll?{}".format( port, urlencode({"q": json.dumps([{ "id": "0", "path": "file" }])}), )) self.assertEqual(response.json(), {"0": "hello"})
def harness() -> Iterator[None]: try: for name, content in BUILD_FILES.items(): safe_file_dump(name, content) yield finally: safe_rmtree(SUBPROJ_SPEC)
def test_hackily_snapshot(self): with temporary_dir() as temp_dir: safe_file_dump( os.path.join( temp_dir, "bin", DefaultVersion.name, DefaultVersion.default_version, DefaultVersion.name, ), "content!", ) context = self.context( for_subsystems=[DefaultVersion], options={ GLOBAL_SCOPE: { "binaries_baseurls": [f"file:///{temp_dir}"] } }, ) self.maxDiff = None default_version_tool = DefaultVersion.global_instance() _, snapshot = default_version_tool.hackily_snapshot(context) self.assertEqual( "51a98706ab7458069aabe01856cb352ca97686e3edd3bf9ebd3205c2b38b2974", snapshot.directory_digest.fingerprint, )
def rewrite_record_file(workspace, src_record_file, mutated_file_tuples): """Given a RECORD file and list of mutated file tuples, update the RECORD file in place. The RECORD file should always be a member of the mutated files, due to both containing versions, and having a version in its filename. """ mutated_files = set() dst_record_file = None for src, dst in mutated_file_tuples: if src == src_record_file: dst_record_file = dst else: mutated_files.add(dst) if not dst_record_file: raise Exception( "Malformed whl or bad globs: `{}` was not rewritten.".format( src_record_file)) output_records = [] file_name = os.path.join(workspace, dst_record_file) for line in read_file(file_name).splitlines(): filename, fingerprint_str, size_str = line.rsplit(",", 3) if filename in mutated_files: fingerprint_str, size_str = fingerprint_file(workspace, filename) output_line = ",".join((filename, fingerprint_str, size_str)) else: output_line = line output_records.append(output_line) safe_file_dump(file_name, "\r\n".join(output_records) + "\r\n")
def test_wait_for_file(self): with temporary_dir() as td: test_filename = os.path.join(td, "test.out") safe_file_dump(test_filename, "test") self.pmm._wait_for_file(test_filename, "file to be created", "file was created", timeout=0.1)
def assert_dump_and_read(self, test_content, dump_kwargs, read_kwargs): with temporary_dir() as td: test_filename = os.path.join(td, 'test.out') # TODO(#7121): remove all deprecated usages of `binary_mode` and `mode` arguments to # safe_file_dump() in this file when the deprecation period is over! safe_file_dump(test_filename, test_content, **dump_kwargs) self.assertEqual(read_file(test_filename, **read_kwargs), test_content)
def write_stats_to_json(cls, file_name, stats): """Write stats to a local json file.""" params = cls._json_dump_options(stats) mode = 'w' if PY3 else 'wb' try: safe_file_dump(file_name, params, mode=mode) except Exception as e: # Broad catch - we don't want to fail in stats related failure. print('WARNING: Failed to write stats to {} due to Error: {}'.format(file_name, e), file=sys.stderr)
def write_stats_to_json(cls, file_name: str, stats: dict) -> None: """Write stats to a local json file.""" params = cls._json_dump_options(stats) try: safe_file_dump(file_name, params, mode='w') except Exception as e: # Broad catch - we don't want to fail in stats related failure. print( f'WARNING: Failed to write stats to {file_name} due to Error: {e!r}', file=sys.stderr)
def write_metadata_by_name(self, metadata_key, metadata_value) -> None: """Write process metadata using a named identity. :param string metadata_key: The metadata key (e.g. 'pid'). :param string metadata_value: The metadata value (e.g. '1729'). """ safe_mkdir(self._get_metadata_dir_by_name(self.name, self._metadata_base_dir)) file_path = self._metadata_file_path(metadata_key) safe_file_dump(file_path, metadata_value)
def _conan_user_home(self, conan, in_workdir=False): """Create the CONAN_USER_HOME for this task fingerprint and initialize the Conan remotes. See https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install for docs on configuring remotes. """ # This argument is exposed so tests don't leak out of the workdir. if in_workdir: base_cache_dir = self.workdir else: base_cache_dir = get_pants_cachedir() user_home_base = os.path.join(base_cache_dir, 'conan-support', 'conan-user-home') # Locate the subdirectory of the pants shared cachedir specific to this task's option values. user_home = os.path.join(user_home_base, self.fingerprint) conan_install_base = os.path.join(user_home, '.conan') # Conan doesn't copy remotes.txt into the .conan subdir after the "config install" command, it # simply edits registry.json. However, it is valid to have this file there, and Conan won't # touch it, so we use its presence to detect whether we have appropriately initialized the # Conan installation. remotes_txt_sentinel = os.path.join(conan_install_base, 'remotes.txt') if not os.path.isfile(remotes_txt_sentinel): safe_mkdir(conan_install_base) # Conan doesn't consume the remotes.txt file just by being in the conan directory -- we need # to create another directory containing any selection of files detailed in # https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install # and "install" from there to our desired conan directory. with temporary_dir() as remotes_install_dir: # Create an artificial conan configuration dir containing just remotes.txt. remotes_txt_for_install = os.path.join(remotes_install_dir, 'remotes.txt') safe_file_dump(remotes_txt_for_install, self._remotes_txt_content, mode='w') # Configure the desired user home from this artificial config dir. argv = ['config', 'install', remotes_install_dir] workunit_factory = functools.partial( self.context.new_workunit, name='initial-conan-config', labels=[WorkUnitLabel.TOOL]) env = { 'CONAN_USER_HOME': user_home, } cmdline, exit_code = conan.run(workunit_factory, argv, env=env) if exit_code != 0: raise self.ConanConfigError( 'Error configuring conan with argv {} and environment {}: exited non-zero ({}).' .format(cmdline, env, exit_code), exit_code=exit_code) # Generate the sentinel file so that we know the remotes have been successfully configured for # this particular task fingerprint in successive pants runs. safe_file_dump(remotes_txt_sentinel, self._remotes_txt_content, mode='w') return user_home
def make_snapshot(self, files: Mapping[str, str | bytes]) -> Snapshot: """Makes a snapshot from a map of file name to file content.""" with temporary_dir() as temp_dir: for file_name, content in files.items(): mode = "wb" if isinstance(content, bytes) else "w" safe_file_dump(os.path.join(temp_dir, file_name), content, mode=mode) return self.scheduler.capture_snapshots( (PathGlobsAndRoot(PathGlobs(("**", )), temp_dir), ))[0]
def write_metadata_by_name(self, name, metadata_key, metadata_value): """Write process metadata using a named identity. :param string name: The ProcessMetadataManager identity/name (e.g. 'pantsd'). :param string metadata_key: The metadata key (e.g. 'pid'). :param string metadata_value: The metadata value (e.g. '1729'). """ self._maybe_init_metadata_dir_by_name(name) file_path = os.path.join(self._get_metadata_dir_by_name(name), metadata_key) safe_file_dump(file_path, metadata_value)
def write_metadata_by_name(self, name, metadata_key, metadata_value): """Write process metadata using a named identity. :param string name: The ProcessMetadataManager identity/name (e.g. 'pantsd'). :param string metadata_key: The metadata key (e.g. 'pid'). :param string metadata_value: The metadata value (e.g. '1729'). """ self._maybe_init_metadata_dir_by_name(name) file_path = self._metadata_file_path(name, metadata_key) safe_file_dump(file_path, metadata_value)
def add_to_runtime_classpath(self, context, tgt, files_dict): """Creates and adds the given files to the classpath for the given target under a temp path.""" compile_classpath = context.products.get_data('runtime_classpath', ClasspathProducts) # Create a temporary directory under the target id, then dump all files. target_dir = os.path.join(self.test_workdir, tgt.id) safe_mkdir(target_dir) classpath_dir = safe_mkdtemp(dir=target_dir) for rel_path, content in files_dict.items(): safe_file_dump(os.path.join(classpath_dir, rel_path), content) # Add to the classpath. compile_classpath.add_for_target(tgt, [('default', classpath_dir)])
def make_snapshot(self, files): """Makes a snapshot from a collection of files. :param files: a dictionary, where key=filename, value=file_content where both are of type String. :return: a Snapshot. """ with temporary_dir() as temp_dir: for file_name, content in files.items(): safe_file_dump(os.path.join(temp_dir, file_name), content) return self.scheduler.capture_snapshots( (PathGlobsAndRoot(PathGlobs(('**', )), temp_dir), ))[0]
def test_request_classes_by_source(self): """`classes_by_source` is expensive to compute: confirm that it is only computed when needed.""" # Class names (with and without a method name) should not trigger. self.assertFalse(JUnitRun.request_classes_by_source(["com.goo.ber"])) self.assertFalse(JUnitRun.request_classes_by_source(["com.goo.ber#method"])) # Existing files (with and without the method name) should trigger. srcfile = os.path.join(self.test_workdir, "this.is.a.source.file.scala") safe_file_dump(srcfile, "content!") self.assertTrue(JUnitRun.request_classes_by_source([srcfile])) self.assertTrue(JUnitRun.request_classes_by_source(["{}#method".format(srcfile)]))
def make_snapshot(self, files): """Makes a snapshot from a collection of files. :param files: a dictionary, where key=filename, value=file_content where both are of type String. :return: a Snapshot. """ with temporary_dir() as temp_dir: for file_name, content in files.items(): safe_file_dump(os.path.join(temp_dir, file_name), content) return self.scheduler.capture_snapshots(( PathGlobsAndRoot(PathGlobs(('**',)), text_type(temp_dir)), ))[0]
def _snapshot_resources(resources, prefix='.'): with temporary_dir() as root_dir: for filename, filecontent in resources.items(): safe_file_dump(os.path.join(os.path.join(root_dir, prefix), filename), filecontent) extra_resources_relative_to_rootdir = {os.path.join(prefix, k): v for k, v in resources.items()} snapshot, = self.context._scheduler.capture_snapshots([ PathGlobsAndRoot(PathGlobs(extra_resources_relative_to_rootdir), root_dir) ]) return snapshot.directory_digest
def test_request_classes_by_source(self): """`classes_by_source` is expensive to compute: confirm that it is only computed when needed.""" # Class names (with and without a method name) should not trigger. self.assertFalse(JUnitRun.request_classes_by_source(['com.goo.ber'])) self.assertFalse(JUnitRun.request_classes_by_source(['com.goo.ber#method'])) # Existing files (with and without the method name) should trigger. srcfile = os.path.join(self.test_workdir, 'this.is.a.source.file.scala') safe_file_dump(srcfile, 'content!', mode='w') self.assertTrue(JUnitRun.request_classes_by_source([srcfile])) self.assertTrue(JUnitRun.request_classes_by_source(['{}#method'.format(srcfile)]))
def test_ctypes_native_language_interop(self, toolchain_variant): # TODO: consider making this mock_buildroot/run_pants_with_workdir into a # PantsRunIntegrationTest method! with self.mock_buildroot( dirs_to_copy=[self._binary_interop_target_dir]) as buildroot, buildroot.pushd(): # Replace strict_deps=False with nothing so we can override it (because target values for this # option take precedence over subsystem options). orig_wrapped_math_build = read_file(self._wrapped_math_build_file) without_strict_deps_wrapped_math_build = re.sub( 'strict_deps=False,', '', orig_wrapped_math_build) safe_file_dump(self._wrapped_math_build_file, without_strict_deps_wrapped_math_build) # This should fail because it does not turn on strict_deps for a target which requires it. pants_binary_strict_deps_failure = self.run_pants_with_workdir( command=['binary', self._binary_target_with_interop], # Explicitly set to True (although this is the default). config={ 'native-build-step': { 'toolchain_variant': toolchain_variant.value, }, # TODO(#6848): don't make it possible to forget to add the toolchain_variant option! 'native-build-settings': { 'strict_deps': True, }, }, workdir=os.path.join(buildroot.new_buildroot, '.pants.d'), build_root=buildroot.new_buildroot) self.assert_failure(pants_binary_strict_deps_failure) self.assertIn(toolchain_variant.resolve_for_enum_variant({ 'gnu': "fatal error: some_math.h: No such file or directory", 'llvm': "fatal error: 'some_math.h' file not found", }), pants_binary_strict_deps_failure.stdout_data) # TODO(#6848): we need to provide the libstdc++.so.6.dylib which comes with gcc on osx in the # DYLD_LIBRARY_PATH during the 'run' goal somehow. attempt_pants_run = Platform.current.resolve_for_enum_variant({ 'darwin': toolchain_variant == ToolchainVariant.llvm, 'linux': True, }) if attempt_pants_run: pants_run_interop = self.run_pants(['-q', 'run', self._binary_target_with_interop], config={ 'native-build-step': { 'toolchain_variant': toolchain_variant.value, }, 'native-build-settings': { 'strict_deps': True, }, }) self.assert_success(pants_run_interop) self.assertEqual('x=3, f(x)=299\n', pants_run_interop.stdout_data)
def log_exception(cls, msg): try: pid = os.getpid() fatal_error_log_entry = cls._format_exception_message(msg, pid) # We care more about this log than the shared log, so completely write to it first. This # avoids any errors with concurrent modification of the shared log affecting the per-pid log. safe_file_dump(cls.exceptions_log_path(for_pid=pid), fatal_error_log_entry, mode='w') # TODO: we should probably guard this against concurrent modification somehow. safe_file_dump(cls.exceptions_log_path(), fatal_error_log_entry, mode='a') except Exception as e: # TODO: If there is an error in writing to the exceptions log, we may want to consider trying # to write to another location (e.g. the cwd, if that is not already the destination). logger.error('Problem logging original exception: {}'.format(e))
def test_added_files_correctly_detected(self): get_added_files_script = 'build-support/bin/get_added_files.sh' with self._create_tiny_git_repo(copy_files=[Path(get_added_files_script)]) as (git, worktree, _): # Create a new file. new_file = os.path.join(worktree, 'wow.txt') safe_file_dump(new_file, '') # Stage the file. rel_new_file = os.path.relpath(new_file, worktree) git.add(rel_new_file) self._assert_subprocess_success_with_output( worktree, [get_added_files_script], # This should be the only entry in the index, and it is a newly added file. full_expected_output=f"{rel_new_file}\n")
def test_added_files_correctly_detected(self): get_added_files_script = os.path.join(self.pants_repo_root, 'build-support/bin/get_added_files.sh') with self._create_tiny_git_repo() as (git, worktree, _): # Create a new file. new_file = os.path.join(worktree, 'wow.txt') safe_file_dump(new_file, '') # Stage the file. rel_new_file = os.path.relpath(new_file, worktree) git.add(rel_new_file) self._assert_subprocess_success_with_output( worktree, [get_added_files_script], # This should be the only entry in the index, and it is a newly added file. full_expected_output="{}\n".format(rel_new_file))
def add_to_runtime_classpath(self, context, tgt, files_dict): """Creates and adds the given files to the classpath for the given target under a temp path. :API: public """ runtime_classpath = self.get_runtime_classpath(context) # Create a temporary directory under the target id, then dump all files. target_dir = os.path.join(self.test_workdir, tgt.id) safe_mkdir(target_dir) classpath_dir = safe_mkdtemp(dir=target_dir) for rel_path, content in files_dict.items(): safe_file_dump(os.path.join(classpath_dir, rel_path), content) # Add to the classpath. runtime_classpath.add_for_target(tgt, [("default", classpath_dir)])
def upload_stats(self): """Write stats to local cache, and upload to server, if needed.""" stats = { 'run_info': self.run_info.get_as_dict(), 'cumulative_timings': self.cumulative_timings.get_all(), 'self_timings': self.self_timings.get_all(), 'artifact_cache_stats': self.artifact_cache_stats.get_all() } stats_file = os.path.join(get_pants_cachedir(), 'stats', '{}.json'.format(self.run_info.get_info('id'))) safe_file_dump(stats_file, json.dumps(stats)) stats_url = self.get_options().stats_upload_url if stats_url: self.post_stats(stats_url, stats, timeout=self.get_options().stats_upload_timeout)
def replace_in_file(workspace, src_file_path, from_str, to_str): """Replace from_str with to_str in the name and content of the given file. If any edits were necessary, returns the new filename (which may be the same as the old filename). """ from_bytes = from_str.encode('ascii') to_bytes = to_str.encode('ascii') data = read_file(os.path.join(workspace, src_file_path)) if from_bytes not in data and from_str not in src_file_path: return None dst_file_path = src_file_path.replace(from_str, to_str) safe_file_dump(os.path.join(workspace, dst_file_path), data.replace(from_bytes, to_bytes)) if src_file_path != dst_file_path: os.unlink(os.path.join(workspace, src_file_path)) return dst_file_path
def test_dependencies_swap(self): template = dedent(""" python_library( name = 'A', source = 'A.py', {a_deps} ) python_library( name = 'B', source = 'B.py', {b_deps} ) """) with self.pantsd_successful_run_context() as (pantsd_run, checker, _, _): with temporary_dir(".") as directory: safe_file_dump(os.path.join(directory, "A.py"), mode="w") safe_file_dump(os.path.join(directory, "B.py"), mode="w") if directory.startswith("./"): directory = directory[2:] def list_and_verify(): result = pantsd_run(["list", f"{directory}:"]) checker.assert_started() self.assert_success(result) expected_targets = { f"{directory}:{target}" for target in ("A", "B") } self.assertEqual( expected_targets, set(result.stdout_data.strip().split("\n"))) with open(os.path.join(directory, "BUILD"), "w") as f: f.write( template.format(a_deps='dependencies = [":B"],', b_deps="")) list_and_verify() with open(os.path.join(directory, "BUILD"), "w") as f: f.write( template.format(a_deps="", b_deps='dependencies = [":A"],')) list_and_verify()
def test_dependencies_swap(self): template = dedent(""" python_library( name = 'A', source = 'A.py', {a_deps} ) python_library( name = 'B', source = 'B.py', {b_deps} ) """) with self.pantsd_successful_run_context() as (pantsd_run, checker, _, _): with temporary_dir('.') as directory: safe_file_dump(os.path.join(directory, 'A.py'), mode='w') safe_file_dump(os.path.join(directory, 'B.py'), mode='w') if directory.startswith('./'): directory = directory[2:] def list_and_verify(): result = pantsd_run(['list', '{}:'.format(directory)]) checker.assert_started() self.assert_success(result) expected_targets = { '{}:{}'.format(directory, target) for target in ('A', 'B') } self.assertEqual( expected_targets, set(result.stdout_data.strip().split('\n'))) with open(os.path.join(directory, 'BUILD'), 'w') as f: f.write( template.format(a_deps='dependencies = [":B"],', b_deps='')) list_and_verify() with open(os.path.join(directory, 'BUILD'), 'w') as f: f.write( template.format(a_deps='', b_deps='dependencies = [":A"],')) list_and_verify()
def test_select_argv(self): """Test invoking binary_util.py as a standalone script.""" with temporary_dir() as tmp_dir: config_file_loc = os.path.join(tmp_dir, 'pants.ini') safe_file_dump(config_file_loc, payload="""\ [GLOBAL] allow_external_binary_tool_downloads: True pants_bootstrapdir: {} """.format(tmp_dir)) expected_output_glob = os.path.join( tmp_dir, 'bin', 'cmake', '*', '*', '3.9.5', 'cmake') with environment_as(PANTS_CONFIG_FILES='[{!r}]'.format(config_file_loc)): # Ignore the first argument, as per sys.argv. output_file = select(['_', 'cmake', '3.9.5', 'cmake.tar.gz']) self.assertTrue(is_readable_dir(output_file)) realized_glob = assert_single_element(glob.glob(expected_output_glob)) self.assertEqual(os.path.realpath(output_file), os.path.realpath(realized_glob))
def test_pantsd_parse_exception_success(self): # This test covers the case described in #6426, where a run that is failing fast due to an # exception can race other completing work. We expect all runs to fail due to the error # that has been introduced, but none of them should hang. test_path = 'testprojects/3rdparty/this_is_definitely_not_a_valid_directory' test_build_file = os.path.join(test_path, 'BUILD') invalid_symbol = 'this_is_definitely_not_a_valid_symbol' try: safe_mkdir(test_path, clean=True) safe_file_dump(test_build_file, "{}()".format(invalid_symbol)) for _ in range(3): with self.pantsd_run_context(success=False) as (pantsd_run, checker, _, _): result = pantsd_run(['list', 'testprojects::']) checker.assert_started() self.assertIn(invalid_symbol, result.stderr_data) finally: rm_rf(test_path)
def test_reset_interactive_output_stream(self): """Test redirecting the terminal output stream to a separate file.""" lifecycle_stub_cmdline = self._lifecycle_stub_cmdline() failing_pants_run = self.run_pants(lifecycle_stub_cmdline) self.assert_failure(failing_pants_run) self.assertIn('erroneous!', failing_pants_run.stderr_data) with temporary_dir() as tmpdir: some_file = os.path.join(tmpdir, 'some_file') safe_file_dump(some_file, '') redirected_pants_run = self.run_pants([ "--lifecycle-stubs-new-interactive-stream-output-file={}".format(some_file), ] + lifecycle_stub_cmdline) self.assert_failure(redirected_pants_run) # The Exiter prints the final error message to whatever the interactive output stream is set # to, so when it's redirected it won't be in stderr. self.assertNotIn('erroneous!', redirected_pants_run.stderr_data) self.assertIn('erroneous!', read_file(some_file))
def setUp(self): """Prepare targets, context, runtime classpath. """ super(TestConsolidateClasspath, self).setUp() self.task = self.prepare_execute(self.context()) self.jar_artifact = self.create_artifact(org='org.example', name='foo', rev='1.0.0') self.zip_artifact = self.create_artifact(org='org.pantsbuild', name='bar', rev='2.0.0', ext='zip') self.bundle_artifact = self.create_artifact(org='org.apache', name='baz', rev='3.0.0', classifier='tests') self.tar_gz_artifact = self.create_artifact(org='org.gnu', name='gary', rev='4.0.0', ext='tar.gz') self.jar_lib = self.make_target(spec='3rdparty/jvm/org/example:foo', target_type=JarLibrary, jars=[JarDependency(org='org.example', name='foo', rev='1.0.0'), JarDependency(org='org.pantsbuild', name='bar', rev='2.0.0', ext='zip'), JarDependency(org='org.apache', name='baz', rev='3.0.0', classifier='tests'), JarDependency(org='org.gnu', name='gary', rev='4.0.0', ext='tar.gz')]) safe_file_dump(os.path.join(self.build_root, 'resources/foo/file'), '// dummy content') self.resources_target = self.make_target('//resources:foo-resources', Resources, sources=['foo/file']) # This is so that payload fingerprint can be computed. safe_file_dump(os.path.join(self.build_root, 'foo/Foo.java'), '// dummy content') self.java_lib_target = self.make_target('//foo:foo-library', JavaLibrary, sources=['Foo.java']) self.binary_target = self.make_target(spec='//foo:foo-binary', target_type=JvmBinary, dependencies=[ self.java_lib_target, self.jar_lib, self.resources_target ]) self.dist_root = os.path.join(self.build_root, 'dist')
def add_consolidated_bundle(self, context, tgt, files_dict): """Add a bundle to the classpath as if it has been consolidated already. """ consolidated_classpath = context.products.get_data( 'consolidated_classpath', init_func=ClasspathProducts.init_func(self.pants_workdir) ) # Create a temporary directory under the target id, then dump all files. target_dir = os.path.join(self.test_workdir, tgt.id) safe_mkdir(target_dir) entry_path = safe_mkdtemp(dir=target_dir) classpath_dir = safe_mkdtemp(dir=target_dir) for rel_path, content in files_dict.items(): safe_file_dump(os.path.join(entry_path, rel_path), content) # Create Jar to mimic consolidate classpath behavior. jarpath = os.path.join(classpath_dir, 'output-0.jar') with self.task.open_jar(jarpath, overwrite=True, compressed=False) as jar: jar.write(entry_path) consolidated_classpath.add_for_target(tgt, [('default', jarpath)])
def setUp(self): """Prepare targets, context, runtime classpath. """ super(TestBundleCreate, self).setUp() self.jar_artifact = self.create_artifact(org="org.example", name="foo", rev="1.0.0") self.zip_artifact = self.create_artifact(org="org.pantsbuild", name="bar", rev="2.0.0", ext="zip") self.bundle_artifact = self.create_artifact(org="org.apache", name="baz", rev="3.0.0", classifier="tests") self.tar_gz_artifact = self.create_artifact(org="org.gnu", name="gary", rev="4.0.0", ext="tar.gz") self.jar_lib = self.make_target( spec="3rdparty/jvm/org/example:foo", target_type=JarLibrary, jars=[ JarDependency(org="org.example", name="foo", rev="1.0.0"), JarDependency(org="org.pantsbuild", name="bar", rev="2.0.0", ext="zip"), JarDependency(org="org.apache", name="baz", rev="3.0.0", classifier="tests"), JarDependency(org="org.gnu", name="gary", rev="4.0.0", ext="tar.gz"), ], ) safe_file_dump(os.path.join(self.build_root, "resources/foo/file"), "// dummy content") self.resources_target = self.make_target("//resources:foo-resources", Resources, sources=["foo/file"]) # This is so that payload fingerprint can be computed. safe_file_dump(os.path.join(self.build_root, "foo/Foo.java"), "// dummy content") self.java_lib_target = self.make_target("//foo:foo-library", JavaLibrary, sources=["Foo.java"]) self.binary_target = self.make_target( spec="//foo:foo-binary", target_type=JvmBinary, dependencies=[self.java_lib_target, self.jar_lib], resources=[self.resources_target.address.spec], ) self.app_target = self.make_target( spec="//foo:foo-app", target_type=JvmApp, basename="FooApp", dependencies=[self.binary_target] ) self.task_context = self.context(target_roots=[self.app_target]) self._setup_classpath(self.task_context) self.dist_root = os.path.join(self.build_root, "dist")
def test_pantsd_invalidation_stale_sources(self): test_path = 'tests/python/pants_test/daemon_correctness_test_0001' test_build_file = os.path.join(test_path, 'BUILD') test_src_file = os.path.join(test_path, 'some_file.py') has_source_root_regex = r'"source_root": ".*/{}"'.format(test_path) export_cmd = ['export', test_path] try: with self.pantsd_successful_run_context() as (pantsd_run, checker, workdir, _): safe_mkdir(test_path, clean=True) pantsd_run(['help']) checker.assert_started() safe_file_dump(test_build_file, "python_library(sources=globs('some_non_existent_file.py'))") result = pantsd_run(export_cmd) checker.assert_running() self.assertNotRegexpMatches(result.stdout_data, has_source_root_regex) safe_file_dump(test_build_file, "python_library(sources=globs('*.py'))") result = pantsd_run(export_cmd) checker.assert_running() self.assertNotRegexpMatches(result.stdout_data, has_source_root_regex) safe_file_dump(test_src_file, 'import this\n') result = pantsd_run(export_cmd) checker.assert_running() self.assertRegexpMatches(result.stdout_data, has_source_root_regex) finally: rm_rf(test_path)
def store_stats(self): """Store stats about this run in local and optionally remote stats dbs.""" run_information = self.run_info.get_as_dict() target_data = run_information.get('target_data', None) if target_data: run_information['target_data'] = ast.literal_eval(target_data) stats = { 'run_info': run_information, 'cumulative_timings': self.cumulative_timings.get_all(), 'self_timings': self.self_timings.get_all(), 'artifact_cache_stats': self.artifact_cache_stats.get_all(), 'outcomes': self.outcomes } # Dump individual stat file. # TODO(benjy): Do we really need these, once the statsdb is mature? stats_file = os.path.join(get_pants_cachedir(), 'stats', '{}.json'.format(self.run_info.get_info('id'))) safe_file_dump(stats_file, json.dumps(stats)) # Add to local stats db. StatsDBFactory.global_instance().get_db().insert_stats(stats) # Upload to remote stats db. stats_url = self.get_options().stats_upload_url if stats_url: pid = os.fork() if pid == 0: try: self.post_stats(stats_url, stats, timeout=self.get_options().stats_upload_timeout) finally: os._exit(0) # Write stats to local json file. stats_json_file_name = self.get_options().stats_local_json_file if stats_json_file_name: self.write_stats_to_json(stats_json_file_name, stats)
def test_dependencies_swap(self): template = dedent(""" python_library( name = 'A', source = 'A.py', {a_deps} ) python_library( name = 'B', source = 'B.py', {b_deps} ) """) with self.pantsd_successful_run_context() as (pantsd_run, checker, _, _): with temporary_dir('.') as directory: safe_file_dump(os.path.join(directory, 'A.py'), mode='w') safe_file_dump(os.path.join(directory, 'B.py'), mode='w') if directory.startswith('./'): directory = directory[2:] def list_and_verify(): result = pantsd_run(['list', '{}:'.format(directory)]) checker.assert_started() self.assert_success(result) expected_targets = {'{}:{}'.format(directory, target) for target in ('A', 'B')} self.assertEqual(expected_targets, set(result.stdout_data.strip().split('\n'))) with open(os.path.join(directory, 'BUILD'), 'w') as f: f.write(template.format(a_deps='dependencies = [":B"],', b_deps='')) list_and_verify() with open(os.path.join(directory, 'BUILD'), 'w') as f: f.write(template.format(a_deps='', b_deps='dependencies = [":A"],')) list_and_verify()
def test_check_packages(self): package_check_script = os.path.join(self.pants_repo_root, 'build-support/bin/check_packages.sh') with self._create_tiny_git_repo() as (_, worktree, _): init_py_path = os.path.join(worktree, 'subdir/__init__.py') # Check that an invalid __init__.py errors. safe_file_dump(init_py_path, 'asdf') self._assert_subprocess_error(worktree, [package_check_script, 'subdir'], """\ ERROR: All '__init__.py' files should be empty or else only contain a namespace declaration, but the following contain code: --- subdir/__init__.py """) # Check that a valid empty __init__.py succeeds. safe_file_dump(init_py_path, '') self._assert_subprocess_success(worktree, [package_check_script, 'subdir']) # Check that a valid __init__.py with `pkg_resources` setup succeeds. safe_file_dump(init_py_path, "__import__('pkg_resources').declare_namespace(__name__)") self._assert_subprocess_success(worktree, [package_check_script, 'subdir'])
def test_wait_for_file(self): with temporary_dir() as td: test_filename = os.path.join(td, 'test.out') safe_file_dump(test_filename, 'test') ProcessMetadataManager._wait_for_file(test_filename, timeout=.1)
def create_build_files(self, build_root): # Create BUILD files # build_root: # BUILD # BUILD.other # dir_a: # BUILD # BUILD.other # subdir: # BUILD # dir_b: # BUILD dir_a = os.path.join(build_root, 'dir_a') dir_b = os.path.join(build_root, 'dir_b') dir_a_subdir = os.path.join(dir_a, 'subdir') safe_mkdir(dir_a) safe_mkdir(dir_b) safe_mkdir(dir_a_subdir) safe_file_dump(os.path.join(build_root, 'BUILD'), 'target(name="a")\ntarget(name="b")') safe_file_dump(os.path.join(build_root, 'BUILD.other'), 'target(name="c")') safe_file_dump(os.path.join(dir_a, 'BUILD'), 'target(name="a")\ntarget(name="b")') safe_file_dump(os.path.join(dir_a, 'BUILD.other'), 'target(name="c")') safe_file_dump(os.path.join(dir_b, 'BUILD'), 'target(name="a")') safe_file_dump(os.path.join(dir_a_subdir, 'BUILD'), 'target(name="a")')
def dump(self, directory): """Dump this Digest object adjacent to the given directory.""" payload = '{}:{}'.format(self.fingerprint, self.serialized_bytes_length) safe_file_dump(self._path(directory), payload=payload)
def test_readwrite_file(self): with temporary_dir() as td: test_filename = os.path.join(td, 'test.out') test_content = '3333' safe_file_dump(test_filename, test_content) self.assertEqual(read_file(test_filename), test_content)
def test_wait_for_file(self): with temporary_dir() as td: test_filename = os.path.join(td, 'test.out') safe_file_dump(test_filename, 'test') self.pmm._wait_for_file(test_filename, timeout=.1)