def test_dumps_traceback_on_sigabrt(self):
    # SIGABRT sends a traceback to the log file for the current process thanks to
    # faulthandler.enable().
    with self._send_signal_to_waiter_handle(signal.SIGABRT) as (workdir, waiter_run):
      # Check that the logs show an abort signal and the beginning of a traceback.
      pid_specific_log_file, shared_log_file = self._get_log_file_paths(workdir, waiter_run)
      assertRegex(self, read_file(pid_specific_log_file), """\
Fatal Python error: Aborted

Thread [^\n]+ \\(most recent call first\\):
""")
      # faulthandler.enable() only allows use of a single logging file at once for fatal tracebacks.
      self.assertEqual('', read_file(shared_log_file))
Example #2
0
def bootstrap_c_source(scheduler_bindings_path, output_dir, module_name=NATIVE_ENGINE_MODULE):
  """Bootstrap an external CFFI C source file."""

  safe_mkdir(output_dir)

  with temporary_dir() as tempdir:
    temp_output_prefix = os.path.join(tempdir, module_name)
    real_output_prefix = os.path.join(output_dir, module_name)
    temp_c_file = '{}.c'.format(temp_output_prefix)
    if PY2:
      temp_c_file = temp_c_file.encode('utf-8')
    c_file = '{}.c'.format(real_output_prefix)
    env_script = '{}.cflags'.format(real_output_prefix)

    # Preprocessor directives won't parse in the .cdef calls, so we have to hide them for now.
    scheduler_bindings_content = read_file(scheduler_bindings_path)
    scheduler_bindings = _hackily_rewrite_scheduler_bindings(scheduler_bindings_content)

    ffibuilder = cffi.FFI()
    ffibuilder.cdef(scheduler_bindings)
    ffibuilder.cdef(_FFISpecification.format_cffi_externs())
    ffibuilder.set_source(module_name, scheduler_bindings)
    ffibuilder.emit_c_code(temp_c_file)

    # Work around https://github.com/rust-lang/rust/issues/36342 by renaming initnative_engine to
    # wrapped_initnative_engine so that the rust code can define the symbol initnative_engine.
    #
    # If we dont do this, we end up at the mercy of the implementation details of rust's stripping
    # and LTO. In the past we have found ways to trick it into not stripping symbols which was handy
    # (it kept the binary working) but inconvenient (it was relying on unspecified behavior, it meant
    # our binaries couldn't be stripped which inflated them by 2~3x, and it reduced the amount of LTO
    # we could use, which led to unmeasured performance hits).
    #
    # We additionally remove the ifdefs that apply conditional `init` logic for Py2 vs Py3, in order
    # to define a module that is loadable by either 2 or 3.
    # TODO: Because PyPy uses the same `init` function name regardless of the python version, this
    # trick does not work there: we leave its conditional in place.
    file_content = read_file(temp_c_file)
    if CFFI_C_PATCH_BEFORE not in file_content:
      raise Exception('The patch for the CFFI generated code will not apply cleanly.')
    file_content = file_content.replace(CFFI_C_PATCH_BEFORE, CFFI_C_PATCH_AFTER)

    # Extract the preprocessor directives we had to hide to get the .cdef call to parse.
    file_content = _hackily_recreate_includes_for_bindings(file_content)

  _replace_file(c_file, file_content)

  # Write a shell script to be sourced at build time that contains inherited CFLAGS.
  _replace_file(env_script, get_build_cflags())
Example #3
0
  def _await_socket(self, timeout):
    """Blocks for the nailgun subprocess to bind and emit a listening port in the nailgun stdout."""
    with safe_open(self._ng_stdout, 'r') as ng_stdout:
      start_time = time.time()
      accumulated_stdout = ''
      while 1:
        readable, _, _ = select.select([ng_stdout], [], [], self._SELECT_WAIT)
        if readable:
          line = ng_stdout.readline()                          # TODO: address deadlock risk here.
          try:
            return self._NG_PORT_REGEX.match(line).group(1)
          except AttributeError:
            pass
          accumulated_stdout += line

        if (time.time() - start_time) > timeout:
          stderr = read_file(self._ng_stderr)
          raise NailgunClient.NailgunError(
            'Failed to read nailgun output after {sec} seconds!\n'
            'Stdout:\n{stdout}\nStderr:\n{stderr}'.format(
              sec=timeout,
              stdout=accumulated_stdout,
              stderr=stderr,
            )
          )
Example #4
0
  def _await_socket(self, timeout):
    """Blocks for the nailgun subprocess to bind and emit a listening port in the nailgun stdout."""
    with safe_open(self._ng_stdout, 'r') as ng_stdout:
      start_time = time.time()
      accumulated_stdout = ''
      while 1:
        # TODO: share the decreasing timeout logic here with NailgunProtocol.iter_chunks() by adding
        # a method to pants.util.contextutil!
        remaining_time = time.time() - (start_time + timeout)
        if remaining_time > 0:
          stderr = read_file(self._ng_stderr, binary_mode=True)
          raise self.InitialNailgunConnectTimedOut(
            timeout=timeout,
            stdout=accumulated_stdout,
            stderr=stderr,
          )

        readable, _, _ = select.select([ng_stdout], [], [], (-1 * remaining_time))
        if readable:
          line = ng_stdout.readline()                          # TODO: address deadlock risk here.
          try:
            return self._NG_PORT_REGEX.match(line).group(1)
          except AttributeError:
            pass
          accumulated_stdout += line
Example #5
0
def rewrite_record_file(workspace, src_record_file, mutated_file_tuples):
  """Given a RECORD file and list of mutated file tuples, update the RECORD file in place.

  The RECORD file should always be a member of the mutated files, due to both containing
  versions, and having a version in its filename.
  """
  mutated_files = set()
  dst_record_file = None
  for src, dst in mutated_file_tuples:
    if src == src_record_file:
      dst_record_file = dst
    else:
      mutated_files.add(dst)
  if not dst_record_file:
    raise Exception('Malformed whl or bad globs: `{}` was not rewritten.'.format(src_record_file))

  output_records = []
  file_name = os.path.join(workspace, dst_record_file)
  for line in read_file(file_name).splitlines():
    filename, fingerprint_str, size_str = line.rsplit(',', 3)
    if filename in mutated_files:
      fingerprint_str, size_str = fingerprint_file(workspace, filename)
      output_line = ','.join((filename, fingerprint_str, size_str))
    else:
      output_line = line
    output_records.append(output_line)

  safe_file_dump(file_name, '\r\n'.join(output_records) + '\r\n')
Example #6
0
def fingerprint_file(workspace, filename):
  """Given a relative filename located in a workspace, fingerprint the file.

  Returns a tuple of fingerprint string and size string.
  """
  content = read_file(os.path.join(workspace, filename))
  fingerprint = hashlib.sha256(content)
  return 'sha256={}'.format(base64.b64encode(fingerprint.digest())), str(len(content))
  def test_dumps_logs_on_signal(self):
    """Send signals which are handled, but don't get converted into a KeyboardInterrupt."""
    signal_names = {
      signal.SIGQUIT: 'SIGQUIT',
      signal.SIGTERM: 'SIGTERM',
    }
    for (signum, signame) in signal_names.items():
      with self._send_signal_to_waiter_handle(signum) as (workdir, waiter_run):
        assertRegex(self, waiter_run.stderr_data, """\
timestamp: ([^\n]+)
Signal {signum} \\({signame}\\) was raised\\. Exiting with failure\\.
""".format(signum=signum, signame=signame))
        # Check that the logs show a graceful exit by SIGTERM.
        pid_specific_log_file, shared_log_file = self._get_log_file_paths(workdir, waiter_run)
        self._assert_graceful_signal_log_matches(
          waiter_run.pid, signum, signame, read_file(pid_specific_log_file))
        self._assert_graceful_signal_log_matches(
          waiter_run.pid, signum, signame, read_file(shared_log_file))
  def test_logs_unhandled_exception(self):
    with temporary_dir() as tmpdir:
      pants_run = self.run_pants_with_workdir(
        ['--no-enable-pantsd', 'list', '//:this-target-does-not-exist'],
        workdir=tmpdir,
        # The backtrace should be omitted when --print-exception-stacktrace=False.
        print_exception_stacktrace=False)
      self.assert_failure(pants_run)
      assertRegex(self, pants_run.stderr_data, """\
timestamp: ([^\n]+)
Exception caught: \\(pants\\.engine\\.scheduler\\.ExecutionError\\) \\(backtrace omitted\\)
Exception message: 1 Exception encountered:
  ResolveError: "this-target-does-not-exist" was not found in namespace ""\\. Did you mean one of:
""")
      pid_specific_log_file, shared_log_file = self._get_log_file_paths(tmpdir, pants_run)
      self._assert_unhandled_exception_log_matches(
        pants_run.pid, read_file(pid_specific_log_file))
      self._assert_unhandled_exception_log_matches(
        pants_run.pid, read_file(shared_log_file))
Example #9
0
  def read_metadata_by_name(self, name, metadata_key, caster=None):
    """Read process metadata using a named identity.

    :param string name: The ProcessMetadataManager identity/name (e.g. 'pantsd').
    :param string metadata_key: The metadata key (e.g. 'pid').
    :param func caster: A casting callable to apply to the read value (e.g. `int`).
    """
    try:
      file_path = os.path.join(self._get_metadata_dir_by_name(name), metadata_key)
      return self._maybe_cast(read_file(file_path).strip(), caster)
    except (IOError, OSError):
      return None
  def test_ctypes_native_language_interop(self, toolchain_variant):
    # TODO: consider making this mock_buildroot/run_pants_with_workdir into a
    # PantsRunIntegrationTest method!
    with self.mock_buildroot(
        dirs_to_copy=[self._binary_interop_target_dir]) as buildroot, buildroot.pushd():

      # Replace strict_deps=False with nothing so we can override it (because target values for this
      # option take precedence over subsystem options).
      orig_wrapped_math_build = read_file(self._wrapped_math_build_file)
      without_strict_deps_wrapped_math_build = re.sub(
        'strict_deps=False,', '', orig_wrapped_math_build)
      safe_file_dump(self._wrapped_math_build_file, without_strict_deps_wrapped_math_build)

      # This should fail because it does not turn on strict_deps for a target which requires it.
      pants_binary_strict_deps_failure = self.run_pants_with_workdir(
        command=['binary', self._binary_target_with_interop],
        # Explicitly set to True (although this is the default).
        config={
          'native-build-step': {
            'toolchain_variant': toolchain_variant.value,
          },
          # TODO(#6848): don't make it possible to forget to add the toolchain_variant option!
          'native-build-settings': {
            'strict_deps': True,
          },
        },
        workdir=os.path.join(buildroot.new_buildroot, '.pants.d'),
        build_root=buildroot.new_buildroot)
      self.assert_failure(pants_binary_strict_deps_failure)
      self.assertIn(toolchain_variant.resolve_for_enum_variant({
        'gnu': "fatal error: some_math.h: No such file or directory",
        'llvm': "fatal error: 'some_math.h' file not found",
      }),
                    pants_binary_strict_deps_failure.stdout_data)

    # TODO(#6848): we need to provide the libstdc++.so.6.dylib which comes with gcc on osx in the
    # DYLD_LIBRARY_PATH during the 'run' goal somehow.
    attempt_pants_run = Platform.current.resolve_for_enum_variant({
      'darwin': toolchain_variant == ToolchainVariant.llvm,
      'linux': True,
    })
    if attempt_pants_run:
      pants_run_interop = self.run_pants(['-q', 'run', self._binary_target_with_interop], config={
        'native-build-step': {
          'toolchain_variant': toolchain_variant.value,
        },
        'native-build-settings': {
          'strict_deps': True,
        },
      })
      self.assert_success(pants_run_interop)
      self.assertEqual('x=3, f(x)=299\n', pants_run_interop.stdout_data)
Example #11
0
  def _compile_vts(self, vts, target, sources, analysis_file, upstream_analysis, classpath, outdir,
                   log_file, progress_message, settings, fatal_warnings, zinc_file_manager,
                   counter):
    """Compiles sources for the given vts into the given output dir.

    vts - versioned target set
    sources - sources for this target set
    analysis_file - the analysis file to manipulate
    classpath - a list of classpath entries
    outdir - the output dir to send classes to

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
    if not sources:
      self.context.log.warn('Skipping {} compile for targets with no sources:\n  {}'
                            .format(self.name(), vts.targets))
    else:
      counter_val = str(counter()).rjust(counter.format_length(), b' ')
      counter_str = '[{}/{}] '.format(counter_val, counter.size)
      # Do some reporting.
      self.context.log.info(
        counter_str,
        'Compiling ',
        items_to_report_element(sources, '{} source'.format(self.name())),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
        ' (',
        progress_message,
        ').')
      with self.context.new_workunit('compile', labels=[WorkUnitLabel.COMPILER]) as compile_workunit:
        # The compiler may delete classfiles, then later exit on a compilation error. Then if the
        # change triggering the error is reverted, we won't rebuild to restore the missing
        # classfiles. So we force-invalidate here, to be on the safe side.
        vts.force_invalidate()
        if self.get_options().capture_classpath:
          self._record_compile_classpath(classpath, vts.targets, outdir)

        # If compiling a plugin, don't try to use it on itself.
        javac_plugins_to_exclude = (t.plugin for t in vts.targets if isinstance(t, JavacPlugin))
        try:
          self.compile(self._args, classpath, sources, outdir, upstream_analysis, analysis_file,
                       log_file, settings, fatal_warnings, zinc_file_manager,
                       javac_plugins_to_exclude)
        except TaskError:
          if self.get_options().suggest_missing_deps:
            logs = self._find_failed_compile_logs(compile_workunit)
            if logs:
              self._find_missing_deps('\n'.join([read_file(log) for log in logs]), target)
          raise
    def test_fails_ctrl_c_ffi_extern(self):
        with temporary_dir() as tmpdir:
            with environment_as(_RAISE_KEYBOARDINTERRUPT_IN_EXTERNS="True"):
                pants_run = self.run_pants_with_workdir(
                    self._lifecycle_stub_cmdline(), workdir=tmpdir)
                self.assert_failure(pants_run)

                self.assertIn(
                    "KeyboardInterrupt: ctrl-c interrupted execution of a ffi method!",
                    pants_run.stderr,
                )

                pid_specific_log_file, shared_log_file = self._get_log_file_paths(
                    tmpdir, pants_run.pid)

                self.assertIn(
                    "KeyboardInterrupt: ctrl-c interrupted execution of a ffi method!",
                    read_file(pid_specific_log_file),
                )
                self.assertIn(
                    "KeyboardInterrupt: ctrl-c interrupted execution of a ffi method!",
                    read_file(shared_log_file),
                )
Example #13
0
    def _find_missing_deps(self, compile_logs, target):
        with self.context.new_workunit('missing-deps-suggest',
                                       labels=[WorkUnitLabel.COMPILER]):
            compile_failure_log = '\n'.join(
                read_file(log) for log in compile_logs)

            missing_dep_suggestions, no_suggestions = self._missing_deps_finder.find(
                compile_failure_log, target)

            if missing_dep_suggestions:
                self.context.log.info(
                    'Found the following deps from target\'s transitive '
                    'dependencies that provide the missing classes:')
                suggested_deps = set()
                for classname, candidates in missing_dep_suggestions.items():
                    suggested_deps.add(list(candidates)[0])
                    self.context.log.info('  {}: {}'.format(
                        classname, ', '.join(candidates)))

                # We format the suggested deps with single quotes and commas so that
                # they can be easily cut/pasted into a BUILD file.
                formatted_suggested_deps = [
                    "'%s'," % dep for dep in suggested_deps
                ]
                suggestion_msg = (
                    '\nIf the above information is correct, '
                    'please add the following to the dependencies of ({}):\n  {}\n'
                    .format(
                        target.address.spec,
                        '\n  '.join(sorted(list(formatted_suggested_deps)))))

                path_to_buildozer = self.get_options().buildozer
                if path_to_buildozer:
                    suggestion_msg += (
                        "\nYou can do this by running:\n"
                        "  {buildozer} 'add dependencies {deps}' {target}".
                        format(buildozer=path_to_buildozer,
                               deps=" ".join(sorted(suggested_deps)),
                               target=target.address.spec))

                self.context.log.info(suggestion_msg)

            if no_suggestions:
                self.context.log.warn(
                    'Unable to find any deps from target\'s transitive '
                    'dependencies that provide the following missing classes:')
                no_suggestion_msg = '\n   '.join(sorted(list(no_suggestions)))
                self.context.log.warn('  {}'.format(no_suggestion_msg))
                self.context.log.warn(
                    self.get_options().missing_deps_not_found_msg)
Example #14
0
    def test_dumps_logs_on_signal(self):
        """Send signals which are handled, but don't get converted into a KeyboardInterrupt."""
        signal_names = {
            signal.SIGQUIT: 'SIGQUIT',
            signal.SIGTERM: 'SIGTERM',
        }
        for (signum, signame) in signal_names.items():
            with self._send_signal_to_waiter_handle(signum) as (workdir,
                                                                waiter_run):
                self.assertRegex(
                    waiter_run.stderr_data, """\
timestamp: ([^\n]+)
Signal {signum} \\({signame}\\) was raised\\. Exiting with failure\\.
""".format(signum=signum, signame=signame))
                # Check that the logs show a graceful exit by SIGTERM.
                pid_specific_log_file, shared_log_file = self._get_log_file_paths(
                    workdir, waiter_run)
                self._assert_graceful_signal_log_matches(
                    waiter_run.pid, signum, signame,
                    read_file(pid_specific_log_file))
                self._assert_graceful_signal_log_matches(
                    waiter_run.pid, signum, signame,
                    read_file(shared_log_file))
    def test_dumps_traceback_on_sigabrt(self):
        # SIGABRT sends a traceback to the log file for the current process thanks to
        # faulthandler.enable().
        with self.pantsd_successful_run_context() as ctx:
            ctx.runner(["help"])
            pid = ctx.checker.assert_started()
            os.kill(pid, signal.SIGABRT)

            time.sleep(5)

            # Check that the logs show an abort signal and the beginning of a traceback.
            pid_specific_log_file, shared_log_file = self._get_log_file_paths(
                ctx.workdir, pid)
            self.assertRegex(
                read_file(pid_specific_log_file),
                """\
Fatal Python error: Aborted

Thread [^\n]+ \\(most recent call first\\):
""",
            )
            # faulthandler.enable() only allows use of a single logging file at once for fatal tracebacks.
            self.assertEqual("", read_file(shared_log_file))
Example #16
0
  def _assert_ctypes_interop_with_mock_buildroot(self, toolchain_variant):
    # TODO: consider making this mock_buildroot/run_pants_with_workdir into a
    # PantsRunIntegrationTest method!
    with self.mock_buildroot(
        dirs_to_copy=[self._binary_interop_target_dir]) as buildroot, buildroot.pushd():

      # Replace strict_deps=False with nothing so we can override it (because target values for this
      # option take precedence over subsystem options).
      orig_wrapped_math_build = read_file(self._wrapped_math_build_file, binary_mode=False)
      without_strict_deps_wrapped_math_build = re.sub(
        'strict_deps=False,', '', orig_wrapped_math_build)
      safe_file_dump(self._wrapped_math_build_file, without_strict_deps_wrapped_math_build, mode='w')

      # This should fail because it does not turn on strict_deps for a target which requires it.
      pants_binary_strict_deps_failure = self.run_pants_with_workdir(
        command=['binary', self._binary_target_with_interop],
        # Explicitly set to True (although this is the default).
        config={
          'native-build-step': {
            'toolchain_variant': toolchain_variant,
          },
          # TODO(#6848): don't make it possible to forget to add the toolchain_variant option!
          'native-build-settings': {
            'strict_deps': True,
          },
        },
        workdir=os.path.join(buildroot.new_buildroot, '.pants.d'),
        build_root=buildroot.new_buildroot)
      self.assert_failure(pants_binary_strict_deps_failure)
      self.assertIn(self._include_not_found_message_for_variant[toolchain_variant],
                    pants_binary_strict_deps_failure.stdout_data)

    # TODO(#6848): we need to provide the libstdc++.so.6.dylib which comes with gcc on osx in the
    # DYLD_LIBRARY_PATH during the 'run' goal somehow.
    attempt_pants_run = Platform.create().resolve_platform_specific({
      'darwin': lambda: toolchain_variant != 'gnu',
      'linux': lambda: True,
    })
    if attempt_pants_run:
      pants_run_interop = self.run_pants(['-q', 'run', self._binary_target_with_interop], config={
        'native-build-step': {
          'toolchain_variant': toolchain_variant,
        },
        'native-build-settings': {
          'strict_deps': True,
        },
      })
      self.assert_success(pants_run_interop)
      self.assertEqual('x=3, f(x)=299\n', pants_run_interop.stdout_data)
Example #17
0
    def test_logs_unhandled_exception(self):
        with temporary_dir() as tmpdir:
            pants_run = self.run_pants_with_workdir(
                [
                    "--no-enable-pantsd", "list",
                    "//:this-target-does-not-exist"
                ],
                workdir=tmpdir,
                # The backtrace should be omitted when --print-exception-stacktrace=False.
                print_exception_stacktrace=False,
            )
            self.assert_failure(pants_run)
            self.assertRegex(
                pants_run.stderr_data,
                """\
"this-target-does-not-exist" was not found in namespace ""\\. Did you mean one of:
""",
            )
            pid_specific_log_file, shared_log_file = self._get_log_file_paths(
                tmpdir, pants_run.pid)
            self._assert_unhandled_exception_log_matches(
                pants_run.pid, read_file(pid_specific_log_file))
            self._assert_unhandled_exception_log_matches(
                pants_run.pid, read_file(shared_log_file))
Example #18
0
    def test_prints_traceback_on_sigusr2(self):
        with self.pantsd_successful_run_context() as ctx:
            ctx.runner(["help"])
            pid = ctx.checker.assert_started()
            os.kill(pid, signal.SIGUSR2)

            time.sleep(5)

            ctx.checker.assert_running()
            self.assertRegex(
                read_file(os.path.join(ctx.workdir, "pantsd", "pantsd.log")),
                """\
Current thread [^\n]+ \\(most recent call first\\):
""",
            )
Example #19
0
    def test_logs_unhandled_exception(self):
        with temporary_dir() as tmpdir:
            pants_run = self.run_pants_with_workdir(
                [
                    '--no-enable-pantsd', 'list',
                    '//:this-target-does-not-exist'
                ],
                workdir=tmpdir,
                # The backtrace should be omitted when --print-exception-stacktrace=False.
                print_exception_stacktrace=False)
            self.assert_failure(pants_run)
            self.assertRegex(
                pants_run.stderr_data, """\
timestamp: ([^\n]+)
Exception caught: \\(pants\\.engine\\.scheduler\\.ExecutionError\\) \\(backtrace omitted\\)
Exception message: 1 Exception encountered:
  ResolveError: "this-target-does-not-exist" was not found in namespace ""\\. Did you mean one of:
""")
            pid_specific_log_file, shared_log_file = self._get_log_file_paths(
                tmpdir, pants_run)
            self._assert_unhandled_exception_log_matches(
                pants_run.pid, read_file(pid_specific_log_file))
            self._assert_unhandled_exception_log_matches(
                pants_run.pid, read_file(shared_log_file))
  def test_no_py_namespace(self):
    no_py_namespace_target = self._target_dict()['no-py-namespace']
    with self.assertRaises(PyThriftNamespaceClashCheck.NamespaceExtractionError) as cm:
      self._run_tasks(target_roots=[no_py_namespace_target])
    self.assertEqual(str(cm.exception), """\
Python namespaces could not be extracted from some thrift sources. Declaring a `namespace py` in
thrift sources for python thrift library targets will soon become required.

1 python library target(s) contained thrift sources not declaring a python namespace. The targets
and/or files which need to be edited will be dumped to: {}
"""
                     .format(cm.exception.output_file))
    self.assertEqual(
      'src/py-thrift:no-py-namespace: [src/py-thrift/bad.thrift]\n',
      read_file(cm.exception.output_file))
Example #21
0
def replace_in_file(workspace, src_file_path, from_str, to_str):
  """Replace from_str with to_str in the name and content of the given file.

  If any edits were necessary, returns the new filename (which may be the same as the old filename).
  """
  from_bytes = from_str.encode('ascii')
  to_bytes = to_str.encode('ascii')
  data = read_file(os.path.join(workspace, src_file_path))
  if from_bytes not in data and from_str not in src_file_path:
    return None

  dst_file_path = src_file_path.replace(from_str, to_str)
  safe_file_dump(os.path.join(workspace, dst_file_path), data.replace(from_bytes, to_bytes))
  if src_file_path != dst_file_path:
    os.unlink(os.path.join(workspace, src_file_path))
  return dst_file_path
Example #22
0
def replace_in_file(workspace, src_file_path, from_str, to_str):
  """Replace from_str with to_str in the name and content of the given file.

  If any edits were necessary, returns the new filename (which may be the same as the old filename).
  """
  from_bytes = from_str.encode('ascii')
  to_bytes = to_str.encode('ascii')
  data = read_file(os.path.join(workspace, src_file_path))
  if from_bytes not in data and from_str not in src_file_path:
    return None

  dst_file_path = src_file_path.replace(from_str, to_str)
  safe_file_dump(os.path.join(workspace, dst_file_path), data.replace(from_bytes, to_bytes))
  if src_file_path != dst_file_path:
    os.unlink(os.path.join(workspace, src_file_path))
  return dst_file_path
Example #23
0
  def _compile_vts(self, vts, target, sources, analysis_file, upstream_analysis, classpath, outdir,
                   log_file, zinc_args_file, progress_message, settings, fatal_warnings,
                   zinc_file_manager, counter):
    """Compiles sources for the given vts into the given output dir.

    vts - versioned target set
    sources - sources for this target set
    analysis_file - the analysis file to manipulate
    classpath - a list of classpath entries
    outdir - the output dir to send classes to

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
    if not sources:
      self.context.log.warn('Skipping {} compile for targets with no sources:\n  {}'
                            .format(self.name(), vts.targets))
    else:
      counter_val = str(counter()).rjust(counter.format_length(), b' ')
      counter_str = '[{}/{}] '.format(counter_val, counter.size)
      # Do some reporting.
      self.context.log.info(
        counter_str,
        'Compiling ',
        items_to_report_element(sources, '{} source'.format(self.name())),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
        ' (',
        progress_message,
        ').')
      with self.context.new_workunit('compile', labels=[WorkUnitLabel.COMPILER]) as compile_workunit:
        if self.get_options().capture_classpath:
          self._record_compile_classpath(classpath, vts.targets, outdir)

        try:
          self.compile(self._args, classpath, sources, outdir, upstream_analysis, analysis_file,
                       log_file, zinc_args_file, settings, fatal_warnings, zinc_file_manager,
                       self._get_plugin_map('javac', target),
                       self._get_plugin_map('scalac', target))
        except TaskError:
          if self.get_options().suggest_missing_deps:
            logs = self._find_failed_compile_logs(compile_workunit)
            if logs:
              self._find_missing_deps('\n'.join([read_file(log).decode('utf-8') for log in logs]), target)
          raise
Example #24
0
  def _compile_vts(self, vts, target, sources, analysis_file, upstream_analysis, classpath, outdir,
                   log_file, zinc_args_file, progress_message, settings, fatal_warnings,
                   zinc_file_manager, counter):
    """Compiles sources for the given vts into the given output dir.

    vts - versioned target set
    sources - sources for this target set
    analysis_file - the analysis file to manipulate
    classpath - a list of classpath entries
    outdir - the output dir to send classes to

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
    if not sources:
      self.context.log.warn('Skipping {} compile for targets with no sources:\n  {}'
                            .format(self.name(), vts.targets))
    else:
      counter_val = str(counter()).rjust(counter.format_length(), b' ')
      counter_str = '[{}/{}] '.format(counter_val, counter.size)
      # Do some reporting.
      self.context.log.info(
        counter_str,
        'Compiling ',
        items_to_report_element(sources, '{} source'.format(self.name())),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
        ' (',
        progress_message,
        ').')
      with self.context.new_workunit('compile', labels=[WorkUnitLabel.COMPILER]) as compile_workunit:
        if self.get_options().capture_classpath:
          self._record_compile_classpath(classpath, vts.targets, outdir)

        try:
          self.compile(self._args, classpath, sources, outdir, upstream_analysis, analysis_file,
                       log_file, zinc_args_file, settings, fatal_warnings, zinc_file_manager,
                       self._get_plugin_map('javac', target),
                       self._get_plugin_map('scalac', target))
        except TaskError:
          if self.get_options().suggest_missing_deps:
            logs = self._find_failed_compile_logs(compile_workunit)
            if logs:
              self._find_missing_deps('\n'.join([read_file(log).decode('utf-8') for log in logs]), target)
          raise
Example #25
0
def bootstrap_c_source(output_dir, module_name=NATIVE_ENGINE_MODULE):
    """Bootstrap an external CFFI C source file."""

    safe_mkdir(output_dir)

    with temporary_dir() as tempdir:
        temp_output_prefix = os.path.join(tempdir, module_name)
        real_output_prefix = os.path.join(output_dir, module_name)
        temp_c_file = '{}.c'.format(temp_output_prefix)
        if PY2:
            temp_c_file = temp_c_file.encode('utf-8')
        c_file = '{}.c'.format(real_output_prefix)
        env_script = '{}.cflags'.format(real_output_prefix)

        ffibuilder = cffi.FFI()
        ffibuilder.cdef(CFFI_TYPEDEFS)
        ffibuilder.cdef(CFFI_HEADERS)
        ffibuilder.cdef(CFFI_EXTERNS)
        ffibuilder.set_source(module_name, CFFI_TYPEDEFS + CFFI_HEADERS)
        ffibuilder.emit_c_code(temp_c_file)

        # Work around https://github.com/rust-lang/rust/issues/36342 by renaming initnative_engine to
        # wrapped_initnative_engine so that the rust code can define the symbol initnative_engine.
        #
        # If we dont do this, we end up at the mercy of the implementation details of rust's stripping
        # and LTO. In the past we have found ways to trick it into not stripping symbols which was handy
        # (it kept the binary working) but inconvenient (it was relying on unspecified behavior, it meant
        # our binaries couldn't be stripped which inflated them by 2~3x, and it reduced the amount of LTO
        # we could use, which led to unmeasured performance hits).
        #
        # We additionally remove the ifdefs that apply conditional `init` logic for Py2 vs Py3, in order
        # to define a module that is loadable by either 2 or 3.
        # TODO: Because PyPy uses the same `init` function name regardless of the python version, this
        # trick does not work there: we leave its conditional in place.
        file_content = read_file(temp_c_file).decode('utf-8')
        if CFFI_C_PATCH_BEFORE not in file_content:
            raise Exception(
                'The patch for the CFFI generated code will not apply cleanly.'
            )
        file_content = file_content.replace(CFFI_C_PATCH_BEFORE,
                                            CFFI_C_PATCH_AFTER)

    _replace_file(c_file, file_content)

    # Write a shell script to be sourced at build time that contains inherited CFLAGS.
    _replace_file(env_script, get_build_cflags())
  def test_reset_interactive_output_stream(self):
    """Test redirecting the terminal output stream to a separate file."""
    lifecycle_stub_cmdline = self._lifecycle_stub_cmdline()

    failing_pants_run = self.run_pants(lifecycle_stub_cmdline)
    self.assert_failure(failing_pants_run)
    self.assertIn('erroneous!', failing_pants_run.stderr_data)

    with temporary_dir() as tmpdir:
      some_file = os.path.join(tmpdir, 'some_file')
      safe_file_dump(some_file, '')
      redirected_pants_run = self.run_pants([
        "--lifecycle-stubs-new-interactive-stream-output-file={}".format(some_file),
      ] + lifecycle_stub_cmdline)
      self.assert_failure(redirected_pants_run)
      # The Exiter prints the final error message to whatever the interactive output stream is set
      # to, so when it's redirected it won't be in stderr.
      self.assertNotIn('erroneous!', redirected_pants_run.stderr_data)
      self.assertIn('erroneous!', read_file(some_file))
Example #27
0
    def test_reset_interactive_output_stream(self):
        """Test redirecting the terminal output stream to a separate file."""
        lifecycle_stub_cmdline = self._lifecycle_stub_cmdline()

        failing_pants_run = self.run_pants(lifecycle_stub_cmdline)
        self.assert_failure(failing_pants_run)
        self.assertIn("erroneous!", failing_pants_run.stderr_data)

        with temporary_dir() as tmpdir:
            some_file = os.path.join(tmpdir, "some_file")
            safe_file_dump(some_file, "")
            redirected_pants_run = self.run_pants([
                f"--lifecycle-stubs-new-interactive-stream-output-file={some_file}"
            ] + lifecycle_stub_cmdline)
            self.assert_failure(redirected_pants_run)
            # The Exiter prints the final error message to whatever the interactive output stream is set
            # to, so when it's redirected it won't be in stderr.
            self.assertNotIn("erroneous!", redirected_pants_run.stderr_data)
            self.assertIn("erroneous!", read_file(some_file))
Example #28
0
  def _find_missing_deps(self, compile_logs, target):
    with self.context.new_workunit('missing-deps-suggest', labels=[WorkUnitLabel.COMPILER]):
      compile_failure_log = '\n'.join(read_file(log).decode('utf-8') for log in compile_logs)
      
      missing_dep_suggestions, no_suggestions = self._missing_deps_finder.find(
        compile_failure_log, target)

      if missing_dep_suggestions:
        self.context.log.info('Found the following deps from target\'s transitive '
                              'dependencies that provide the missing classes:')
        suggested_deps = set()
        for classname, candidates in missing_dep_suggestions.items():
          suggested_deps.add(list(candidates)[0])
          self.context.log.info('  {}: {}'.format(classname, ', '.join(candidates)))

        # We format the suggested deps with single quotes and commas so that
        # they can be easily cut/pasted into a BUILD file.
        formatted_suggested_deps = ["'%s'," % dep for dep in suggested_deps]
        suggestion_msg = (
          '\nIf the above information is correct, '
          'please add the following to the dependencies of ({}):\n  {}\n'
            .format(target.address.spec, '\n  '.join(sorted(list(formatted_suggested_deps))))
        )

        path_to_buildozer = self.get_options().buildozer
        if path_to_buildozer:
          suggestion_msg += ("\nYou can do this by running {buildozer} "
                             "'add dependencies {deps}' {target}".format(
                                 buildozer=path_to_buildozer,
                                 deps=" ".join(sorted(suggested_deps)),
                                 target=target.address.spec
                             )
                            )

        self.context.log.info(suggestion_msg)

      if no_suggestions:
        self.context.log.warn('Unable to find any deps from target\'s transitive '
                               'dependencies that provide the following missing classes:')
        no_suggestion_msg = '\n   '.join(sorted(list(no_suggestions)))
        self.context.log.warn('  {}'.format(no_suggestion_msg))
        self.context.log.warn(self.get_options().missing_deps_not_found_msg)
Example #29
0
    def test_ctypes_native_language_interop(self):
        # TODO: consider making this mock_buildroot/run_pants_with_workdir into a
        # PantsRunIntegrationTest method!
        with self.mock_buildroot(
                dirs_to_copy=[self._binary_interop_target_dir
                              ]) as buildroot, buildroot.pushd():

            # Replace strict_deps=False with nothing so we can override it (because target values for this
            # option take precedence over subsystem options).
            orig_wrapped_math_build = read_file(self._wrapped_math_build_file)
            without_strict_deps_wrapped_math_build = re.sub(
                'strict_deps=False,', '', orig_wrapped_math_build)
            safe_file_dump(self._wrapped_math_build_file,
                           without_strict_deps_wrapped_math_build)

            # This should fail because it does not turn on strict_deps for a target which requires it.
            pants_binary_strict_deps_failure = self.run_pants_with_workdir(
                command=['binary', self._binary_target_with_interop],
                # Explicitly set to True (although this is the default).
                config={'native-build-settings': {
                    'strict_deps': True
                }},
                workdir=os.path.join(buildroot.new_buildroot, '.pants.d'),
                build_root=buildroot.new_buildroot)
            self.assert_failure(pants_binary_strict_deps_failure)
            self.assertIn("fatal error: 'some_math.h' file not found",
                          pants_binary_strict_deps_failure.stdout_data)

        pants_run_interop = self.run_pants(
            ['-q', 'run', self._binary_target_with_interop],
            config={
                'native-build-settings': {
                    'strict_deps': False,
                },
            })
        self.assert_success(pants_run_interop)
        self.assertEqual('x=3, f(x)=299\n', pants_run_interop.stdout_data)
Example #30
0
 def test_readwrite_file(self):
     with temporary_dir() as td:
         test_filename = os.path.join(td, 'test.out')
         test_content = '3333'
         safe_file_dump(test_filename, test_content)
         self.assertEqual(read_file(test_filename), test_content)
 def filecontent_for(path):
   return FileContent(ensure_text(path), read_file(path, binary_mode=True))
Example #32
0
 def assert_dump_and_read(self, test_content, dump_kwargs, read_kwargs):
   with temporary_dir() as td:
     test_filename = os.path.join(td, 'test.out')
     safe_file_dump(test_filename, test_content, **dump_kwargs)
     self.assertEqual(read_file(test_filename, **read_kwargs), test_content)
Example #33
0
 def assert_dump_and_read(self, test_content, dump_kwargs, read_kwargs):
     with temporary_dir() as td:
         test_filename = os.path.join(td, "test.out")
         safe_file_dump(test_filename, test_content, **dump_kwargs)
         self.assertEqual(read_file(test_filename, **read_kwargs),
                          test_content)
Example #34
0
 def test_readwrite_file(self):
   with temporary_dir() as td:
     test_filename = os.path.join(td, 'test.out')
     test_content = '3333'
     safe_file_dump(test_filename, test_content)
     self.assertEqual(read_file(test_filename), test_content)
Example #35
0
 def filecontent_for(path: str) -> FileContent:
     return FileContent(
         ensure_text(path),
         read_file(path, binary_mode=True),
     )
Example #36
0
    def test_ctypes_native_language_interop(self, toolchain_variant):
        # TODO: consider making this mock_buildroot/run_pants_with_workdir into a
        # PantsRunIntegrationTest method!
        with self.mock_buildroot(
                dirs_to_copy=[self._binary_interop_target_dir
                              ]) as buildroot, buildroot.pushd():

            # Replace strict_deps=False with nothing so we can override it (because target values for this
            # option take precedence over subsystem options).
            orig_wrapped_math_build = read_file(self._wrapped_math_build_file)
            without_strict_deps_wrapped_math_build = re.sub(
                "strict_deps=False,", "", orig_wrapped_math_build)
            safe_file_dump(self._wrapped_math_build_file,
                           without_strict_deps_wrapped_math_build)

            # This should fail because it does not turn on strict_deps for a target which requires it.
            pants_binary_strict_deps_failure = self.run_pants_with_workdir(
                command=["binary", self._binary_target_with_interop],
                # Explicitly set to True (although this is the default).
                config={
                    "native-build-step": {
                        "toolchain_variant": toolchain_variant.value,
                    },
                    # TODO(#6848): don't make it possible to forget to add the toolchain_variant option!
                    "native-build-settings": {
                        "strict_deps": True,
                    },
                },
                workdir=os.path.join(buildroot.new_buildroot, ".pants.d"),
            )
            self.assert_failure(pants_binary_strict_deps_failure)
            self.assertIn(
                match(
                    toolchain_variant,
                    {
                        ToolchainVariant.gnu:
                        "fatal error: some_math.h: No such file or directory",
                        ToolchainVariant.llvm:
                        "fatal error: 'some_math.h' file not found",
                    },
                ),
                pants_binary_strict_deps_failure.stdout_data,
            )

        # TODO(#6848): we need to provide the libstdc++.so.6.dylib which comes with gcc on osx in the
        # DYLD_LIBRARY_PATH during the 'run' goal somehow.
        attempt_pants_run = match(
            Platform.current,
            {
                Platform.darwin: toolchain_variant == ToolchainVariant.llvm,
                Platform.linux: True,
            },
        )
        if attempt_pants_run:
            pants_run_interop = self.run_pants(
                ["-q", "run", self._binary_target_with_interop],
                config={
                    "native-build-step": {
                        "toolchain_variant": toolchain_variant.value,
                    },
                    "native-build-settings": {
                        "strict_deps": True,
                    },
                },
            )
            self.assert_success(pants_run_interop)
            self.assertEqual("x=3, f(x)=299\n", pants_run_interop.stdout_data)
Example #37
0
 def filecontent_for(path):
   return FileContent(ensure_text(path), read_file(path))