Пример #1
0
 def test_ensure_binary(self) -> None:
     unicode_val = "快"
     self.assertEqual(bytearray([0xE5, 0xBF, 0xAB]),
                      ensure_binary(unicode_val))
     with self.assertRaises(TypeError):
         ensure_binary(
             45)  # type: ignore[arg-type] # intended to fail type check
Пример #2
0
    def join(self,
             stdin_data: bytes | str | None = None,
             tee_output: bool = False) -> PantsResult:
        """Wait for the pants process to complete, and return a PantsResult for it."""

        communicate_fn = self.process.communicate
        if tee_output:
            # TODO: MyPy complains that SubprocessProcessHandler.communicate_teeing_stdout_and_stderr does
            # not have the same type signature as subprocess.Popen.communicate_teeing_stdout_and_stderr.
            # It's possibly not worth trying to fix this because the type stubs for subprocess.Popen are
            # very complex and also not very precise, given how many different configurations Popen can
            # take.
            communicate_fn = SubprocessProcessHandler(
                self.process
            ).communicate_teeing_stdout_and_stderr  # type: ignore[assignment]
        if stdin_data is not None:
            stdin_data = ensure_binary(stdin_data)
        (stdout, stderr) = communicate_fn(stdin_data)

        if self.process.returncode != PANTS_SUCCEEDED_EXIT_CODE:
            render_logs(self.workdir)

        return PantsResult(
            command=self.command,
            exit_code=self.process.returncode,
            stdout=stdout.decode(),
            stderr=stderr.decode(),
            workdir=self.workdir,
            pid=self.process.pid,
        )
Пример #3
0
  def _modify_thrift(self, source):
    """
    Replaces the python keywords in the thrift file

    Find all python keywords in each thrift file and appends a trailing underscore.
    For example, 'from' will be converted to 'from_'.
    """
    rewrites = []
    # Use binary strings here as data read from files is binary, and mixing
    # binary and text can cause problems
    renames = dict((ensure_binary(kw), b'%s_' % kw) for kw in keyword.kwlist)
    token_regex = re.compile(r'\b(%s)\b' % '|'.join(renames.keys()), re.MULTILINE)

    def token_replace(match):
      return renames[match.group(1)]

    def replace_tokens(contents):
      return token_regex.sub(token_replace, contents)

    rewrites.append(replace_tokens)
    with open(source) as contents:
      modified = functools.reduce(lambda txt, rewrite: rewrite(txt), rewrites, contents.read())
      contents.close()
      with open(source, 'w') as thrift:
        thrift.write(modified)
    return source
Пример #4
0
    def exit(self, result=PANTS_SUCCEEDED_EXIT_CODE, msg=None, out=None):
        """Exits the runtime.

    :param result: The exit status. Typically either PANTS_SUCCEEDED_EXIT_CODE or
                   PANTS_FAILED_EXIT_CODE, but can be a string as well. (Optional)
    :param msg: A string message to print to stderr or another custom file desciptor before exiting.
                (Optional)
    :param out: The file descriptor to emit `msg` to. (Optional)
    """
        if msg:
            out = out or sys.stderr
            if hasattr(out, 'buffer'):
                out = out.buffer

            msg = ensure_binary(msg)
            try:
                out.write(msg)
                out.write(b'\n')
                # TODO: Determine whether this call is a no-op because the stream gets flushed on exit, or
                # if we could lose what we just printed, e.g. if we get interrupted by a signal while
                # exiting and the stream is buffered like stdout.
                out.flush()
            except Exception as e:
                # If the file is already closed, or any other error occurs, just log it and continue to
                # exit.
                if msg:
                    logger.warning(
                        "Encountered error when trying to log this message: {}, \n "
                        "exception: {} \n out: {}".format(msg, e, out))
                    # In pantsd, this won't go anywhere, because there's really nowhere for us to log if we
                    # can't log :(
                    # Not in pantsd, this will end up in sys.stderr.
                    traceback.print_stack()
                logger.exception(e)
        self._exit(result)
Пример #5
0
  def exit(self, result=0, msg=None, out=None):
    """Exits the runtime.

    :param result: The exit status. Typically a 0 indicating success or a 1 indicating failure, but
                   can be a string as well. (Optional)
    :param msg: A string message to print to stderr or another custom file desciptor before exiting.
                (Optional)
    :param out: The file descriptor to emit `msg` to. (Optional)
    """
    if msg:
      out = out or sys.stderr
      if PY3 and hasattr(out, 'buffer'):
        out = out.buffer

      msg = ensure_binary(msg)
      try:
        out.write(msg)
        out.write(b'\n')
        # TODO: Determine whether this call is a no-op because the stream gets flushed on exit, or
        # if we could lose what we just printed, e.g. if we get interrupted by a signal while
        # exiting and the stream is buffered like stdout.
        out.flush()
      except Exception as e:
        # If the file is already closed, or any other error occurs, just log it and continue to
        # exit.
        logger.exception(e)
    self._exit(result)
Пример #6
0
    def _modify_thrift(self, source):
        """
    Replaces the python keywords in the thrift file

    Find all python keywords in each thrift file and appends a trailing underscore.
    For example, 'from' will be converted to 'from_'.
    """
        rewrites = []
        # Use binary strings here as data read from files is binary, and mixing
        # binary and text can cause problems
        renames = dict(
            (ensure_binary(kw), b'%s_' % kw) for kw in keyword.kwlist)
        token_regex = re.compile(r'\b(%s)\b' % '|'.join(renames.keys()),
                                 re.MULTILINE)

        def token_replace(match):
            return renames[match.group(1)]

        def replace_tokens(contents):
            return token_regex.sub(token_replace, contents)

        rewrites.append(replace_tokens)
        with open(source) as contents:
            modified = functools.reduce(lambda txt, rewrite: rewrite(txt),
                                        rewrites, contents.read())
            contents.close()
            with open(source, 'w') as thrift:
                thrift.write(modified)
        return source
Пример #7
0
def combine_hashes(hashes):
    """A simple helper function to combine other hashes.  Sorts the hashes before rolling them in."""
    hasher = sha1()
    for h in sorted(hashes):
        h = ensure_binary(h)
        hasher.update(h)
    return hasher.hexdigest()
Пример #8
0
def hash_all(strs: typing.Iterable[Union[bytes, str]]) -> str:
    """Returns a hash of the concatenation of all the strings in strs using sha1."""
    digest = hashlib.sha1()
    for s in strs:
        s = ensure_binary(s)
        digest.update(s)
    return digest.hexdigest()
Пример #9
0
 def report(self, xml_dir, report_dir):
     testsuites = self.parse_xml_files(xml_dir)
     safe_mkdir(report_dir)
     report_file_path = os.path.join(report_dir, 'junit-report.html')
     with open(report_file_path, 'w') as fp:
         fp.write(ensure_binary(self.generate_html(testsuites)))
     return report_file_path
Пример #10
0
 def test_keyword_replaced(self):
     # These are ensure_binary because python's read() does not do decoding
     thrift_contents = dedent('''
   # This file contains UTF-8: Anasûrimbor Kellhus
   namespace py gen.twitter.tweetypie.tweet
   struct UrlEntity {
     1: i16 from
   }
 ''').encode('utf-8')
     expected_replaced_contents = ensure_binary(
         dedent('''
   # This file contains UTF-8: Anasûrimbor Kellhus
   namespace py gen.twitter.tweetypie.tweet
   struct UrlEntity {
     1: i16 from_
   }
 ''').encode('utf-8'))
     m = mock_open(read_data=thrift_contents)
     with patch('__builtin__.open', m, create=True):
         replace_python_keywords_in_file('thrift_dummmy.thrift')
         expected_open_call_list = [
             call('thrift_dummmy.thrift'),
             call('thrift_dummmy.thrift', 'w')
         ]
         m.call_args_list == expected_open_call_list
         mock_file_handle = m()
         mock_file_handle.write.assert_called_once_with(
             expected_replaced_contents)
Пример #11
0
  def exit(self, result=PANTS_SUCCEEDED_EXIT_CODE, msg=None, out=None):
    """Exits the runtime.

    :param result: The exit status. Typically either PANTS_SUCCEEDED_EXIT_CODE or
                   PANTS_FAILED_EXIT_CODE, but can be a string as well. (Optional)
    :param msg: A string message to print to stderr or another custom file desciptor before exiting.
                (Optional)
    :param out: The file descriptor to emit `msg` to. (Optional)
    """
    if msg:
      out = out or sys.stderr
      if PY3 and hasattr(out, 'buffer'):
        out = out.buffer

      msg = ensure_binary(msg)
      try:
        out.write(msg)
        out.write(b'\n')
        # TODO: Determine whether this call is a no-op because the stream gets flushed on exit, or
        # if we could lose what we just printed, e.g. if we get interrupted by a signal while
        # exiting and the stream is buffered like stdout.
        out.flush()
      except Exception as e:
        # If the file is already closed, or any other error occurs, just log it and continue to
        # exit.
        if msg:
          logger.warning("Encountered error when trying to log this message: {}".format(msg))
          # In pantsd, this won't go anywhere, because there's really nowhere for us to log if we
          # can't log :(
          # Not in pantsd, this will end up in sys.stderr.
          traceback.print_stack()
        logger.exception(e)
    self._exit(result)
Пример #12
0
def combine_hashes(hashes):
  """A simple helper function to combine other hashes.  Sorts the hashes before rolling them in."""
  hasher = sha1()
  for h in sorted(hashes):
    h = ensure_binary(h)
    hasher.update(h)
  return hasher.hexdigest()
Пример #13
0
 def test_keyword_replaced(self):
   # These are ensure_binary because python's read() does not do decoding
   thrift_contents = dedent('''
     # This file contains UTF-8: Anasûrimbor Kellhus
     namespace py gen.twitter.tweetypie.tweet
     struct UrlEntity {
       1: i16 from
     }
   ''').encode('utf-8')
   expected_replaced_contents = ensure_binary(dedent('''
     # This file contains UTF-8: Anasûrimbor Kellhus
     namespace py gen.twitter.tweetypie.tweet
     struct UrlEntity {
       1: i16 from_
     }
   ''').encode('utf-8'))
   builder = PythonThriftBuilder(target=self.target('test_thrift_replacement:one'),
                                 root_dir=self.build_root,
                                 config=create_config(sample_ini=sample_ini_test))
   m = mock_open(read_data=thrift_contents)
   with patch('__builtin__.open', m, create=True):
     builder = PythonThriftBuilder(target=self.target('test_thrift_replacement:one'),
                                 root_dir=self.build_root,
                                 config=create_config(sample_ini=sample_ini_test))
     builder._modify_thrift('thrift_dummmy.thrift')
     expected_open_call_list = [call('thrift_dummmy.thrift'), call('thrift_dummmy.thrift', 'w')]
     m.call_args_list == expected_open_call_list
     mock_file_handle = m()
     mock_file_handle.write.assert_called_once_with(expected_replaced_contents)
Пример #14
0
 def report(self, xml_dir, report_dir):
     testsuites = self.parse_xml_files(xml_dir)
     safe_mkdir(report_dir)
     report_file_path = os.path.join(report_dir, "junit-report.html")
     with open(report_file_path, "w") as fp:
         fp.write(ensure_binary(self.generate_html(testsuites)))
     return report_file_path
Пример #15
0
 def report(self):
   self._logger.debug('Generating JUnit HTML report...')
   testsuites = self._parse_xml_files(self._xml_dir)
   safe_mkdir_for(self._report_file_path)
   with open(self._report_file_path, 'wb') as fp:
     fp.write(ensure_binary(self._generate_html(testsuites)))
   self._logger.debug('JUnit HTML report generated to {}'.format(self._report_file_path))
   return self._report_file_path
Пример #16
0
 def emit(self, s, dest=ReporterDestination.OUT):
   s = ensure_binary(s)
   if dest == ReporterDestination.OUT:
     self.settings.outfile.write(s)
   elif dest == ReporterDestination.ERR:
     self.settings.errfile.write(s)
   else:
     raise Exception('Invalid {}: {}'.format(ReporterDestination, dest))
Пример #17
0
 def emit(self, s, dest=ReporterDestination.OUT):
     s = ensure_binary(s)
     if dest == ReporterDestination.OUT:
         self.settings.outfile.write(s)
     elif dest == ReporterDestination.ERR:
         self.settings.errfile.write(s)
     else:
         raise Exception("Invalid {}: {}".format(ReporterDestination, dest))
Пример #18
0
 def _compute_fingerprint(self, field_keys):
   hasher = sha1()
   empty_hash = True
   for key in sorted(field_keys):
     field = self._fields[key]
     if field is not None:
       fp = field.fingerprint()
       if fp is not None:
         empty_hash = False
         fp = ensure_binary(fp)
         key = ensure_binary(key)
         key_sha1 = sha1(key).hexdigest().encode('utf-8')
         hasher.update(key_sha1)
         hasher.update(fp)
   if empty_hash:
     return None
   else:
     return hasher.hexdigest() if PY3 else hasher.hexdigest().decode('utf-8')
Пример #19
0
 def _compute_fingerprint(self, field_keys):
     hasher = sha1()
     empty_hash = True
     for key in sorted(field_keys):
         field = self._fields[key]
         if field is not None:
             fp = field.fingerprint()
             if fp is not None:
                 empty_hash = False
                 fp = ensure_binary(fp)
                 key = ensure_binary(key)
                 key_sha1 = sha1(key).hexdigest().encode('utf-8')
                 hasher.update(key_sha1)
                 hasher.update(fp)
     if empty_hash:
         return None
     else:
         return hasher.hexdigest()
Пример #20
0
def hash_all(strs: typing.Iterable[Union[bytes, str]], digest: Optional[Digest] = None) -> str:
  """Returns a hash of the concatenation of all the strings in strs.

  If a hashlib message digest is not supplied a new sha1 message digest is used.
  """
  digest = digest or hashlib.sha1()
  for s in strs:
    s = ensure_binary(s)
    digest.update(s)
  return digest.hexdigest()
Пример #21
0
 def report(self, output_dir):
   self._logger.debug('Generating JUnit HTML report...')
   testsuites = self._parse_xml_files()
   report_file_path = os.path.join(output_dir, 'reports', 'junit-report.html')
   safe_mkdir_for(report_file_path)
   with open(report_file_path, 'wb') as fp:
     fp.write(ensure_binary(self._generate_html(testsuites)))
   self._logger.debug('JUnit HTML report generated to {}'.format(report_file_path))
   if self._open_report:
     return report_file_path
Пример #22
0
def hash_all(strs, digest=None):
    """Returns a hash of the concatenation of all the strings in strs.

  If a hashlib message digest is not supplied a new sha1 message digest is used.
  """
    digest = digest or hashlib.sha1()
    for s in strs:
        s = ensure_binary(s)
        digest.update(s)
    return digest.hexdigest()
Пример #23
0
def hash_all(strs, digest=None):
  """Returns a hash of the concatenation of all the strings in strs.

  If a hashlib message digest is not supplied a new sha1 message digest is used.
  """
  digest = digest or hashlib.sha1()
  for s in strs:
    s = ensure_binary(s)
    digest.update(s)
  return digest.hexdigest()
Пример #24
0
 def _write_flush(self, fd, payload=None):
     """Write a payload to a given fd (if provided) and flush the fd."""
     try:
         if payload:
             fd.write(ensure_binary(payload))
         fd.flush()
     except (IOError, OSError) as e:
         # If a `Broken Pipe` is encountered during a stdio fd write, we're headless - bail.
         if e.errno == errno.EPIPE and self._exit_on_broken_pipe:
             sys.exit()
         # Otherwise, re-raise.
         raise
Пример #25
0
 def test_invalid_unicode_in_build_file(self):
   """Demonstrate that unicode characters causing parse errors raise real parse errors."""
   self.add_to_build_file('BUILD', ensure_binary(dedent(
     """
     jvm_binary(name = ‘hello’,  # Parse error due to smart quotes (non ascii characters)
       source = 'HelloWorld.java'
       main = 'foo.HelloWorld',
     )
     """
   )))
   build_file = self.create_buildfile('BUILD')
   self.assert_parser_error(build_file, 'invalid syntax')
Пример #26
0
  def join(self, stdin_data=None, tee_output=False):
    """Wait for the pants process to complete, and return a PantsResult for it."""

    communicate_fn = self.process.communicate
    if tee_output:
      communicate_fn = SubprocessProcessHandler(self.process).communicate_teeing_stdout_and_stderr
    if stdin_data is not None:
      stdin_data = ensure_binary(stdin_data)
    (stdout_data, stderr_data) = communicate_fn(stdin_data)

    return PantsResult(self.command, self.process.returncode, stdout_data.decode("utf-8"),
                       stderr_data.decode("utf-8"), self.workdir)
Пример #27
0
 def test_invalid_unicode_in_build_file(self):
   """Demonstrate that unicode characters causing parse errors raise real parse errors."""
   self.add_to_build_file('BUILD', ensure_binary(dedent(
     """
     jvm_binary(name = ‘hello’,  # Parse error due to smart quotes (non ascii characters)
       source = 'HelloWorld.java'
       main = 'foo.HelloWorld',
     )
     """
   )))
   build_file = self.create_buildfile('BUILD')
   self.assert_parser_error(build_file, 'invalid syntax')
Пример #28
0
 def _write_flush(self, fd, payload=None):
   """Write a payload to a given fd (if provided) and flush the fd."""
   try:
     if payload:
       fd.write(ensure_binary(payload))
     fd.flush()
   except (IOError, OSError) as e:
     # If a `Broken Pipe` is encountered during a stdio fd write, we're headless - bail.
     if e.errno == errno.EPIPE and self._exit_on_broken_pipe:
       sys.exit()
     # Otherwise, re-raise.
     raise
Пример #29
0
 def test_unicode_string_in_build_file(self):
   """Demonstrates that a string containing unicode should work in a BUILD file."""
   self.add_to_build_file('BUILD', ensure_binary(dedent(
       """
       java_library(
         name='foo',
         sources=['א.java']
       )
       """
   )))
   build_file = FilesystemBuildFile(self.build_root, 'BUILD')
   self.build_file_parser.parse_build_file(build_file)
Пример #30
0
 def test_invalid_unicode_in_build_file(self):
   """Demonstrate that unicode characters causing parse errors raise real parse errors."""
   self.add_to_build_file('BUILD', ensure_binary(dedent(
     """
     jvm_binary(name = ‘hello’,  # Parse error due to smart quotes (non ascii characters)
       source = 'HelloWorld.java'
       main = 'foo.HelloWorld',
     )
     """
   )))
   build_file = FilesystemBuildFile(self.build_root, 'BUILD')
   with self.assertRaises(BuildFileParser.BuildFileParserError):
     self.build_file_parser.parse_build_file(build_file)
Пример #31
0
 def output(self, args, stdin_payload=None, binary_mode=False, **kwargs):
   process = self._pex.run(args,
                           stdout=subprocess.PIPE,
                           stderr=subprocess.PIPE,
                           with_chroot=False,
                           blocking=False,
                           **kwargs)
   if stdin_payload is not None:
     stdin_payload = ensure_binary(stdin_payload)
   (stdout, stderr) = process.communicate(input=stdin_payload)
   if not binary_mode:
     stdout = stdout.decode('utf-8')
     stderr = stderr.decode('utf-8')
   return (stdout, stderr, process.returncode, self._pretty_cmdline(args))
Пример #32
0
 def output(self, args, stdin_payload=None, binary_mode=False, **kwargs):
   process = self._pex.run(args,
                           stdout=subprocess.PIPE,
                           stderr=subprocess.PIPE,
                           with_chroot=False,
                           blocking=False,
                           **kwargs)
   if stdin_payload is not None:
     stdin_payload = ensure_binary(stdin_payload)
   (stdout, stderr) = process.communicate(input=stdin_payload)
   if not binary_mode:
     stdout = stdout.decode()
     stderr = stderr.decode()
   return (stdout, stderr, process.returncode, self._pretty_cmdline(args))
Пример #33
0
 def _assert_subprocess_error_with_input(self, worktree, cmd, stdin_payload, expected_excerpt):
   proc = subprocess.Popen(
     cmd,
     cwd=worktree,
     stdin=subprocess.PIPE,
     stdout=subprocess.PIPE,
     stderr=subprocess.PIPE,
   )
   (stdout_data, stderr_data) = proc.communicate(input=ensure_binary(stdin_payload))
   # Attempting to call '{}\n{}'.format(...) on bytes in python 3 gives you the string:
   #   "b'<the first string>'\nb'<the second string>'"
   # So we explicitly decode both stdout and stderr here.
   stdout_data = stdout_data.decode('utf-8')
   stderr_data = stderr_data.decode('utf-8')
   self.assertNotEqual(0, proc.returncode)
   all_output = '{}\n{}'.format(stdout_data, stderr_data)
   self.assertIn(expected_excerpt, all_output)
Пример #34
0
    def join(self, stdin_data: bytes | str | None = None) -> PantsResult:
        """Wait for the pants process to complete, and return a PantsResult for it."""
        if stdin_data is not None:
            stdin_data = ensure_binary(stdin_data)
        (stdout, stderr) = self.process.communicate(stdin_data)

        if self.process.returncode != PANTS_SUCCEEDED_EXIT_CODE:
            render_logs(self.workdir)

        return PantsResult(
            command=self.command,
            exit_code=self.process.returncode,
            stdout=stdout.decode(),
            stderr=stderr.decode(),
            workdir=self.workdir,
            pid=self.process.pid,
        )
Пример #35
0
 def test_unicode_string_in_build_file(self):
     """Demonstrates that a string containing unicode should work in a BUILD file."""
     self.add_to_build_file(
         "BUILD",
         ensure_binary(
             dedent(
                 """
     java_library(
       name='foo',
       sources=['א.java']
     )
     """
             )
         ),
     )
     build_file = self.create_buildfile("BUILD")
     self.build_file_parser.parse_build_file(build_file)
Пример #36
0
 def _assert_subprocess_error_with_input(self, worktree, cmd, stdin_payload,
                                         expected_excerpt):
     proc = subprocess.Popen(
         cmd,
         cwd=worktree,
         stdin=subprocess.PIPE,
         stdout=subprocess.PIPE,
         stderr=subprocess.PIPE,
     )
     (stdout_data,
      stderr_data) = proc.communicate(input=ensure_binary(stdin_payload))
     # Attempting to call '{}\n{}'.format(...) on bytes in python 3 gives you the string:
     #   "b'<the first string>'\nb'<the second string>'"
     # So we explicitly decode both stdout and stderr here.
     stdout_data = stdout_data.decode('utf-8')
     stderr_data = stderr_data.decode('utf-8')
     self.assertNotEqual(0, proc.returncode)
     all_output = '{}\n{}'.format(stdout_data, stderr_data)
     self.assertIn(expected_excerpt, all_output)
Пример #37
0
  def test_unicode_comments(self):
    """We had a bug where unicode characters in comments would cause the option parser to fail.

    Without the fix to the option parser, this test case reproduces the error:

    UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 44:
                       ordinal not in range(128)
    """
    self._do_test(
      ['Hi there!', 'This is an element in a list of strings.'],
      ensure_binary(dedent(u"""
      [
        'Hi there!',
        # This is a comment with ‘sneaky‘ unicode characters.
        'This is an element in a list of strings.',
        # This is a comment with an obvious unicode character ☺.
        ]
      """).strip()),
    )
Пример #38
0
  def _get_perl6_subproc_os_env(self, perl6_env):
    # NB: These source file containing directory paths are assumed to have been de-duped.
    source_lib_containing_dirs = list(perl6_env.source_lib_entries.containing_lib_dirs)
    zef_install_specs = [r.install_spec for r in perl6_env.zef_resolve_results]

    # NB: put the thirdparty resolve at the end.
    all_lib_entries = source_lib_containing_dirs + zef_install_specs
    perl6lib_joined = ensure_binary(self.PERL6LIB_SEP.join(map(ensure_binary, all_lib_entries)))

    full_path_var = create_path_env_var(self._rakudo_moar.path_entries, os.environ.copy(),
                                        prepend=True)

    invocation_env = os.environ.copy()
    invocation_env.update({
      'PERL6LIB': perl6lib_joined,
      'PATH': full_path_var,
    })

    return invocation_env
Пример #39
0
  def join(self, stdin_data=None, tee_output=False):
    """Wait for the pants process to complete, and return a PantsResult for it."""

    communicate_fn = self.process.communicate
    if tee_output:
      communicate_fn = SubprocessProcessHandler(self.process).communicate_teeing_stdout_and_stderr
    if stdin_data is not None:
      stdin_data = ensure_binary(stdin_data)
    (stdout_data, stderr_data) = communicate_fn(stdin_data)

    if self.process.returncode != PANTS_SUCCEEDED_EXIT_CODE:
      render_logs(self.workdir)

    return PantsResult(
      self.command,
      self.process.returncode,
      stdout_data.decode("utf-8"),
      stderr_data.decode("utf-8"),
      self.workdir,
      self.process.pid)
Пример #40
0
    def write_setup(self, root_target, reduced_dependencies, chroot):
        """Write the setup.py of a target.

        Must be run after writing the contents to the chroot.
        """
        setup_keywords = root_target.provides.setup_py_keywords.copy()

        package_dir = {"": self.SOURCE_ROOT}
        packages, namespace_packages, resources = self.find_packages(
            root_target, chroot)

        if namespace_packages:
            setup_keywords["namespace_packages"] = list(
                sorted(namespace_packages))

        if packages:
            setup_keywords.update(
                package_dir=package_dir,
                packages=list(sorted(packages)),
                package_data=dict((str(package), list(map(str, rs)))
                                  for (package, rs) in resources.items()),
            )

        setup_keywords["install_requires"] = list(
            self.install_requires(reduced_dependencies))

        for binary_name, entry_point in self.iter_entry_points(root_target):
            if "entry_points" not in setup_keywords:
                setup_keywords["entry_points"] = {}
            if "console_scripts" not in setup_keywords["entry_points"]:
                setup_keywords["entry_points"]["console_scripts"] = []
            setup_keywords["entry_points"]["console_scripts"].append(
                "{} = {}".format(binary_name, entry_point))

        setup_py = self._setup_boilerplate().format(
            setup_dict=distutils_repr(setup_keywords),
            setup_target=root_target.address.reference())
        chroot.write(ensure_binary(setup_py), "setup.py")

        # Make sure that `setup.py` is included.
        chroot.write("include *.py", "MANIFEST.in", mode="w")
Пример #41
0
    def join(self,
             stdin_data: Optional[Union[bytes, str]] = None,
             tee_output: bool = False) -> PantsResult:
        """Wait for the pants process to complete, and return a PantsResult for it."""

        communicate_fn = self.process.communicate
        if tee_output:
            communicate_fn = SubprocessProcessHandler(
                self.process).communicate_teeing_stdout_and_stderr
        if stdin_data is not None:
            stdin_data = ensure_binary(stdin_data)
        (stdout_data, stderr_data) = communicate_fn(stdin_data)

        if self.process.returncode != PANTS_SUCCEEDED_EXIT_CODE:
            render_logs(self.workdir)

        return PantsResult(command=self.command,
                           returncode=self.process.returncode,
                           stdout_data=stdout_data.decode(),
                           stderr_data=stderr_data.decode(),
                           workdir=self.workdir,
                           pid=self.process.pid)
Пример #42
0
 def test_keyword_replaced(self):
   # These are ensure_binary because python's read() does not do decoding
   thrift_contents = dedent('''
     # This file contains UTF-8: Anasûrimbor Kellhus
     namespace py gen.twitter.tweetypie.tweet
     struct UrlEntity {
       1: i16 from
     }
   ''').encode('utf-8')
   expected_replaced_contents = ensure_binary(dedent('''
     # This file contains UTF-8: Anasûrimbor Kellhus
     namespace py gen.twitter.tweetypie.tweet
     struct UrlEntity {
       1: i16 from_
     }
   ''').encode('utf-8'))
   m = mock_open(read_data=thrift_contents)
   with patch('__builtin__.open', m, create=True):
     replace_python_keywords_in_file('thrift_dummmy.thrift')
     expected_open_call_list = [call('thrift_dummmy.thrift'), call('thrift_dummmy.thrift', 'w')]
     m.call_args_list == expected_open_call_list
     mock_file_handle = m()
     mock_file_handle.write.assert_called_once_with(expected_replaced_contents)
Пример #43
0
 def test_keyword_replaced(self):
     # These are ensure_binary because python's read() does not do decoding
     thrift_contents = dedent('''
   # This file contains UTF-8: Anasûrimbor Kellhus
   namespace py gen.twitter.tweetypie.tweet
   struct UrlEntity {
     1: i16 from
   }
 ''').encode('utf-8')
     expected_replaced_contents = ensure_binary(
         dedent('''
   # This file contains UTF-8: Anasûrimbor Kellhus
   namespace py gen.twitter.tweetypie.tweet
   struct UrlEntity {
     1: i16 from_
   }
 ''').encode('utf-8'))
     builder = PythonThriftBuilder(
         target=self.target('test_thrift_replacement:one'),
         root_dir=self.build_root,
         config=create_config(sample_ini=sample_ini_test))
     m = mock_open(read_data=thrift_contents)
     with patch('__builtin__.open', m, create=True):
         builder = PythonThriftBuilder(
             target=self.target('test_thrift_replacement:one'),
             root_dir=self.build_root,
             config=create_config(sample_ini=sample_ini_test))
         builder._modify_thrift('thrift_dummmy.thrift')
         expected_open_call_list = [
             call('thrift_dummmy.thrift'),
             call('thrift_dummmy.thrift', 'w')
         ]
         m.call_args_list == expected_open_call_list
         mock_file_handle = m()
         mock_file_handle.write.assert_called_once_with(
             expected_replaced_contents)
Пример #44
0
  def write_setup(self, root_target, reduced_dependencies, chroot):
    """Write the setup.py of a target.

    Must be run after writing the contents to the chroot.
    """
    setup_keywords = root_target.provides.setup_py_keywords.copy()

    package_dir = {'': self.SOURCE_ROOT}
    packages, namespace_packages, resources = self.find_packages(root_target, chroot)

    if namespace_packages:
      setup_keywords['namespace_packages'] = list(sorted(namespace_packages))

    if packages:
      setup_keywords.update(
          package_dir=package_dir,
          packages=list(sorted(packages)),
          package_data=dict((str(package), list(map(str, rs)))
                            for (package, rs) in resources.items()))

    setup_keywords['install_requires'] = list(self.install_requires(reduced_dependencies))

    for binary_name, entry_point in self.iter_entry_points(root_target):
      if 'entry_points' not in setup_keywords:
        setup_keywords['entry_points'] = {}
      if 'console_scripts' not in setup_keywords['entry_points']:
        setup_keywords['entry_points']['console_scripts'] = []
      setup_keywords['entry_points']['console_scripts'].append(
          '{} = {}'.format(binary_name, entry_point))

    setup_py = self._setup_boilerplate().format(setup_dict=distutils_repr(setup_keywords),
                                                setup_target=root_target.address.reference())
    chroot.write(ensure_binary(setup_py), 'setup.py')

    # Make sure that `setup.py` is included.
    chroot.write('include *.py', 'MANIFEST.in', mode='w')
Пример #45
0
import os
import StringIO
import subprocess
import traceback
from contextlib import contextmanager

from pants.scm.scm import Scm
from pants.util.contextutil import pushd
from pants.util.strutil import ensure_binary

# 40 is Linux's hard-coded limit for total symlinks followed when resolving a path.
MAX_SYMLINKS_IN_REALPATH = 40
GIT_HASH_LENGTH = 20
# Precompute these because ensure_binary is slow and we'll need them a lot
SLASH = ensure_binary('/')
NUL = ensure_binary('\0')
SPACE = ensure_binary(' ')
NEWLINE = ensure_binary('\n')
EMPTY_STRING = ensure_binary("")


class Git(Scm):
    """An Scm implementation backed by git."""
    @classmethod
    def detect_worktree(cls, binary='git', subdir=None):
        """Detect the git working tree above cwd and return it; else, return None.

    :param string binary: The path to the git binary to use, 'git' by default.
    :param string subdir: The path to start searching for a git repo.
    :returns: path to the directory where the git working tree is rooted.
Пример #46
0
 def fingerprint(self):
   hasher = sha1()
   self.test_value = ensure_binary(self.test_value)
   hasher.update(self.test_value)
   return hasher.hexdigest()
Пример #47
0
 def test_ensure_bytes(self):
     unicode_val = "快"
     self.assertEquals(bytearray([0xE5, 0xBF, 0xAB]), ensure_binary(unicode_val))
     with self.assertRaises(TypeError):
         ensure_binary(45)
Пример #48
0
 def fingerprint(self):
     hasher = sha1()
     self.test_value = ensure_binary(self.test_value)
     hasher.update(self.test_value)
     return hasher.hexdigest()
Пример #49
0
 def test_ensure_binary(self):
   unicode_val = u'快'
   self.assertEqual(bytearray([0xe5, 0xbf, 0xab]), ensure_binary(unicode_val))
   with self.assertRaises(TypeError):
     ensure_binary(45)
Пример #50
0
import StringIO
import traceback
from contextlib import contextmanager

from pants.scm.scm import Scm
from pants.util.contextutil import pushd
from pants.util.memo import memoized_method
from pants.util.process_handler import subprocess
from pants.util.strutil import ensure_binary


# 40 is Linux's hard-coded limit for total symlinks followed when resolving a path.
MAX_SYMLINKS_IN_REALPATH = 40
GIT_HASH_LENGTH = 20
# Precompute these because ensure_binary is slow and we'll need them a lot
SLASH = ensure_binary('/')
NUL = ensure_binary('\0')
SPACE = ensure_binary(' ')
NEWLINE = ensure_binary('\n')
EMPTY_STRING = ensure_binary("")


logger = logging.getLogger(__name__)


class Git(Scm):
  """An Scm implementation backed by git."""

  @classmethod
  def detect_worktree(cls, binary='git', subdir=None):
    """Detect the git working tree above cwd and return it; else, return None.
Пример #51
0
 def write(stream_name, data):
   stream = workunit.output(stream_name)
   stream.write(ensure_binary(data))
   stream.flush()