Beispiel #1
0
  def Link(self, srcs):
    """Link these objects with predetermined options and output name."""
    out = self.LinkOutputName()
    self.Log('\nLink %s' % out)
    bin_name = self.GetCXXCompiler()
    srcs_flags = []
    if not self.empty:
      srcs_flags += srcs
    srcs_flags += self.link_options
    # Handle an IRT link specially, using a separate script.
    if self.irt_linker:
      if self.tls_edit is None:
        raise Error('Linking the IRT requires tls_edit')
      irt_link_cmd = [sys.executable, self.irt_linker,
                      '--output=' + out,
                      '--tls-edit=' + self.tls_edit,
                      '--link-cmd=' + bin_name,
                      '--readelf-cmd=' + self.GetReadElf()]
      if self.commands_are_scripts:
        irt_link_cmd += ['--commands-are-scripts']
      if self.arch == 'x86-64':
        irt_link_cmd += ['--sandbox-base-hiding-check',
                         '--objdump-cmd=' + self.GetObjDump()]
      irt_link_cmd += srcs_flags
      err = self.Run(irt_link_cmd, normalize_slashes=False)
      if err:
        raise Error('FAILED with %d: %s' % (err, ' '.join(irt_link_cmd)))
      return out

    MakeDir(os.path.dirname(out))
    cmd_line = [bin_name, '-o', out, '-Wl,--as-needed']
    cmd_line += srcs_flags

    self.RunLink(cmd_line, out)
    return out
Beispiel #2
0
    def GetIRTLayout(self, irt_file):
        """Check if the IRT's data and text segment fit layout constraints and
       get sizes of the IRT's text and data segments.

    Returns a tuple containing:
      * whether the IRT data/text top addresses fit within the max limit
      * current data/text top addrs
      * size of text and data segments
    """
        cmd_line = [self.readelf_cmd, '-W', '--segments', irt_file]
        # Put LC_ALL=C in the environment for readelf, so that its messages
        # will reliably match what we're looking for rather than being in some
        # other language and/or character set.
        env = dict(os.environ)
        env['LC_ALL'] = 'C'
        segment_info = self.Run(cmd_line, get_output=True, env=env)
        lines = segment_info.splitlines()
        ph_start = -1
        for i, line in enumerate(lines):
            if line == 'Program Headers:':
                ph_start = i + 1
                break
        if ph_start == -1:
            raise Error('Could not find Program Headers start: %s\n' % lines)
        seg_lines = lines[ph_start:]
        text_bottom = 0
        text_top = 0
        data_bottom = 0
        data_top = 0
        for line in seg_lines:
            pieces = line.split()
            # Type, Offset, Vaddr, Paddr, FileSz, MemSz, Flg(multiple), Align
            if len(pieces) >= 8 and pieces[0] == 'LOAD':
                # Vaddr + MemSz
                segment_bottom = int(pieces[2], 16)
                segment_top = segment_bottom + int(pieces[5], 16)
                if pieces[6] == 'R' and pieces[7] == 'E':
                    text_top = max(segment_top, text_top)
                    if text_bottom == 0:
                        text_bottom = segment_bottom
                    else:
                        text_bottom = min(segment_bottom, text_bottom)
                    continue
                if pieces[6] == 'R' or pieces[6] == 'RW':
                    data_top = max(segment_top, data_top)
                    if data_bottom == 0:
                        data_bottom = segment_bottom
                    else:
                        data_bottom = min(segment_bottom, data_bottom)
                    continue
        if text_top == 0 or data_top == 0 or text_bottom == 0 or data_bottom == 0:
            raise Error(
                'Could not parse IRT Layout: text_top=0x%x text_bottom=0x%x\n'
                '                            data_top=0x%x data_bottom=0x%x\n'
                'readelf output: %s\n' %
                (text_top, text_bottom, data_top, data_bottom, lines))
        return ((text_top <= self.irt_text_max
                 and data_top <= self.irt_data_max), text_top, data_top,
                text_top - text_bottom, data_top - data_bottom)
Beispiel #3
0
def CheckObjectSize(path):
  # Here, object file should exist. However, we're seeing the case that
  # we cannot read the object file on Windows.
  # When some error happens, we raise an error. However, we'd like to know
  # that the problem is solved or not after some time passed, so we continue
  # checking object file size.
  retry = 0
  error_messages = []

  path = FixPath(path)

  while retry < 5:
    try:
      st = os.stat(path)
      if st.st_size != 0:
        break
      error_messages.append(
          'file size of object %s is 0 (try=%d)' % (path, retry))
    except Exception as e:
      error_messages.append(
          'failed to stat() for %s (try=%d): %s' % (path, retry, e))

    time.sleep(1)
    retry += 1

  if error_messages:
    raise Error('\n'.join(error_messages))
Beispiel #4
0
  def Generate(self, srcs, obj_to_src=None):
    """Generate final output file.

    Link or Archive the final output file, from the compiled sources.
    """
    if self.outtype in ['nexe', 'pexe', 'nso']:
      out = self.Link(srcs)
      if self.is_pnacl_toolchain and self.finalize_pexe:
        # Note: pnacl-finalize also does stripping.
        self.Finalize(out)
      elif self.strip_all or self.strip_debug:
        self.Strip(out)
    elif self.outtype in ['nlib', 'plib']:
      out = self.Archive(srcs, obj_to_src)
      if self.strip_debug:
        self.Strip(out)
      elif self.strip_all:
        raise Error('FAILED: --strip-all on libs will result in unusable libs.')
    else:
      raise Error('FAILED: Unknown outtype: %s' % (self.outtype))
Beispiel #5
0
  def Translate(self, src):
    """Translate a pexe to a nexe."""
    out = self.TranslateOutputName()
    self.Log('\nTranslate %s' % out)
    bin_name = self.GetBinName('translate')
    cmd_line = [bin_name, '-arch', self.arch, src, '-o', out]
    cmd_line += self.link_options

    err = self.Run(cmd_line)
    if err:
      raise Error('FAILED with %d: %s' % (err, ' '.join(cmd_line)))
    return out
Beispiel #6
0
  def Finalize(self, src):
    """Finalize the PEXE"""
    self.Log('\nFinalize %s' % src)

    out = self.StripOutputName()
    self.CleanOutput(out)
    bin_name = self.GetPnaclFinalize()
    cmd_line = [bin_name, src, '-o', out]
    err = self.Run(cmd_line)
    if err:
      raise Error('FAILED with %d: %s' % (err, ' '.join(cmd_line)))
    return out
Beispiel #7
0
  def Strip(self, src):
    """Strip the NEXE"""
    self.Log('\nStrip %s' % src)

    out = self.StripOutputName()
    pre_debug_tagging = self.UntaggedName()
    self.CleanOutput(out)
    self.CleanOutput(pre_debug_tagging)

    # Strip from foo.debug to foo.untagged.
    strip_name = self.GetStrip()
    strip_option = '--strip-all' if self.strip_all else '--strip-debug'
    # pnacl does not have an objcopy so there are no way to embed a link
    if self.is_pnacl_toolchain:
      cmd_line = [strip_name, strip_option, src, '-o', out]
      err = self.Run(cmd_line)
      if err:
        raise Error('FAILED with %d: %s' % (err, ' '.join(cmd_line)))
    else:
      cmd_line = [strip_name, strip_option, src, '-o', pre_debug_tagging]
      err = self.Run(cmd_line)
      if err:
        raise Error('FAILED with %d: %s' % (err, ' '.join(cmd_line)))

      # Tag with a debug link to foo.debug copying from foo.untagged to foo.
      objcopy_name = self.GetObjCopy()
      cmd_line = [objcopy_name, '--add-gnu-debuglink', src,
                  pre_debug_tagging, out]
      err = self.Run(cmd_line)
      if err:
        raise Error('FAILED with %d: %s' % (err, ' '.join(cmd_line)))

      # Drop the untagged intermediate.
      self.CleanOutput(pre_debug_tagging)

    return out
Beispiel #8
0
    def Link(self, link_args):
        """Link the IRT with the given link_args."""
        out = self.output
        self.Log('\nLinking IRT: %s' % out)
        pre_tls_edit_out = out + '.raw'

        MakeDir(os.path.dirname(pre_tls_edit_out))

        cmd_line = [self.link_cmd, '-o', pre_tls_edit_out, '-Wl,--as-needed']
        cmd_line += link_args

        # Do an initial link of the IRT, without segment layout parameters
        # to determine the segment sizes.
        self.RunLink(cmd_line, pre_tls_edit_out)

        # Then grab the segment sizes and re-link w/ the right layout.
        # 'fits' is ignored after the first link, since correct layout parameters
        # were not present in the command line.
        (fits, text_top, data_top, text_size,
         data_size) = self.GetIRTLayout(pre_tls_edit_out)
        cmd_line += self.GetIRTLayoutFlags(text_size, data_size)
        self.RunLink(cmd_line, pre_tls_edit_out)
        (fits, text_top, data_top, text_size,
         data_size) = self.GetIRTLayout(pre_tls_edit_out)
        if not fits:
            raise Error('Already re-linked IRT and it still does not fit:\n'
                        'text_top=0x%x and data_top=0x%x\n' %
                        (text_top, data_top))
        self.Log('IRT layout fits: text_top=0x%x and data_top=0x%x' %
                 (text_top, data_top))

        tls_edit_cmd = [FixPath(self.tls_edit), pre_tls_edit_out, out]
        tls_edit_err = self.Run(tls_edit_cmd, possibly_script=False)
        if tls_edit_err:
            raise Error('FAILED with %d: %s' %
                        (tls_edit_err, ' '.join(tls_edit_cmd)))
Beispiel #9
0
def GetIntegerEnv(flag_name, default=0):
  """Parses and returns integer environment variable.

  Args:
    flag_name: a string name of a flag.
    default: default return value if the flag is not set.

  Returns:
    Integer value of the flag.
  """
  flag_value = os.environ.get(flag_name)
  if flag_value is None:
    return default
  try:
    return int(flag_value)
  except ValueError:
    raise Error('Invalid ' + flag_name + ': ' + flag_value)
    def SandboxBaseCheck(self):
        """
    Check that sandbox base address is not revealed.

    This is a kind of lint check to ensure that the LLVM assembler's option for
    hiding the sandbox base address on x86-64 is being used in all code compiled
    into the IRT. It is only a heuristic intended to prevent accidental changes
    in the IRT or toolchain build, and is not exhaustive. It is a stopgap until
    we can fix https://code.google.com/p/nativeclient/issues/detail?id=3596
    """
        cmd = [self.objdump_cmd, '-d', self.output]
        output = self.Run(cmd, get_output=True)
        # Disallow callq, all movs variants, all stos variants
        # (objdump always disassembles 'call' as 'callq' in x86-64)
        test_regex = r'\scallq\s|\smovs[bwlq]\s|\sstos[bwlq]\s'
        # Disallow reads/pushes from rsp (other than %rsp,%rpb), and from rbp
        test_regex += r'|[^(]%rsp,(?!%rbp)|[^(]%rbp,|push\s+%r[sb]p'
        # Disallow reads from %r11 or uses as a base register
        test_regex += r'|%r11,'
        # All indirect jumps must be through r11
        test_regex += r'|jmpq\s+\*%r(?!11)'
        matched = re.search(test_regex, output)
        if matched:
            print(
                'The following instructions may reveal the sandbox base address:'
            )
            lines_printed = 0
            lines_printed_limit = 50
            for line in output.splitlines():
                match = re.search(test_regex, line)
                if match and lines_printed < lines_printed_limit:
                    lines_printed += 1
                    print(line)
            if lines_printed == lines_printed_limit:
                print('(additional lines not printed)')
            print('ran', cmd)
            raise Error('IRT sandbox base address hiding lint check failed')

        else:
            self.Log('Sandbox base address hiding lint check passed')
Beispiel #11
0
  def ListInvalidObjectsInArchive(self, archive_file, verbose=False):
    """Check the object size from the result of 'ar tv foo.a'.

    'ar tv foo.a' shows information like the following:
    rw-r--r-- 0/0  1024 Jan  1 09:00 1970 something1.o
    rw-r--r-- 0/0 12023 Jan  1 09:00 1970 something2.o
    rw-r--r-- 0/0  1124 Jan  1 09:00 1970 something3.o

    the third column is the size of object file. We parse it, and verify
    the object size is not 0.

    Args:
      archive_file: a path to archive file to be verified.
      verbose: print information if True.

    Returns:
      list of 0 byte files.
    """

    cmd_line = [self.GetAr(), 'tv', archive_file]
    output = self.Run(cmd_line, get_output=True)

    if verbose:
      print output

    result = []
    for line in output.splitlines():
      xs = line.split()
      if len(xs) < 3:
        raise Error('Unexpected string: %s' % line)

      object_size = xs[2]
      if object_size == '0':
        result.append(xs[-1])

    return result
Beispiel #12
0
 def RunLink(self, cmd_line, link_out):
     self.CleanOutput(link_out)
     err = self.Run(cmd_line)
     if err:
         raise Error('FAILED with %d: %s' % (err, ' '.join(cmd_line)))
Beispiel #13
0
def Main(argv):
  parser = OptionParser()
  parser.add_option('--empty', dest='empty', default=False,
                    help='Do not pass sources to library.', action='store_true')
  parser.add_option('--no-suffix', dest='suffix', default=True,
                    help='Do not append arch suffix.', action='store_false')
  parser.add_option('--strip-debug', dest='strip_debug', default=False,
                    help='Strip the NEXE for debugging', action='store_true')
  parser.add_option('--strip-all', dest='strip_all', default=False,
                    help='Strip the NEXE for production', action='store_true')
  parser.add_option('--strip', dest='strip', default='',
                    help='Strip the filename')
  parser.add_option('--nonstable-pnacl', dest='finalize_pexe', default=True,
                    help='Do not finalize pnacl bitcode for ABI stability',
                    action='store_false')
  parser.add_option('--source-list', dest='source_list',
                    help='Filename to load a source list from')
  parser.add_option('--tls-edit', dest='tls_edit', default=None,
                    help='tls_edit location if TLS should be modified for IRT')
  parser.add_option('--irt-linker', dest='irt_linker', default=None,
                    help='linker tool to use if linking the IRT')
  parser.add_option('-a', '--arch', dest='arch',
                    help='Set target architecture')
  parser.add_option('-c', '--compile', dest='compile_only', default=False,
                    help='Compile only.', action='store_true')
  parser.add_option('-i', '--include-dirs', dest='incdirs',
                    help='Set include directories.')
  parser.add_option('-l', '--lib-dirs', dest='libdirs',
                    help='Set library directories.')
  parser.add_option('-n', '--name', dest='name',
                    help='Base path and name of the nexe.')
  parser.add_option('-o', '--objdir', dest='objdir',
                    help='Base path of the object output dir.')
  parser.add_option('-r', '--root', dest='root',
                    help='Set the root directory of the sources')
  parser.add_option('--product-directory', dest='product_directory',
                    help='Set the root directory of the build')
  parser.add_option('-b', '--build', dest='build',
                    help='Set build type (<toolchain>_<outtype>, ' +
                    'where toolchain is newlib or glibc and outtype is ' +
                    'one of nexe, nlib, nso, pexe, or translate)')
  parser.add_option('--compile_flags', dest='compile_flags',
                    help='Set compile flags.')
  parser.add_option('--defines', dest='defines',
                    help='Set defines')
  parser.add_option('--link_flags', dest='link_flags',
                    help='Set link flags.')
  parser.add_option('-v', '--verbose', dest='verbose', default=False,
                    help='Enable verbosity', action='store_true')
  parser.add_option('-t', '--toolpath', dest='toolpath',
                    help='Set the path for of the toolchains.')
  parser.add_option('--config-name', dest='build_config',
                    help='GYP build configuration name (Release/Debug)')
  parser.add_option('--gomadir', dest='gomadir',
                    help='Path of the goma directory.')
  options, files = parser.parse_args(argv[1:])

  if options.name is None:
    parser.error('--name is required!')
  if options.build_config is None:
    parser.error('--config-name is required!')
  if options.root is None:
    parser.error('--root is required!')
  if options.arch is None:
    parser.error('--arch is required!')
  if options.build is None:
    parser.error('--build is required!')

  if not argv:
    parser.print_help()
    return 1

  # Compare command-line options to last run, and force a rebuild if they
  # have changed.
  options.cmd_file = options.name + '.cmd'
  UpdateBuildArgs(argv, options.cmd_file)

  if options.product_directory is None:
    parser.error('--product-dir is required')
  product_dir = options.product_directory
  # Normalize to forward slashes because re.sub interprets backslashes
  # as escape characters. This also simplifies the subsequent regexes.
  product_dir = product_dir.replace('\\', '/')
  # Remove fake child that may be apended to the path.
  # See untrusted.gypi.
  product_dir = re.sub(r'/+xyz$', '', product_dir)

  build = None
  try:
    if options.source_list:
      source_list_handle = open(options.source_list, 'r')
      source_list = source_list_handle.read().splitlines()
      source_list_handle.close()

      for file_name in source_list:
        file_name = RemoveQuotes(file_name)
        if "$" in file_name:
          # The "make" backend can have an "obj" interpolation variable.
          file_name = re.sub(r'\$!?[({]?obj[)}]?', product_dir + '/obj',
                             file_name)
          # Expected patterns:
          # $!PRODUCT_DIR in ninja.
          # $(builddir) in make.
          # $(OutDir) in MSVC.
          # $(BUILT_PRODUCTS_DIR) in xcode.
          # Also strip off and re-add the trailing directory seperator because
          # different platforms are inconsistent on if it's there or not.
          # HACK assume only the product directory is the only var left.
          file_name = re.sub(r'\$!?[({]?\w+[)}]?/?', product_dir + '/',
                             file_name)
          assert "$" not in file_name, file_name
        files.append(file_name)

    # Use set instead of list not to compile the same file twice.
    # To keep in mind that the order of files may differ from the .gypcmd file,
    # the set is not converted to a list.
    # Having duplicated files can cause race condition of compiling during
    # parallel build using goma.
    # TODO(sbc): remove the duplication and turn it into an error.
    files = set(files)

    # Fix slash style to insulate invoked toolchains.
    options.toolpath = os.path.normpath(options.toolpath)

    build = Builder(options)
    objs = []

    if build.outtype == 'translate':
      # Just translate a pexe to a nexe
      if len(files) != 1:
        parser.error('Pexe translation requires exactly one input file.')
      build.Translate(list(files)[0])
      return 0

    obj_to_src = {}
    if build.IsGomaParallelBuild():
      inputs = multiprocessing.Queue()
      returns = multiprocessing.Queue()

      # Don't limit number of processess in the burst mode.
      if build.goma_burst:
        num_processes = len(files)
      else:
        num_processes = min(build.goma_processes, len(files))

      # Start parallel build.
      build_processes = []
      for _ in xrange(num_processes):
        process = multiprocessing.Process(target=CompileProcess,
                                          args=(build, inputs, returns))
        process.start()
        build_processes.append(process)

      # Start sender process. We cannot send tasks from here, because
      # if the input queue is stuck, no one can receive output.
      sender_process = multiprocessing.Process(
          target=SenderProcess,
          args=(files, num_processes, inputs))
      sender_process.start()

      # Wait for results.
      src_to_obj = {}
      for _ in files:
        out = returns.get()
        # An exception raised in the process may come through the queue.
        # Raise it again here.
        if (isinstance(out, tuple) and len(out) == 3 and
            isinstance(out[1], Exception)):
          # TODO(shinyak): out[2] contains stringified traceback. It's just
          # a string, so we cannot pass it to raise. So, we just log it here,
          # and pass None as traceback.
          build.Log(out[2])
          raise out[0], out[1], None
        elif out and len(out) == 2:
          src_to_obj[out[0]] = out[1]
          # Sometimes out[1] is None.
          if out[1]:
            basename = os.path.basename(out[1])
            if basename in obj_to_src:
              raise Error('multiple same name objects detected: %s' % basename)
            obj_to_src[basename] = out[0]
        else:
          raise Error('Unexpected element in CompileProcess output_queue %s' %
                      out)

      # Keep the input files ordering consistent for link phase to ensure
      # determinism.
      for filename in files:
        # If input file to build.Compile is something it cannot handle, it
        # returns None.

        if src_to_obj[filename]:
          obj_name = src_to_obj[filename]
          objs.append(obj_name)
          # TODO(shinyak): In goma environement, it turned out archive file
          # might contain 0 byte size object, however, the object file itself
          # is not 0 byte. There might be several possibilities:
          # (1) archiver failed to read object file.
          # (2) object file was written after archiver opened it.
          # I don't know what is happening, however, let me check the object
          # file size here.
          CheckObjectSize(obj_name)

      # Wait until all processes have stopped and verify that there are no more
      # results.
      for process in build_processes:
        process.join()
      sender_process.join()

      assert inputs.empty()
      assert returns.empty()

    else:  # slow path.
      for filename in files:
        out = build.Compile(filename)
        if out:
          basename = os.path.basename(out)
          if basename in obj_to_src:
            raise Error('multiple same name objects detected: %s' % basename)
          obj_to_src[basename] = out
          objs.append(out)

    # Do not link if building an object. However we still want the output file
    # to be what was specified in options.name
    if options.compile_only:
      if len(objs) > 1:
        raise Error('--compile mode cannot be used with multiple sources')
      shutil.copy(objs[0], options.name)
    else:
      build.Generate(objs, obj_to_src)
    return 0
  except Error as e:
    sys.stderr.write('%s\n' % e)
    if build is not None:
      build.EmitDeferredLog()
    return 1
  except:
    if build is not None:
      build.EmitDeferredLog()
    raise
Beispiel #14
0
  def NeedsRebuild(self, outd, out, src, rebuilt=False):
    if not IsFile(self.toolstamp):
      if rebuilt:
        raise Error('Could not find toolchain stamp file %s.' % self.toolstamp)
      return True
    if not IsFile(self.cmd_file):
      if rebuilt:
        raise Error('Could not find cmd file %s.' % self.cmd_file)
      return True
    if not IsFile(outd):
      if rebuilt:
        raise Error('Could not find dependency file %s.' % outd)
      return True
    if not IsFile(out):
      if rebuilt:
        raise Error('Could not find output file %s.' % out)
      return True

    inputs = [__file__, self.toolstamp, src, self.cmd_file]
    outputs = [out, outd]

    # Find their timestamps if any.
    input_times = [(GetMTime(f), f) for f in inputs]
    output_times = [(GetMTime(f), f) for f in outputs]

    # All inputs must exist.
    missing_inputs = [p[1] for p in input_times if p[0] is None]
    if missing_inputs:
      raise Error('Missing inputs: %s' % str(missing_inputs))

    # Rebuild if any outputs are missing.
    missing_outputs = [p[1] for p in output_times if p[0] is None]
    if missing_outputs:
      if rebuilt:
        raise Error('Outputs missing after rebuild: %s' % str(missing_outputs))
      return True

    newest_input = max(input_times)
    oldest_output = min(output_times)

    if IsStale(oldest_output[0], newest_input[0], rebuilt):
      if rebuilt:
        raise Error('Output %s is older than toolchain stamp %s' % (
            oldest_output[1], newest_input[1]))
      return True

    # Decode emitted makefile.
    with open(FixPath(outd), 'r') as fh:
      deps = fh.read()
    # Remove line continuations
    deps = deps.replace('\\\n', ' ')
    deps = deps.replace('\n', '')
    # The dependencies are whitespace delimited following the first ':'
    # (that is not part of a windows drive letter)
    deps = deps.split(':', 1)
    if pynacl.platform.IsWindows() and len(deps[0]) == 1:
      # The path has a drive letter, find the next ':'
      deps = deps[1].split(':', 1)[1]
    else:
      deps = deps[1]
    deps = deps.split()
    if pynacl.platform.IsWindows():
      deps = [self.FixWindowsPath(d) for d in deps]
    # Check if any input has changed.
    for filename in deps:
      file_tm = GetMTime(filename)
      if IsStale(oldest_output[0], file_tm, rebuilt):
        if rebuilt:
          raise Error('Dependency %s is older than output %s.' % (
              filename, oldest_output[1]))
        return True
    return False
Beispiel #15
0
  def Compile(self, src):
    """Compile the source with pre-determined options."""

    compile_options = self.compile_options[:]
    _, ext = os.path.splitext(src)
    if ext in ['.c', '.S']:
      bin_name = self.GetCCompiler()
      compile_options.append('-std=gnu99')
      if self.is_pnacl_toolchain and ext == '.S':
        compile_options.append('-arch')
        compile_options.append(self.arch)
    elif ext in ['.cc', '.cpp']:
      compile_options.append('-std=gnu++0x')
      compile_options.append('-Wno-deprecated-register')
      bin_name = self.GetCXXCompiler()
    else:
      if ext != '.h':
        self.Log('Skipping unknown type %s for %s.' % (ext, src))
      return None

    # This option is only applicable to C, and C++ compilers warn if
    # it is present, so remove it for C++ to avoid the warning.
    if ext != '.c' and '-Wstrict-prototypes' in compile_options:
      compile_options.remove('-Wstrict-prototypes')

    self.Log('\nCompile %s' % src)

    out = self.GetObjectName(src)

    # The pnacl and nacl-clang toolchains is not able to handle output paths
    # where the PWD + filename is greater than 255, even if the normalised
    # path would be < 255.  This change also exists in the pnacl python driver
    # but is duplicated here so we get meaning full error messages from
    # nacl-clang too.
    if pynacl.platform.IsWindows() and (self.is_pnacl_toolchain or
        self.is_nacl_clang):
      full_out = os.path.join(os.getcwd(), out)
      if len(full_out) > 255:
        # Try normalising the full path and see if that brings us under the
        # limit.  In this case we will be passing the full path of the .o file
        # to the compiler, which will change the first line of the .d file.
        # However, the .d file is only consumed by build_nexe itself so it
        # should not have any adverse effects.
        out = os.path.normpath(full_out)
        if len(out) > 255:
          raise Error('Output path too long (%s): %s' % (len(out), out))

    outd = os.path.splitext(out)[0] + '.d'

    # Don't rebuild unneeded.
    if not self.NeedsRebuild(outd, out, src):
      return out

    MakeDir(os.path.dirname(out))
    self.CleanOutput(out)
    self.CleanOutput(outd)
    cmd_line = [bin_name, '-c', src, '-o', out,
                '-MD', '-MF', outd] + compile_options
    if self.gomacc:
      cmd_line.insert(0, self.gomacc)
    err = self.Run(cmd_line)
    if err:
      self.CleanOutput(outd)
      raise Error('FAILED with %d: %s' % (err, ' '.join(cmd_line)))
    else:
      try:
        self.NeedsRebuild(outd, out, src, True)
      except Error as e:
        raise Error('Failed to compile %s to %s with deps %s and cmdline:\t%s'
                    '\nNeedsRebuild returned error: %s' % (
                        src, out, outd, ' '.join(cmd_line), e))
    return out
Beispiel #16
0
  def __init__(self, options):
    super(Builder, self).__init__(options)
    arch = options.arch
    self.arch = arch
    build_type = options.build.split('_')
    toolname = build_type[0]
    self.outtype = build_type[1]
    self.osname = pynacl.platform.GetOS()

    # pnacl toolchain can be selected in three different ways
    # 1. by specifying --arch=pnacl directly to generate
    #    pexe targets.
    # 2. by specifying --build=newlib_translate to generated
    #    nexe via translation
    # 3. by specifying --build=newlib_{nexe,nlib}_pnacl use pnacl
    #    toolchain in native mode (e.g. the IRT shim)
    self.is_pnacl_toolchain = False
    if self.outtype == 'translate':
      self.is_pnacl_toolchain = True

    if len(build_type) > 2 and build_type[2] == 'pnacl':
      self.is_pnacl_toolchain = True

    self.is_nacl_clang = len(build_type) > 2 and build_type[2] == 'clang'

    if arch.endswith('-nonsfi'):
      arch = arch[:-len('-nonsfi')]

    if arch in ['x86-32', 'x86-64']:
      mainarch = 'x86'
      self.tool_prefix = 'x86_64-nacl-'
    elif arch == 'arm':
      self.tool_prefix = 'arm-nacl-'
      mainarch = 'arm'
    elif arch == 'mips':
      self.tool_prefix = 'mipsel-nacl-'
      mainarch = 'mipsel'
    elif arch == 'pnacl':
      self.is_pnacl_toolchain = True
    else:
      raise Error('Toolchain architecture %s not supported.' % arch)

    if toolname not in ['newlib', 'glibc']:
      raise Error('Toolchain of type %s not supported.' % toolname)

    if arch == 'mips' and toolname == 'glibc':
      raise Error('mips glibc not supported.')

    if arch == 'pnacl' and toolname == 'glibc':
      raise Error('pnacl glibc not yet supported.')

    if self.is_pnacl_toolchain:
      self.tool_prefix = 'pnacl-'
      tool_subdir = 'pnacl_newlib'
    elif self.is_nacl_clang:
      tool_subdir = 'pnacl_newlib'
    else:
      tool_subdir = 'nacl_%s_%s' % (mainarch, toolname)
    # The pnacl-clang, etc. tools are scripts. Note that for the CommandRunner
    # so that it can know if a shell is needed or not.
    self.SetCommandsAreScripts(self.is_pnacl_toolchain)

    build_arch = pynacl.platform.GetArch()
    tooldir = os.path.join('%s_%s' % (self.osname, build_arch), tool_subdir)

    self.root_path = options.root
    self.nacl_path = os.path.join(self.root_path, 'native_client')

    project_path, project_name = os.path.split(options.name)
    self.outdir = options.objdir

    # Set the toolchain directories
    self.toolchain = os.path.join(options.toolpath, tooldir)
    self.toolbin = os.path.join(self.toolchain, 'bin')
    self.toolstamp = os.path.join(self.toolchain, tool_subdir + '.json')
    if not IsFile(self.toolstamp):
      raise Error('Could not find toolchain prep stamp file: ' + self.toolstamp)

    self.inc_paths = ArgToList(options.incdirs)
    self.lib_paths = ArgToList(options.libdirs)
    self.define_list = ArgToList(options.defines)

    self.name = options.name
    self.cmd_file = options.cmd_file
    self.BuildCompileOptions(
        options.compile_flags, self.define_list, options.arch)
    self.BuildLinkOptions(options.link_flags)
    self.BuildArchiveOptions()
    self.strip = options.strip
    self.empty = options.empty
    self.strip_all = options.strip_all
    self.strip_debug = options.strip_debug
    self.tls_edit = options.tls_edit
    self.finalize_pexe = options.finalize_pexe and arch == 'pnacl'
    goma_config = self.GetGomaConfig(options.gomadir, arch, toolname)
    self.gomacc = goma_config.get('gomacc', '')
    self.goma_burst = goma_config.get('burst', False)
    self.goma_processes = goma_config.get('processes', 1)

    # Define NDEBUG for Release builds.
    if options.build_config.startswith('Release'):
      self.compile_options.append('-DNDEBUG')

    # Use unoptimized native objects for debug IRT builds for faster compiles.
    if (self.is_pnacl_toolchain
        and (self.outtype == 'nlib'
             or self.outtype == 'nexe')
        and self.arch != 'pnacl'):
      if (options.build_config is not None
          and options.build_config.startswith('Debug')):
        self.compile_options.extend(['--pnacl-allow-translate',
                                     '--pnacl-allow-native',
                                     '-arch', self.arch])

    self.irt_linker = options.irt_linker
    self.Log('Compile options: %s' % self.compile_options)
    self.Log('Linker options: %s' % self.link_options)
Beispiel #17
0
 def RunArchive():
   self.CleanOutput(out)
   err = self.Run(cmd_line)
   if err:
     raise Error('FAILED with %d: %s' % (err, ' '.join(cmd_line)))
Beispiel #18
0
  def Archive(self, srcs, obj_to_src=None):
    """Archive these objects with predetermined options and output name."""
    out = self.ArchiveOutputName()
    self.Log('\nArchive %s' % out)

    needs_verify = False
    if '-r' in self.link_options:
      bin_name = self.GetCXXCompiler()
      cmd_line = [bin_name, '-o', out, '-Wl,--as-needed']
      if not self.empty:
        cmd_line += srcs
      cmd_line += self.link_options
    else:
      bin_name = self.GetAr()
      cmd_line = [bin_name, '-rc', out]
      if not self.empty:
        cmd_line += srcs
      if self.IsGomaParallelBuild() and pynacl.platform.IsWindows():
        needs_verify = True

    MakeDir(os.path.dirname(out))

    def RunArchive():
      self.CleanOutput(out)
      err = self.Run(cmd_line)
      if err:
        raise Error('FAILED with %d: %s' % (err, ' '.join(cmd_line)))

    RunArchive()

    # HACK(shinyak): Verifies archive file on Windows if goma is used.
    # When using goma, archive sometimes contains 0 byte object on Windows,
    # though object file itself is not 0 byte on disk. So, we'd like to verify
    # the content of the archive. If the archive contains 0 byte object,
    # we'd like to retry archiving. I'm not sure this fixes the problem,
    # however, this might give us some hints.
    # See also: http://crbug.com/390764
    if needs_verify:
      ok = False
      for retry in xrange(3):
        invalid_obj_names = self.ListInvalidObjectsInArchive(out)
        if not invalid_obj_names:
          ok = True
          break

        print ('WARNING: found 0 byte objects in %s. '
               'Recompile them without goma (try=%d)'
               % (out, retry + 1))

        time.sleep(1)
        if obj_to_src:
          for invalid_obj_name in invalid_obj_names:
            src = obj_to_src.get(invalid_obj_name)

            if not src:
              print ('Couldn\'t find the corresponding src for %s' %
                     invalid_obj_name)
              raise Error('ERROR archive is corrupted: %s' % out)

            print 'Recompile without goma:', src
            self.gomacc = None
            self.Compile(src)

        RunArchive()

      if not ok:
        # Show the contents of archive if not ok.
        self.ListInvalidObjectsInArchive(out, verbose=True)
        raise Error('ERROR: archive is corrupted: %s' % out)

    return out