Beispiel #1
0
    def get_crash_report(self):
        assert self._crash_addr is not None
        for binary_name, start_addr, end_addr in self._text_segments:
            if start_addr > self._crash_addr or self._crash_addr >= end_addr:
                continue

            addr = self._crash_addr
            # For PIC or PIE, we need to subtract the load bias.
            if binary_name.endswith('.so') or OPTIONS.is_bare_metal_build():
                addr -= start_addr

            if os.path.exists(binary_name):
                binary_filename = binary_name
            else:
                self.init_binary_map()
                if binary_name not in self._binary_map:
                    return '%s %x (binary file not found)\n' % (binary_name,
                                                                addr)
                binary_filename = self._binary_map[binary_name]

            pipe = subprocess.Popen([
                toolchain.get_tool(OPTIONS.target(), 'addr2line'), '-e',
                binary_filename
            ],
                                    stdin=subprocess.PIPE,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.STDOUT)
            addr2line_result = pipe.communicate('%x\n' % addr)[0]

            # We can always get clean result using 32 byte aligned start
            # address as NaCl binary does never overlap 32 byte boundary.
            objdump_start_addr = (addr & ~31) - 32
            objdump_end_addr = addr + 64
            pipe = subprocess.Popen([
                toolchain.get_tool(OPTIONS.target(), 'objdump'), '-SC',
                binary_filename, '--start-address',
                '0x%x' % objdump_start_addr, '--stop-address',
                '0x%x' % objdump_end_addr
            ],
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.STDOUT)
            objdump_result = pipe.communicate('%x\n' % addr)[0]

            if self._is_annotating:
                report = ('[[ %s 0x%x %s ]]' %
                          (binary_filename, addr, addr2line_result.strip()))
                # The result of objdump is too verbose for annotation.
            else:
                report = '%s 0x%x\n' % (binary_filename, addr)
                report += addr2line_result
                report += objdump_result

            return report
        return 'Failed to retrieve a crash report\n'
Beispiel #2
0
  def get_crash_report(self):
    assert self._crash_addr is not None
    for binary_name, start_addr, end_addr in self._text_segments:
      if start_addr > self._crash_addr or self._crash_addr >= end_addr:
        continue

      addr = self._crash_addr
      # For PIC or PIE, we need to subtract the load bias.
      if binary_name.endswith('.so') or OPTIONS.is_bare_metal_build():
        addr -= start_addr

      if os.path.exists(binary_name):
        binary_filename = binary_name
      else:
        self.init_binary_map()
        if binary_name not in self._binary_map:
          return '%s %x (binary file not found)\n' % (binary_name, addr)
        binary_filename = self._binary_map[binary_name]

      pipe = subprocess.Popen([toolchain.get_tool(OPTIONS.target(),
                                                  'addr2line'),
                               '-e', binary_filename],
                              stdin=subprocess.PIPE,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.STDOUT)
      addr2line_result = pipe.communicate('%x\n' % addr)[0]

      # We can always get clean result using 32 byte aligned start
      # address as NaCl binary does never overlap 32 byte boundary.
      objdump_start_addr = (addr & ~31) - 32
      objdump_end_addr = addr + 64
      pipe = subprocess.Popen([toolchain.get_tool(OPTIONS.target(),
                                                  'objdump'),
                               '-SC', binary_filename,
                               '--start-address', '0x%x' % objdump_start_addr,
                               '--stop-address', '0x%x' % objdump_end_addr],
                              stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
      objdump_result = pipe.communicate('%x\n' % addr)[0]

      if self._is_annotating:
        report = ('[[ %s 0x%x %s ]]' %
                  (binary_filename, addr, addr2line_result.strip()))
        # The result of objdump is too verbose for annotation.
      else:
        report = '%s 0x%x\n' % (binary_filename, addr)
        report += addr2line_result
        report += objdump_result

      return report
    return 'Failed to retrieve a crash report\n'
Beispiel #3
0
def _get_generate_libvpx_asm_ninja():
    if not OPTIONS.is_arm():
        return None

    gen_asm_ninja = ninja_generator.NinjaGenerator('libvpx_asm')
    # Translate RVCT format assembly code into GNU Assembler format.
    gen_asm_ninja.rule(
        _GEN_LIBVPX_ASM_RULE,
        command=_ADS2GAS + ' < $in > $out.tmp && (mv $out.tmp $out)')

    # Translate C source code into assembly code and run grep to
    # generate a list of constants. Assembly code generated by this rule
    # will be included from other assembly code. See
    # third_party/android/external/libvpx/libvpx.mk for corresponding
    # rules written in Makefile.
    asm_include_paths = [
        '-I' + staging.as_staging('android/external/libvpx/armv7a-neon'),
        '-I' + staging.as_staging('android/external/libvpx/libvpx')
    ]
    gen_asm_ninja.rule(
        _GEN_LIBVPX_OFFSETS_ASM_RULE,
        command=('%s -DINLINE_ASM %s -S $in -o $out.s && '
                 'grep \'^[a-zA-Z0-9_]* EQU\' $out.s | '
                 'tr -d \'$$\\#\' | '
                 '%s > $out.tmp && (mv $out.tmp $out)' % (toolchain.get_tool(
                     OPTIONS.target(), 'cc'), ' '.join(asm_include_paths),
                                                          _ADS2GAS)))

    return gen_asm_ninja
Beispiel #4
0
def _get_generate_libvpx_asm_ninja():
    if not OPTIONS.is_arm():
        return None

    gen_asm_ninja = ninja_generator.NinjaGenerator('libvpx_asm')
    # Translate RVCT format assembly code into GNU Assembler format.
    gen_asm_ninja.rule(_GEN_LIBVPX_ASM_RULE,
                       command=_ADS2GAS +
                       ' < $in > $out.tmp && (mv $out.tmp $out)')

    # Translate C source code into assembly code and run grep to
    # generate a list of constants. Assembly code generated by this rule
    # will be included from other assembly code. See
    # third_party/android/external/libvpx/libvpx.mk for corresponding
    # rules written in Makefile.
    asm_include_paths = [
        '-I' + staging.as_staging('android/external/libvpx/armv7a-neon'),
        '-I' + staging.as_staging('android/external/libvpx/libvpx')
    ]
    gen_asm_ninja.rule(_GEN_LIBVPX_OFFSETS_ASM_RULE,
                       command=('%s -DINLINE_ASM %s -S $in -o $out.s && '
                                'grep \'^[a-zA-Z0-9_]* EQU\' $out.s | '
                                'tr -d \'$$\\#\' | '
                                '%s > $out.tmp && (mv $out.tmp $out)' %
                                (toolchain.get_tool(OPTIONS.target(), 'cc'),
                                 ' '.join(asm_include_paths), _ADS2GAS)))

    return gen_asm_ninja
Beispiel #5
0
def _run_gdb_for_bare_metal_arm(runner_args, test_args):
  gdb = toolchain.get_tool(build_options.OPTIONS.target(), 'gdb')
  bare_metal_loader_index = runner_args.index(
      toolchain.get_nonsfi_loader())

  # For Bare Metal ARM, we use qemu's remote debugging interface.
  args = (runner_args[:bare_metal_loader_index] +
          ['-g', '4014'] +
          runner_args[bare_metal_loader_index:] + test_args)
  # Create a new session using setsid. See the comment in
  # _run_gdb_for_nacl for detail.
  qemu_arm_proc = subprocess.Popen(args, stderr=subprocess.STDOUT,
                                   preexec_fn=os.setsid)

  gdb_command = _get_gdb_command_to_inject_bare_metal_gdb_py(test_args[0])

  args = ([gdb, '-ex', 'target remote :4014'] +
          gdb_command +
          gdb_util.get_args_for_stlport_pretty_printers() +
          ['-ex',
           'echo \n*** Type \'continue\' or \'c\' to start debugging ***\n\n',
           toolchain.get_nonsfi_loader()])
  subprocess.call(args)

  qemu_arm_proc.kill()
Beispiel #6
0
def main():
    description = 'Tool to manipulate symbol list files.'
    parser = argparse.ArgumentParser(description=description)
    parser.add_argument(
        '--dump-defined',
        action='store_true',
        help='Dump defined symbols from the given shared object.')
    parser.add_argument(
        '--dump-undefined',
        action='store_true',
        help='Dump defined symbols from the given shared object.')
    parser.add_argument('--clean',
                        action='store_true',
                        help='Copy symbols file with comments stripped.')
    parser.add_argument(
        '--verify',
        action='store_true',
        help='Verify that file 1 does not contain symbols listed in file 2.')
    parser.add_argument('args', nargs=argparse.REMAINDER)

    args = parser.parse_args()

    OPTIONS.parse_configure_file()
    nm = toolchain.get_tool(OPTIONS.target(), 'nm')

    if args.dump_defined:
        command = (nm + ' --defined-only --extern-only --format=posix %s | '
                   'sed -n \'s/^\(.*\) [A-Za-z].*$/\\1/p\' | '
                   'LC_ALL=C sort -u' % args.args[0])
        return subprocess.check_call(command, shell=True)

    elif args.dump_undefined:
        command = (nm + ' --undefined-only --format=posix %s | '
                   'sed -n \'s/^\(.*\) U.*$/\\1/p\' | '
                   'LC_ALL=C sort -u' % args.args[0])
        return subprocess.check_call(command, shell=True)

    elif args.clean:
        command = ('egrep -ve "^#" %s | LC_ALL=C sort' % args.args[0])
        return subprocess.check_call(command, shell=True)

    elif args.verify:
        command = ('LC_ALL=C comm -12 %s %s' % (args.args[0], args.args[1]))
        try:
            diff = subprocess.check_output(command,
                                           shell=True,
                                           stderr=subprocess.STDOUT)
        except subprocess.CalledProcessError as e:
            # This can happen if files are not sorted
            print e.output.rstrip()
            return 1
        if diff:
            print '%s has disallowed symbols: ' % (args.args[0])
            print diff.rstrip()
            return 1
        return 0

    print 'No command specified.'
    return 1
    def build_gms_core_or_use_prebuilt(self):
        if OPTIONS.enable_art_aot():
            # Rule for pre-optimizing gms-core apk.
            boot_image_dir = os.path.join(build_common.get_android_fs_root(),
                                          'system/framework',
                                          build_common.get_art_isa())
            self.rule(
                'gms_core_apk_preoptimize',
                'src/build/gms_core_apk_preoptimize.py --input $in --output $out',
                description='Preoptimizing gmscore sub apks contained in $in')
            self.build(GmsCoreNinjaGenerator._APK_PATH,
                       'gms_core_apk_preoptimize',
                       GmsCoreNinjaGenerator._ORIGINAL_APK_PATH,
                       implicit=[
                           toolchain.get_tool('java', 'dex2oat'),
                           os.path.join(boot_image_dir, 'boot.art'),
                           os.path.join(boot_image_dir, 'boot.oat')
                       ])

        if not OPTIONS.internal_apks_source_is_internal():
            return

        flags = '--eng' if OPTIONS.is_debug_code_enabled() else ''
        build_log = os.path.join('out/gms-core-build/build.log')
        command = ('internal/build/build.py gms-core %s > %s 2>&1 || '
                   '(cat %s; exit 1)') % (flags, build_log, build_log)

        if OPTIONS.internal_apks_source() == 'internal-dev':
            # Only for local development.  play-services.apk dependes on jars below to
            # build, just to use ARC specific feature like ArcMessageBridge and
            # Tracing.  This dependency is a must-have for a clean build.  But this
            # dependency can cause unrelated framework change to trigger rebuild of
            # play-services.apk, which is very slow.  With this option, eng will self
            # manages the dependency, which is almost always satisfied.
            jars = []
        else:
            # Simply make these jars the dependencies of gms-core-build, which
            # references ArcMessage and ArcMessageBridge in the jar.  Note that these
            # jars changes often and is like to cause unnecessary rebuild of gms-core,
            # which is very slow.  We may think about a way to minimize the
            # dependency.
            #
            # See also: internal/mods/gms-core/vendor/unbundled_google/packages/ \
            #     OneUp/package/Android.mk
            #     OneUp/package/generate_package.mk
            jars = [
                build_common.get_build_path_for_jar('arc-services-framework',
                                                    subpath='classes.jar'),
                build_common.get_build_path_for_jar('framework',
                                                    subpath='classes.jar'),
            ]

        self.build(GmsCoreNinjaGenerator._ALL_OUTPUTS,
                   'run_shell_command',
                   implicit=['src/build/DEPS.arc-int'] + jars,
                   variables={'command': command})
Beispiel #8
0
def _get_defined_functions(library):
    nm = toolchain.get_tool(OPTIONS.target(), 'nm')
    nm_output = subprocess.check_output([nm, '-D', '--defined-only', library])

    functions = []
    for line in nm_output.splitlines():
        matched = re.search(r' [TW] (\w+)', line)
        if matched:
            functions.append(matched.group(1))
    return functions
Beispiel #9
0
def main():
  description = 'Tool to manipulate symbol list files.'
  parser = argparse.ArgumentParser(description=description)
  parser.add_argument(
      '--dump-defined', action='store_true',
      help='Dump defined symbols from the given shared object.')
  parser.add_argument(
      '--dump-undefined', action='store_true',
      help='Dump defined symbols from the given shared object.')
  parser.add_argument(
      '--clean', action='store_true',
      help='Copy symbols file with comments stripped.')
  parser.add_argument(
      '--verify', action='store_true',
      help='Verify that file 1 does not contain symbols listed in file 2.')
  parser.add_argument('args', nargs=argparse.REMAINDER)

  args = parser.parse_args()

  OPTIONS.parse_configure_file()
  nm = toolchain.get_tool(OPTIONS.target(), 'nm')

  if args.dump_defined:
    command = (nm + ' --defined-only --extern-only --format=posix %s | '
               'sed -n \'s/^\(.*\) [A-Za-z].*$/\\1/p\' | '
               'LC_ALL=C sort -u' % args.args[0])
    return subprocess.check_call(command, shell=True)

  elif args.dump_undefined:
    command = (nm + ' --undefined-only --format=posix %s | '
               'sed -n \'s/^\(.*\) U.*$/\\1/p\' | '
               'LC_ALL=C sort -u' % args.args[0])
    return subprocess.check_call(command, shell=True)

  elif args.clean:
    command = ('egrep -ve "^#" %s | LC_ALL=C sort' % args.args[0])
    return subprocess.check_call(command, shell=True)

  elif args.verify:
    command = ('LC_ALL=C comm -12 %s %s' % (args.args[0], args.args[1]))
    try:
      diff = subprocess.check_output(command, shell=True,
                                     stderr=subprocess.STDOUT)
    except subprocess.CalledProcessError as e:
      # This can happen if files are not sorted
      print e.output.rstrip()
      return 1
    if diff:
      print '%s has disallowed symbols: ' % (args.args[0])
      print diff.rstrip()
      return 1
    return 0

  print 'No command specified.'
  return 1
Beispiel #10
0
def _launch_plugin_gdb(gdb_args, gdb_type):
  """Launches GDB for a plugin process."""
  gdb = toolchain.get_tool(OPTIONS.target(), 'gdb')
  if gdb_type == 'xterm':
    # For "xterm" mode, just run the gdb process.
    command = _get_xterm_gdb('plugin', gdb, gdb_args)
    subprocess.Popen(command)
  elif gdb_type == 'screen':
    command = _get_screen_gdb('plugin', gdb, gdb_args)
    subprocess.Popen(command)
    print '''

=====================================================================

Now gdb should be running in another screen. Set breakpoints as you
like and start debugging by

(gdb) continue

=====================================================================
'''
  elif gdb_type == 'emacsclient':
    command = _get_emacsclient_gdb('plugin', gdb, gdb_args)
    subprocess.Popen(command)
    print '''

=====================================================================

Now gdb should be running in your emacs session. Set breakpoints as you
like and start debugging by

(gdb) continue

=====================================================================
'''
  else:
    # For "wait" mode, we create a shell script and let the user know.
    command_file = _create_command_file([gdb] + gdb_args)
    print '''

=====================================================================

Now you can attach GDB. Run the following command in another shell.

$ cd /path/to/arc
$ sh %s

Then, set breakpoints as you like and start debugging by

(gdb) continue

=====================================================================

''' % command_file.name
Beispiel #11
0
def _launch_plugin_gdb(gdb_args, gdb_type):
    """Launches GDB for a plugin process."""
    gdb = toolchain.get_tool(OPTIONS.target(), 'gdb')
    if gdb_type == 'xterm':
        # For "xterm" mode, just run the gdb process.
        command = _get_xterm_gdb('plugin', gdb, gdb_args)
        subprocess.Popen(command)
    elif gdb_type == 'screen':
        command = _get_screen_gdb('plugin', gdb, gdb_args)
        subprocess.Popen(command)
        print '''

=====================================================================

Now gdb should be running in another screen. Set breakpoints as you
like and start debugging by

(gdb) continue

=====================================================================
'''
    elif gdb_type == 'emacsclient':
        command = _get_emacsclient_gdb('plugin', gdb, gdb_args)
        subprocess.Popen(command)
        print '''

=====================================================================

Now gdb should be running in your emacs session. Set breakpoints as you
like and start debugging by

(gdb) continue

=====================================================================
'''
    else:
        # For "wait" mode, we create a shell script and let the user know.
        command_file = _create_command_file([gdb] + gdb_args)
        print '''

=====================================================================

Now you can attach GDB. Run the following command in another shell.

$ cd /path/to/arc
$ sh %s

Then, set breakpoints as you like and start debugging by

(gdb) continue

=====================================================================

''' % command_file.name
Beispiel #12
0
  def build_gms_core_or_use_prebuilt(self):
    if OPTIONS.enable_art_aot():
      # Rule for pre-optimizing gms-core apk.
      boot_image_dir = os.path.join(build_common.get_android_fs_root(),
                                    'system/framework',
                                    build_common.get_art_isa())
      self.rule(
          'gms_core_apk_preoptimize',
          'src/build/gms_core_apk_preoptimize.py --input $in --output $out',
          description='Preoptimizing gmscore sub apks contained in $in')
      self.build(GmsCoreNinjaGenerator._APK_PATH,
                 'gms_core_apk_preoptimize',
                 GmsCoreNinjaGenerator._ORIGINAL_APK_PATH,
                 implicit=[toolchain.get_tool('java', 'dex2oat'),
                           os.path.join(boot_image_dir, 'boot.art'),
                           os.path.join(boot_image_dir, 'boot.oat')])

    if not OPTIONS.internal_apks_source_is_internal():
      return

    flags = '--eng' if OPTIONS.is_debug_code_enabled() else ''
    build_log = os.path.join('out/gms-core-build/build.log')
    command = ('internal/build/build.py gms-core %s > %s 2>&1 || '
               '(cat %s; exit 1)') % (flags, build_log, build_log)

    if OPTIONS.internal_apks_source() == 'internal-dev':
      # Only for local development.  play-services.apk dependes on jars below to
      # build, just to use ARC specific feature like ArcMessageBridge and
      # Tracing.  This dependency is a must-have for a clean build.  But this
      # dependency can cause unrelated framework change to trigger rebuild of
      # play-services.apk, which is very slow.  With this option, eng will self
      # manages the dependency, which is almost always satisfied.
      jars = []
    else:
      # Simply make these jars the dependencies of gms-core-build, which
      # references ArcMessage and ArcMessageBridge in the jar.  Note that these
      # jars changes often and is like to cause unnecessary rebuild of gms-core,
      # which is very slow.  We may think about a way to minimize the
      # dependency.
      #
      # See also: internal/mods/gms-core/vendor/unbundled_google/packages/ \
      #     OneUp/package/Android.mk
      #     OneUp/package/generate_package.mk
      jars = [
          build_common.get_build_path_for_jar('arc-services-framework',
                                              subpath='classes.jar'),
          build_common.get_build_path_for_jar('framework',
                                              subpath='classes.jar'),
      ]

    self.build(GmsCoreNinjaGenerator._ALL_OUTPUTS,
               'run_shell_command',
               implicit=['src/build/DEPS.arc-int'] + jars,
               variables={'command': command})
Beispiel #13
0
def _preoptimize_subapk(src_apk, dest_apk, work_dir):
  # Extract inner apks from |src_apk|.
  # Note that we cannot use Python zipfile module for handling apk.
  # See: https://bugs.python.org/issue14315.
  subprocess.call(['unzip', '-q', src_apk, _SUBAPK_PATTERN, '-d', work_dir])
  inner_apk_list = file_util.glob(os.path.join(work_dir, _SUBAPK_PATTERN))

  # Optimize each apk and place the output odex next to the apk.
  odex_files = []
  for apk_path in inner_apk_list:
    apk_name = os.path.basename(apk_path)
    odex_name = re.sub(r'\.apk$', '.odex', apk_name)
    odex_path_in_apk = os.path.join(_SUBAPK_PATH, odex_name)
    odex_path = os.path.join(work_dir, odex_path_in_apk)
    odex_files.append(odex_path_in_apk)
    install_path = _get_apk_install_location(_calc_sha1(apk_path), apk_name)

    dex2oat_cmd = [
        'src/build/filter_dex2oat_warnings.py',
        toolchain.get_tool('java', 'dex2oat')
    ] + build_common.get_dex2oat_for_apk_flags(
        apk_path=apk_path,
        apk_install_path=install_path,
        output_odex_path=odex_path)
    if subprocess.call(dex2oat_cmd, cwd=_ARC_ROOT) != 0:
      print 'ERROR: preoptimize failed for %s.' % apk_path
      return False

  # Prepare |dest_apk|.
  shutil.copyfile(src_apk, dest_apk)

  # Add odex files to |dest_apk| by using aapt.
  if odex_files:
    aapt_add_cmd = [os.path.join(_ARC_ROOT, toolchain.get_tool('java', 'aapt')),
                    'add', os.path.join(_ARC_ROOT, dest_apk)] + odex_files
    with open(os.devnull, 'w') as devnull:
      if subprocess.call(aapt_add_cmd, cwd=work_dir, stdout=devnull) != 0:
        print 'ERROR: adding odex files to %s failed.' % dest_apk
        file_util.remove_file_force(dest_apk)
        return False
  return True
Beispiel #14
0
def _get_nacl_irt_path(parsed_args):
  if not OPTIONS.is_nacl_build():
    return None
  chrome_path = _get_chrome_path(parsed_args)
  irt = toolchain.get_tool(OPTIONS.target(), 'irt')
  nacl_irt_path = os.path.join(os.path.dirname(chrome_path), irt)
  nacl_irt_debug_path = nacl_irt_path + '.debug'
  # Use debug version nacl_irt if it exists.
  if os.path.exists(nacl_irt_debug_path):
    return nacl_irt_debug_path
  else:
    return nacl_irt_path
Beispiel #15
0
def get_defined_symbols(filename):
  output = subprocess.check_output([toolchain.get_tool(OPTIONS.target(), 'nm'),
                                    '--defined-only', '-D', filename])
  syms = set()
  for line in output.splitlines():
    toks = line.split()
    # Empty lines or a filename line.
    if len(toks) <= 1:
      continue
    addr, sym_type, name = line.split()
    syms.add(name)
  return syms
Beispiel #16
0
    def __init__(self, suite_runner, additional_launch_chrome_opts=None, rebuild_crx=False):
        if additional_launch_chrome_opts is None:
            additional_launch_chrome_opts = []
        self._suite_runner = suite_runner
        self._name = suite_runner.name
        self._additional_launch_chrome_opts = additional_launch_chrome_opts[:]
        if not rebuild_crx:
            self._additional_launch_chrome_opts.append("--nocrxbuild")

        self._adb = toolchain.get_tool("host", "adb")
        self._has_error = False
        self._thread = None
Beispiel #17
0
  def prepare(self, unused_test_methods_to_run):
    """Builds test jar files for a test."""
    shutil.rmtree(self._work_dir, ignore_errors=True)

    # Copy the source directory to the working directory.
    # Note that we must not copy the files by python's internal utilities
    # here, such as shutil.copy, or loops written manually, etc., because it
    # would cause ETXTBSY in run_subprocess called below if we run this
    # on multi-threading. Here is the senario:
    # Let there are two cases A, and B, and, to simplify, let what we do here
    # are 1) copying the "{A_src,B_src}/build" files to "{A,B}/build", and then
    # 2) fork() and execute() "{A,B}/build". Each will run on a different
    # threads, named thread-A and thread-B.
    # 1) on thread-A, "A_src/build" is copied to "A/build".
    # 2) on thread-B, "B_src/build" starts to be copied to "B/build". For that
    #    purpose, "B/build" is opened with "write" flag.
    # 3) on thread-A, the process is fork()'ed, *before the copy of "B/build"
    #    is completed. So, subprocess-A keeps the FD of "B/build" with "write".
    # 4) on thread-B, "B/build" is copied, and close()'ed, then fork()'ed.
    # 5) on subprocess-B, it tries to exec "B/build". However, the file is
    #    still kept opened by subprocess-A. As a result, ETXTBSY is reported.
    # Probably, the ideal solution would be that such an issue should be
    # handled by the framework (crbug.com/345667), but it seems to need some
    # more investigation. So, instead, we copy the files in another process.
    subprocess.check_call(['cp', '-Lr', self._source_dir, self._work_dir])

    build_script = os.path.abspath(os.path.join(self._work_dir, 'build'))
    if not os.path.isfile(build_script):
      # If not found, use the default-build script.
      # Note: do not use a python function here, such as shutil.copy directly.
      # See above comment for details.
      subprocess.check_call(
          ['cp', os.path.join(self.get_source_root(), 'etc', 'default-build'),
           build_script])
    # Ensure that the executable bit is set.
    os.chmod(build_script, stat.S_IRWXU)

    env = {
        'DX': 'dx',
        'NEED_DEX': 'true',
        'TEST_NAME': self._suite_name,
        'JAVAC': toolchain.get_tool('java', 'javac'),
        'PATH': ':'.join([
            os.path.join(build_common.get_arc_root(),
                         toolchain.get_android_sdk_build_tools_dir()),
            # Put PATH in the end to prevent shadowing previous path.
            os.environ['PATH']
        ])
    }
    subprocess.check_call([build_script], env=env, cwd=self._work_dir)

    args = self.get_system_mode_launch_chrome_command(self._name)
    prep_launch_chrome.prepare_crx_with_raw_args(args)
Beispiel #18
0
 def _run_aapt(self):
     apk_path = self._apk_path
     aapt_path = toolchain.get_tool('java', 'aapt')
     output = subprocess.check_output([aapt_path, 'd', 'badging', apk_path])
     m = self._package_re.search(output)
     if not m:
         sys.exit('Cannot find package in aapt output for ' + apk_path)
     self.package_name = m.group(1)
     if m.group(2) == '':
         self.version_code = 0
     else:
         self.version_code = int(m.group(2))
     self.version_name = m.group(3)
Beispiel #19
0
 def _run_aapt(self):
   apk_path = self._apk_path
   aapt_path = toolchain.get_tool('java', 'aapt')
   output = subprocess.check_output([aapt_path, 'd', 'badging', apk_path])
   m = self._package_re.search(output)
   if not m:
     sys.exit('Cannot find package in aapt output for ' + apk_path)
   self.package_name = m.group(1)
   if m.group(2) == '':
     self.version_code = 0
   else:
     self.version_code = int(m.group(2))
   self.version_name = m.group(3)
Beispiel #20
0
def get_defined_symbols(filename):
    output = subprocess.check_output([
        toolchain.get_tool(OPTIONS.target(), 'nm'), '--defined-only', '-D',
        filename
    ])
    syms = set()
    for line in output.splitlines():
        toks = line.split()
        # Empty lines or a filename line.
        if len(toks) <= 1:
            continue
        addr, sym_type, name = line.split()
        syms.add(name)
    return syms
Beispiel #21
0
    def __init__(self,
                 suite_runner,
                 additional_launch_chrome_opts=None,
                 rebuild_crx=False):
        if additional_launch_chrome_opts is None:
            additional_launch_chrome_opts = []
        self._suite_runner = suite_runner
        self._name = suite_runner.name
        self._additional_launch_chrome_opts = additional_launch_chrome_opts[:]
        if not rebuild_crx:
            self._additional_launch_chrome_opts.append('--nocrxbuild')

        self._adb = toolchain.get_tool('host', 'adb')
        self._has_error = False
        self._thread = None
Beispiel #22
0
def _run_chrome(parsed_args, **kwargs):
  if parsed_args.logcat is not None:
    # adb process will be terminated in the atexit handler, registered
    # in the signal_util.setup().
    subprocess.Popen(
        [toolchain.get_tool('host', 'adb'), 'logcat'] + parsed_args.logcat)

  params = _compute_chrome_params(parsed_args)
  gdb_util.create_or_remove_bare_metal_gdb_lock_dir(parsed_args.gdb)

  # Similar to adb subprocess, using atexit has timing issue. See above comment
  # for the details.
  chrome_timeout = _select_chrome_timeout(parsed_args)
  for i in xrange(parsed_args.chrome_flakiness_retry + 1):
    if i > 0:
      logging.error('Chrome is flaky. Retrying...: %d', i)

    p = chrome_process.ChromeProcess(params, timeout=chrome_timeout)
    atexit.register(_terminate_chrome, p)

    gdb_util.maybe_launch_gdb(parsed_args.gdb, parsed_args.gdb_type, p.pid)
    jdb_util.maybe_launch_jdb(parsed_args.jdb_port, parsed_args.jdb_type)

    # Write the PID to a file, so that other launch_chrome process sharing the
    # same user data can find the process. In common case, the file will be
    # removed by _terminate_chrome() defined above.
    file_util.makedirs_safely(_USER_DATA_DIR)
    with open(_CHROME_PID_PATH, 'w') as pid_file:
      pid_file.write('%d\n' % p.pid)

    stats = startup_stats.StartupStats()
    handler = _select_output_handler(parsed_args, stats, p, **kwargs)

    # Wait for the process to finish or us to be interrupted.
    try:
      returncode = p.handle_output(handler)
    except output_handler.ChromeFlakinessError:
      # Chrome is terminated due to its flakiness. Retry.
      continue

    if returncode:
      sys.exit(returncode)
    return stats

  # Here, the Chrome flakiness failure has continued too many times.
  # Terminate the script.
  logging.error('Chrome is too flaky so that it hits retry limit.')
  sys.exit(1)
Beispiel #23
0
def _launch_gdb(title, pid_string, gdb_type):
    """Launches GDB for a non-plugin process."""
    host_gdb = toolchain.get_tool('host', 'gdb')
    command = ['-p', pid_string]
    if title in ('gpu', 'renderer'):
        command.extend(['-ex', r'echo To start: signal SIGUSR1\n'])
    if gdb_type == 'xterm':
        command = _get_xterm_gdb(title, host_gdb, command)
    elif gdb_type == 'screen':
        command = _get_screen_gdb(title, host_gdb, command)
    elif gdb_type == 'emacsclient':
        command = _get_emacsclient_gdb(title, host_gdb, command)
    gdb_process = subprocess.Popen(command)

    if gdb_type == 'xterm':
        _run_gdb_watch_thread(gdb_process)
Beispiel #24
0
def _launch_gdb(title, pid_string, gdb_type):
  """Launches GDB for a non-plugin process."""
  host_gdb = toolchain.get_tool('host', 'gdb')
  command = ['-p', pid_string]
  if title in ('gpu', 'renderer'):
    command.extend(['-ex', r'echo To start: signal SIGUSR1\n'])
  if gdb_type == 'xterm':
    command = _get_xterm_gdb(title, host_gdb, command)
  elif gdb_type == 'screen':
    command = _get_screen_gdb(title, host_gdb, command)
  elif gdb_type == 'emacsclient':
    command = _get_emacsclient_gdb(title, host_gdb, command)
  gdb_process = subprocess.Popen(command)

  if gdb_type == 'xterm':
    _run_gdb_watch_thread(gdb_process)
Beispiel #25
0
def make_table_of_contents(target, input_so_path):
    # List only external dynamic symbol as implied by '-g' and '-D'.
    # Use posix format of output that implied by '-f','p' as it is easiest to
    # parse for our usage.
    external_symbols = subprocess.check_output(
        [toolchain.get_tool(target, 'nm'), '-gD', '-f', 'p', input_so_path])
    symbols = []
    for line in external_symbols.splitlines():
        # |line| should contain:
        # <symbol name> <symbol type> <address>
        # Put symbol names and symbol types into the TOC file.
        # Drop address part since its modification does not require relinking for
        # binaries that are dynamically linked agaist |input_so_path|.
        symbols.append(' '.join(line.split(' ')[:2]))

    return '\n'.join(symbols)
Beispiel #26
0
def _run_gdb_for_bare_metal(runner_args, test_args):
  gdb = toolchain.get_tool(build_options.OPTIONS.target(), 'gdb')
  bare_metal_loader_index = runner_args.index(
      toolchain.get_nonsfi_loader())

  gdb_command = _get_gdb_command_to_inject_bare_metal_gdb_py(test_args[0])

  args = (runner_args[:bare_metal_loader_index] +
          [gdb] +
          gdb_command +
          gdb_util.get_args_for_stlport_pretty_printers() +
          ['-ex',
           'echo \n*** Type \'run\' or \'r\' to start debugging ***\n\n',
           '--args'] +
          runner_args[bare_metal_loader_index:] +
          test_args)
  subprocess.call(args)
Beispiel #27
0
def make_table_of_contents(target, input_so_path):
  # List only external dynamic symbol as implied by '-g' and '-D'.
  # Use posix format of output that implied by '-f','p' as it is easiest to
  # parse for our usage.
  external_symbols = subprocess.check_output([
      toolchain.get_tool(target, 'nm'), '-gD', '-f', 'p', input_so_path
  ])
  symbols = []
  for line in external_symbols.splitlines():
    # |line| should contain:
    # <symbol name> <symbol type> <address>
    # Put symbol names and symbol types into the TOC file.
    # Drop address part since its modification does not require relinking for
    # binaries that are dynamically linked agaist |input_so_path|.
    symbols.append(' '.join(line.split(' ')[:2]))

  return '\n'.join(symbols)
Beispiel #28
0
def _run_gdb_for_nacl(args, test_args):
  runnable_ld = args[-1]
  assert 'runnable-ld.so' in runnable_ld
  # Insert -g flag before -a to let sel_ldr wait for GDB.
  a_index = args.index('-a')
  assert 'sel_ldr' in args[a_index - 1]
  args.insert(a_index, '-g')
  args.extend(test_args)
  # The child process call setsid(2) to create a new session so that
  # sel_ldr will not die by Ctrl-C either. Note that ignoring SIGINT
  # does not work for sel_ldr, because sel_ldr will override the
  # setting.
  sel_ldr_proc = subprocess.Popen(args, stderr=subprocess.STDOUT,
                                  preexec_fn=os.setsid)

  gdb = toolchain.get_tool(build_options.OPTIONS.target(), 'gdb')
  irt = toolchain.get_nacl_irt_core(build_options.OPTIONS.get_target_bitsize())

  # The Bionic loader only provides the base name of each loaded binary in L
  # for 32-bit platforms due to a compatibility issue.
  # ARC keeps the behavior and provides full path information for debugging
  # explicitly. See SEARCH_NAME() in mods/android/bionic/linker/linker.cpp.
  # DSOs are covered by build_common.get_load_library_path(), but full path
  # information for test main binary should be specified separately.
  #
  # Note GDB uses NaCl manifest for arc.nexe so we do not need the library
  # search paths for launch_chrome.
  solib_paths = [build_common.get_load_library_path_for_test(),
                 build_common.get_load_library_path(),
                 os.path.dirname(test_args[0])]

  args = [
      gdb,
      '-ex', 'target remote :4014',
      '-ex', 'nacl-irt %s' % irt,
      # The Bionic does not pass a fullpath of a shared object to the
      # debugger. Fixing this issue by modifying the Bionic loader
      # will need a bunch of ARC MOD. We work-around the issue by
      # passing the path of shared objects here.
      '-ex', 'set solib-search-path %s' % ':'.join(solib_paths),
      '-ex',
      'echo \n*** Type \'continue\' or \'c\' to start debugging ***\n\n',
      runnable_ld]
  subprocess.call(args)
  sel_ldr_proc.kill()
Beispiel #29
0
def _check_javac_version():
    # Stamp file should keep the last modified time of the java binary.
    javac_path = distutils.spawn.find_executable(
        toolchain.get_tool('java', 'javac'))
    stamp_file = build_common.StampFile(
        '%s %f' % (javac_path, os.path.getmtime(javac_path)),
        build_common.get_javac_revision_file())
    if stamp_file.is_up_to_date():
        return

    want_version = '1.7.'
    javac_version = subprocess.check_output([javac_path, '-version'],
                                            stderr=subprocess.STDOUT)
    if want_version not in javac_version:
        print '\nWARNING: You are not using Java 7.',
        print 'Installed version:', javac_version.strip()
        print 'See docs/getting-java.md.\n'
    else:
        stamp_file.update()
Beispiel #30
0
def _check_javac_version():
  # Stamp file should keep the last modified time of the java binary.
  javac_path = distutils.spawn.find_executable(
      toolchain.get_tool('java', 'javac'))
  stamp_file = build_common.StampFile(
      '%s %f' % (javac_path, os.path.getmtime(javac_path)),
      build_common.get_javac_revision_file())
  if stamp_file.is_up_to_date():
    return

  want_version = '1.7.'
  javac_version = subprocess.check_output(
      [javac_path, '-version'], stderr=subprocess.STDOUT)
  if want_version not in javac_version:
    print '\nWARNING: You are not using Java 7.',
    print 'Installed version:', javac_version.strip()
    print 'See docs/getting-java.md.\n'
  else:
    stamp_file.update()
Beispiel #31
0
def _parse_apk(apk_path):
  """Parses the XML meta data of classes.dex in the given .apk file."""
  parsed_data = subprocess.check_output([
      toolchain.get_tool('java', 'dexdump'), apk_path, '-lxml'])
  return cElementTree.fromstring(parsed_data)
Beispiel #32
0
def _parse_apk(apk_path):
    """Parses the XML meta data of classes.dex in the given .apk file."""
    parsed_data = subprocess.check_output(
        [toolchain.get_tool('java', 'dexdump'), apk_path, '-lxml'])
    return cElementTree.fromstring(parsed_data)