Example #1
0
def DownloadAndRunCTS(args, test_runner_args):
  base_cts_dir = None
  delete_cts_dir = False
  try:
    relative_cts_path = GetCtsPath(args.arch, args.platform)

    if args.apk_dir:
      base_cts_dir = args.apk_dir
    else:
      base_cts_dir = tempfile.mkdtemp()
      delete_cts_dir = True

    local_cts_path = os.path.join(base_cts_dir, relative_cts_path)
    google_storage_cts_path = '%s/%s' % (_CTS_BUCKET, relative_cts_path)

    # Download CTS APK if needed.
    if not os.path.exists(local_cts_path):
      if cmd_helper.RunCmd(
          [_GSUTIL_PATH, 'cp', google_storage_cts_path, local_cts_path]):
        raise Exception('Error downloading CTS from Google Storage.')

    test_runner_args += ['--test-apk', local_cts_path]
    # TODO(mikecase): This doesn't work at all with the
    # --gtest-filter test runner option currently. The
    # filter options will just override eachother.
    if args.skip_expected_failures:
      test_runner_args += ['-f=-%s' % ':'.join(GetExpectedFailures())]
    return cmd_helper.RunCmd(
        [_TEST_RUNNER_PATH, 'instrumentation'] + test_runner_args)
  finally:
    if delete_cts_dir and base_cts_dir:
      shutil.rmtree(base_cts_dir)
Example #2
0
def main(argv):
    parser = optparse.OptionParser()
    parser.add_option('-s',
                      '--steps',
                      help='A JSON file containing all the steps to be '
                      'sharded.')
    parser.add_option('--flaky_steps',
                      help='A JSON file containing steps that are flaky and '
                      'will have its exit code ignored.')
    parser.add_option('-p',
                      '--print_results',
                      help='Only prints the results for the previously '
                      'executed step, do not run it again.')
    options, _ = parser.parse_args(argv)
    if options.print_results:
        return cmd_helper.RunCmd([
            'build/android/test_runner.py', 'perf', '--print-step',
            options.print_results
        ])
    flaky_options = []
    if options.flaky_steps:
        flaky_options = ['--flaky-steps', options.flaky_steps]
    return cmd_helper.RunCmd([
        'build/android/test_runner.py', 'perf', '-v', '--steps', options.steps
    ] + flaky_options)
Example #3
0
  def testPushAndDeleteFiles_noSubDir(self):
    host_tmp_dir = tempfile.mkdtemp()
    (host_file_path1, file_name1) = self._MakeTempFileGivenDir(
        host_tmp_dir, _OLD_CONTENTS)
    (host_file_path2, file_name2) = self._MakeTempFileGivenDir(
        host_tmp_dir, _OLD_CONTENTS)

    device_file_path1 = "%s/%s" % (_DEVICE_DIR, file_name1)
    device_file_path2 = "%s/%s" % (_DEVICE_DIR, file_name2)
    self.adb.Push(host_file_path1, device_file_path1)
    self.adb.Push(host_file_path2, device_file_path2)

    with open(host_file_path1, 'w') as f:
      f.write(_NEW_CONTENTS)
    cmd_helper.RunCmd(['rm', host_file_path2])

    self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
                                   delete_device_stale=True)
    result = self.device.RunShellCommand(
        ['cat', device_file_path1], check_return=True, single_line=True)
    self.assertEqual(_NEW_CONTENTS, result)

    filenames = self.device.ListDirectory(_DEVICE_DIR)
    self.assertEqual([file_name1], filenames)

    cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
    self.device.RemovePath(_DEVICE_DIR, recursive=True, force=True)
    def testPushAndDeleteFiles_SubDir(self):
        host_tmp_dir = tempfile.mkdtemp()
        host_sub_dir1 = "%s/%s" % (host_tmp_dir, _SUB_DIR1)
        host_sub_dir2 = "%s/%s/%s" % (host_tmp_dir, _SUB_DIR, _SUB_DIR2)
        cmd_helper.RunCmd(['mkdir', '-p', host_sub_dir1])
        cmd_helper.RunCmd(['mkdir', '-p', host_sub_dir2])

        (host_file_path1,
         file_name1) = self._MakeTempFileGivenDir(host_tmp_dir, _OLD_CONTENTS)
        (host_file_path2,
         file_name2) = self._MakeTempFileGivenDir(host_tmp_dir, _OLD_CONTENTS)
        (host_file_path3,
         file_name3) = self._MakeTempFileGivenDir(host_sub_dir1, _OLD_CONTENTS)
        (host_file_path4,
         file_name4) = self._MakeTempFileGivenDir(host_sub_dir2, _OLD_CONTENTS)

        device_file_path1 = "%s/%s" % (_DEVICE_DIR, file_name1)
        device_file_path2 = "%s/%s" % (_DEVICE_DIR, file_name2)
        device_file_path3 = "%s/%s/%s" % (_DEVICE_DIR, _SUB_DIR1, file_name3)
        device_file_path4 = "%s/%s/%s/%s" % (_DEVICE_DIR, _SUB_DIR, _SUB_DIR2,
                                             file_name4)

        self.adb.Push(host_file_path1, device_file_path1)
        self.adb.Push(host_file_path2, device_file_path2)
        self.adb.Push(host_file_path3, device_file_path3)
        self.adb.Push(host_file_path4, device_file_path4)

        with open(host_file_path1, 'w') as f:
            f.write(_NEW_CONTENTS)
        cmd_helper.RunCmd(['rm', host_file_path2])
        cmd_helper.RunCmd(['rm', host_file_path4])

        self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
                                     delete_device_stale=True)
        result = self.device.RunShellCommand(['cat', device_file_path1],
                                             check_return=True,
                                             single_line=True)
        self.assertEqual(_NEW_CONTENTS, result)

        filenames = self.device.ListDirectory(_DEVICE_DIR)
        self.assertIn(file_name1, filenames)
        self.assertIn(_SUB_DIR1, filenames)
        self.assertIn(_SUB_DIR, filenames)
        self.assertEqual(3, len(filenames))

        result = self.device.RunShellCommand(['cat', device_file_path3],
                                             check_return=True,
                                             single_line=True)
        self.assertEqual(_OLD_CONTENTS, result)

        filenames = self.device.ListDirectory(
            posixpath.join(_DEVICE_DIR, _SUB_DIR, _SUB_DIR2))
        self.assertEqual([], filenames)

        cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
        self.device.RemovePath(_DEVICE_DIR, recursive=True, force=True)
    def testPushAndDeleteFiles_SubDir(self):
        host_tmp_dir = tempfile.mkdtemp()
        host_sub_dir1 = "%s/%s" % (host_tmp_dir, _SUB_DIR1)
        host_sub_dir2 = "%s/%s/%s" % (host_tmp_dir, _SUB_DIR, _SUB_DIR2)
        cmd_helper.RunCmd(['mkdir', '-p', host_sub_dir1])
        cmd_helper.RunCmd(['mkdir', '-p', host_sub_dir2])

        (host_file_path1,
         file_name1) = self._MakeTempFileGivenDir(host_tmp_dir, _OLD_CONTENTS)
        (host_file_path2,
         file_name2) = self._MakeTempFileGivenDir(host_tmp_dir, _OLD_CONTENTS)
        (host_file_path3,
         file_name3) = self._MakeTempFileGivenDir(host_sub_dir1, _OLD_CONTENTS)
        (host_file_path4,
         file_name4) = self._MakeTempFileGivenDir(host_sub_dir2, _OLD_CONTENTS)

        device_file_path1 = "%s/%s" % (_DEVICE_DIR, file_name1)
        device_file_path2 = "%s/%s" % (_DEVICE_DIR, file_name2)
        device_file_path3 = "%s/%s/%s" % (_DEVICE_DIR, _SUB_DIR1, file_name3)
        device_file_path4 = "%s/%s/%s/%s" % (_DEVICE_DIR, _SUB_DIR, _SUB_DIR2,
                                             file_name4)

        self.adb.Push(host_file_path1, device_file_path1)
        self.adb.Push(host_file_path2, device_file_path2)
        self.adb.Push(host_file_path3, device_file_path3)
        self.adb.Push(host_file_path4, device_file_path4)

        with open(host_file_path1, 'w') as f:
            f.write(_NEW_CONTENTS)
        cmd_helper.RunCmd(['rm', host_file_path2])
        cmd_helper.RunCmd(['rm', host_file_path4])

        self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
                                     delete_device_stale=True)
        result = self.device.RunShellCommand(['cat', device_file_path1],
                                             single_line=True)
        self.assertEqual(_NEW_CONTENTS, result)

        result = self.device.RunShellCommand(['ls', _DEVICE_DIR])
        self.assertIn(file_name1, result)
        self.assertIn(_SUB_DIR1, result)
        self.assertIn(_SUB_DIR, result)
        self.assertEqual(3, len(result))

        result = self.device.RunShellCommand(['cat', device_file_path3],
                                             single_line=True)
        self.assertEqual(_OLD_CONTENTS, result)

        result = self.device.RunShellCommand(
            ["ls", "%s/%s/%s" % (_DEVICE_DIR, _SUB_DIR, _SUB_DIR2)],
            single_line=True)
        self.assertEqual('', result)

        self.device.RunShellCommand(['rm', '-rf', _DEVICE_DIR])
        cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
Example #6
0
  def ResizeAndWipeAvd(self, storage_size):
    """Wipes old AVD and creates new AVD of size |storage_size|.

    This serves as a work around for '-partition-size' and '-wipe-data'
    """
    userdata_img = os.path.join(_BASE_AVD_DIR, '%s.avd' % self.avd_name,
                                'userdata.img')
    userdata_qemu_img = os.path.join(_BASE_AVD_DIR, '%s.avd' % self.avd_name,
                                     'userdata-qemu.img')
    resize_cmd = ['resize2fs', userdata_img, '%s' % storage_size]
    logging.info('Resizing userdata.img to ideal size')
    cmd_helper.RunCmd(resize_cmd)
    wipe_cmd = ['cp', userdata_img, userdata_qemu_img]
    logging.info('Replacing userdata-qemu.img with the new userdata.img')
    cmd_helper.RunCmd(wipe_cmd)
Example #7
0
def Dump(jar_path):
    """Dumps class and method information from a JAR into a dict via proguard.

  Args:
    jar_path: An absolute path to the JAR file to dump.
  Returns:
    A dict in the following format:
      {
        'classes': [
          {
            'class': '',
            'superclass': '',
            'annotations': {},
            'methods': [
              {
                'method': '',
                'annotations': {},
              },
              ...
            ],
          },
          ...
        ],
      }
  """

    with tempfile.NamedTemporaryFile() as proguard_output:
        cmd_helper.RunCmd([
            'java', '-jar', _PROGUARD_PATH, '-injars', jar_path, '-dontshrink',
            '-dontoptimize', '-dontobfuscate', '-dontpreverify', '-dump',
            proguard_output.name
        ])
        return Parse(proguard_output)
Example #8
0
def exists(name, bucket):
  bucket = _format_bucket_name(bucket)
  gs_path = 'gs://%s/%s' % (bucket, name)

  cmd = [_GSUTIL_PATH, '-q', 'stat', gs_path]
  return_code = cmd_helper.RunCmd(cmd)
  return return_code == 0
Example #9
0
def upload(name, filepath, bucket, gs_args=None, command_args=None,
           content_type=None, authenticated_link=True):
  """Uploads data to Google Storage.

  Args:
    name: Name of the file on Google Storage.
    filepath: Path to file you want to upload.
    bucket: Bucket to upload file to.
    content_type: Content type to upload as. If not specified, Google storage
        will attempt to infer content type from file extension.
    authenticated_link: Whether to return a link that requires user to
        authenticate with a Google account. Setting this to false will return
        a link that does not require user to be signed into Google account but
        will only work for completely public storage buckets.
  Returns:
    Web link to item uploaded to Google Storage bucket.
  """
  bucket = _format_bucket_name(bucket)

  gs_path = 'gs://%s/%s' % (bucket, name)
  logging.info('Uploading %s to %s', filepath, gs_path)

  cmd = [_GSUTIL_PATH, '-q']
  cmd.extend(gs_args or [])
  if content_type:
    cmd.extend(['-h', 'Content-Type:%s' % content_type])
  cmd.extend(['cp'] + (command_args or []) + [filepath, gs_path])

  cmd_helper.RunCmd(cmd)

  return get_url_link(name, bucket, authenticated_link)
    def RunTest(self, _test):
        """Runs junit tests from |self._test_suite|."""
        with tempfile.NamedTemporaryFile() as json_file:
            java_script = os.path.join(constants.GetOutDirectory(), 'bin',
                                       'helper', self._test_suite)
            command = [java_script]

            # Add Jar arguments.
            jar_args = [
                '-test-jars', self._test_suite + '.jar', '-json-results-file',
                json_file.name
            ]
            if self._test_filter:
                jar_args.extend(['-gtest-filter', self._test_filter])
            if self._package_filter:
                jar_args.extend(['-package-filter', self._package_filter])
            if self._runner_filter:
                jar_args.extend(['-runner-filter', self._runner_filter])
            if self._sdk_version:
                jar_args.extend(['-sdk-version', self._sdk_version])
            command.extend(['--jar-args', '"%s"' % ' '.join(jar_args)])

            # Add JVM arguments.
            jvm_args = []
            if self._coverage_dir:
                jvm_args.append('-Demma.coverage.out.file=%s' %
                                self._coverage_dir)
            if jvm_args:
                command.extend(['--jvm-args', '"%s"' % ' '.join(jvm_args)])

            return_code = cmd_helper.RunCmd(command)
            results_list = json_results.ParseResultsFromJson(
                json.loads(json_file.read()))
            return (results_list, return_code)
Example #11
0
def cipd_download(cipd, version, download_dir):
  """Downloads CIPD package files.

  This is different from cipd ensure in that actual files will exist at
  download_dir instead of symlinks.

  Args:
    cipd: CTSCIPDYaml object
    version: Version of package
    download_dir: Destination directory
  """
  package = cipd.get_package()
  download_dir_abs = os.path.abspath(download_dir)
  if not os.path.isdir(download_dir_abs):
    os.makedirs(download_dir_abs)
  with tempfile_ext.NamedTemporaryDirectory() as workDir, chdir(workDir):
    cipd_ensure(package, version, '.')
    for file_name in cipd.get_files():
      src_path = os.path.join(_ENSURE_SUBDIR, file_name)
      dest_path = os.path.join(download_dir_abs, file_name)
      dest_dir = os.path.dirname(dest_path)
      if not os.path.isdir(dest_dir):
        os.makedirs(dest_dir)
      ret = cmd_helper.RunCmd(['cp', '--reflink=never', src_path, dest_path])
      if ret:
        raise IOError('Error file copy from ' + file_name + ' to ' + dest_path)
Example #12
0
File: avd.py Project: wzis/chromium
  def _EditConfigs(self):
    android_avd_home = os.path.join(self._emulator_home, 'avd')
    avd_dir = os.path.join(android_avd_home, '%s.avd' % self._config.avd_name)

    config_path = os.path.join(avd_dir, 'config.ini')
    if os.path.exists(config_path):
      with open(config_path) as config_file:
        config_contents = ini.load(config_file)
    else:
      config_contents = {}

    config_contents['hw.sdCard'] = 'true'
    if self.avd_settings.sdcard.size:
      sdcard_path = os.path.join(avd_dir, 'cr-sdcard.img')
      if not os.path.exists(sdcard_path):
        mksdcard_path = os.path.join(
            os.path.dirname(self._emulator_path), 'mksdcard')
        mksdcard_cmd = [
            mksdcard_path,
            self.avd_settings.sdcard.size,
            sdcard_path,
        ]
        cmd_helper.RunCmd(mksdcard_cmd)

      config_contents['hw.sdCard.path'] = sdcard_path

    with open(config_path, 'w') as config_file:
      ini.dump(config_contents, config_file)
Example #13
0
def Dump(apk_path):
  """Dumps class and method information from a APK into a dict via dexdump.

  Args:
    apk_path: An absolute path to an APK file to dump.
  Returns:
    A dict in the following format:
      {
        <package_name>: {
          'classes': {
            <class_name>: {
              'methods': [<method_1>, <method_2>]
            }
          }
        }
      }
  """
  # TODO(mikecase): Support multi-dex
  try:
    dexfile_dir = tempfile.mkdtemp()
    # Python zipfile module is unable to unzip APKs.
    cmd_helper.RunCmd(['unzip', apk_path, 'classes.dex'], cwd=dexfile_dir)
    dexfile = os.path.join(dexfile_dir, 'classes.dex')
    output_xml = cmd_helper.GetCmdOutput([DEXDUMP_PATH, '-l', 'xml', dexfile])
    return _ParseRootNode(ElementTree.fromstring(output_xml))
  finally:
    shutil.rmtree(dexfile_dir)
  def testPushWithStaleDirectories(self):
    # Make a few files and directories to push.
    host_tmp_dir = tempfile.mkdtemp()
    host_sub_dir1 = '%s/%s' % (host_tmp_dir, _SUB_DIR1)
    host_sub_dir2 = "%s/%s/%s" % (host_tmp_dir, _SUB_DIR, _SUB_DIR2)
    os.makedirs(host_sub_dir1)
    os.makedirs(host_sub_dir2)

    self._MakeTempFileGivenDir(host_sub_dir1, _OLD_CONTENTS)
    self._MakeTempFileGivenDir(host_sub_dir2, _OLD_CONTENTS)

    # Push all our created files/directories and verify they're on the device.
    self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
                                 delete_device_stale=True)
    top_level_dirs = self.device.ListDirectory(_DEVICE_DIR)
    self.assertIn(_SUB_DIR1, top_level_dirs)
    self.assertIn(_SUB_DIR, top_level_dirs)
    sub_dir = self.device.ListDirectory('%s/%s' % (_DEVICE_DIR, _SUB_DIR))
    self.assertIn(_SUB_DIR2, sub_dir)

    # Remove one of the directories on the host and push again.
    cmd_helper.RunCmd(['rm', '-rf', host_sub_dir2])
    self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
                                 delete_device_stale=True)

    # Verify that the directory we removed is no longer on the device, but the
    # other directories still are.
    top_level_dirs = self.device.ListDirectory(_DEVICE_DIR)
    self.assertIn(_SUB_DIR1, top_level_dirs)
    self.assertIn(_SUB_DIR, top_level_dirs)
    sub_dir = self.device.ListDirectory('%s/%s' % (_DEVICE_DIR, _SUB_DIR))
    self.assertEqual([], sub_dir)
Example #15
0
def RunCTS(test_runner_args, local_cts_dir, apk, test_filter,
           skip_expected_failures=True, json_results_file=None):
  """Run tests in apk using test_runner script at _TEST_RUNNER_PATH.

  Returns the script result code,
  tests expected to fail will be skipped unless skip_expected_failures
  is set to False, test results will be stored in
  the json_results_file file if specified
  """
  local_test_runner_args = test_runner_args + ['--test-apk',
                                               os.path.join(local_cts_dir, apk)]

  # TODO(mikecase): This doesn't work at all with the
  # --gtest-filter test runner option currently. The
  # filter options will just override eachother.
  if skip_expected_failures:
    local_test_runner_args += ['-f=-%s' % ':'.join(GetExpectedFailures())]
  # The preferred method is to specify test filters per release in
  # the CTS_GCS path file.  It will override any
  # previous filters, including ones in expected failures
  # file.
  if test_filter:
    local_test_runner_args += ['-f=' + test_filter]
  if json_results_file:
    local_test_runner_args += ['--json-results-file=%s' %
                               json_results_file]
  return cmd_helper.RunCmd(
      [_TEST_RUNNER_PATH, 'instrumentation'] + local_test_runner_args)
Example #16
0
  def testDeleteFiles(self):
    host_tmp_dir = tempfile.mkdtemp()
    (host_file_path, file_name) = self._MakeTempFileGivenDir(
        host_tmp_dir, _OLD_CONTENTS)

    device_file_path = "%s/%s" % (_DEVICE_DIR, file_name)
    self.adb.Push(host_file_path, device_file_path)

    cmd_helper.RunCmd(['rm', host_file_path])
    self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
                                 delete_device_stale=True)
    filenames = self.device.ListDirectory(_DEVICE_DIR)
    self.assertEqual([], filenames)

    cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
    self.device.RemovePath(_DEVICE_DIR, recursive=True, force=True)
def InstallKVM():
  """Installs KVM packages."""
  rc = cmd_helper.RunCmd(['sudo', 'apt-get', 'install', 'kvm'])
  if rc:
    logging.critical('ERROR: Did not install KVM. Make sure hardware '
                     'virtualization is enabled in BIOS (i.e. Intel VT-x or '
                     'AMD SVM).')
  # TODO(navabi): Use modprobe kvm-amd on AMD processors.
  rc = cmd_helper.RunCmd(['sudo', 'modprobe', 'kvm-intel'])
  if rc:
    logging.critical('ERROR: Did not add KVM module to Linux Kernel. Make sure '
                     'hardware virtualization is enabled in BIOS.')
  # Now check to ensure KVM acceleration can be used.
  if not RunKvmOk():
    logging.critical('ERROR: Can not use KVM acceleration. Make sure hardware '
                     'virtualization is enabled in BIOS (i.e. Intel VT-x or '
                     'AMD SVM).')
def GetSDK():
  """Download the SDK and unzip it into EMULATOR_SDK_ROOT."""
  logging.info('Download Android SDK.')
  sdk_url = '%s/%s' % (SDK_BASE_URL, SDK_ZIP)
  try:
    cmd_helper.RunCmd(['curl', '-o', '/tmp/sdk.zip', sdk_url])
    print 'curled unzipping...'
    rc = cmd_helper.RunCmd(['unzip', '-o', '/tmp/sdk.zip', '-d', '/tmp/'])
    if rc:
      raise Exception('ERROR: could not download/unzip Android SDK.')
    # Get the name of the sub-directory that everything will be extracted to.
    dirname, _ = os.path.splitext(SDK_ZIP)
    zip_dir = '/tmp/%s' % dirname
    # Move the extracted directory to EMULATOR_SDK_ROOT
    shutil.move(zip_dir, constants.EMULATOR_SDK_ROOT)
  finally:
    os.unlink('/tmp/sdk.zip')
    def testDeleteFiles(self):
        host_tmp_dir = tempfile.mkdtemp()
        (host_file_path,
         file_name) = self._MakeTempFileGivenDir(host_tmp_dir, _OLD_CONTENTS)

        device_file_path = "%s/%s" % (_DEVICE_DIR, file_name)
        self.adb.Push(host_file_path, device_file_path)

        cmd_helper.RunCmd(['rm', host_file_path])
        self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
                                     delete_device_stale=True)
        result = self.device.RunShellCommand(['ls', _DEVICE_DIR],
                                             single_line=True)
        self.assertEqual('', result)

        cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
        self.device.RunShellCommand(['rm', '-rf', _DEVICE_DIR])
def main():
  option_parser = optparse.OptionParser()
  option_parser.add_option('--output', help='HTML output filename.')
  option_parser.add_option('--coverage-dir', default=None,
                           help=('Root of the directory in which to search for '
                                 'coverage data (.ec) files.'))
  option_parser.add_option('--metadata-dir', default=None,
                           help=('Root of the directory in which to search for '
                                 'coverage metadata (.em) files.'))
  option_parser.add_option('--cleanup', action='store_true',
                           help=('If set, removes coverage files generated at '
                                 'runtime.'))
  options, _ = option_parser.parse_args()

  devil_chromium.Initialize()

  if not (options.coverage_dir and options.metadata_dir and options.output):
    option_parser.error('One or more mandatory options are missing.')

  coverage_files = _GetFilesWithExt(options.coverage_dir, 'ec')
  metadata_files = _GetFilesWithExt(options.metadata_dir, 'em')
  # Filter out zero-length files. These are created by emma_instr.py when a
  # target has no classes matching the coverage filter.
  metadata_files = [f for f in metadata_files if os.path.getsize(f)]
  print 'Found coverage files: %s' % str(coverage_files)
  print 'Found metadata files: %s' % str(metadata_files)

  sources = []
  for f in metadata_files:
    sources_file = os.path.splitext(f)[0] + '_sources.txt'
    with open(sources_file, 'r') as sf:
      sources.extend(json.load(sf))
  sources = [os.path.join(host_paths.DIR_SOURCE_ROOT, s) for s in sources]
  print 'Sources: %s' % sources

  input_args = []
  for f in coverage_files + metadata_files:
    input_args.append('-in')
    input_args.append(f)

  output_args = ['-Dreport.html.out.file', options.output]
  source_args = ['-sp', ','.join(sources)]

  exit_code = cmd_helper.RunCmd(
      ['java', '-cp',
       os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'lib', 'emma.jar'),
       'emma', 'report', '-r', 'html']
      + input_args + output_args + source_args)

  if options.cleanup:
    for f in coverage_files:
      os.remove(f)

  # Command tends to exit with status 0 when it actually failed.
  if not exit_code and not os.path.exists(options.output):
    exit_code = 1

  return exit_code
Example #21
0
def Dump(jar_path):
    """Dumps class and method information from a JAR into a dict via proguard.

  Args:
    jar_path: An absolute path to the JAR file to dump.
  Returns:
    A dict in the following format:
      {
        'classes': [
          {
            'class': '',
            'superclass': '',
            'annotations': {/* dict -- see below */},
            'methods': [
              {
                'method': '',
                'annotations': {/* dict -- see below */},
              },
              ...
            ],
          },
          ...
        ],
      }

    Annotations dict format:
      {
        'empty-annotation-class-name': None,
        'annotation-class-name': {
          'field': 'primitive-value',
          'field': [ 'array-item-1', 'array-item-2', ... ],
          'field': {
            /* Object value */
            'field': 'primitive-value',
            'field': [ 'array-item-1', 'array-item-2', ... ],
            'field': { /* Object value */ }
          }
        }
      }

    Note that for top-level annotations their class names are used for
    identification, whereas for any nested annotations the corresponding
    field names are used.

    One drawback of this approach is that an array containing empty
    annotation classes will be represented as an array of 'None' values,
    thus it will not be possible to find out annotation class names.
    On the other hand, storing both annotation class name and the field name
    would produce a very complex JSON.
  """

    with tempfile.NamedTemporaryFile() as proguard_output:
        cmd_helper.RunCmd([
            'java', '-jar', _PROGUARD_PATH, '-injars', jar_path, '-dontshrink',
            '-dontoptimize', '-dontobfuscate', '-dontpreverify', '-dump',
            proguard_output.name
        ])
        return Parse(proguard_output)
def _UploadFiles(upload_dir, files):
  """Upload files to the render tests GS bucket."""
  if files:
    google_storage_upload_dir = os.path.join(_RENDER_TEST_BUCKET, upload_dir)
    cmd = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gsutil.py'),
           '-m', 'cp']
    cmd.extend(files)
    cmd.append(google_storage_upload_dir)
    cmd_helper.RunCmd(cmd)
Example #23
0
  def update_testing_json(self):
    """Performs generate_buildbot_json.py.

    Raises:
      IOError: If generation failed.
    """
    with chdir(self._root_dir):
      ret = cmd_helper.RunCmd(['python', _GENERATE_BUILDBOT_JSON])
      if ret:
        raise IOError('Error while generating_buildbot_json.py')
Example #24
0
    def stage_cipd_update(self):
        """Stage CIPD package for update by combining CIPD and filtered CTS files.

    It is an error to call this if filtered/ and cipd/ do not already exist
    under work_dir, or if staged already exists under work_dir.

    Raises:
      DirExistsError: If staged/ already exists in work_dir.
      MissingDirError: If filtered/ or cipd/ does not exist in work_dir.
    """
        if not os.path.isdir(self._filter_dir):
            raise MissingDirError(self._filter_dir)
        if not os.path.isdir(self._cipd_dir):
            raise MissingDirError(self._cipd_dir)
        if os.path.isdir(self._stage_dir):
            raise DirExistsError(self._stage_dir)
        os.makedirs(self._stage_dir)
        filtered = os.listdir(self._filter_dir)
        self._CIPDYaml.clear_files()
        for p, a in self._CTSConfig.iter_platform_archs():
            origin_base = self._CTSConfig.get_origin_zip(p, a)
            cipd_zip = self._CTSConfig.get_cipd_zip(p, a)
            dest_path = os.path.join(self._stage_dir, cipd_zip)
            if not os.path.isdir(os.path.dirname(dest_path)):
                os.makedirs(os.path.dirname(dest_path))
            self._CIPDYaml.append_file(cipd_zip)
            if origin_base in filtered:
                logging.info(
                    'Staging downloaded and filtered version of %s to %s.',
                    origin_base, dest_path)
                cmd_helper.RunCmd([
                    'cp',
                    os.path.join(self._filter_dir, origin_base), dest_path
                ])
            else:
                logging.info('Staging reused %s to %s/',
                             os.path.join(self._cipd_dir, cipd_zip), dest_path)
                cmd_helper.RunCmd(
                    ['cp',
                     os.path.join(self._cipd_dir, cipd_zip), dest_path])
        self._CIPDYaml.write(
            os.path.join(self._stage_dir, self._CIPDYaml.get_file_basename()))
Example #25
0
 def _DeleteAVD(self):
   """Delete the AVD of this emulator."""
   avd_command = [
       self.android,
       '--silent',
       'delete',
       'avd',
       '--name', self.avd_name,
   ]
   logging.info('Delete AVD command: %s', ' '.join(avd_command))
   cmd_helper.RunCmd(avd_command)
Example #26
0
  def testPushChangedFiles_noFileChange(self):
    (host_file_path, file_name) = self._MakeTempFile(_OLD_CONTENTS)
    device_file_path = "%s/%s" % (_DEVICE_DIR, file_name)
    self.adb.Push(host_file_path, device_file_path)
    self.device.PushChangedFiles([(host_file_path, device_file_path)])
    result = self.device.RunShellCommand(
        ['cat', device_file_path], check_return=True, single_line=True)
    self.assertEqual(_OLD_CONTENTS, result)

    cmd_helper.RunCmd(['rm', host_file_path])
    self.device.RemovePath(_DEVICE_DIR, recursive=True, force=True)
Example #27
0
def Dump(jar_path):
    """Dumps class and method information from a JAR into a dict via proguard.

  Args:
    jar_path: An absolute path to the JAR file to dump.
  Returns:
    A dict in the following format:
      {
        'classes': [
          {
            'class': '',
            'superclass': '',
            'annotations': {/* dict -- see below */},
            'methods': [
              {
                'method': '',
                'annotations': {/* dict -- see below */},
              },
              ...
            ],
          },
          ...
        ],
      }

    Annotations dict format:
      {
        'empty-annotation-class-name': None,
        'annotation-class-name': {
          'field': 'primitive-value',
          'field': [ 'array-item-1', 'array-item-2', ... ],
          /* Object fields are not supported yet, coming soon! */
          'field': {
            /* Object value */
            'field': 'primitive-value',
            'field': [ 'array-item-1', 'array-item-2', ... ],
            'field': { /* Object value */ }
          }
        }
      }

    Note that for top-level annotations their class names are used for
    identification, whereas for any nested annotations the corresponding
    field names are used.
  """

    with tempfile.NamedTemporaryFile() as proguard_output:
        cmd_helper.RunCmd([
            'java', '-jar', _PROGUARD_PATH, '-injars', jar_path, '-dontshrink',
            '-dontoptimize', '-dontobfuscate', '-dontpreverify', '-dump',
            proguard_output.name
        ])
        return Parse(proguard_output)
Example #28
0
    def update_repository(self):
        """Update chromium checkout with changes for this update.

    After this is called, git add -u && git commit && git cl upload
    will still be needed to generate the CL.

    Raises:
      MissingFileError: If CIPD has not yet been staged or updated.
      UncommittedChangeException: If repo files have uncommitted changes.
      InconsistentFilesException: If errors are detected in staged config files.
    """
        if not os.path.exists(self._version_file):
            raise MissingFileError(self._version_file)

        staged_yaml_path = os.path.join(self._stage_dir,
                                        self._CIPDYaml.get_file_basename())

        if not os.path.exists(staged_yaml_path):
            raise MissingFileError(staged_yaml_path)

        with open(self._version_file) as vf:
            new_cipd_version = vf.read()
            logging.info('Read in new CIPD version %s from %s',
                         new_cipd_version, vf.name)

        repo_cipd_yaml = self._CIPDYaml.get_file_path()
        for f in self._repo_helper.cipd_referrers + [repo_cipd_yaml]:
            git_status = self._repo_helper.git_status(f)
            if git_status:
                raise UncommittedChangeException(f)

        repo_cipd_package = self._repo_helper.cts_cipd_package
        staged_yaml = cts_utils.CTSCIPDYaml(file_path=staged_yaml_path)
        if repo_cipd_package != staged_yaml.get_package():
            raise InconsistentFilesException(
                'Inconsistent CTS package name, {} in {}, but {} in {}'.format(
                    repo_cipd_package, cts_utils.DEPS_FILE,
                    staged_yaml.get_package(), staged_yaml.get_file_path()))

        logging.info('Updating files that reference %s under %s.',
                     cts_utils.CTS_DEP_PACKAGE, self._repo_root)
        self._repo_helper.update_cts_cipd_rev(new_cipd_version)
        logging.info('Regenerate buildbot json files under %s.',
                     self._repo_root)
        self._repo_helper.update_testing_json()
        logging.info('Copy staged %s to  %s.', staged_yaml_path,
                     repo_cipd_yaml)
        cmd_helper.RunCmd(['cp', staged_yaml_path, repo_cipd_yaml])
        logging.info('Ensure CIPD CTS package at %s to the new version %s',
                     repo_cipd_yaml, new_cipd_version)
        cts_utils.cipd_ensure(self._CIPDYaml.get_package(), new_cipd_version,
                              os.path.dirname(repo_cipd_yaml))
    def RunTests(self):
        with tempfile.NamedTemporaryFile() as json_file:
            java_script = os.path.join(constants.GetOutDirectory(), 'bin',
                                       'helper', self._test_instance.suite)
            command = [java_script]

            # Add Jar arguments.
            jar_args = [
                '-test-jars', self._test_instance.suite + '.jar',
                '-json-results-file', json_file.name
            ]
            if self._test_instance.test_filter:
                jar_args.extend(
                    ['-gtest-filter', self._test_instance.test_filter])
            if self._test_instance.package_filter:
                jar_args.extend(
                    ['-package-filter', self._test_instance.package_filter])
            if self._test_instance.runner_filter:
                jar_args.extend(
                    ['-runner-filter', self._test_instance.runner_filter])
            command.extend(['--jar-args', '"%s"' % ' '.join(jar_args)])

            # Add JVM arguments.
            jvm_args = []
            # TODO(mikecase): Add a --robolectric-dep-dir arg to test runner.
            # Have this arg set by GN in the generated test runner scripts.
            jvm_args += [
                '-Drobolectric.dependency.dir=%s' %
                os.path.join(constants.GetOutDirectory(), 'lib.java',
                             'third_party', 'robolectric')
            ]
            if self._test_instance.coverage_dir:
                if not os.path.exists(self._test_instance.coverage_dir):
                    os.makedirs(self._test_instance.coverage_dir)
                elif not os.path.isdir(self._test_instance.coverage_dir):
                    raise Exception(
                        '--coverage-dir takes a directory, not file path.')
                jvm_args.append(
                    '-Demma.coverage.out.file=%s' %
                    os.path.join(self._test_instance.coverage_dir,
                                 '%s.ec' % self._test_instance.suite))
            if jvm_args:
                command.extend(['--jvm-args', '"%s"' % ' '.join(jvm_args)])

            cmd_helper.RunCmd(command)
            results_list = json_results.ParseResultsFromJson(
                json.loads(json_file.read()))

            test_run_results = base_test_result.TestRunResults()
            test_run_results.AddResults(results_list)

            return [test_run_results]
def BuildTargetsForCpu(targets, cpu, output_dir):
    logging.info('Building %s', cpu)

    gn_args = [
        'ffmpeg_branding="Chrome"',
        'is_component_build=false',
        'is_debug=false',
        'proprietary_codecs=true',
        'symbol_level=1',
        'target_cpu="%s"' % cpu,
        'target_os="android"',
        'use_goma=true',
    ]

    cmd = ['gn', 'gen', '--args=%s' % (' '.join(gn_args)), output_dir]
    ec = cmd_helper.RunCmd(cmd)
    if ec:
        raise base_error.BaseError('%s failed with %d' % (cmd, ec))

    ec = cmd_helper.RunCmd(['autoninja', '-C', output_dir] + targets)
    if ec:
        raise base_error.BaseError('building %s failed with %d' % (cpu, ec))