Beispiel #1
0
 def testGSUtilCopyDirWithQuietFlag(
         self,  #  pylint: disable=no-self-use
         run_command_mock):
     slave_utils.GSUtilCopyDir('foo', 'bar', add_quiet_flag=True)
     run_command_mock.assert_called_with([
         '/mock/gsutil',
         '-m',
         '-q',
         'cp',
         '-R',
         'foo',
         'bar',
     ])
 def testGSUtilCopyCacheControl(
         self,  # pylint: disable=R0201
         run_command_mock,
         gs_util_setup_mock):
     slave_utils.GSUtilCopyFile('foo', 'bar', cache_control='mock_cache')
     run_command_mock.assert_called_with([
         '/mock/gsutil', '-h', 'Cache-Control:mock_cache', 'cp',
         'file://foo', 'file://bar/foo'
     ])
     slave_utils.GSUtilCopyDir('foo', 'bar', cache_control='mock_cache')
     run_command_mock.assert_called_with([
         '/mock/gsutil', '-m', '-h', 'Cache-Control:mock_cache', 'cp', '-R',
         'foo', 'bar'
     ])
Beispiel #3
0
  def Run(self):
    """Run the SkPicturePlayback BuildStep."""

    # Ensure the right .boto file is used by gsutil.
    if not self._skip_all_gs_access and old_gs_utils.read_timestamp_file(
        timestamp_file_name=old_gs_utils.TIMESTAMP_COMPLETED_FILENAME,
        gs_base=self._dest_gsbase,
        gs_relative_dir=posixpath.join(ROOT_PLAYBACK_DIR_NAME,
                                       SKPICTURES_DIR_NAME)) == "0":
      raise Exception(
          'Missing .boto file or .boto does not have the right credentials.'
          'Please see https://docs.google.com/a/google.com/document/d/1ZzHP6M5q'
          'ACA9nJnLqOZr2Hl0rjYqE4yQsQWAfVjKCzs/edit '
          '(may have to request access). The .boto file will need to be placed '
          'under third_party/chromium_buildbot/site_config/')

    # Download the credentials file if it was not previously downloaded.
    if self._skip_all_gs_access:
      print """\n\nPlease create a %s file that contains:
      {
        "google": {
          "username": "******",
          "password": "******"
        },
        "facebook": {
          "username": "******",
          "password": "******"
        }
      }\n\n""" % CREDENTIALS_FILE_PATH
      raw_input("Please press a key when you are ready to proceed...")
    elif not os.path.isfile(CREDENTIALS_FILE_PATH):
      # Download the credentials.json file from Google Storage.
      slave_utils.GSUtilDownloadFile(
          src=CREDENTIALS_GS_LOCATION, dst=CREDENTIALS_FILE_PATH)

    # Delete any left over data files in the data directory.
    for archive_file in glob.glob(
        os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, 'skia_*')):
      os.remove(archive_file)

    # Delete the local root directory if it already exists.
    if os.path.exists(LOCAL_PLAYBACK_ROOT_DIR):
      shutil.rmtree(LOCAL_PLAYBACK_ROOT_DIR)

    # Create the required local storage directories.
    self._CreateLocalStorageDirs()

    # Start the timer.
    start_time = time.time()

    # Loop through all page_sets.
    for page_set in self._page_sets:

      page_set_basename = os.path.basename(page_set).split('.')[0] + '.json'
      wpr_data_file = page_set.split(os.path.sep)[-1].split('.')[0] + '_000.wpr'

      if self._record:
        # Create an archive of the specified webpages if '--record=True' is
        # specified.
        record_wpr_cmd = (
          'DISPLAY=%s' % X11_DISPLAY,
          os.path.join(TELEMETRY_BINARIES_DIR, 'record_wpr'),
          '--extra-browser-args=--disable-setuid-sandbox',
          '--browser=exact',
          '--browser-executable=%s' % self._browser_executable,
          page_set
        )
        for _ in range(RETRY_RECORD_WPR_COUNT):
          output = shell_utils.run(' '.join(record_wpr_cmd), shell=True)
          if RECORD_FAILURE_MSG in output:
            print output
          else:
            # Break out of the retry loop since there were no errors.
            break
        else:
          # If we get here then record_wpr did not succeed and thus did not
          # break out of the loop.
          raise Exception('record_wpr failed for page_set: %s' % page_set)

      else:
        if not self._skip_all_gs_access:
          # Get the webpages archive so that it can be replayed.
          self._DownloadWebpagesArchive(wpr_data_file, page_set_basename)

      run_measurement_cmd = (
          'DISPLAY=%s' % X11_DISPLAY,
          'timeout', '300',
          os.path.join(TELEMETRY_BINARIES_DIR, 'run_measurement'),
          '--extra-browser-args=--disable-setuid-sandbox',
          '--browser=exact',
          '--browser-executable=%s' % self._browser_executable,
          SKP_BENCHMARK,
          page_set,
          '-o',
          '/tmp/test.skp',
          '--skp-outdir=%s' % TMP_SKP_DIR
      )
      for _ in range(RETRY_RUN_MEASUREMENT_COUNT):
        try:
          print '\n\n=======Capturing SKP of %s=======\n\n' % page_set
          shell_utils.run(' '.join(run_measurement_cmd), shell=True)
        except shell_utils.CommandFailedException:
          # skpicture_printer sometimes fails with AssertionError but the
          # captured SKP is still valid. This is a known issue.
          pass

        if self._record:
          # Move over the created archive into the local webpages archive
          # directory.
          shutil.move(
              os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, wpr_data_file),
              self._local_record_webpages_archive_dir)
          shutil.move(
              os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR,
                           page_set_basename),
              self._local_record_webpages_archive_dir)

        # Rename generated SKP files into more descriptive names.
        try:
          self._RenameSkpFiles(page_set)
          # Break out of the retry loop since there were no errors.
          break
        except Exception:
          # There was a failure continue with the loop.
          traceback.print_exc()
          print '\n\n=======Retrying %s=======\n\n' % page_set
          time.sleep(10)
      else:
        # If we get here then run_measurement did not succeed and thus did not
        # break out of the loop.
        raise Exception('run_measurement failed for page_set: %s' % page_set)

    print '\n\n=======Capturing SKP files took %s seconds=======\n\n' % (
        time.time() - start_time)

    if self._skia_tools:
      render_pictures_cmd = [
          os.path.join(self._skia_tools, 'render_pictures'),
          '-r', self._local_skp_dir
      ]
      render_pdfs_cmd = [
          os.path.join(self._skia_tools, 'render_pdfs'),
          self._local_skp_dir
      ]

      for tools_cmd in (render_pictures_cmd, render_pdfs_cmd):
        print '\n\n=======Running %s=======' % ' '.join(tools_cmd)
        proc = subprocess.Popen(tools_cmd)
        (code, output) = shell_utils.log_process_after_completion(proc,
                                                                  echo=False)
        if code != 0:
          raise Exception('%s failed!' % ' '.join(tools_cmd))

      if not self._non_interactive:
        print '\n\n=======Running debugger======='
        os.system('%s %s' % (os.path.join(self._skia_tools, 'debugger'),
                             os.path.join(self._local_skp_dir, '*')))

    print '\n\n'

    if not self._skip_all_gs_access and self._upload_to_gs:
      print '\n\n=======Uploading to Google Storage=======\n\n'
      # Copy the directory structure in the root directory into Google Storage.
      dest_dir_name = ROOT_PLAYBACK_DIR_NAME
      if self._alternate_upload_dir:
        dest_dir_name = self._alternate_upload_dir
      gs_status = slave_utils.GSUtilCopyDir(
          src_dir=LOCAL_PLAYBACK_ROOT_DIR,
          gs_base=self._dest_gsbase,
          dest_dir=dest_dir_name,
          gs_acl=gs_utils.GSUtils.PLAYBACK_CANNED_ACL)
      if gs_status != 0:
        raise Exception(
            'ERROR: GSUtilCopyDir error %d. "%s" -> "%s/%s"' % (
                gs_status, LOCAL_PLAYBACK_ROOT_DIR, self._dest_gsbase,
                ROOT_PLAYBACK_DIR_NAME))
      self._SetGoogleReadACLs(
          posixpath.join(self._dest_gsbase, dest_dir_name, SKPICTURES_DIR_NAME))

      # Add a timestamp file to the SKP directory in Google Storage so we can
      # use directory level rsync like functionality.
      old_gs_utils.write_timestamp_file(
          timestamp_file_name=old_gs_utils.TIMESTAMP_COMPLETED_FILENAME,
          timestamp_value=time.time(),
          gs_base=self._dest_gsbase,
          gs_relative_dir=posixpath.join(dest_dir_name, SKPICTURES_DIR_NAME),
          gs_acl=gs_utils.GSUtils.PLAYBACK_CANNED_ACL,
          local_dir=LOCAL_PLAYBACK_ROOT_DIR)

      print '\n\n=======New SKPs have been uploaded to %s =======\n\n' % (
          posixpath.join(self._dest_gsbase, dest_dir_name, SKPICTURES_DIR_NAME))
    else:
      print '\n\n=======Not Uploading to Google Storage=======\n\n'
      print 'Generated resources are available in %s\n\n' % (
          LOCAL_PLAYBACK_ROOT_DIR)

    return 0
def archive_layout(options, args):
  logging.basicConfig(level=logging.INFO,
                      format='%(asctime)s %(filename)s:%(lineno)-3d'
                             ' %(levelname)s %(message)s',
                      datefmt='%y%m%d %H:%M:%S')
  chrome_dir = os.path.abspath(options.build_dir)
  results_dir_basename = os.path.basename(options.results_dir)
  if options.results_dir is not None:
    options.results_dir = os.path.abspath(os.path.join(options.build_dir,
                                                       options.results_dir))
  else:
    options.results_dir = chromium_utils.FindUpward(chrome_dir, RESULT_DIR)
  print 'Archiving results from %s' % options.results_dir
  staging_dir = options.staging_dir or slave_utils.GetStagingDir(chrome_dir)
  print 'Staging in %s' % staging_dir
  if not os.path.exists(staging_dir):
    os.makedirs(staging_dir)

  (actual_file_list, diff_file_list) = _CollectArchiveFiles(options.results_dir)
  zip_file = chromium_utils.MakeZip(staging_dir,
                                    results_dir_basename,
                                    actual_file_list,
                                    options.results_dir)[1]
  # TODO(ojan): Stop separately uploading full_results.json once garden-o-matic
  # switches to using failing_results.json.
  full_results_json = os.path.join(options.results_dir, 'full_results.json')
  failing_results_json = os.path.join(options.results_dir,
      'failing_results.json')

  # Extract the build name of this slave (e.g., 'chrome-release') from its
  # configuration file if not provided as a param.
  build_name = options.builder_name or slave_utils.SlaveBuildName(chrome_dir)
  build_name = re.sub('[ .()]', '_', build_name)

  wc_dir = os.path.dirname(chrome_dir)
  last_change = slave_utils.GetHashOrRevision(wc_dir)

  # TODO(dpranke): Is it safe to assume build_number is not blank? Should we
  # assert() this ?
  build_number = str(options.build_number)
  print 'last change: %s' % last_change
  print 'build name: %s' % build_name
  print 'build number: %s' % build_number
  print 'host name: %s' % socket.gethostname()

  if options.gs_bucket:
    # Create a file containing last_change revision. This file will be uploaded
    # after all layout test results are uploaded so the client can check this
    # file to see if the upload for the revision is complete.
    # See crbug.com/574272 for more details.
    last_change_file = os.path.join(staging_dir, 'LAST_CHANGE')
    with open(last_change_file, 'w') as f:
      f.write(last_change)

    # Copy the results to a directory archived by build number.
    gs_base = '/'.join([options.gs_bucket, build_name, build_number])
    gs_acl = options.gs_acl
    # These files never change, cache for a year.
    cache_control = "public, max-age=31556926"
    slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl,
      cache_control=cache_control)
    slave_utils.GSUtilCopyDir(options.results_dir, gs_base, gs_acl=gs_acl,
      cache_control=cache_control)

    # TODO(dpranke): Remove these two lines once clients are fetching the
    # files from the layout-test-results dir.
    slave_utils.GSUtilCopyFile(full_results_json, gs_base, gs_acl=gs_acl,
      cache_control=cache_control)
    slave_utils.GSUtilCopyFile(failing_results_json, gs_base, gs_acl=gs_acl,
      cache_control=cache_control)

    slave_utils.GSUtilCopyFile(last_change_file,
      gs_base + '/' + results_dir_basename, gs_acl=gs_acl,
      cache_control=cache_control)

    # And also to the 'results' directory to provide the 'latest' results
    # and make sure they are not cached at all (Cloud Storage defaults to
    # caching w/ a max-age=3600).
    gs_base = '/'.join([options.gs_bucket, build_name, 'results'])
    cache_control = 'no-cache'
    slave_utils.GSUtilCopyFile(zip_file, gs_base, gs_acl=gs_acl,
        cache_control=cache_control)
    slave_utils.GSUtilCopyDir(options.results_dir, gs_base, gs_acl=gs_acl,
        cache_control=cache_control)

    slave_utils.GSUtilCopyFile(last_change_file,
        gs_base + '/' + results_dir_basename, gs_acl=gs_acl,
        cache_control=cache_control)

  else:
    # Where to save layout test results.
    dest_parent_dir = os.path.join(archive_utils.Config.www_dir_base,
        results_dir_basename.replace('-', '_'), build_name)
    dest_dir = os.path.join(dest_parent_dir, last_change)

    _MaybeMakeDirectoryOnArchiveHost(dest_dir)
    _CopyFileToArchiveHost(zip_file, dest_dir)
    _CopyFileToArchiveHost(full_results_json, dest_dir)
    _CopyFileToArchiveHost(failing_results_json, dest_dir)
    # Not supported on Google Storage yet.
    _ArchiveFullLayoutTestResults(staging_dir, dest_parent_dir, diff_file_list,
                                  options)
  return 0