Esempio n. 1
0
  def Run(self):
    """Run the SkPicturePlayback BuildStep."""

    # Download the credentials file if it was not previously downloaded.
    if not os.path.isfile(CREDENTIALS_FILE_PATH):
      # Download the credentials.json file from Google Storage.
      self.gs.download_file(CREDENTIALS_GS_PATH, CREDENTIALS_FILE_PATH)

    if not os.path.isfile(CREDENTIALS_FILE_PATH):
      print """\n\nCould not locate credentials file in the storage.
      Please create a %s file that contains:
      {
        "google": {
          "username": "******",
          "password": "******"
        },
        "facebook": {
          "username": "******",
          "password": "******"
        }
      }\n\n""" % CREDENTIALS_FILE_PATH
      raw_input("Please press a key when you are ready to proceed...")

    # Delete any left over data files in the data directory.
    for archive_file in glob.glob(
        os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, 'skia_*')):
      os.remove(archive_file)

    # Delete the local root directory if it already exists.
    if os.path.exists(LOCAL_PLAYBACK_ROOT_DIR):
      shutil.rmtree(LOCAL_PLAYBACK_ROOT_DIR)

    # Create the required local storage directories.
    self._CreateLocalStorageDirs()

    # Start the timer.
    start_time = time.time()

    # Loop through all page_sets.
    for page_set in self._page_sets:

      page_set_basename = os.path.basename(page_set).split('.')[0]
      page_set_json_name = page_set_basename + '.json'
      wpr_data_file = page_set.split(os.path.sep)[-1].split('.')[0] + '_000.wpr'
      page_set_dir = os.path.dirname(page_set)

      if self._IsChromiumPageSet(page_set):
        print 'Using Chromium\'s captured archives for Chromium\'s page sets.'
      elif self._record:
        # Create an archive of the specified webpages if '--record=True' is
        # specified.
        record_wpr_cmd = (
          'PYTHONPATH=%s:$PYTHONPATH' % page_set_dir,
          'DISPLAY=%s' % X11_DISPLAY,
          os.path.join(self._telemetry_binaries_dir, 'record_wpr'),
          '--extra-browser-args=--disable-setuid-sandbox',
          '--browser=exact',
          '--browser-executable=%s' % self._browser_executable,
          '%s_page_set' % page_set_basename,
          '--page-set-base-dir=%s' % page_set_dir
        )
        for _ in range(RETRY_RECORD_WPR_COUNT):
          try:
            shell_utils.run(' '.join(record_wpr_cmd), shell=True)

            # Move over the created archive into the local webpages archive
            # directory.
            shutil.move(
              os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, wpr_data_file),
              self._local_record_webpages_archive_dir)
            shutil.move(
              os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR,
                           page_set_json_name),
              self._local_record_webpages_archive_dir)

            # Break out of the retry loop since there were no errors.
            break
          except Exception:
            # There was a failure continue with the loop.
            traceback.print_exc()
        else:
          # If we get here then record_wpr did not succeed and thus did not
          # break out of the loop.
          raise Exception('record_wpr failed for page_set: %s' % page_set)

      else:
        # Get the webpages archive so that it can be replayed.
        self._DownloadWebpagesArchive(wpr_data_file, page_set_json_name)

      run_benchmark_cmd = (
          'PYTHONPATH=%s:$PYTHONPATH' % page_set_dir,
          'DISPLAY=%s' % X11_DISPLAY,
          'timeout', '300',
          os.path.join(self._telemetry_binaries_dir, 'run_benchmark'),
          '--extra-browser-args=--disable-setuid-sandbox',
          '--browser=exact',
          '--browser-executable=%s' % self._browser_executable,
          SKP_BENCHMARK,
          '--page-set-name=%s' % page_set_basename,
          '--page-set-base-dir=%s' % page_set_dir,
          '--skp-outdir=%s' % TMP_SKP_DIR,
          '--also-run-disabled-tests'
      )

      for _ in range(RETRY_RUN_MEASUREMENT_COUNT):
        try:
          print '\n\n=======Capturing SKP of %s=======\n\n' % page_set
          shell_utils.run(' '.join(run_benchmark_cmd), shell=True)
        except shell_utils.CommandFailedException:
          # skpicture_printer sometimes fails with AssertionError but the
          # captured SKP is still valid. This is a known issue.
          pass

        # Rename generated SKP files into more descriptive names.
        try:
          self._RenameSkpFiles(page_set)
          # Break out of the retry loop since there were no errors.
          break
        except Exception:
          # There was a failure continue with the loop.
          traceback.print_exc()
          print '\n\n=======Retrying %s=======\n\n' % page_set
          time.sleep(10)
      else:
        # If we get here then run_benchmark did not succeed and thus did not
        # break out of the loop.
        raise Exception('run_benchmark failed for page_set: %s' % page_set)

    print '\n\n=======Capturing SKP files took %s seconds=======\n\n' % (
        time.time() - start_time)

    if self._skia_tools:
      render_pictures_cmd = [
          os.path.join(self._skia_tools, 'render_pictures'),
          '-r', self._local_skp_dir
      ]
      render_pdfs_cmd = [
          os.path.join(self._skia_tools, 'render_pdfs'),
          '-r', self._local_skp_dir
      ]

      for tools_cmd in (render_pictures_cmd, render_pdfs_cmd):
        print '\n\n=======Running %s=======' % ' '.join(tools_cmd)
        proc = subprocess.Popen(tools_cmd)
        (code, _) = shell_utils.log_process_after_completion(proc, echo=False)
        if code != 0:
          raise Exception('%s failed!' % ' '.join(tools_cmd))

      if not self._non_interactive:
        print '\n\n=======Running debugger======='
        os.system('%s %s' % (os.path.join(self._skia_tools, 'debugger'),
                             self._local_skp_dir))

    print '\n\n'

    if self._upload:
      print '\n\n=======Uploading to %s=======\n\n' % self.gs.target_type()
      # Copy the directory structure in the root directory into Google Storage.
      dest_dir_name = ROOT_PLAYBACK_DIR_NAME
      if self._alternate_upload_dir:
        dest_dir_name = self._alternate_upload_dir

      self.gs.upload_dir_contents(
          LOCAL_PLAYBACK_ROOT_DIR, dest_dir=dest_dir_name,
          upload_if=gs_utils.GSUtils.UploadIf.IF_MODIFIED,
          predefined_acl=GS_PREDEFINED_ACL,
          fine_grained_acl_list=GS_FINE_GRAINED_ACL_LIST)

      print '\n\n=======New SKPs have been uploaded to %s =======\n\n' % (
          posixpath.join(self.gs.target_name(), dest_dir_name,
                         SKPICTURES_DIR_NAME))
    else:
      print '\n\n=======Not Uploading to %s=======\n\n' % self.gs.target_type()
      print 'Generated resources are available in %s\n\n' % (
          LOCAL_PLAYBACK_ROOT_DIR)

    return 0
Esempio n. 2
0
    def Run(self):
        """Run the SkPicturePlayback BuildStep."""

        # Download the credentials file if it was not previously downloaded.
        if not os.path.isfile(CREDENTIALS_FILE_PATH):
            # Download the credentials.json file from Google Storage.
            self.gs.download_file(CREDENTIALS_GS_PATH, CREDENTIALS_FILE_PATH)

        if not os.path.isfile(CREDENTIALS_FILE_PATH):
            print """\n\nCould not locate credentials file in the storage.
      Please create a %s file that contains:
      {
        "google": {
          "username": "******",
          "password": "******"
        },
        "facebook": {
          "username": "******",
          "password": "******"
        }
      }\n\n""" % CREDENTIALS_FILE_PATH
            raw_input("Please press a key when you are ready to proceed...")

        # Delete any left over data files in the data directory.
        for archive_file in glob.glob(
                os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, 'skia_*')):
            os.remove(archive_file)

        # Delete the local root directory if it already exists.
        if os.path.exists(LOCAL_PLAYBACK_ROOT_DIR):
            shutil.rmtree(LOCAL_PLAYBACK_ROOT_DIR)

        # Create the required local storage directories.
        self._CreateLocalStorageDirs()

        # Start the timer.
        start_time = time.time()

        # Loop through all page_sets.
        for page_set in self._page_sets:

            page_set_basename = os.path.basename(page_set).split('.')[0]
            page_set_json_name = page_set_basename + '.json'
            wpr_data_file = page_set.split(
                os.path.sep)[-1].split('.')[0] + '_000.wpr'
            page_set_dir = os.path.dirname(page_set)

            if self._IsChromiumPageSet(page_set):
                print 'Using Chromium\'s captured archives for Chromium\'s page sets.'
            elif self._record:
                # Create an archive of the specified webpages if '--record=True' is
                # specified.
                record_wpr_cmd = (
                    'PYTHONPATH=%s:$PYTHONPATH' % page_set_dir,
                    'DISPLAY=%s' % X11_DISPLAY,
                    os.path.join(self._telemetry_binaries_dir, 'record_wpr'),
                    '--extra-browser-args=--disable-setuid-sandbox',
                    '--browser=exact',
                    '--browser-executable=%s' % self._browser_executable,
                    '%s_page_set' % page_set_basename,
                    '--page-set-base-dir=%s' % page_set_dir)
                for _ in range(RETRY_RECORD_WPR_COUNT):
                    try:
                        shell_utils.run(' '.join(record_wpr_cmd), shell=True)

                        # Move over the created archive into the local webpages archive
                        # directory.
                        shutil.move(
                            os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR,
                                         wpr_data_file),
                            self._local_record_webpages_archive_dir)
                        shutil.move(
                            os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR,
                                         page_set_json_name),
                            self._local_record_webpages_archive_dir)

                        # Break out of the retry loop since there were no errors.
                        break
                    except Exception:
                        # There was a failure continue with the loop.
                        traceback.print_exc()
                else:
                    # If we get here then record_wpr did not succeed and thus did not
                    # break out of the loop.
                    raise Exception('record_wpr failed for page_set: %s' %
                                    page_set)

            else:
                # Get the webpages archive so that it can be replayed.
                self._DownloadWebpagesArchive(wpr_data_file,
                                              page_set_json_name)

            run_benchmark_cmd = (
                'PYTHONPATH=%s:$PYTHONPATH' % page_set_dir,
                'DISPLAY=%s' % X11_DISPLAY, 'timeout', '300',
                os.path.join(self._telemetry_binaries_dir, 'run_benchmark'),
                '--extra-browser-args=--disable-setuid-sandbox',
                '--browser=exact',
                '--browser-executable=%s' % self._browser_executable,
                SKP_BENCHMARK, '--page-set-name=%s' % page_set_basename,
                '--page-set-base-dir=%s' % page_set_dir,
                '--skp-outdir=%s' % TMP_SKP_DIR, '--also-run-disabled-tests')

            for _ in range(RETRY_RUN_MEASUREMENT_COUNT):
                try:
                    print '\n\n=======Capturing SKP of %s=======\n\n' % page_set
                    shell_utils.run(' '.join(run_benchmark_cmd), shell=True)
                except shell_utils.CommandFailedException:
                    # skpicture_printer sometimes fails with AssertionError but the
                    # captured SKP is still valid. This is a known issue.
                    pass

                # Rename generated SKP files into more descriptive names.
                try:
                    self._RenameSkpFiles(page_set)
                    # Break out of the retry loop since there were no errors.
                    break
                except Exception:
                    # There was a failure continue with the loop.
                    traceback.print_exc()
                    print '\n\n=======Retrying %s=======\n\n' % page_set
                    time.sleep(10)
            else:
                # If we get here then run_benchmark did not succeed and thus did not
                # break out of the loop.
                raise Exception('run_benchmark failed for page_set: %s' %
                                page_set)

        print '\n\n=======Capturing SKP files took %s seconds=======\n\n' % (
            time.time() - start_time)

        if self._skia_tools:
            render_pictures_cmd = [
                os.path.join(self._skia_tools, 'render_pictures'), '-r',
                self._local_skp_dir
            ]
            render_pdfs_cmd = [
                os.path.join(self._skia_tools, 'render_pdfs'), '-r',
                self._local_skp_dir
            ]

            for tools_cmd in (render_pictures_cmd, render_pdfs_cmd):
                print '\n\n=======Running %s=======' % ' '.join(tools_cmd)
                proc = subprocess.Popen(tools_cmd)
                (code,
                 _) = shell_utils.log_process_after_completion(proc,
                                                               echo=False)
                if code != 0:
                    raise Exception('%s failed!' % ' '.join(tools_cmd))

            if not self._non_interactive:
                print '\n\n=======Running debugger======='
                os.system('%s %s' % (os.path.join(
                    self._skia_tools, 'debugger'), self._local_skp_dir))

        print '\n\n'

        if self._upload:
            print '\n\n=======Uploading to %s=======\n\n' % self.gs.target_type(
            )
            # Copy the directory structure in the root directory into Google Storage.
            dest_dir_name = ROOT_PLAYBACK_DIR_NAME
            if self._alternate_upload_dir:
                dest_dir_name = self._alternate_upload_dir

            self.gs.upload_dir_contents(
                LOCAL_PLAYBACK_ROOT_DIR,
                dest_dir=dest_dir_name,
                upload_if=gs_utils.GSUtils.UploadIf.IF_MODIFIED,
                predefined_acl=GS_PREDEFINED_ACL,
                fine_grained_acl_list=GS_FINE_GRAINED_ACL_LIST)

            print '\n\n=======New SKPs have been uploaded to %s =======\n\n' % (
                posixpath.join(self.gs.target_name(), dest_dir_name,
                               SKPICTURES_DIR_NAME))
        else:
            print '\n\n=======Not Uploading to %s=======\n\n' % self.gs.target_type(
            )
            print 'Generated resources are available in %s\n\n' % (
                LOCAL_PLAYBACK_ROOT_DIR)

        return 0
Esempio n. 3
0
  def Run(self):
    """Run the SkPicturePlayback BuildStep."""

    # Delete any left over data files in the data directory.
    for archive_file in glob.glob(
        os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, 'skia_*')):
      os.remove(archive_file)

    # Delete the local root directory if it already exists.
    if os.path.exists(LOCAL_PLAYBACK_ROOT_DIR):
      shutil.rmtree(LOCAL_PLAYBACK_ROOT_DIR)

    # Create the required local storage directories.
    self._CreateLocalStorageDirs()

    # Start the timer.
    start_time = time.time()

    # Loop through all page_sets.
    for page_set in self._page_sets:

      page_set_basename = os.path.basename(page_set).split('.')[0] + '.json'
      wpr_data_file = page_set.split(os.path.sep)[-1].split('.')[0] + '_000.wpr'

      if self._record:
        # Create an archive of the specified webpages if '--record=True' is
        # specified.
        record_wpr_cmd = (
          'DISPLAY=%s' % X11_DISPLAY,
          os.path.join(self._telemetry_binaries_dir, 'record_wpr'),
          '--extra-browser-args=--disable-setuid-sandbox',
          '--browser=exact',
          '--browser-executable=%s' % self._browser_executable,
          page_set
        )
        for _ in range(RETRY_RECORD_WPR_COUNT):
          output = shell_utils.run(' '.join(record_wpr_cmd), shell=True)
          if RECORD_FAILURE_MSG in output:
            print output
          else:
            # Break out of the retry loop since there were no errors.
            break
        else:
          # If we get here then record_wpr did not succeed and thus did not
          # break out of the loop.
          raise Exception('record_wpr failed for page_set: %s' % page_set)

      else:
        if not self._skip_all_gs_access:
          # Get the webpages archive so that it can be replayed.
          self._DownloadWebpagesArchive(wpr_data_file, page_set_basename)

      page_set_name = os.path.basename(page_set).split('.')[0]
      run_benchmark_cmd = (
          'DISPLAY=%s' % X11_DISPLAY,
          'timeout', '300',
          os.path.join(self._telemetry_binaries_dir, 'run_benchmark'),
          '--extra-browser-args=--disable-setuid-sandbox',
          '--browser=exact',
          '--browser-executable=%s' % self._browser_executable,
          SKP_BENCHMARK,
          page_set_name,
          '-o',
          '/tmp/test.skp',
          '--skp-outdir=%s' % TMP_SKP_DIR
      )
      page_set_dst = os.path.join(self._telemetry_binaries_dir, 'page_sets',
                                  os.path.basename(page_set))
      wpr_dst = os.path.join(self._telemetry_binaries_dir, 'page_sets', 'data',
                             wpr_data_file)
      json_dst = os.path.join(self._telemetry_binaries_dir, 'page_sets', 'data',
                              page_set_basename)
      copied_page_set = False
      if not os.path.exists(page_set_dst):
        print 'Copying %s to %s' % (page_set, page_set_dst)
        shutil.copyfile(page_set, page_set_dst)
        wpr_src = os.path.join(os.path.dirname(page_set), 'data',
                               wpr_data_file)
        print 'Copying %s to %s' % (wpr_src, wpr_dst)
        shutil.copyfile(wpr_src, wpr_dst)
        json_src = os.path.join(os.path.dirname(page_set), 'data',
                                page_set_basename)
        print 'Copying %s to %s' % (json_src, json_dst)
        shutil.copyfile(json_src, json_dst)
        copied_page_set = True

      for _ in range(RETRY_RUN_MEASUREMENT_COUNT):
        try:
          print '\n\n=======Capturing SKP of %s=======\n\n' % page_set
          shell_utils.run(' '.join(run_benchmark_cmd), shell=True)
        except shell_utils.CommandFailedException:
          # skpicture_printer sometimes fails with AssertionError but the
          # captured SKP is still valid. This is a known issue.
          pass

        if self._record:
          # Move over the created archive into the local webpages archive
          # directory.
          shutil.move(
              os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, wpr_data_file),
              self._local_record_webpages_archive_dir)
          shutil.move(
              os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR,
                           page_set_basename),
              self._local_record_webpages_archive_dir)

        # Rename generated SKP files into more descriptive names.
        try:
          self._RenameSkpFiles(page_set)
          # Break out of the retry loop since there were no errors.
          break
        except Exception:
          # There was a failure continue with the loop.
          traceback.print_exc()
          print '\n\n=======Retrying %s=======\n\n' % page_set
          time.sleep(10)
      else:
        if copied_page_set:
          os.remove(page_set_dst)
          os.remove(wpr_dst)
          os.remove(json_dst)
        # If we get here then run_benchmark did not succeed and thus did not
        # break out of the loop.
        raise Exception('run_benchmark failed for page_set: %s' % page_set)

      if copied_page_set:
        os.remove(page_set_dst)
        os.remove(wpr_dst)
        os.remove(json_dst)

    print '\n\n=======Capturing SKP files took %s seconds=======\n\n' % (
        time.time() - start_time)

    if self._skia_tools:
      render_pictures_cmd = [
          os.path.join(self._skia_tools, 'render_pictures'),
          '-r', self._local_skp_dir
      ]
      render_pdfs_cmd = [
          os.path.join(self._skia_tools, 'render_pdfs'),
          self._local_skp_dir
      ]

      for tools_cmd in (render_pictures_cmd, render_pdfs_cmd):
        print '\n\n=======Running %s=======' % ' '.join(tools_cmd)
        proc = subprocess.Popen(tools_cmd)
        (code, output) = shell_utils.log_process_after_completion(proc,
                                                                  echo=False)
        if code != 0:
          raise Exception('%s failed!' % ' '.join(tools_cmd))

      if not self._non_interactive:
        print '\n\n=======Running debugger======='
        os.system('%s %s' % (os.path.join(self._skia_tools, 'debugger'),
                             os.path.join(self._local_skp_dir, '*')))

    print '\n\n'

    if not self._skip_all_gs_access and self._upload_to_gs:
      print '\n\n=======Uploading to Google Storage=======\n\n'
      # Copy the directory structure in the root directory into Google Storage.
      dest_dir_name = ROOT_PLAYBACK_DIR_NAME
      if self._alternate_upload_dir:
        dest_dir_name = self._alternate_upload_dir

      gs_bucket = self._dest_gsbase.lstrip(gs_utils.GS_PREFIX)
      gs_utils.GSUtils().upload_dir_contents(
          LOCAL_PLAYBACK_ROOT_DIR, gs_bucket, dest_dir_name,
          upload_if=gs_utils.GSUtils.UploadIf.IF_MODIFIED,
          predefined_acl=GS_PREDEFINED_ACL,
          fine_grained_acl_list=GS_FINE_GRAINED_ACL_LIST)

      print '\n\n=======New SKPs have been uploaded to %s =======\n\n' % (
          posixpath.join(self._dest_gsbase, dest_dir_name, SKPICTURES_DIR_NAME))
    else:
      print '\n\n=======Not Uploading to Google Storage=======\n\n'
      print 'Generated resources are available in %s\n\n' % (
          LOCAL_PLAYBACK_ROOT_DIR)

    return 0