Exemple #1
0
def DownloadLatestFile(base_url, partial_name, dst):
    """Get the latest archived object with the given base url and partial name.

  Args:
    base_url: Base Google Storage archive URL (gs://...) containing the build.
    partial_name: Partial name of the archive file to download.
    dst: Destination file/directory where the file will be downloaded.

  Raises:
    Exception: If unable to find or download a file.
  """
    base_url_glob = '%s/**' % base_url.rstrip('/')
    result = slave_utils.GSUtilListBucket(base_url_glob, ['-l'])

    if not result or result[0]:
        raise Exception('Could not find any archived files.')

    files = [b.split()[2] for b in result[1].split('\n') if partial_name in b]

    if not files:
        raise Exception('Could not find any matching files.')

    files = [distutils.version.LooseVersion(x) for x in files]
    newest_file = str(max(files))
    slave_utils.GSUtilDownloadFile(newest_file, dst)
Exemple #2
0
def DownloadFileWithPolling(url,
                            dst,
                            poll_interval=_POLL_INTERVAL_DEFAULT,
                            timeout=_TIMEOUT_DEFAULT):
    """Download the archived file with given |url|.

  If not found, keep trying at |poll_interval| until we reach |timeout|.

  Args:
    url: Google Storage URL (gs://...).
    dst: Destination file/directory where the file will be downloaded.
    poll_interval: Polling interval in seconds.
    timeout: Timeout in seconds.

  Raises:
    Exception: If there is a timeout.
  """
    start_time = time.time()
    time_passed = 0
    while time_passed < timeout:
        if not slave_utils.GSUtilDownloadFile(url, dst):
            return
        print 'Retrying in %d seconds...' % poll_interval
        time.sleep(poll_interval)
        time_passed = time.time() - start_time
    raise Exception('Timed out trying to download %s' % url)
Exemple #3
0
 def _DownloadWebpagesArchive(self, wpr_data_file, page_set_basename):
   """Downloads the webpages archive and its required page set from GS."""
   wpr_source = posixpath.join(
       self._dest_gsbase, ROOT_PLAYBACK_DIR_NAME, 'webpages_archive',
       wpr_data_file)
   page_set_source = posixpath.join(
       self._dest_gsbase, ROOT_PLAYBACK_DIR_NAME, 'webpages_archive',
       page_set_basename)
   if (old_gs_utils.does_storage_object_exist(wpr_source) and
       old_gs_utils.does_storage_object_exist(page_set_source)):
     slave_utils.GSUtilDownloadFile(
         src=wpr_source, dst=LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR)
     slave_utils.GSUtilDownloadFile(
         src=page_set_source, dst=LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR)
   else:
     raise Exception('%s and %s do not exist in Google Storage!' % (
         wpr_source, page_set_source))
def _MaybeUploadReleaseNotes(version):
    """Upload release notes if conditions are right."""
    # Check if the current version has already been released.
    notes_name = 'notes.txt'
    notes_url = '%s/%s/%s' % (GS_CHROMEDRIVER_BUCKET, version, notes_name)
    prev_version = '.'.join(
        [version.split('.')[0],
         str(int(version.split('.')[1]) - 1)])
    prev_notes_url = '%s/%s/%s' % (GS_CHROMEDRIVER_BUCKET, prev_version,
                                   notes_name)

    result, _ = slave_utils.GSUtilListBucket(notes_url, [])
    if result == 0:
        return

    fixed_issues = []
    query = ('https://code.google.com/p/chromedriver/issues/csv?'
             'can=1&q=label%%3AChromeDriver-%s&colspec=ID%%20Summary' %
             version)
    issues = StringIO.StringIO(_GetWebPageContent(query).split('\n', 1)[1])
    for issue in csv.reader(issues):
        if not issue:
            continue
        issue_id = issue[0]
        desc = issue[1]
        labels = issue[2].split(', ')
        labels.remove('ChromeDriver-%s' % version)
        if 'Hotlist-GoodFirstBug' in labels:
            labels.remove('Hotlist-GoodFirstBug')
        fixed_issues += [
            'Resolved issue %s: %s [%s]' % (issue_id, desc, labels)
        ]

    old_notes = ''
    temp_notes_fname = tempfile.mkstemp()[1]
    if not slave_utils.GSUtilDownloadFile(prev_notes_url, temp_notes_fname):
        with open(temp_notes_fname, 'rb') as f:
            old_notes = f.read()

    new_notes = '----------ChromeDriver v%s (%s)----------\n%s\n%s\n\n%s' % (
        version, datetime.date.today().isoformat(), 'Supports Chrome v%s-%s' %
        _GetSupportedChromeVersions(), '\n'.join(fixed_issues), old_notes)
    with open(temp_notes_fname, 'w') as f:
        f.write(new_notes)

    if slave_utils.GSUtilCopy(temp_notes_fname,
                              notes_url,
                              mimetype='text/plain'):
        util.MarkBuildStepError()
Exemple #5
0
def read_timestamp_file(timestamp_file_name, gs_base, gs_relative_dir):
    """Reads the specified TIMESTAMP file from the specified GS dir.

  Returns 0 if the file is empty or does not exist.
  """
    src = posixpath.join(gs_base, gs_relative_dir, timestamp_file_name)
    temp_file = tempfile.mkstemp()[1]
    slave_utils.GSUtilDownloadFile(src=src, dst=temp_file)

    storage_file_obj = open(temp_file, 'r')
    try:
        timestamp_value = storage_file_obj.read().strip()
        return timestamp_value if timestamp_value else "0"
    finally:
        storage_file_obj.close()
Exemple #6
0
def download_directory_contents_if_changed(gs_base, gs_relative_dir,
                                           local_dir):
    """Compares the TIMESTAMP_LAST_UPLOAD_COMPLETED and downloads if different.

  The goal of download_directory_contents_if_changed and
  upload_directory_contents_if_changed is to attempt to replicate directory
  level rsync functionality to the Google Storage directories we care about.
  """
    if _are_timestamps_equal(gs_base, gs_relative_dir, local_dir):
        print '\n\n=======Local directory is current=======\n\n'
    else:
        file_utils.create_clean_local_dir(local_dir)
        gs_source = posixpath.join(gs_base, gs_relative_dir, '*')
        slave_utils.GSUtilDownloadFile(src=gs_source, dst=local_dir)
        if not _are_timestamps_equal(gs_base, gs_relative_dir, local_dir):
            raise Exception('Failed to download from GS: %s' % gs_source)
def _GetTestResultsLog(platform):
  """Gets the test results log for the given platform.

  Returns:
    A dictionary where the keys are SVN revisions and the values are booleans
    indicating whether the tests passed.
  """
  temp_log = tempfile.mkstemp()[1]
  log_name = TEST_LOG_FORMAT % platform
  result = slave_utils.GSUtilDownloadFile(
      '%s/%s' % (GS_CHROMEDRIVER_DATA_BUCKET, log_name), temp_log)
  if result:
    return {}
  with open(temp_log, 'rb') as log_file:
    json_dict = json.load(log_file)
  # Workaround for json encoding dictionary keys as strings.
  return dict([(int(v[0]), v[1]) for v in json_dict.items()])
Exemple #8
0
def _are_timestamps_equal(gs_base, gs_relative_dir, local_dir):
    """Compares the local TIMESTAMP with the TIMESTAMP from Google Storage."""

    local_timestamp_file = os.path.join(local_dir,
                                        TIMESTAMP_COMPLETED_FILENAME)
    # Make sure that the local TIMESTAMP file exists.
    if not os.path.exists(local_timestamp_file):
        return False

    # Get the timestamp file from Google Storage.
    src = posixpath.join(gs_base, gs_relative_dir,
                         TIMESTAMP_COMPLETED_FILENAME)
    temp_file = tempfile.mkstemp()[1]
    slave_utils.GSUtilDownloadFile(src=src, dst=temp_file)

    local_file_obj = open(local_timestamp_file, 'r')
    storage_file_obj = open(temp_file, 'r')
    try:
        local_timestamp = local_file_obj.read().strip()
        storage_timestamp = storage_file_obj.read().strip()
        return local_timestamp == storage_timestamp
    finally:
        local_file_obj.close()
        storage_file_obj.close()
Exemple #9
0
  def Run(self):
    """Run the SkPicturePlayback BuildStep."""

    # Ensure the right .boto file is used by gsutil.
    if not self._skip_all_gs_access and old_gs_utils.read_timestamp_file(
        timestamp_file_name=old_gs_utils.TIMESTAMP_COMPLETED_FILENAME,
        gs_base=self._dest_gsbase,
        gs_relative_dir=posixpath.join(ROOT_PLAYBACK_DIR_NAME,
                                       SKPICTURES_DIR_NAME)) == "0":
      raise Exception(
          'Missing .boto file or .boto does not have the right credentials.'
          'Please see https://docs.google.com/a/google.com/document/d/1ZzHP6M5q'
          'ACA9nJnLqOZr2Hl0rjYqE4yQsQWAfVjKCzs/edit '
          '(may have to request access). The .boto file will need to be placed '
          'under third_party/chromium_buildbot/site_config/')

    # Download the credentials file if it was not previously downloaded.
    if self._skip_all_gs_access:
      print """\n\nPlease create a %s file that contains:
      {
        "google": {
          "username": "******",
          "password": "******"
        },
        "facebook": {
          "username": "******",
          "password": "******"
        }
      }\n\n""" % CREDENTIALS_FILE_PATH
      raw_input("Please press a key when you are ready to proceed...")
    elif not os.path.isfile(CREDENTIALS_FILE_PATH):
      # Download the credentials.json file from Google Storage.
      slave_utils.GSUtilDownloadFile(
          src=CREDENTIALS_GS_LOCATION, dst=CREDENTIALS_FILE_PATH)

    # Delete any left over data files in the data directory.
    for archive_file in glob.glob(
        os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, 'skia_*')):
      os.remove(archive_file)

    # Delete the local root directory if it already exists.
    if os.path.exists(LOCAL_PLAYBACK_ROOT_DIR):
      shutil.rmtree(LOCAL_PLAYBACK_ROOT_DIR)

    # Create the required local storage directories.
    self._CreateLocalStorageDirs()

    # Start the timer.
    start_time = time.time()

    # Loop through all page_sets.
    for page_set in self._page_sets:

      page_set_basename = os.path.basename(page_set).split('.')[0] + '.json'
      wpr_data_file = page_set.split(os.path.sep)[-1].split('.')[0] + '_000.wpr'

      if self._record:
        # Create an archive of the specified webpages if '--record=True' is
        # specified.
        record_wpr_cmd = (
          'DISPLAY=%s' % X11_DISPLAY,
          os.path.join(TELEMETRY_BINARIES_DIR, 'record_wpr'),
          '--extra-browser-args=--disable-setuid-sandbox',
          '--browser=exact',
          '--browser-executable=%s' % self._browser_executable,
          page_set
        )
        for _ in range(RETRY_RECORD_WPR_COUNT):
          output = shell_utils.run(' '.join(record_wpr_cmd), shell=True)
          if RECORD_FAILURE_MSG in output:
            print output
          else:
            # Break out of the retry loop since there were no errors.
            break
        else:
          # If we get here then record_wpr did not succeed and thus did not
          # break out of the loop.
          raise Exception('record_wpr failed for page_set: %s' % page_set)

      else:
        if not self._skip_all_gs_access:
          # Get the webpages archive so that it can be replayed.
          self._DownloadWebpagesArchive(wpr_data_file, page_set_basename)

      run_measurement_cmd = (
          'DISPLAY=%s' % X11_DISPLAY,
          'timeout', '300',
          os.path.join(TELEMETRY_BINARIES_DIR, 'run_measurement'),
          '--extra-browser-args=--disable-setuid-sandbox',
          '--browser=exact',
          '--browser-executable=%s' % self._browser_executable,
          SKP_BENCHMARK,
          page_set,
          '-o',
          '/tmp/test.skp',
          '--skp-outdir=%s' % TMP_SKP_DIR
      )
      for _ in range(RETRY_RUN_MEASUREMENT_COUNT):
        try:
          print '\n\n=======Capturing SKP of %s=======\n\n' % page_set
          shell_utils.run(' '.join(run_measurement_cmd), shell=True)
        except shell_utils.CommandFailedException:
          # skpicture_printer sometimes fails with AssertionError but the
          # captured SKP is still valid. This is a known issue.
          pass

        if self._record:
          # Move over the created archive into the local webpages archive
          # directory.
          shutil.move(
              os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR, wpr_data_file),
              self._local_record_webpages_archive_dir)
          shutil.move(
              os.path.join(LOCAL_REPLAY_WEBPAGES_ARCHIVE_DIR,
                           page_set_basename),
              self._local_record_webpages_archive_dir)

        # Rename generated SKP files into more descriptive names.
        try:
          self._RenameSkpFiles(page_set)
          # Break out of the retry loop since there were no errors.
          break
        except Exception:
          # There was a failure continue with the loop.
          traceback.print_exc()
          print '\n\n=======Retrying %s=======\n\n' % page_set
          time.sleep(10)
      else:
        # If we get here then run_measurement did not succeed and thus did not
        # break out of the loop.
        raise Exception('run_measurement failed for page_set: %s' % page_set)

    print '\n\n=======Capturing SKP files took %s seconds=======\n\n' % (
        time.time() - start_time)

    if self._skia_tools:
      render_pictures_cmd = [
          os.path.join(self._skia_tools, 'render_pictures'),
          '-r', self._local_skp_dir
      ]
      render_pdfs_cmd = [
          os.path.join(self._skia_tools, 'render_pdfs'),
          self._local_skp_dir
      ]

      for tools_cmd in (render_pictures_cmd, render_pdfs_cmd):
        print '\n\n=======Running %s=======' % ' '.join(tools_cmd)
        proc = subprocess.Popen(tools_cmd)
        (code, output) = shell_utils.log_process_after_completion(proc,
                                                                  echo=False)
        if code != 0:
          raise Exception('%s failed!' % ' '.join(tools_cmd))

      if not self._non_interactive:
        print '\n\n=======Running debugger======='
        os.system('%s %s' % (os.path.join(self._skia_tools, 'debugger'),
                             os.path.join(self._local_skp_dir, '*')))

    print '\n\n'

    if not self._skip_all_gs_access and self._upload_to_gs:
      print '\n\n=======Uploading to Google Storage=======\n\n'
      # Copy the directory structure in the root directory into Google Storage.
      dest_dir_name = ROOT_PLAYBACK_DIR_NAME
      if self._alternate_upload_dir:
        dest_dir_name = self._alternate_upload_dir
      gs_status = slave_utils.GSUtilCopyDir(
          src_dir=LOCAL_PLAYBACK_ROOT_DIR,
          gs_base=self._dest_gsbase,
          dest_dir=dest_dir_name,
          gs_acl=gs_utils.GSUtils.PLAYBACK_CANNED_ACL)
      if gs_status != 0:
        raise Exception(
            'ERROR: GSUtilCopyDir error %d. "%s" -> "%s/%s"' % (
                gs_status, LOCAL_PLAYBACK_ROOT_DIR, self._dest_gsbase,
                ROOT_PLAYBACK_DIR_NAME))
      self._SetGoogleReadACLs(
          posixpath.join(self._dest_gsbase, dest_dir_name, SKPICTURES_DIR_NAME))

      # Add a timestamp file to the SKP directory in Google Storage so we can
      # use directory level rsync like functionality.
      old_gs_utils.write_timestamp_file(
          timestamp_file_name=old_gs_utils.TIMESTAMP_COMPLETED_FILENAME,
          timestamp_value=time.time(),
          gs_base=self._dest_gsbase,
          gs_relative_dir=posixpath.join(dest_dir_name, SKPICTURES_DIR_NAME),
          gs_acl=gs_utils.GSUtils.PLAYBACK_CANNED_ACL,
          local_dir=LOCAL_PLAYBACK_ROOT_DIR)

      print '\n\n=======New SKPs have been uploaded to %s =======\n\n' % (
          posixpath.join(self._dest_gsbase, dest_dir_name, SKPICTURES_DIR_NAME))
    else:
      print '\n\n=======Not Uploading to Google Storage=======\n\n'
      print 'Generated resources are available in %s\n\n' % (
          LOCAL_PLAYBACK_ROOT_DIR)

    return 0
def DownloadFromBucket(source_gsurl, dest):
    status = slave_utils.GSUtilDownloadFile(source_gsurl, dest)
    if status != 0:
        raise Exception('ERROR: GSUtilDownloadFile error %d. "%s" -> "%s"' %
                        (status, source_gsurl, dest))
    return 0
Exemple #11
0
def upload_directory_contents_if_changed(gs_base,
                                         gs_relative_dir,
                                         gs_acl,
                                         local_dir,
                                         force_upload=False,
                                         upload_chunks=False,
                                         files_to_upload=None):
    """Compares the TIMESTAMP_LAST_UPLOAD_COMPLETED and uploads if different.

  Args:
    gs_base: str - The Google Storage base. Eg: gs://rmistry.
    gs_relative_dir: str - Relative directory to the Google Storage base.
    gs_acl: str - ACL to use when uploading to Google Storage.
    local_dir: str - The local directory to upload.
    force_upload: bool - Whether upload should be done regardless of timestamps
        matching or not.
    upload_chunks: bool - Whether upload should be done in chunks or in a single
        command.
    files_to_upload: str seq - Specific files that should be uploaded, if not
        specified then all files in local_dir are uploaded. If upload_chunks is
        True then files will be uploaded in chunks else they will be uploaded
        one at a time. The Google Storage directory is not cleaned before upload
        if files_to_upload is specified.

  The goal of download_directory_contents_if_changed and
  upload_directory_contents_if_changed is to attempt to replicate directory
  level rsync functionality to the Google Storage directories we care about.

  Returns True if contents were uploaded, else returns False.
  """
    if not force_upload and _are_timestamps_equal(gs_base, gs_relative_dir,
                                                  local_dir):
        print '\n\n=======Local directory is current=======\n\n'
        return False
    else:
        local_src = os.path.join(local_dir, '*')
        gs_dest = posixpath.join(gs_base, gs_relative_dir)
        timestamp_value = time.time()

        if not files_to_upload:
            print '\n\n=======Delete Storage directory before uploading=======\n\n'
            delete_storage_object(gs_dest)

        print '\n\n=======Writing new TIMESTAMP_LAST_UPLOAD_STARTED=======\n\n'
        write_timestamp_file(timestamp_file_name=TIMESTAMP_STARTED_FILENAME,
                             timestamp_value=timestamp_value,
                             gs_base=gs_base,
                             gs_relative_dir=gs_relative_dir,
                             local_dir=local_dir,
                             gs_acl=gs_acl)

        if upload_chunks:
            if files_to_upload:
                local_files = [
                    os.path.join(local_dir, local_file)
                    for local_file in files_to_upload
                ]
            else:
                local_files = [
                    os.path.join(local_dir, local_file)
                    for local_file in os.listdir(local_dir)
                ]
            for files_chunk in _get_chunks(local_files, FILES_CHUNK):
                gsutil = slave_utils.GSUtilSetup()
                command = [gsutil, 'cp'] + files_chunk + [gs_dest]
                try:
                    shell_utils.run(command)
                except shell_utils.CommandFailedException:
                    raise Exception(
                        'Could not upload the chunk to Google Storage! The chunk: %s'
                        % files_chunk)
        else:
            if files_to_upload:
                for file_to_upload in files_to_upload:
                    if slave_utils.GSUtilDownloadFile(src=os.path.join(
                            local_dir, file_to_upload),
                                                      dst=gs_dest) != 0:
                        raise Exception(
                            'Could not upload %s to Google Storage!' %
                            file_to_upload)
            else:
                if slave_utils.GSUtilDownloadFile(src=local_src,
                                                  dst=gs_dest) != 0:
                    raise Exception('Could not upload %s to Google Storage!' %
                                    local_src)

        print '\n\n=======Writing new TIMESTAMP_LAST_UPLOAD_COMPLETED=======\n\n'
        write_timestamp_file(timestamp_file_name=TIMESTAMP_COMPLETED_FILENAME,
                             timestamp_value=timestamp_value,
                             gs_base=gs_base,
                             gs_relative_dir=gs_relative_dir,
                             local_dir=local_dir,
                             gs_acl=gs_acl)
        return True