Exemple #1
0
def _CheckWprShaFiles(input_api, output_api):
  """Check whether the wpr sha files have matching URLs."""
  old_sys_path = sys.path
  try:
    perf_dir = input_api.PresubmitLocalPath()
    py_utils_path = os.path.abspath(os.path.join(
        perf_dir, '..', '..', 'third_party', 'catapult', 'common', 'py_utils'))
    sys.path.insert(1, py_utils_path)
    from py_utils import cloud_storage  # pylint: disable=import-error
  finally:
    sys.path = old_sys_path

  results = []
  for affected_file in input_api.AffectedFiles(include_deletes=False):
    filename = affected_file.AbsoluteLocalPath()
    if not filename.endswith('wpr.sha1'):
      continue
    expected_hash = cloud_storage.ReadHash(filename)
    is_wpr_file_uploaded = any(
        cloud_storage.Exists(bucket, expected_hash)
        for bucket in cloud_storage.BUCKET_ALIASES.itervalues())
    if not is_wpr_file_uploaded:
      wpr_filename = filename[:-5]
      results.append(output_api.PresubmitError(
          'The file matching %s is not in Cloud Storage yet.\n'
          'You can upload your new WPR archive file with the command:\n'
          'depot_tools/upload_to_google_storage.py --bucket '
          '<Your pageset\'s bucket> %s.\nFor more info: see '
          'http://www.chromium.org/developers/telemetry/'
          'record_a_page_set#TOC-Upload-the-recording-to-Cloud-Storage' %
          (filename, wpr_filename)))
  return results
Exemple #2
0
def FetchAndExtractSymbolsMac(symbol_base_directory, version):
    def GetLocalPath(base_dir, version):
        return os.path.join(base_dir, version + ".tar.bz2")

    def GetSymbolsPath(version):
        return "desktop-*/" + version + "/mac64/Google Chrome.dSYM.tar.bz2"

    def ExtractSymbolTarFile(symbol_sub_dir, symbol_tar_file):
        os.makedirs(symbol_sub_dir)
        with tarfile.open(os.path.expanduser(symbol_tar_file), "r:bz2") as tar:
            tar.extractall(symbol_sub_dir)

    symbol_sub_dir = os.path.join(symbol_base_directory, version)
    if os.path.isdir(symbol_sub_dir):
        return True

    bzip_path = GetLocalPath(symbol_base_directory, version)
    if not os.path.isfile(bzip_path):

        cloud_storage_bucket = "chrome-unsigned"
        if not cloud_storage.Exists(cloud_storage_bucket,
                                    GetSymbolsPath(version)):
            print "Can't find symbols on GCS."
            return False
        print "Downloading symbols files from GCS, please wait."
        cloud_storage.Get(cloud_storage_bucket, GetSymbolsPath(version),
                          bzip_path)

    ExtractSymbolTarFile(symbol_sub_dir, bzip_path)
    return True
Exemple #3
0
def _QueuePlatformUpdate(binary, platform, version_info, config, channel):
  """ platform: the name of the platform for the browser to
      be downloaded & updated from cloud storage. """
  platform_info = _PLATFORM_MAP[platform]

  if binary == 'chrome':
    remote_path = _ResolveChromeRemotePath(platform_info, version_info)
  elif binary == 'chromium':
    remote_path = _ResolveChromiumRemotePath(channel, platform, version_info)
  else:
    raise ValueError('binary must be \'chrome\' or \'chromium\'')

  if not cloud_storage.Exists(remote_path.bucket, remote_path.path):
    cloud_storage_path = 'gs://%s/%s' % (remote_path.bucket, remote_path.path)
    logging.warn('Failed to find %s build for version %s at path %s.' % (
        platform, version_info.version, cloud_storage_path))
    logging.warn('Skipping this update for this platform/channel.')
    return

  reference_builds_folder = os.path.join(
      os.path.dirname(os.path.abspath(__file__)), 'chrome_telemetry_build',
      'reference_builds', binary, channel)
  if not os.path.exists(reference_builds_folder):
    os.makedirs(reference_builds_folder)
  local_dest_path = os.path.join(reference_builds_folder,
                                 platform,
                                 platform_info.zip_name)
  cloud_storage.Get(remote_path.bucket, remote_path.path, local_dest_path)
  _ModifyBuildIfNeeded(binary, local_dest_path, platform)
  config.AddCloudStorageDependencyUpdateJob('%s_%s' % (binary, channel),
      platform, local_dest_path, version=version_info.version,
      execute_job=False)
Exemple #4
0
def FetchFromCloudStorage(bucket_name, source_path, destination_dir):
  """Fetches file(s) from the Google Cloud Storage.

  As a side-effect, this prints messages to stdout about what's happening.

  Args:
    bucket_name: Google Storage bucket name.
    source_path: Source file path.
    destination_dir: Destination file path.

  Returns:
    Local file path of downloaded file if it was downloaded. If the file does
    not exist in the given bucket, or if there was an error while downloading,
    None is returned.
  """
  target_file = os.path.join(destination_dir, os.path.basename(source_path))
  gs_url = 'gs://%s/%s' % (bucket_name, source_path)
  try:
    if cloud_storage.Exists(bucket_name, source_path):
      logging.info('Fetching file from %s...', gs_url)
      cloud_storage.Get(bucket_name, source_path, target_file)
      if os.path.exists(target_file):
        return target_file
    else:
      logging.info('File %s not found in cloud storage.', gs_url)
      return None
  except Exception as e:
    logging.warn('Exception while fetching from cloud storage: %s', e)
    if os.path.exists(target_file):
      os.remove(target_file)
  return None
 def testExistsReturnsFalse(self, subprocess_mock):
     p_mock = mock.Mock()
     subprocess_mock.Popen.return_value = p_mock
     p_mock.communicate.return_value = (
         '', 'CommandException: One or more URLs matched no objects.\n')
     p_mock.returncode_result = 1
     self.assertFalse(
         cloud_storage.Exists('fake bucket', 'fake remote path'))
def FindProtoTracePath(html_url):
    """
  Finds the proto trace path given a html trace url.

  In the simple case foo/bar/trace.pb is the proto trace for foo/bar/trace.html.
  But sometimes that's not available so we have to look for a .pb.gz file in a
  special directory."""
    subpath = _GetSubpathInBucket(html_url)
    if subpath.endswith('trace.html'):
        proto_path = subpath.replace('trace.html', 'trace.pb')
        if cloud_storage.Exists(cloud_storage.TELEMETRY_OUTPUT, proto_path):
            return proto_path
        proto_path += '.gz'
        if cloud_storage.Exists(cloud_storage.TELEMETRY_OUTPUT, proto_path):
            return proto_path

    directory_path = '/'.join(subpath.split('/')[:-1])
    return _GetProtoTraceLinkFromTraceEventsDir(directory_path)
Exemple #7
0
def BuildIsAvailable(bucket_name, remote_path):
  """Checks whether a build is currently archived at some place."""
  logging.info('Checking existence: gs://%s/%s' % (bucket_name, remote_path))
  try:
    exists = cloud_storage.Exists(bucket_name, remote_path)
    logging.info('Exists? %s' % exists)
    return exists
  except cloud_storage.CloudStorageError:
    return False
Exemple #8
0
def ParseGSLinksFromHTTPLink(http_link):
    """Parses gs:// links to traces from HTTP link.

  The link to HTML trace can be obtained by substituting the part of http_link
  ending with /o/ with 'gs://chrome-telemetry-output/'.

  The link to proto trace in the simplest case can be obtained from HTML trace
  link by replacing the extension from 'html' to 'pb'. In case this approach
  does not work the proto trace link can be found in trace/traceEvents
  subdirectory.
  For example, the first approach works for
  https://console.developers.google.com/m/cloudstorage/b/chrome-telemetry-output/o/20201004T094119_6100/rendering.desktop/animometer_webgl_attrib_arrays/retry_0/trace.html:
  The cloud storage paths to HTML and proto traces are:
  20201004T094119_6100/rendering.desktop/animometer_webgl_attrib_arrays/retry_0/trace.html
  20201004T094119_6100/rendering.desktop/animometer_webgl_attrib_arrays/retry_0/trace.pb,
  but doesn't work for
  https://console.developers.google.com/m/cloudstorage/b/chrome-telemetry-output/o/20200928T183503_42028/v8.browsing_desktop/browse_social_tumblr_infinite_scroll_2018/retry_0/trace.html:
  The cloud storage paths to HTML and proto traces are:
  20200928T183503_42028/v8.browsing_desktop/browse_social_tumblr_infinite_scroll_2018/
  retry_0/trace.html,
  20200928T183503_42028/v8.browsing_desktop/browse_social_tumblr_infinite_scroll_2018/
  retry_0/trace/traceEvents/tmpTq5XNv.pb.gz
  """
    html_link_suffix = '/trace.html'
    assert http_link.endswith(html_link_suffix), (
        'Link passed to ParseGSLinksFromHTTPLink ("%s") is '
        ' invalid. The link must end with "%s".') % (http_link,
                                                     html_link_suffix)

    html_link = http_link.split('/o/')[1]
    if not cloud_storage.Exists(cloud_storage.TELEMETRY_OUTPUT, html_link):
        raise cloud_storage.NotFoundError(
            'HTML trace link %s not found in cloud storage.' % html_link)

    proto_link = os.path.splitext(html_link)[0] + '.pb'

    if not cloud_storage.Exists(cloud_storage.TELEMETRY_OUTPUT, proto_link):
        link_prefix = html_link[:-len(html_link_suffix)]
        proto_link = GetProtoTraceLinkFromTraceEventsDir(link_prefix)
    return html_link, proto_link
Exemple #9
0
 def _ConditionallyUploadToCloudStorage(self, img_name, page, tab, screenshot):
   """Uploads the screenshot to cloud storage as the reference image
   for this test, unless it already exists. Returns True if the
   upload was actually performed."""
   if not self.options.refimg_cloud_storage_bucket:
     raise Exception('--refimg-cloud-storage-bucket argument is required')
   cloud_name = self._FormatReferenceImageName(img_name, page, tab)
   if not cloud_storage.Exists(self.options.refimg_cloud_storage_bucket,
                               cloud_name):
     self._UploadBitmapToCloudStorage(self.options.refimg_cloud_storage_bucket,
                                      cloud_name,
                                      screenshot)
     return True
   return False
def _FetchGCSFile(cloud_storage_bucket, gcs_file, output_file):
    """Fetch and save file from GCS to |output_file|.

  Args:
    cloud_storage_bucket: bucket in cloud storage where symbols reside.
    gcs_file: path to file in GCS.
    output_file: local file to store fetched GCS file.

  Returns:
    True if successfully fetches file; False, otherwise.
  """
    if cloud_storage.Exists(cloud_storage_bucket, gcs_file):
        logging.info('Downloading files from GCS: ' + gcs_file)
        cloud_storage.Get(cloud_storage_bucket, gcs_file, output_file)
        logging.info('Saved file locally to: ' + output_file)
        return True
    return False
Exemple #11
0
    def Upload(self, force=False):
        """Upload all pending files and then write the updated config to disk.

    Will attempt to copy files existing in the upload location to a backup
    location in the same bucket in cloud storage if |force| is True.

    Args:
      force: True if files should be uploaded to cloud storage even if a
          file already exists in the upload location.

    Raises:
      CloudStorageUploadConflictError: If |force| is False and the potential
          upload location of a file already exists.
      CloudStorageError: If copying an existing file to the backup location
          or uploading the new file fails.
    """
        if cloud_storage.Exists(self._cs_bucket, self._cs_remote_path):
            if not force:
                #pylint: disable=nonstandard-exception
                raise exceptions.CloudStorageUploadConflictError(
                    self._cs_bucket, self._cs_remote_path)
                #pylint: enable=nonstandard-exception
            logging.debug(
                'A file already exists at upload path %s in self.cs_bucket'
                ' %s', self._cs_remote_path, self._cs_bucket)
            try:
                cloud_storage.Copy(self._cs_bucket, self._cs_bucket,
                                   self._cs_remote_path, self._cs_backup_path)
                self._backed_up = True
            except cloud_storage.CloudStorageError:
                logging.error(
                    'Failed to copy existing file %s in cloud storage bucket '
                    '%s to backup location %s', self._cs_remote_path,
                    self._cs_bucket, self._cs_backup_path)
                raise

        try:
            cloud_storage.Insert(self._cs_bucket, self._cs_remote_path,
                                 self._local_path)
        except cloud_storage.CloudStorageError:
            logging.error(
                'Failed to upload %s to %s in cloud_storage bucket %s',
                self._local_path, self._cs_remote_path, self._cs_bucket)
            raise
        self._updated = True
def DownloadHtmlTrace(html_url, download_dir=DEFAULT_TRACE_DIR):
    """Downloads html trace given the url. Returns local path.

  Skips downloading if file was already downloaded once."""
    local_filename = os.path.join(download_dir,
                                  GetLocalTraceFileName(html_url))
    local_path = local_filename + '.html'
    if os.path.exists(local_path):
        logging.info('%s already downloaded. Skipping.' % local_path)
        return local_path

    remote_path = _GetSubpathInBucket(html_url)
    if not cloud_storage.Exists(cloud_storage.TELEMETRY_OUTPUT, remote_path):
        raise cloud_storage.NotFoundError(
            'HTML trace %s not found in cloud storage.' % html_url)

    cloud_storage.Get(cloud_storage.TELEMETRY_OUTPUT, remote_path, local_path)
    return local_path
def UploadHostBinary(binary_name, binary_path, version):
    """Upload the binary to the cloud.

  This function uploads the host binary (e.g. trace_processor_shell) to the
  cloud and updates the 'latest' file for the host platform to point to the
  newly uploaded file. Note that it doesn't modify the config and so doesn't
  affect which binaries will be downloaded by FetchHostBinary.
  """
    filename = os.path.basename(binary_path)
    platform = _GetHostPlatform()
    remote_path = posixpath.join(BINARY_CS_FOLDER, binary_name, platform,
                                 version, filename)
    if not cloud_storage.Exists(BINARY_BUCKET, remote_path):
        cloud_storage.Insert(BINARY_BUCKET,
                             remote_path,
                             binary_path,
                             publicly_readable=True)
    _SetLatestPathForBinary(binary_name, platform, remote_path)
def UploadAndSwitchDataFile(data_file_name, data_file_path, version):
    """Upload the script to the cloud and update config to use the new version."""
    filename = os.path.basename(data_file_path)
    remote_path = posixpath.join(DATA_CS_FOLDER, data_file_name, version,
                                 filename)
    if not cloud_storage.Exists(DATA_BUCKET, remote_path):
        cloud_storage.Insert(DATA_BUCKET,
                             remote_path,
                             data_file_path,
                             publicly_readable=False)

    with open(CONFIG_PATH) as f:
        config = json.load(f)
    config[data_file_name]['remote_path'] = remote_path
    config[data_file_name]['hash'] = cloud_storage.CalculateHash(
        data_file_path)
    with open(CONFIG_PATH, 'w') as f:
        json.dump(config, f, indent=4, separators=(',', ': '))
Exemple #15
0
 def DownloadAndExtractZipFile(zip_path, source, destination):
     if not os.path.isfile(zip_path):
         if not cloud_storage.Exists(cloud_storage_bucket, source):
             print "Can't find symbols on GCS."
             return False
         print "Downloading symbols files from GCS, please wait."
         cloud_storage.Get(cloud_storage_bucket, source, zip_path)
         if not os.path.isfile(zip_path):
             print "Can't download symbols on GCS."
             return False
     with zipfile.ZipFile(zip_path, "r") as zip_file:
         for member in zip_file.namelist():
             filename = os.path.basename(member)
             # Skip directories.
             if not filename:
                 continue
             # Extract archived files.
             source = zip_file.open(member)
             target = file(os.path.join(destination, filename), "wb")
             with source, target:
                 shutil.copyfileobj(source, target)
Exemple #16
0
 def DependencyExistsInCloudStorage(self):
     return cloud_storage.Exists(self._cs_bucket, self._cs_remote_path)