def download_build(cp_num, revision_map, zip_file_name, context):
    """Download a single build corresponding to the cp_num and context."""
    remote_file_path = '%s/%s_%s.zip' % (context.original_remote_path,
                                         context.file_prefix,
                                         revision_map[cp_num])
    cloud_storage.Get(context.original_gs_bucket, remote_file_path,
                      zip_file_name)
    def testDisableCloudStorageIo(self, unused_lock_mock):
        os.environ['DISABLE_CLOUD_STORAGE_IO'] = '1'
        dir_path = 'real_dir_path'
        self.fs.CreateDirectory(dir_path)
        file_path = os.path.join(dir_path, 'file1')
        file_path_sha = file_path + '.sha1'

        def CleanTimeStampFile():
            os.remove(file_path + '.fetchts')

        self.CreateFiles([file_path, file_path_sha])
        with open(file_path_sha, 'w') as f:
            f.write('hash1234')
        with self.assertRaises(cloud_storage.CloudStorageIODisabled):
            cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1',
                               'remote_path2')
        with self.assertRaises(cloud_storage.CloudStorageIODisabled):
            cloud_storage.Get('bucket', 'foo', file_path)
        with self.assertRaises(cloud_storage.CloudStorageIODisabled):
            cloud_storage.GetIfChanged(file_path, 'foo')
        with self.assertRaises(cloud_storage.CloudStorageIODisabled):
            cloud_storage.GetIfHashChanged('bar', file_path, 'bucket',
                                           'hash1234')
        with self.assertRaises(cloud_storage.CloudStorageIODisabled):
            cloud_storage.Insert('bucket', 'foo', file_path)

        CleanTimeStampFile()
        with self.assertRaises(cloud_storage.CloudStorageIODisabled):
            cloud_storage.GetFilesInDirectoryIfChanged(dir_path, 'bucket')
Exemple #3
0
def _QueuePlatformUpdate(binary, platform, version_info, config, channel):
  """ platform: the name of the platform for the browser to
      be downloaded & updated from cloud storage. """
  platform_info = _PLATFORM_MAP[platform]

  if binary == 'chrome':
    remote_path = _ResolveChromeRemotePath(platform_info, version_info)
  elif binary == 'chromium':
    remote_path = _ResolveChromiumRemotePath(channel, platform, version_info)
  else:
    raise ValueError('binary must be \'chrome\' or \'chromium\'')

  if not cloud_storage.Exists(remote_path.bucket, remote_path.path):
    cloud_storage_path = 'gs://%s/%s' % (remote_path.bucket, remote_path.path)
    logging.warn('Failed to find %s build for version %s at path %s.' % (
        platform, version_info.version, cloud_storage_path))
    logging.warn('Skipping this update for this platform/channel.')
    return

  reference_builds_folder = os.path.join(
      os.path.dirname(os.path.abspath(__file__)), 'chrome_telemetry_build',
      'reference_builds', binary, channel)
  if not os.path.exists(reference_builds_folder):
    os.makedirs(reference_builds_folder)
  local_dest_path = os.path.join(reference_builds_folder,
                                 platform,
                                 platform_info.zip_name)
  cloud_storage.Get(remote_path.bucket, remote_path.path, local_dest_path)
  _ModifyBuildIfNeeded(binary, local_dest_path, platform)
  config.AddCloudStorageDependencyUpdateJob('%s_%s' % (binary, channel),
      platform, local_dest_path, version=version_info.version,
      execute_job=False)
Exemple #4
0
def FetchAndExtractSymbolsMac(symbol_base_directory, version):
    def GetLocalPath(base_dir, version):
        return os.path.join(base_dir, version + ".tar.bz2")

    def GetSymbolsPath(version):
        return "desktop-*/" + version + "/mac64/Google Chrome.dSYM.tar.bz2"

    def ExtractSymbolTarFile(symbol_sub_dir, symbol_tar_file):
        os.makedirs(symbol_sub_dir)
        with tarfile.open(os.path.expanduser(symbol_tar_file), "r:bz2") as tar:
            tar.extractall(symbol_sub_dir)

    symbol_sub_dir = os.path.join(symbol_base_directory, version)
    if os.path.isdir(symbol_sub_dir):
        return True

    bzip_path = GetLocalPath(symbol_base_directory, version)
    if not os.path.isfile(bzip_path):

        cloud_storage_bucket = "chrome-unsigned"
        if not cloud_storage.Exists(cloud_storage_bucket,
                                    GetSymbolsPath(version)):
            print "Can't find symbols on GCS."
            return False
        print "Downloading symbols files from GCS, please wait."
        cloud_storage.Get(cloud_storage_bucket, GetSymbolsPath(version),
                          bzip_path)

    ExtractSymbolTarFile(symbol_sub_dir, bzip_path)
    return True
Exemple #5
0
def FetchFromCloudStorage(bucket_name, source_path, destination_dir):
  """Fetches file(s) from the Google Cloud Storage.

  As a side-effect, this prints messages to stdout about what's happening.

  Args:
    bucket_name: Google Storage bucket name.
    source_path: Source file path.
    destination_dir: Destination file path.

  Returns:
    Local file path of downloaded file if it was downloaded. If the file does
    not exist in the given bucket, or if there was an error while downloading,
    None is returned.
  """
  target_file = os.path.join(destination_dir, os.path.basename(source_path))
  gs_url = 'gs://%s/%s' % (bucket_name, source_path)
  try:
    if cloud_storage.Exists(bucket_name, source_path):
      logging.info('Fetching file from %s...', gs_url)
      cloud_storage.Get(bucket_name, source_path, target_file)
      if os.path.exists(target_file):
        return target_file
    else:
      logging.info('File %s not found in cloud storage.', gs_url)
      return None
  except Exception as e:
    logging.warn('Exception while fetching from cloud storage: %s', e)
    if os.path.exists(target_file):
      os.remove(target_file)
  return None
def GetLatestPath(binary_name, platform):
    with tempfile_ext.NamedTemporaryFile() as latest_file:
        latest_file.close()
        remote_path = posixpath.join(BINARY_CS_FOLDER, binary_name, platform,
                                     LATEST_FILENAME)
        cloud_storage.Get(BINARY_BUCKET, remote_path, latest_file.name)
        with open(latest_file.name) as latest:
            return latest.read()
def CreateTraceFile(trace_link_prefix, traces_dir, extension):
    trace_link = '%s.%s' % (trace_link_prefix, extension)
    with tempfile.NamedTemporaryFile(dir=traces_dir,
                                     suffix='_trace.%s' % extension,
                                     delete=False) as trace_file:
        cloud_storage.Get(cloud_storage.TELEMETRY_OUTPUT, trace_link,
                          trace_file.name)
        logging.debug('Downloading trace to %s\ntrace_link: %s' %
                      (trace_file.name, trace_link))
        return trace_file.name
Exemple #8
0
def download_build(cp_num, revision_map, zip_file_name, context):
    """Download a single build corresponding to the cp_num and context."""
    remote_file_path = '%s/%s_%s.zip' % (context.original_remote_path,
                                         context.file_prefix,
                                         revision_map[cp_num])
    try:
        cloud_storage.Get(context.original_gs_bucket, remote_file_path,
                          zip_file_name)
    except Exception, e:
        logging.warning('Failed to download: %s, error: %s', zip_file_name, e)
        return False
Exemple #9
0
 def _DownloadFromCloudStorage(self, img_name, page, tab):
   """Downloads the reference image for the given test from cloud
   storage, returning it as a Telemetry Bitmap object."""
   # TODO(kbr): there's a race condition between the deletion of the
   # temporary file and gsutil's overwriting it.
   if not self.options.refimg_cloud_storage_bucket:
     raise Exception('--refimg-cloud-storage-bucket argument is required')
   temp_file = tempfile.NamedTemporaryFile(suffix='.png').name
   cloud_storage.Get(self.options.refimg_cloud_storage_bucket,
                     self._FormatReferenceImageName(img_name, page, tab),
                     temp_file)
   return image_util.FromPngFile(temp_file)
def FetchDataFile(data_file_name):
    """Download the file from the cloud."""
    with open(CONFIG_PATH) as f:
        config = json.load(f)
    remote_path = config[data_file_name]['remote_path']
    expected_hash = config[data_file_name]['hash']
    filename = posixpath.basename(remote_path)
    local_path = os.path.join(LOCAL_STORAGE_FOLDER, filename)
    cloud_storage.Get(DATA_BUCKET, remote_path, local_path)
    if cloud_storage.CalculateHash(local_path) != expected_hash:
        raise RuntimeError('The downloaded data file has wrong hash.')
    return local_path
Exemple #11
0
def DownloadTraceFile(trace_link, traces_dir):
    trace_link_extension = os.path.splitext(trace_link)[1]
    if trace_link.startswith('/'):
        trace_link = trace_link[1:]
    if not os.path.exists(traces_dir):
        os.mkdir(traces_dir, 0755)
    with tempfile.NamedTemporaryFile(dir=traces_dir,
                                     suffix='_trace%s' % trace_link_extension,
                                     delete=False) as trace_file:
        cloud_storage.Get(cloud_storage.TELEMETRY_OUTPUT, trace_link,
                          trace_file.name)
        logging.debug('Downloading trace to %s\ntrace_link: %s.' %
                      (trace_file.name, trace_link))
        return trace_file.name
def _FetchGCSFile(cloud_storage_bucket, gcs_file, output_file):
    """Fetch and save file from GCS to |output_file|.

  Args:
    cloud_storage_bucket: bucket in cloud storage where symbols reside.
    gcs_file: path to file in GCS.
    output_file: local file to store fetched GCS file.

  Returns:
    True if successfully fetches file; False, otherwise.
  """
    if cloud_storage.Exists(cloud_storage_bucket, gcs_file):
        logging.info('Downloading files from GCS: ' + gcs_file)
        cloud_storage.Get(cloud_storage_bucket, gcs_file, output_file)
        logging.info('Saved file locally to: ' + output_file)
        return True
    return False
def DownloadProtoTrace(html_url, download_dir=DEFAULT_TRACE_DIR):
    """Downloads the associated proto trace for html trace url. Returns path.

  Skips downloading if file was already downloaded once."""
    local_filename = os.path.join(download_dir,
                                  GetLocalTraceFileName(html_url))
    for local_path in [local_filename + '.pb', local_filename + '.pb.gz']:
        if os.path.exists(local_path):
            logging.info('%s already downloaded. Skipping.' % local_path)
            return local_path

    remote_path = FindProtoTracePath(html_url)
    extension = GetFileExtension(remote_path)
    local_path = local_filename + extension

    cloud_storage.Get(cloud_storage.TELEMETRY_OUTPUT, remote_path, local_path)
    return local_path
def DownloadHtmlTrace(html_url, download_dir=DEFAULT_TRACE_DIR):
    """Downloads html trace given the url. Returns local path.

  Skips downloading if file was already downloaded once."""
    local_filename = os.path.join(download_dir,
                                  GetLocalTraceFileName(html_url))
    local_path = local_filename + '.html'
    if os.path.exists(local_path):
        logging.info('%s already downloaded. Skipping.' % local_path)
        return local_path

    remote_path = _GetSubpathInBucket(html_url)
    if not cloud_storage.Exists(cloud_storage.TELEMETRY_OUTPUT, remote_path):
        raise cloud_storage.NotFoundError(
            'HTML trace %s not found in cloud storage.' % html_url)

    cloud_storage.Get(cloud_storage.TELEMETRY_OUTPUT, remote_path, local_path)
    return local_path
def FetchHostBinary(binary_name):
    """Download the binary from the cloud.

  This function fetches the binary for the host platform from the cloud.
  The cloud path is read from the config.
  """
    with open(CONFIG_PATH) as f:
        config = json.load(f)
    platform = _GetHostPlatform()
    remote_path = config[binary_name][platform]['remote_path']
    expected_hash = config[binary_name][platform]['hash']
    filename = posixpath.basename(remote_path)
    local_path = os.path.join(LOCAL_STORAGE_FOLDER, filename)
    cloud_storage.Get(BINARY_BUCKET, remote_path, local_path)
    if cloud_storage.CalculateHash(local_path) != expected_hash:
        raise RuntimeError('The downloaded binary has wrong hash.')
    mode = os.stat(local_path).st_mode
    os.chmod(local_path, mode | stat.S_IXUSR)
    return local_path
Exemple #16
0
 def DownloadAndExtractZipFile(zip_path, source, destination):
     if not os.path.isfile(zip_path):
         if not cloud_storage.Exists(cloud_storage_bucket, source):
             print "Can't find symbols on GCS."
             return False
         print "Downloading symbols files from GCS, please wait."
         cloud_storage.Get(cloud_storage_bucket, source, zip_path)
         if not os.path.isfile(zip_path):
             print "Can't download symbols on GCS."
             return False
     with zipfile.ZipFile(zip_path, "r") as zip_file:
         for member in zip_file.namelist():
             filename = os.path.basename(member)
             # Skip directories.
             if not filename:
                 continue
             # Extract archived files.
             source = zip_file.open(member)
             target = file(os.path.join(destination, filename), "wb")
             with source, target:
                 shutil.copyfileobj(source, target)
Exemple #17
0
    def _DownloadRemoteExtensions(self, remote_bucket, local_extensions_dir):
        """Downloads and unzips archive of common extensions to disk.

    Args:
        remote_bucket: bucket to download remote archive from.
        local_extensions_dir: destination extensions directory.

    Raises:
        InvalidExtensionArchiveError if remote archive is not found.
    """
        # Force Unix directory separator for remote path.
        remote_zip_path = '%s/%s' % (REMOTE_DIR, ZIP_NAME)
        local_zip_path = os.path.join(local_extensions_dir, ZIP_NAME)
        try:
            cloud_storage.Get(remote_bucket, remote_zip_path, local_zip_path)
        except cloud_storage.ServerError:
            raise InvalidExtensionArchiveError(
                'Can\'t find archive at gs://%s/%s..' %
                (remote_bucket, remote_zip_path))
        try:
            with zipfile.ZipFile(local_zip_path, 'r') as extensions_zip:
                extensions_zip.extractall(local_extensions_dir)
        finally:
            os.remove(local_zip_path)
Exemple #18
0
def download_revision_map(context):
    """Downloads the revision map in original_gs_url in context."""
    download_file = '%s/%s' % (context.repackage_remote_path,
                               REVISION_MAP_FILE)
    cloud_storage.Get(context.repackage_gs_bucket, download_file,
                      context.revision_file)
def _CalculateHash(remote_path):
    with tempfile_ext.NamedTemporaryFile() as f:
        f.close()
        cloud_storage.Get(BINARY_BUCKET, remote_path, f.name)
        return cloud_storage.CalculateHash(f.name)