Ejemplo n.º 1
0
def _SyncFilesToCloud(input_api, output_api):
    """Searches for .sha1 files and uploads them to Cloud Storage.

  It validates all the hashes and skips upload if not necessary.
  """

    cloud_storage = LoadSupport(input_api)

    # Look in both buckets, in case the user uploaded the file manually. But this
    # script focuses on WPR archives, so it only uploads to the internal bucket.
    hashes_in_cloud_storage = cloud_storage.List(cloud_storage.INTERNAL_BUCKET)
    hashes_in_cloud_storage += cloud_storage.List(cloud_storage.PUBLIC_BUCKET)

    results = []
    for affected_file in input_api.AffectedFiles(include_deletes=False):
        hash_path = affected_file.AbsoluteLocalPath()
        file_path, extension = os.path.splitext(hash_path)
        if extension != '.sha1':
            continue

        with open(hash_path, 'rb') as f:
            file_hash = f.read(1024).rstrip()
        if file_hash in hashes_in_cloud_storage:
            results.append(
                output_api.PresubmitNotifyResult(
                    'File already in Cloud Storage, skipping upload: %s' %
                    hash_path))
            continue

        if not re.match('^([A-Za-z0-9]{40})$', file_hash):
            results.append(
                output_api.PresubmitError(
                    'Hash file does not contain a valid SHA-1 hash: %s' %
                    hash_path))
            continue
        if not os.path.exists(file_path):
            results.append(
                output_api.PresubmitError(
                    'Hash file exists, but file not found: %s' % hash_path))
            continue
        if cloud_storage.GetHash(file_path) != file_hash:
            results.append(
                output_api.PresubmitError(
                    'Hash file does not match file\'s actual hash: %s' %
                    hash_path))
            continue

        try:
            cloud_storage.Insert(cloud_storage.INTERNAL_BUCKET, file_hash,
                                 file_path)
            results.append(
                output_api.PresubmitNotifyResult(
                    'Uploaded file to Cloud Storage: %s' % hash_path))
        except cloud_storage.CloudStorageError, e:
            results.append(
                output_api.PresubmitError(
                    'Unable to upload to Cloud Storage: %s\n\n%s' %
                    (hash_path, e)))
def _SyncFilesToCloud(input_api, output_api):
    """Searches for .sha1 files and uploads them to Cloud Storage.

  It validates all the hashes and skips upload if not necessary.
  """
    # Because this script will be called from a magic PRESUBMIT demon,
    # avoid angering it; don't pollute its sys.path.
    old_sys_path = sys.path
    try:
        sys.path = [os.path.join(os.pardir, os.pardir, 'telemetry')] + sys.path
        from telemetry.page import cloud_storage
    finally:
        sys.path = old_sys_path

    hashes_in_cloud_storage = cloud_storage.List(cloud_storage.DEFAULT_BUCKET)

    results = []
    for hash_path in input_api.AbsoluteLocalPaths():
        file_path, extension = os.path.splitext(hash_path)
        if extension != '.sha1':
            continue

        with open(hash_path, 'rb') as f:
            file_hash = f.read(1024).rstrip()
        if file_hash in hashes_in_cloud_storage:
            results.append(
                output_api.PresubmitNotifyResult(
                    'File already in Cloud Storage, skipping upload: %s' %
                    hash_path))
            continue

        if not re.match('^([A-Za-z0-9]{40})$', file_hash):
            results.append(
                output_api.PresubmitError(
                    'Hash file does not contain a valid SHA-1 hash: %s' %
                    hash_path))
            continue
        if not os.path.exists(file_path):
            results.append(
                output_api.PresubmitError(
                    'Hash file exists, but file not found: %s' % hash_path))
            continue
        if cloud_storage.GetHash(file_path) != file_hash:
            results.append(
                output_api.PresubmitError(
                    'Hash file does not match file\'s actual hash: %s' %
                    hash_path))
            continue

        try:
            cloud_storage.Insert(cloud_storage.DEFAULT_BUCKET, file_hash,
                                 file_path)
        except cloud_storage.CloudStorageError:
            results.append(
                output_api.PresubmitError(
                    'Unable to upload to Cloud Storage: %s' % hash_path))

    return results
Ejemplo n.º 3
0
    def AddRecordedPages(self, urls):
        (target_wpr_file, target_wpr_file_path) = self._NextWprFileName()
        for url in urls:
            self._SetWprFileForPage(url, target_wpr_file)
        shutil.move(self.temp_target_wpr_file_path, target_wpr_file_path)

        # Update the hash file.
        with open(target_wpr_file_path + '.sha1', 'wb') as f:
            f.write(cloud_storage.GetHash(target_wpr_file_path))
            f.flush()

        self._WriteToFile()
        self._DeleteAbandonedWprFiles()
Ejemplo n.º 4
0
def _SyncFilesToCloud(input_api, output_api):
  """Searches for .sha1 files and uploads them to Cloud Storage.

  It validates all the hashes and skips upload if not necessary.
  """

  cloud_storage = LoadSupport(input_api)

  results = []
  for hash_path, file_hash in _GetFilesNotInCloud(input_api):
    file_path, _ = os.path.splitext(hash_path)

    if not re.match('^([A-Za-z0-9]{40})$', file_hash):
      results.append(output_api.PresubmitError(
          'Hash file does not contain a valid SHA-1 hash: %s' % hash_path))
      continue
    if not os.path.exists(file_path):
      results.append(output_api.PresubmitError(
          'Hash file exists, but file not found: %s' % hash_path))
      continue
    if cloud_storage.GetHash(file_path) != file_hash:
      results.append(output_api.PresubmitError(
          'Hash file does not match file\'s actual hash: %s' % hash_path))
      continue

    try:
      bucket_input = raw_input('Uploading to Cloud Storage: %s\n'
                               'Is this file [P]ublic or Google-[i]nternal?'
                               % file_path).lower()
      if 'public'.startswith(bucket_input):
        bucket = cloud_storage.PUBLIC_BUCKET
      elif ('internal'.startswith(bucket_input) or
            'google-internal'.startswith(bucket_input)):
        bucket = cloud_storage.INTERNAL_BUCKET
      else:
        results.append(output_api.PresubmitError(
            'Response was neither "public" nor "internal": %s' % bucket_input))
        return results

      cloud_storage.Insert(bucket, file_hash, file_path)
      results.append(output_api.PresubmitNotifyResult(
          'Uploaded file to Cloud Storage: %s' % file_path))
    except cloud_storage.CloudStorageError, e:
      results.append(output_api.PresubmitError(
          'Unable to upload to Cloud Storage: %s\n\n%s' % (file_path, e)))
Ejemplo n.º 5
0
 def assertCorrectHashFile(self, file_path):
     self.assertTrue(os.path.exists(file_path + '.sha1'))
     with open(file_path + '.sha1', 'rb') as f:
         self.assertEquals(cloud_storage.GetHash(file_path), f.read())
Ejemplo n.º 6
0
def _UpdateHashFile(file_path):
    with open(file_path + '.sha1', 'wb') as f:
        f.write(cloud_storage.GetHash(file_path))
        f.flush()