Ejemplo n.º 1
0
  def AddRecordedStories(self, stories, upload_to_cloud_storage=False):
    if not stories:
      os.remove(self.temp_target_wpr_file_path)
      return

    (target_wpr_file, target_wpr_file_path) = self._NextWprFileName()
    for story in stories:
      self._SetWprFileForStory(story.display_name, target_wpr_file)
    shutil.move(self.temp_target_wpr_file_path, target_wpr_file_path)

    # Update the hash file.
    target_wpr_file_hash = cloud_storage.CalculateHash(target_wpr_file_path)
    with open(target_wpr_file_path + '.sha1', 'wb') as f:
      f.write(target_wpr_file_hash)
      f.flush()

    self._WriteToFile()
    self._DeleteAbandonedWprFiles()

    # Upload to cloud storage
    if upload_to_cloud_storage:
      if not self._bucket:
        logging.warning('StorySet must have bucket specified to upload '
                        'stories to cloud storage.')
        return
      try:
        cloud_storage.Insert(self._bucket, target_wpr_file_hash,
                             target_wpr_file_path)
      except cloud_storage.CloudStorageError, e:
        logging.warning('Failed to upload wpr file %s to cloud storage. '
                        'Error:%s' % target_wpr_file_path, e)
Ejemplo n.º 2
0
 def assertCorrectHashFile(self, file_path):
   old_ch = cloud_storage.CalculateHash
   cloud_storage.CalculateHash = self.overrides.cloud_storage.CalculateHash
   try:
     self.assertTrue(os.path.exists(file_path + '.sha1'))
     with open(file_path + '.sha1', 'rb') as f:
       self.assertEquals(cloud_storage.CalculateHash(file_path), f.read())
   finally:
     cloud_storage.CalculateHash = old_ch
Ejemplo n.º 3
0
  def AddCloudStorageDependencyUpdateJob(
      self, dependency, platform, dependency_path, version=None,
      execute_job=True):
    """Update the file downloaded from cloud storage for a dependency/platform.

    Upload a new file to cloud storage for the given dependency and platform
    pair and update the cloud storage hash and the version for the given pair.

    Example usage:
      The following should update the default platform for 'dep_name':
          UpdateCloudStorageDependency('dep_name', 'default', 'path/to/file')

      The following should update both the mac and win platforms for 'dep_name',
      or neither if either update fails:
          UpdateCloudStorageDependency(
              'dep_name', 'mac_x86_64', 'path/to/mac/file', execute_job=False)
          UpdateCloudStorageDependency(
              'dep_name', 'win_AMD64', 'path/to/win/file', execute_job=False)
          ExecuteUpdateJobs()

    Args:
      dependency: The dependency to update.
      platform: The platform to update the dependency info for.
      dependency_path: Path to the new dependency to be used.
      version: Version of the updated dependency, for checking future updates
          against.
      execute_job: True if the config should be written to disk and the file
          should be uploaded to cloud storage after the update. False if
          multiple updates should be performed atomically. Must call
          ExecuteUpdateJobs after all non-executed jobs are added to complete
          the update.

    Raises:
      ReadWriteError: If the config was not initialized as writable, or if
          |execute_job| is True but the config has update jobs still pending
          execution.
      ValueError: If no information exists in the config for |dependency| on
          |platform|.
    """
    self._ValidateIsConfigUpdatable(
        execute_job=execute_job, dependency=dependency, platform=platform)
    cs_hash = cloud_storage.CalculateHash(dependency_path)
    if version:
      self._SetPlatformData(dependency, platform, 'version_in_cs', version)
    self._SetPlatformData(dependency, platform, 'cloud_storage_hash', cs_hash)

    cs_base_folder = self._GetPlatformData(
        dependency, platform, 'cloud_storage_base_folder')
    cs_bucket = self._GetPlatformData(
        dependency, platform, 'cloud_storage_bucket')
    cs_remote_path = self._CloudStorageRemotePath(
        dependency, cs_hash, cs_base_folder)
    self._pending_uploads.append(uploader.CloudStorageUploader(
        cs_bucket, cs_remote_path, dependency_path))
    if execute_job:
      self.ExecuteUpdateJobs()
def FetchDataFile(data_file_name):
    """Download the file from the cloud."""
    with open(CONFIG_PATH) as f:
        config = json.load(f)
    remote_path = config[data_file_name]['remote_path']
    expected_hash = config[data_file_name]['hash']
    filename = posixpath.basename(remote_path)
    local_path = os.path.join(LOCAL_STORAGE_FOLDER, filename)
    cloud_storage.Get(DATA_BUCKET, remote_path, local_path)
    if cloud_storage.CalculateHash(local_path) != expected_hash:
        raise RuntimeError('The downloaded data file has wrong hash.')
    return local_path
def UploadAndSwitchDataFile(data_file_name, data_file_path, version):
    """Upload the script to the cloud and update config to use the new version."""
    filename = os.path.basename(data_file_path)
    remote_path = posixpath.join(DATA_CS_FOLDER, data_file_name, version,
                                 filename)
    if not cloud_storage.Exists(DATA_BUCKET, remote_path):
        cloud_storage.Insert(DATA_BUCKET,
                             remote_path,
                             data_file_path,
                             publicly_readable=False)

    with open(CONFIG_PATH) as f:
        config = json.load(f)
    config[data_file_name]['remote_path'] = remote_path
    config[data_file_name]['hash'] = cloud_storage.CalculateHash(
        data_file_path)
    with open(CONFIG_PATH, 'w') as f:
        json.dump(config, f, indent=4, separators=(',', ': '))
def FetchHostBinary(binary_name):
    """Download the binary from the cloud.

  This function fetches the binary for the host platform from the cloud.
  The cloud path is read from the config.
  """
    with open(CONFIG_PATH) as f:
        config = json.load(f)
    platform = _GetHostPlatform()
    remote_path = config[binary_name][platform]['remote_path']
    expected_hash = config[binary_name][platform]['hash']
    filename = posixpath.basename(remote_path)
    local_path = os.path.join(LOCAL_STORAGE_FOLDER, filename)
    cloud_storage.Get(BINARY_BUCKET, remote_path, local_path)
    if cloud_storage.CalculateHash(local_path) != expected_hash:
        raise RuntimeError('The downloaded binary has wrong hash.')
    mode = os.stat(local_path).st_mode
    os.chmod(local_path, mode | stat.S_IXUSR)
    return local_path
Ejemplo n.º 7
0
    def AddRecordedStories(self,
                           stories,
                           upload_to_cloud_storage=False,
                           target_platform=_DEFAULT_PLATFORM):
        if not stories:
            os.remove(self.temp_target_wpr_file_path)
            return

        target_wpr_file_hash = cloud_storage.CalculateHash(
            self.temp_target_wpr_file_path)
        (target_wpr_file,
         target_wpr_file_path) = self._NextWprFileName(target_wpr_file_hash)
        for story in stories:
            # Check to see if the platform has been manually overrided.
            if not story.platform_specific:
                current_target_platform = _DEFAULT_PLATFORM
            else:
                current_target_platform = target_platform
            self._SetWprFileForStory(story.name, target_wpr_file,
                                     current_target_platform)
        shutil.move(self.temp_target_wpr_file_path, target_wpr_file_path)

        # Update the hash file.
        with open(target_wpr_file_path + '.sha1', 'wb') as f:
            f.write(target_wpr_file_hash)
            f.flush()

        self._WriteToFile()

        # Upload to cloud storage
        if upload_to_cloud_storage:
            if not self._bucket:
                logging.warning(
                    'StorySet must have bucket specified to upload '
                    'stories to cloud storage.')
                return
            try:
                cloud_storage.Insert(self._bucket, target_wpr_file_hash,
                                     target_wpr_file_path)
            except cloud_storage.CloudStorageError as e:
                logging.warning(
                    'Failed to upload wpr file %s to cloud storage. '
                    'Error:%s' % target_wpr_file_path, e)
def _CalculateHash(remote_path):
    with tempfile_ext.NamedTemporaryFile() as f:
        f.close()
        cloud_storage.Get(BINARY_BUCKET, remote_path, f.name)
        return cloud_storage.CalculateHash(f.name)
def _Base64Hash(file_path):
    return base64.b64encode(cloud_storage.CalculateHash(file_path))