def AddRecordedStories(self, stories, upload_to_cloud_storage=False): if not stories: os.remove(self.temp_target_wpr_file_path) return (target_wpr_file, target_wpr_file_path) = self._NextWprFileName() for story in stories: self._SetWprFileForStory(story.display_name, target_wpr_file) shutil.move(self.temp_target_wpr_file_path, target_wpr_file_path) # Update the hash file. target_wpr_file_hash = cloud_storage.CalculateHash( target_wpr_file_path) with open(target_wpr_file_path + '.sha1', 'wb') as f: f.write(target_wpr_file_hash) f.flush() self._WriteToFile() self._DeleteAbandonedWprFiles() # Upload to cloud storage if upload_to_cloud_storage: if not self._bucket: logging.warning( 'StorySet must have bucket specified to upload ' 'stories to cloud storage.') return try: cloud_storage.Insert(self._bucket, target_wpr_file_hash, target_wpr_file_path) except cloud_storage.CloudStorageError, e: logging.warning( 'Failed to upload wpr file %s to cloud storage. ' 'Error:%s' % target_wpr_file_path, e)
def assertCorrectHashFile(self, file_path): old_ch = cloud_storage.CalculateHash cloud_storage.CalculateHash = self.overrides.cloud_storage.CalculateHash try: self.assertTrue(os.path.exists(file_path + '.sha1')) with open(file_path + '.sha1', 'rb') as f: self.assertEquals(cloud_storage.CalculateHash(file_path), f.read()) finally: cloud_storage.CalculateHash = old_ch
def AddCloudStorageDependencyUpdateJob(self, dependency, platform, dependency_path, version=None, execute_job=True): """Update the file downloaded from cloud storage for a dependency/platform. Upload a new file to cloud storage for the given dependency and platform pair and update the cloud storage hash and the version for the given pair. Example usage: The following should update the default platform for 'dep_name': UpdateCloudStorageDependency('dep_name', 'default', 'path/to/file') The following should update both the mac and win platforms for 'dep_name', or neither if either update fails: UpdateCloudStorageDependency( 'dep_name', 'mac_x86_64', 'path/to/mac/file', execute_job=False) UpdateCloudStorageDependency( 'dep_name', 'win_AMD64', 'path/to/win/file', execute_job=False) ExecuteUpdateJobs() Args: dependency: The dependency to update. platform: The platform to update the dependency info for. dependency_path: Path to the new dependency to be used. version: Version of the updated dependency, for checking future updates against. execute_job: True if the config should be written to disk and the file should be uploaded to cloud storage after the update. False if multiple updates should be performed atomically. Must call ExecuteUpdateJobs after all non-executed jobs are added to complete the update. Raises: ReadWriteError: If the config was not initialized as writable, or if |execute_job| is True but the config has update jobs still pending execution. ValueError: If no information exists in the config for |dependency| on |platform|. """ self._ValidateIsConfigUpdatable(execute_job=execute_job, dependency=dependency, platform=platform) self._is_dirty = True cs_hash = cloud_storage.CalculateHash(dependency_path) if version: self._SetPlatformData(dependency, platform, 'version_in_cs', version) self._SetPlatformData(dependency, platform, 'cloud_storage_hash', cs_hash) cs_base_folder = self._GetPlatformData(dependency, platform, 'cloud_storage_base_folder') cs_bucket = self._GetPlatformData(dependency, platform, 'cloud_storage_bucket') cs_remote_path = self._CloudStorageRemotePath(dependency, cs_hash, cs_base_folder) self._pending_uploads.append( uploader.CloudStorageUploader(cs_bucket, cs_remote_path, dependency_path)) if execute_job: self.ExecuteUpdateJobs()
def _Base64Hash(file_path): return base64.b64encode(cloud_storage.CalculateHash(file_path))