def testInitWithArchiveInfo(self): zip_info = archive_info.ArchiveInfo( 'download_path', 'unzip_location', 'path_within_archive') cs_info = cloud_storage_info.CloudStorageInfo( 'cs_bucket', 'cs_hash', 'download_path', 'cs_remote_path', archive_info=zip_info) self.assertEqual('cs_hash', cs_info._cs_hash) self.assertEqual('cs_bucket', cs_info._cs_bucket) self.assertEqual('cs_remote_path', cs_info._cs_remote_path) self.assertEqual('download_path', cs_info._download_path) self.assertEqual(zip_info, cs_info._archive_info) self.assertFalse(cs_info._version_in_cs)
def testInitWithArchiveInfoErrors(self): zip_info = archive_info.ArchiveInfo( 'download_path', 'unzip_location', 'path_within_archive') self.assertRaises( ValueError, cloud_storage_info.CloudStorageInfo, None, None, None, None, archive_info=zip_info) self.assertRaises( ValueError, cloud_storage_info.CloudStorageInfo, None, None, None, 'cs_remote_path', archive_info=zip_info) self.assertRaises( ValueError, cloud_storage_info.CloudStorageInfo, 'cs_bucket', 'cs_hash', None, 'cs_remote_path', archive_info=zip_info) self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo, 'cs_bucket', 'cs_hash', 'cs_remote_path', None, version_in_cs='version', archive_info=zip_info)
def testGetRemotePathWithArchive(self, cs_get_mock, unzip_mock): def _GetIfHashChangedMock(cs_path, download_path, bucket, file_hash): del cs_path, bucket, file_hash if not os.path.exists(download_path): self.fs.CreateFile(download_path, contents='1010001010101010110101') cs_get_mock.side_effect = _GetIfHashChangedMock unzip_path = os.path.join( os.path.dirname(self.download_path), 'unzip_dir') path_within_archive = os.path.join('path', 'within', 'archive') dep_path = os.path.join(unzip_path, path_within_archive) def _UnzipFileMock(archive_file, unzip_location, tmp_location=None): del archive_file, tmp_location self.fs.CreateFile(dep_path) self.fs.CreateFile(os.path.join(unzip_location, 'extra', 'path')) self.fs.CreateFile(os.path.join(unzip_location, 'another_extra_path')) unzip_mock.side_effect = _UnzipFileMock self.assertFalse(os.path.exists(dep_path)) zip_info = archive_info.ArchiveInfo( self.download_path, unzip_path, path_within_archive) self.cs_info = cloud_storage_info.CloudStorageInfo( 'cs_bucket', 'cs_hash', self.download_path, 'cs_remote_path', version_in_cs='1.2.3.4', archive_info=zip_info) self.assertFalse(unzip_mock.called) self.assertEqual( os.path.abspath(dep_path), self.cs_info.GetRemotePath()) self.assertTrue(os.path.exists(dep_path)) self.assertTrue(stat.S_IMODE(os.stat(os.path.abspath(dep_path)).st_mode) & (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)) unzip_mock.assert_called_once_with(self.download_path, unzip_path) # Should not need to unzip a second time, but should return the same path. unzip_mock.reset_mock() self.assertTrue(os.path.exists(dep_path)) self.assertEqual( os.path.abspath(dep_path), self.cs_info.GetRemotePath()) self.assertTrue(stat.S_IMODE(os.stat(os.path.abspath(dep_path)).st_mode) & (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)) self.assertFalse(unzip_mock.called)
def IterDependencyInfo(self): """ Yields a DependencyInfo for each dependency/platform pair. Raises: ReadWriteError: If called when the config is writable. ValueError: If any of the dependencies contain partial information for downloading from cloud_storage. (See dependency_info.py) """ if self._writable: raise exceptions.ReadWriteError( 'Trying to read dependency info from a writable config. File for ' 'config: %s' % self._config_path) base_path = os.path.dirname(self._config_path) for dependency in self._config_data: dependency_dict = self._config_data.get(dependency) platforms_dict = dependency_dict.get('file_info', {}) for platform in platforms_dict: platform_info = platforms_dict.get(platform) local_info = None local_paths = platform_info.get('local_paths', []) if local_paths: paths = [] for path in local_paths: path = self._FormatPath(path) paths.append( os.path.abspath(os.path.join(base_path, path))) local_info = local_path_info.LocalPathInfo(paths) cs_info = None cs_bucket = dependency_dict.get('cloud_storage_bucket') cs_base_folder = dependency_dict.get( 'cloud_storage_base_folder', '') download_path = platform_info.get('download_path') if download_path: download_path = self._FormatPath(download_path) download_path = os.path.abspath( os.path.join(base_path, download_path)) cs_hash = platform_info.get('cloud_storage_hash') if not cs_hash: raise exceptions.ConfigError( 'Dependency %s has cloud storage info on platform %s, but is ' 'missing a cloud storage hash.', dependency, platform) cs_remote_path = self._CloudStorageRemotePath( dependency, cs_hash, cs_base_folder) version_in_cs = platform_info.get('version_in_cs') zip_info = None path_within_archive = platform_info.get( 'path_within_archive') if path_within_archive: unzip_path = os.path.abspath( os.path.join( os.path.dirname(download_path), '%s_%s_%s' % (dependency, platform, cs_hash))) zip_info = archive_info.ArchiveInfo( download_path, unzip_path, path_within_archive) cs_info = cloud_storage_info.CloudStorageInfo( cs_bucket, cs_hash, download_path, cs_remote_path, version_in_cs=version_in_cs, archive_info=zip_info) dep_info = dependency_info.DependencyInfo( dependency, platform, self._config_path, local_path_info=local_info, cloud_storage_info=cs_info) yield dep_info
def testUpdateMaxCloudStorageInfo(self): dep_info1 = dependency_info.DependencyInfo('dep1', 'platform1', 'config_path1') zip_info2 = archive_info.ArchiveInfo('archive_path2', 'unzip_path2', 'path_withing_archive2') cs_info2 = cloud_storage_info.CloudStorageInfo('cs_bucket2', 'cs_hash2', 'download_path2', 'cs_remote_path2', version_in_cs='2.1.1', archive_info=zip_info2) dep_info2 = dependency_info.DependencyInfo('dep1', 'platform1', 'config_path2', cloud_storage_info=cs_info2) dep_info3 = dependency_info.DependencyInfo('dep1', 'platform1', 'config_path3') zip_info4 = archive_info.ArchiveInfo('archive_path4', 'unzip_path4', 'path_withing_archive4') cs_info4 = cloud_storage_info.CloudStorageInfo('cs_bucket4', 'cs_hash4', 'download_path4', 'cs_remote_path4', version_in_cs='4.2.1', archive_info=zip_info4) dep_info4 = dependency_info.DependencyInfo('dep1', 'platform1', 'config_path4', cloud_storage_info=cs_info4) self.assertEqual('dep1', dep_info1.dependency) self.assertEqual('platform1', dep_info1.platform) self.assertEqual(['config_path1'], dep_info1.config_paths) dep_info1.Update(dep_info2) self.assertFalse(dep_info1.local_paths) self.assertEqual('dep1', dep_info1.dependency) self.assertEqual('platform1', dep_info1.platform) self.assertEqual(['config_path1', 'config_path2'], dep_info1.config_paths) cs_info = dep_info1._cloud_storage_info self.assertEqual(cs_info, cs_info2) self.assertEqual('cs_bucket2', cs_info._cs_bucket) self.assertEqual('cs_hash2', cs_info._cs_hash) self.assertEqual('download_path2', cs_info._download_path) self.assertEqual('cs_remote_path2', cs_info._cs_remote_path) self.assertEqual('cs_remote_path2', cs_info._cs_remote_path) dep_info1.Update(dep_info3) self.assertEqual('dep1', dep_info1.dependency) self.assertEqual('platform1', dep_info1.platform) self.assertEqual(['config_path1', 'config_path2', 'config_path3'], dep_info1.config_paths) self.assertFalse(dep_info1.local_paths) cs_info = dep_info1._cloud_storage_info self.assertEqual(cs_info, cs_info2) self.assertEqual('cs_bucket2', cs_info._cs_bucket) self.assertEqual('cs_hash2', cs_info._cs_hash) self.assertEqual('download_path2', cs_info._download_path) self.assertEqual('cs_remote_path2', cs_info._cs_remote_path) self.assertRaises(ValueError, dep_info1.Update, dep_info4)