def testInitWithArchiveInfoErrors(self): zip_info = archive_info.ArchiveInfo('download_path', 'unzip_location', 'path_within_archive') self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo, None, None, None, None, archive_info=zip_info) self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo, None, None, None, 'cs_remote_path', archive_info=zip_info) self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo, 'cs_bucket', 'cs_hash', None, 'cs_remote_path', archive_info=zip_info) self.assertRaises(ValueError, cloud_storage_info.CloudStorageInfo, 'cs_bucket', 'cs_hash', 'cs_remote_path', None, version_in_cs='version', archive_info=zip_info)
def testGetRemotePathWithArchive(self, cs_get_mock, unzip_mock): def _GetIfHashChangedMock(cs_path, download_path, bucket, file_hash): del cs_path, bucket, file_hash if not os.path.exists(download_path): self.fs.CreateFile(download_path, contents='1010001010101010110101') cs_get_mock.side_effect = _GetIfHashChangedMock unzip_path = os.path.join( os.path.dirname(self.download_path), 'unzip_dir') path_within_archive = os.path.join('path', 'within', 'archive') dep_path = os.path.join(unzip_path, path_within_archive) def _UnzipFileMock(archive_file, unzip_location, tmp_location=None): del archive_file, tmp_location self.fs.CreateFile(dep_path) self.fs.CreateFile(os.path.join(unzip_location, 'extra', 'path')) self.fs.CreateFile(os.path.join(unzip_location, 'another_extra_path')) unzip_mock.side_effect = _UnzipFileMock # Create a stale directory that's expected to get deleted stale_unzip_path_glob = os.path.join( os.path.dirname(self.download_path), 'unzip_dir_*') stale_path = os.path.join( os.path.dirname(self.download_path), 'unzip_dir_stale') self.fs.CreateDirectory(stale_path) self.fs.CreateFile(os.path.join(stale_path, 'some_file')) self.assertFalse(os.path.exists(dep_path)) zip_info = archive_info.ArchiveInfo( self.download_path, unzip_path, path_within_archive, stale_unzip_path_glob) self.cs_info = cloud_storage_info.CloudStorageInfo( 'cs_bucket', 'cs_hash', self.download_path, 'cs_remote_path', version_in_cs='1.2.3.4', archive_info=zip_info) self.assertFalse(unzip_mock.called) self.assertEqual( os.path.abspath(dep_path), self.cs_info.GetRemotePath()) self.assertTrue(os.path.exists(dep_path)) self.assertTrue(stat.S_IMODE(os.stat(os.path.abspath(dep_path)).st_mode) & (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)) unzip_mock.assert_called_once_with(self.download_path, unzip_path) # Stale directory should have been deleted self.assertFalse(os.path.exists(stale_path)) # Should not need to unzip a second time, but should return the same path. unzip_mock.reset_mock() self.assertTrue(os.path.exists(dep_path)) self.assertEqual( os.path.abspath(dep_path), self.cs_info.GetRemotePath()) self.assertTrue(stat.S_IMODE(os.stat(os.path.abspath(dep_path)).st_mode) & (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)) self.assertFalse(unzip_mock.called)
def testInitWithArchiveInfo(self): zip_info = archive_info.ArchiveInfo( 'download_path', 'unzip_location', 'path_within_archive') cs_info = cloud_storage_info.CloudStorageInfo( 'cs_bucket', 'cs_hash', 'download_path', 'cs_remote_path', archive_info=zip_info) self.assertEqual('cs_hash', cs_info._cs_hash) self.assertEqual('cs_bucket', cs_info._cs_bucket) self.assertEqual('cs_remote_path', cs_info._cs_remote_path) self.assertEqual('download_path', cs_info._download_path) self.assertEqual(zip_info, cs_info._archive_info) self.assertFalse(cs_info._version_in_cs)
def IterDependencyInfo(self): """ Yields a DependencyInfo for each dependency/platform pair. Raises: ReadWriteError: If called when the config is writable. ValueError: If any of the dependencies contain partial information for downloading from cloud_storage. (See dependency_info.py) """ if self._writable: raise exceptions.ReadWriteError( 'Trying to read dependency info from a writable config. File for ' 'config: %s' % self._config_path) base_path = os.path.dirname(self._config_path) for dependency in self._config_data: dependency_dict = self._config_data.get(dependency) platforms_dict = dependency_dict.get('file_info', {}) for platform in platforms_dict: platform_info = platforms_dict.get(platform) local_info = None local_paths = platform_info.get('local_paths', []) if local_paths: paths = [] for path in local_paths: path = self._FormatPath(path) paths.append(os.path.abspath(os.path.join(base_path, path))) local_info = local_path_info.LocalPathInfo(paths) cs_info = None cs_bucket = dependency_dict.get('cloud_storage_bucket') cs_base_folder = dependency_dict.get('cloud_storage_base_folder', '') download_path = platform_info.get('download_path') if download_path: download_path = self._FormatPath(download_path) download_path = os.path.abspath( os.path.join(base_path, download_path)) cs_hash = platform_info.get('cloud_storage_hash') if not cs_hash: raise exceptions.ConfigError( 'Dependency %s has cloud storage info on platform %s, but is ' 'missing a cloud storage hash.', dependency, platform) cs_remote_path = self._CloudStorageRemotePath( dependency, cs_hash, cs_base_folder) version_in_cs = platform_info.get('version_in_cs') zip_info = None path_within_archive = platform_info.get('path_within_archive') if path_within_archive: unzip_path = os.path.abspath( os.path.join(os.path.dirname(download_path), '%s_%s_%s' % (dependency, platform, cs_hash))) stale_unzip_path_glob = os.path.abspath( os.path.join(os.path.dirname(download_path), '%s_%s_%s' % (dependency, platform, '[0-9a-f]' * 40))) zip_info = archive_info.ArchiveInfo( download_path, unzip_path, path_within_archive, stale_unzip_path_glob) cs_info = cloud_storage_info.CloudStorageInfo( cs_bucket, cs_hash, download_path, cs_remote_path, version_in_cs=version_in_cs, archive_info=zip_info) dep_info = dependency_info.DependencyInfo( dependency, platform, self._config_path, local_path_info=local_info, cloud_storage_info=cs_info) yield dep_info