def testReadFiles(self): file_system = CachingFileSystem(_CreateLocalFs(), ObjectStoreCreator.Factory()) expected = { './test1.txt': 'test1\n', './test2.txt': 'test2\n', './test3.txt': 'test3\n', } self.assertEqual( expected, file_system.Read(['./test1.txt', './test2.txt', './test3.txt']).Get())
class TestGithubFileSystem(unittest.TestCase): def setUp(self): self._gfs = GithubFileSystem.ForTest( 'repo/', partial(FakeURLFSFetcher, LocalFileSystem(''))) # Start and finish the repository load. self._cgfs = CachingFileSystem(self._gfs, ObjectStoreCreator.ForTest()) def testReadDirectory(self): self._gfs.Refresh().Get() self.assertEqual( sorted(['requirements.txt', '.gitignore', 'README.md', 'src/']), sorted(self._gfs.ReadSingle('').Get())) self.assertEqual( sorted(['__init__.notpy', 'hello.notpy']), sorted(self._gfs.ReadSingle('src/').Get())) def testReadFile(self): self._gfs.Refresh().Get() expected = ( '# Compiled Python files\n' '*.pyc\n' ) self.assertEqual(expected, self._gfs.ReadSingle('.gitignore').Get()) def testMultipleReads(self): self._gfs.Refresh().Get() self.assertEqual( self._gfs.ReadSingle('requirements.txt').Get(), self._gfs.ReadSingle('requirements.txt').Get()) def testReads(self): self._gfs.Refresh().Get() expected = { 'src/': sorted(['hello.notpy', '__init__.notpy']), '': sorted(['requirements.txt', '.gitignore', 'README.md', 'src/']) } read = self._gfs.Read(['', 'src/']).Get() self.assertEqual(expected['src/'], sorted(read['src/'])) self.assertEqual(expected[''], sorted(read[''])) def testStat(self): # This is the hash value from the zip on disk. real_hash = 'c36fc23688a9ec9e264d3182905dc0151bfff7d7' self._gfs.Refresh().Get() dir_stat = StatInfo(real_hash, { 'hello.notpy': StatInfo(real_hash), '__init__.notpy': StatInfo(real_hash) }) self.assertEqual(StatInfo(real_hash), self._gfs.Stat('README.md')) self.assertEqual(StatInfo(real_hash), self._gfs.Stat('src/hello.notpy')) self.assertEqual(dir_stat, self._gfs.Stat('src/')) def testBadReads(self): self._gfs.Refresh().Get() self.assertRaises(FileNotFoundError, self._gfs.Stat, 'DONT_README.md') self.assertRaises(FileNotFoundError, self._gfs.ReadSingle('DONT_README.md').Get) def testCachingFileSystem(self): self._cgfs.Refresh().Get() initial_cgfs_read_one = self._cgfs.ReadSingle('src/hello.notpy').Get() self.assertEqual(initial_cgfs_read_one, self._gfs.ReadSingle('src/hello.notpy').Get()) self.assertEqual(initial_cgfs_read_one, self._cgfs.ReadSingle('src/hello.notpy').Get()) initial_cgfs_read_two = self._cgfs.Read( ['README.md', 'requirements.txt']).Get() self.assertEqual( initial_cgfs_read_two, self._gfs.Read(['README.md', 'requirements.txt']).Get()) self.assertEqual( initial_cgfs_read_two, self._cgfs.Read(['README.md', 'requirements.txt']).Get()) def testWithoutRefresh(self): # Without refreshing it will still read the content from blobstore, and it # does this via the magic of the FakeURLFSFetcher. self.assertEqual(['__init__.notpy', 'hello.notpy'], sorted(self._gfs.ReadSingle('src/').Get())) def testRefresh(self): test_bundle = _TestBundle() gfs, fetcher = test_bundle.CreateGfsAndFetcher() # It shouldn't fetch until Refresh does so; then it will do 2, one for the # stat, and another for the read. self.assertTrue(*fetcher.CheckAndReset()) gfs.Refresh().Get() self.assertTrue(*fetcher.CheckAndReset(fetch_count=1, fetch_async_count=1, fetch_resolve_count=1)) # Refresh is just an alias for Read(''). gfs.Refresh().Get() self.assertTrue(*fetcher.CheckAndReset()) initial_dir_read = sorted(gfs.ReadSingle('').Get()) initial_file_read = gfs.ReadSingle('dir/file1').Get() version, data = test_bundle.Mutate() # Check that changes have not effected the file system yet. self.assertEqual(initial_dir_read, sorted(gfs.ReadSingle('').Get())) self.assertEqual(initial_file_read, gfs.ReadSingle('dir/file1').Get()) self.assertNotEqual(StatInfo(version), gfs.Stat('')) gfs, fetcher = test_bundle.CreateGfsAndFetcher() gfs.Refresh().Get() self.assertTrue(*fetcher.CheckAndReset(fetch_count=1, fetch_async_count=1, fetch_resolve_count=1)) # Check that the changes have affected the file system. self.assertEqual(data, gfs.ReadSingle('new-file').Get()) self.assertEqual(test_bundle.files['zipfile/dir/file1'], gfs.ReadSingle('dir/file1').Get()) self.assertEqual(StatInfo(version), gfs.Stat('new-file')) # Regression test: ensure that reading the data after it's been mutated, # but before Refresh() has been realised, still returns the correct data. gfs, fetcher = test_bundle.CreateGfsAndFetcher() version, data = test_bundle.Mutate() refresh_future = gfs.Refresh() self.assertTrue(*fetcher.CheckAndReset(fetch_count=1, fetch_async_count=1)) self.assertEqual(data, gfs.ReadSingle('new-file').Get()) self.assertEqual(test_bundle.files['zipfile/dir/file1'], gfs.ReadSingle('dir/file1').Get()) self.assertEqual(StatInfo(version), gfs.Stat('new-file')) refresh_future.Get() self.assertTrue(*fetcher.CheckAndReset(fetch_resolve_count=1)) def testGetThenRefreshOnStartup(self): # Regression test: Test that calling Get() but never resolving the future, # then Refresh()ing the data, causes the data to be refreshed. test_bundle = _TestBundle() gfs, fetcher = test_bundle.CreateGfsAndFetcher() self.assertTrue(*fetcher.CheckAndReset()) # Get a predictable version. version, data = test_bundle.Mutate() read_future = gfs.ReadSingle('hello.txt') # Fetch for the Stat(), async-fetch for the Read(). self.assertTrue(*fetcher.CheckAndReset(fetch_count=1, fetch_async_count=1)) refresh_future = gfs.Refresh() self.assertTrue(*fetcher.CheckAndReset()) self.assertEqual(data, read_future.Get()) self.assertTrue(*fetcher.CheckAndReset(fetch_resolve_count=1)) self.assertEqual(StatInfo(version), gfs.Stat('hello.txt')) self.assertTrue(*fetcher.CheckAndReset()) # The fetch will already have been resolved, so resolving the Refresh won't # affect anything. refresh_future.Get() self.assertTrue(*fetcher.CheckAndReset()) # Read data should not have changed. self.assertEqual(data, gfs.ReadSingle('hello.txt').Get()) self.assertEqual(StatInfo(version), gfs.Stat('hello.txt')) self.assertTrue(*fetcher.CheckAndReset())
class TestGithubFileSystem(unittest.TestCase): def setUp(self): self._gfs = GithubFileSystem.ForTest('repo', FakeURLFSFetcher.CreateLocal()) # Start and finish the repository load. self._cgfs = CachingFileSystem(self._gfs, ObjectStoreCreator.ForTest()) def testReadDirectory(self): self._gfs.Refresh().Get() self.assertEqual( sorted(['requirements.txt', '.gitignore', 'README.md', 'src/']), sorted(self._gfs.ReadSingle('').Get())) self.assertEqual(sorted(['__init__.notpy', 'hello.notpy']), sorted(self._gfs.ReadSingle('src/').Get())) def testReadFile(self): self._gfs.Refresh().Get() expected = ('# Compiled Python files\n' '*.pyc\n') self.assertEqual(expected, self._gfs.ReadSingle('.gitignore').Get()) def testMultipleReads(self): self._gfs.Refresh().Get() self.assertEqual( self._gfs.ReadSingle('requirements.txt').Get(), self._gfs.ReadSingle('requirements.txt').Get()) def testReads(self): self._gfs.Refresh().Get() expected = { 'src/': sorted(['hello.notpy', '__init__.notpy']), '': sorted(['requirements.txt', '.gitignore', 'README.md', 'src/']) } read = self._gfs.Read(['', 'src/']).Get() self.assertEqual(expected['src/'], sorted(read['src/'])) self.assertEqual(expected[''], sorted(read[''])) def testStat(self): self._gfs.Refresh().Get() dir_stat = StatInfo( FAKE_HASH, { 'hello.notpy': StatInfo(FAKE_HASH), '__init__.notpy': StatInfo(FAKE_HASH) }) self.assertEqual(StatInfo(FAKE_HASH), self._gfs.Stat('README.md')) self.assertEqual(StatInfo(FAKE_HASH), self._gfs.Stat('src/hello.notpy')) self.assertEqual(dir_stat, self._gfs.Stat('src/')) def testBadReads(self): self._gfs.Refresh().Get() self.assertRaises(FileNotFoundError, self._gfs.Stat, 'DONT_README.md') self.assertRaises(FileNotFoundError, self._gfs.ReadSingle('DONT_README.md').Get) def testCachingFileSystem(self): self._cgfs.Refresh().Get() initial_cgfs_read_one = self._cgfs.ReadSingle('src/hello.notpy').Get() self.assertEqual(initial_cgfs_read_one, self._gfs.ReadSingle('src/hello.notpy').Get()) self.assertEqual(initial_cgfs_read_one, self._cgfs.ReadSingle('src/hello.notpy').Get()) initial_cgfs_read_two = self._cgfs.Read( ['README.md', 'requirements.txt']).Get() self.assertEqual( initial_cgfs_read_two, self._gfs.Read(['README.md', 'requirements.txt']).Get()) self.assertEqual( initial_cgfs_read_two, self._cgfs.Read(['README.md', 'requirements.txt']).Get()) def testWithoutRefresh(self): self.assertRaises(FileNotFoundError, self._gfs.ReadSingle('src/').Get) def testRefresh(self): def make_sha_json(hash_value): from copy import deepcopy commit_json = json.loads( deepcopy( LocalFileSystem('').ReadSingle( 'test_data/github_file_system/test_owner/repo/commits/HEAD' ).Get())) commit_json['commit']['tree']['sha'] = 'hash' + FAKE_HASH[4:] return json.dumps(commit_json) files = { 'zipfile/': '', 'zipfile/hello.txt': 'world', 'zipfile/readme': 'test zip', 'zipfile/dir/file1': 'contents', 'zipfile/dir/file2': 'more contents' } string = _ZipFromFiles(files) test_files = { 'test_owner': { 'changing-repo': { 'commits': { 'HEAD': make_sha_json(FAKE_HASH) }, 'zipball': string } } } test_file_system = TestFileSystem(test_files) gfs = GithubFileSystem.ForTest( 'changing-repo', FakeURLFSFetcher.Create(test_file_system), path='') gfs.Refresh().Get() initial_dir_read = sorted(gfs.ReadSingle('').Get()) initial_file_read = gfs.ReadSingle('dir/file1').Get() # Change the underlying data. files['zipfile/hello.txt'] = 'hello world again' files['zipfile/new-file'] = 'a new file' files['zipfile/dir/file1'] = 'changed file' test_files['test_owner']['changing-repo']['zipball'] = _ZipFromFiles( files) test_files['test_owner']['changing-repo']['commits']['HEAD'] = ( make_sha_json(FAKE_HASH + 'hash')) # Check that changes have not effected the file system yet. self.assertEqual(initial_dir_read, sorted(gfs.ReadSingle('').Get())) self.assertEqual(initial_file_read, gfs.ReadSingle('dir/file1').Get()) gfs.Refresh().Get() # Check that the changes have effected the file system. self.assertTrue('new-file' in gfs.ReadSingle('').Get()) self.assertEqual(files['zipfile/dir/file1'], gfs.ReadSingle('dir/file1').Get())