def testCachedStat(self): test_fs = TestFileSystem({ 'bob': { 'bob0': 'bob/bob0 contents', 'bob1': 'bob/bob1 contents' } }) mock_fs = MockFileSystem(test_fs) file_system = self._CreateCachingFileSystem(mock_fs, start_empty=False) self.assertEqual(StatInfo('0'), file_system.Stat('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) self.assertEqual(StatInfo('0'), file_system.Stat('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset()) # Caching happens on a directory basis, so reading other files from that # directory won't result in a stat. self.assertEqual(StatInfo('0'), file_system.Stat('bob/bob1')) self.assertEqual( StatInfo('0', child_versions={'bob0': '0', 'bob1': '0'}), file_system.Stat('bob/')) self.assertTrue(*mock_fs.CheckAndReset()) # Even though the stat is bumped, the object store still has it cached so # this won't update. test_fs.IncrementStat() self.assertEqual(StatInfo('0'), file_system.Stat('bob/bob0')) self.assertEqual(StatInfo('0'), file_system.Stat('bob/bob1')) self.assertEqual( StatInfo('0', child_versions={'bob0': '0', 'bob1': '0'}), file_system.Stat('bob/')) self.assertTrue(*mock_fs.CheckAndReset())
def testSidenavDataSource(self): file_system = MockFileSystem( TestFileSystem( { 'chrome_sidenav.json': json.dumps([{ 'title': 'H1', 'href': 'H1.html', 'items': [{ 'title': 'H2', 'href': '/H2.html' }] }]) }, relative_to=JSON_TEMPLATES)) expected = [{ 'level': 2, 'child_selected': True, 'title': 'H1', 'href': '/H1.html', 'items': [{ 'level': 3, 'selected': True, 'related': True, 'title': 'H2', 'href': '/H2.html', 'parent': { 'href': '/H1.html', 'title': 'H1' } }] }] sidenav_data_source = SidenavDataSource( ServerInstance.ForTest(file_system), Request.ForTest('/H2.html')) self.assertTrue(*file_system.CheckAndReset()) log_output = CaptureLogging(lambda: self.assertEqual( expected, sidenav_data_source.get('chrome'))) self.assertEqual(1, len(log_output)) self.assertTrue(log_output[0].msg.startswith( 'Paths in sidenav must be qualified.')) # Test that only a single file is read when creating the sidenav, so that # we can be confident in the compiled_file_system.SingleFile annotation. self.assertTrue(*file_system.CheckAndReset( read_count=1, stat_count=1, read_resolve_count=1))
def testSkipNotFound(self): mock_fs = MockFileSystem(TestFileSystem(_TEST_DATA)) compiled_fs = CompiledFileSystem.Factory( ObjectStoreCreator.ForTest()).Create( mock_fs, Cache(lambda path, contents: contents), type(self)) future = compiled_fs.GetFromFile('no_file', skip_not_found=True) # If the file doesn't exist, then the file system is not read. self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual(None, future.Get()) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=1)) future = compiled_fs.GetFromFile('no_file', skip_not_found=True) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) self.assertEqual(None, future.Get()) # The result for a non-existent file should still be cached. self.assertTrue(*mock_fs.CheckAndReset()) future = compiled_fs.GetFromFile('no_file') self.assertRaises(FileNotFoundError, future.Get)
def testCorrectFutureBehaviour(self): # Tests that the underlying FileSystem's Read Future has had Get() called # on it before the Future is resolved, but the underlying Future isn't # resolved until Get is. mock_fs = MockFileSystem(TestFileSystem(_TEST_DATA)) compiled_fs = CompiledFileSystem.Factory( ObjectStoreCreator.ForTest()).Create( mock_fs, lambda path, contents: contents, type(self)) self.assertTrue(*mock_fs.CheckAndReset()) future = compiled_fs.GetFromFile('404.html') self.assertTrue(*mock_fs.CheckAndReset(stat_count=1, read_count=1)) future.Get() self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=1)) future = compiled_fs.GetFromFileListing('apps/') # Current behaviour is to have read=2 and read_resolve=1 because the first # level is read eagerly, then all of the second is read (in parallel). If # it weren't eager (and it may be worth experimenting with that) then it'd # be read=1 and read_resolve=0. self.assertTrue(*mock_fs.CheckAndReset(stat_count=1, read_count=2, read_resolve_count=1)) future.Get() # It's doing 1 more level 'deeper' (already read 'fakedir' and 'deepdir' # though not resolved), so that's 1 more read/resolve + the resolve from # the first read. self.assertTrue(*mock_fs.CheckAndReset(read_count=1, read_resolve_count=2)) # Even though the directory is 1 layer deep the caller has no way of # determining that ahead of time (though perhaps the API could give some # kind of clue, if we really cared). future = compiled_fs.GetFromFileListing('extensions/') self.assertTrue(*mock_fs.CheckAndReset(stat_count=1, read_count=1, read_resolve_count=1)) future.Get() self.assertTrue(*mock_fs.CheckAndReset()) # Similar configuration to the 'apps/' case but deeper. future = compiled_fs.GetFromFileListing('') self.assertTrue(*mock_fs.CheckAndReset(stat_count=1, read_count=2, read_resolve_count=1)) future.Get() self.assertTrue(*mock_fs.CheckAndReset(read_count=2, read_resolve_count=3))
def testWalkCaching(self): test_fs = TestFileSystem({ 'root': { 'file1': 'file1', 'file2': 'file2', 'dir1': { 'dir1_file1': 'dir1_file1', 'dir2': {}, 'dir3': { 'dir3_file1': 'dir3_file1', 'dir3_file2': 'dir3_file2' } } } }) mock_fs = MockFileSystem(test_fs) file_system = self._CreateCachingFileSystem(mock_fs, start_empty=True) for walkinfo in file_system.Walk(''): pass self.assertTrue(*mock_fs.CheckAndReset( read_resolve_count=5, read_count=5, stat_count=5)) all_dirs, all_files = [], [] for root, dirs, files in file_system.Walk(''): all_dirs.extend(dirs) all_files.extend(files) self.assertEqual(sorted(['root/', 'dir1/', 'dir2/', 'dir3/']), sorted(all_dirs)) self.assertEqual( sorted( ['file1', 'file2', 'dir1_file1', 'dir3_file1', 'dir3_file2']), sorted(all_files)) # All data should be cached. self.assertTrue(*mock_fs.CheckAndReset()) # Starting from a different root should still pull cached data. for walkinfo in file_system.Walk('root/dir1/'): pass self.assertTrue(*mock_fs.CheckAndReset())
def testCaching(self): test_fs = TestFileSystem({ 'bob': { 'bob0': 'bob/bob0 contents', 'bob1': 'bob/bob1 contents', 'bob2': 'bob/bob2 contents', 'bob3': 'bob/bob3 contents', } }) mock_fs = MockFileSystem(test_fs) def create_empty_caching_fs(): return self._CreateCachingFileSystem(mock_fs, start_empty=True) file_system = create_empty_caching_fs() self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) # Resource has been cached, so test resource is not re-fetched. self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset()) # Test if the Stat version is the same the resource is not re-fetched. file_system = create_empty_caching_fs() self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) # Test if there is a newer version, the resource is re-fetched. file_system = create_empty_caching_fs() test_fs.IncrementStat() self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) # Test directory and subdirectory stats are cached. file_system = create_empty_caching_fs() file_system._stat_object_store.Del('bob/bob0') file_system._read_object_store.Del('bob/bob0') file_system._stat_object_store.Del('bob/bob1') test_fs.IncrementStat() self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1')) self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(read_count=2, stat_count=1)) self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1')) self.assertTrue(*mock_fs.CheckAndReset()) # Test a more recent parent directory doesn't force a refetch of children. file_system = create_empty_caching_fs() file_system._read_object_store.Del('bob/bob0') file_system._read_object_store.Del('bob/bob1') self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1')) self.assertEqual('bob/bob2 contents', file_system.ReadSingle('bob/bob2')) self.assertEqual('bob/bob3 contents', file_system.ReadSingle('bob/bob3')) self.assertTrue(*mock_fs.CheckAndReset(read_count=3, stat_count=1)) test_fs.IncrementStat(path='bob/') file_system = create_empty_caching_fs() self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1')) self.assertEqual('bob/bob2 contents', file_system.ReadSingle('bob/bob2')) self.assertEqual('bob/bob3 contents', file_system.ReadSingle('bob/bob3')) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) file_system = create_empty_caching_fs() file_system._stat_object_store.Del('bob/bob0') self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset())
def testCheckAndReset(self): fs = MockFileSystem(TestFileSystem(deepcopy(_TEST_DATA))) self.assertTrue(*fs.CheckAndReset()) self.assertFalse(*fs.CheckAndReset(read_count=1)) self.assertFalse(*fs.CheckAndReset(stat_count=1)) future = fs.ReadSingle('apps/') self.assertTrue(*fs.CheckAndReset(read_count=1)) future.Get() self.assertTrue(*fs.CheckAndReset(read_resolve_count=1)) self.assertFalse(*fs.CheckAndReset(read_count=1)) self.assertTrue(*fs.CheckAndReset()) future = fs.ReadSingle('apps/') self.assertFalse(*fs.CheckAndReset(read_count=2)) future.Get() self.assertFalse(*fs.CheckAndReset(read_resolve_count=2)) fs.ReadSingle('extensions/').Get() fs.ReadSingle('extensions/').Get() self.assertTrue(*fs.CheckAndReset(read_count=2, read_resolve_count=2)) self.assertFalse(*fs.CheckAndReset(read_count=2, read_resolve_count=2)) self.assertTrue(*fs.CheckAndReset()) fs.ReadSingle('404.html').Get() self.assertTrue(*fs.CheckAndReset(read_count=1, read_resolve_count=1)) future = fs.Read(['notfound.html', 'apps/']) self.assertTrue(*fs.CheckAndReset(read_count=1)) self.assertRaises(FileNotFoundError, future.Get) self.assertTrue(*fs.CheckAndReset(read_resolve_count=0)) fs.Stat('404.html') fs.Stat('404.html') fs.Stat('apps/') self.assertFalse(*fs.CheckAndReset(stat_count=42)) self.assertFalse(*fs.CheckAndReset(stat_count=42)) self.assertTrue(*fs.CheckAndReset()) fs.ReadSingle('404.html').Get() fs.Stat('404.html') fs.Stat('apps/') self.assertTrue( *fs.CheckAndReset(read_count=1, read_resolve_count=1, stat_count=2)) self.assertTrue(*fs.CheckAndReset())
class APIModelsTest(unittest.TestCase): def setUp(self): object_store_creator = ObjectStoreCreator.ForTest() compiled_fs_factory = CompiledFileSystem.Factory(object_store_creator) self._mock_file_system = MockFileSystem( TestFileSystem(_TEST_DATA, relative_to=CHROME_EXTENSIONS)) features_bundle = FeaturesBundle(self._mock_file_system, compiled_fs_factory, object_store_creator, 'extensions') self._api_models = APIModels(features_bundle, compiled_fs_factory, self._mock_file_system, object_store_creator, 'extensions', SchemaProcessorFactoryForTest()) def testGetNames(self): # Both 'app' and 'app.runtime' appear here because 'app.runtime' has # noparent:true, but 'app.runtime.foo' etc doesn't so it's a sub-feature of # 'app.runtime' not a separate API. 'devtools.inspectedWindow' is an API # because there is no 'devtools'. self.assertEqual([ 'alarms', 'app', 'app.runtime', 'declarativeWebRequest', 'devtools.inspectedWindow', 'input', 'storage' ], sorted(self._api_models.GetNames())) def testGetModel(self): def get_model_name(api_name): return self._api_models.GetModel(api_name).Get().name self.assertEqual('devtools.inspectedWindow', get_model_name('devtools.inspectedWindow')) self.assertEqual('devtools.inspectedWindow', get_model_name('devtools/inspected_window.json')) self.assertEqual( 'devtools.inspectedWindow', get_model_name(CHROME_API + 'devtools/inspected_window.json')) self.assertEqual('alarms', get_model_name('alarms')) self.assertEqual('alarms', get_model_name('alarms.idl')) self.assertEqual('alarms', get_model_name(CHROME_API + 'alarms.idl')) self.assertEqual('input.ime', get_model_name('input.ime')) self.assertEqual('input.ime', get_model_name('input_ime.json')) self.assertEqual('input.ime', get_model_name(CHROME_API + 'input_ime.json')) self.assertEqual('pageAction', get_model_name('pageAction')) self.assertEqual('pageAction', get_model_name('page_action.json')) self.assertEqual('pageAction', get_model_name(CHROME_API + 'page_action.json')) def testGetNonexistentModel(self): self.assertRaises( FileNotFoundError, self._api_models.GetModel('declarativeWebRequest').Get) self.assertRaises( FileNotFoundError, self._api_models.GetModel('declarative_web_request.json').Get) self.assertRaises( FileNotFoundError, self._api_models.GetModel(CHROME_API + 'declarative_web_request.json').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('notfound').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('notfound.json').Get) self.assertRaises( FileNotFoundError, self._api_models.GetModel(CHROME_API + 'notfound.json').Get) self.assertRaises( FileNotFoundError, self._api_models.GetModel(CHROME_API + 'alarms.json').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('storage').Get) self.assertRaises( FileNotFoundError, self._api_models.GetModel(CHROME_API + 'storage.json').Get) self.assertRaises( FileNotFoundError, self._api_models.GetModel(CHROME_API + 'storage.idl').Get) def testSingleFile(self): # 2 stats (1 for JSON and 1 for IDL) for each available API path. # 1 read (for IDL file which existed). future = self._api_models.GetModel('alarms') self.assertTrue(*self._mock_file_system.CheckAndReset( read_count=1, stat_count=len(API_PATHS) * 2)) # 1 read-resolve (for the IDL file). # # The important part here and above is that it's only doing a single read; # any more would break the contract that only a single file is accessed - # see the SingleFile annotation in api_models._CreateAPIModel. future.Get() self.assertTrue(*self._mock_file_system.CheckAndReset( read_resolve_count=1)) # 2 stats (1 for JSON and 1 for IDL) for each available API path. # No reads (still cached). future = self._api_models.GetModel('alarms') self.assertTrue(*self._mock_file_system.CheckAndReset( stat_count=len(API_PATHS) * 2)) future.Get() self.assertTrue(*self._mock_file_system.CheckAndReset())
def testCaching(self): test_fs = TestFileSystem({ 'bob': { 'bob0': 'bob/bob0 contents', 'bob1': 'bob/bob1 contents', 'bob2': 'bob/bob2 contents', 'bob3': 'bob/bob3 contents', } }) mock_fs = MockFileSystem(test_fs) def create_empty_caching_fs(): return self._CreateCachingFileSystem(mock_fs, start_empty=True) file_system = create_empty_caching_fs() # The stat/read should happen before resolving the Future, and resolving # the future shouldn't do any additional work. get_future = file_system.ReadSingle('bob/bob0') self.assertTrue(*mock_fs.CheckAndReset(read_count=1)) self.assertEqual('bob/bob0 contents', get_future.Get()) self.assertTrue( *mock_fs.CheckAndReset(read_resolve_count=1, stat_count=1)) # Resource has been cached, so test resource is not re-fetched. self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0').Get()) self.assertTrue(*mock_fs.CheckAndReset()) # Test if the Stat version is the same the resource is not re-fetched. file_system = create_empty_caching_fs() self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0').Get()) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) # Test if there is a newer version, the resource is re-fetched. file_system = create_empty_caching_fs() test_fs.IncrementStat() future = file_system.ReadSingle('bob/bob0') self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual('bob/bob0 contents', future.Get()) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=1)) # Test directory and subdirectory stats are cached. file_system = create_empty_caching_fs() file_system._stat_cache.Del('bob/bob0') file_system._read_cache.Del('bob/bob0') file_system._stat_cache.Del('bob/bob1') test_fs.IncrementStat() futures = (file_system.ReadSingle('bob/bob1'), file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(read_count=2)) self.assertEqual(('bob/bob1 contents', 'bob/bob0 contents'), tuple(future.Get() for future in futures)) self.assertTrue( *mock_fs.CheckAndReset(read_resolve_count=2, stat_count=1)) self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1').Get()) self.assertTrue(*mock_fs.CheckAndReset()) # Test a more recent parent directory doesn't force a refetch of children. file_system = create_empty_caching_fs() file_system._read_cache.Del('bob/bob0') file_system._read_cache.Del('bob/bob1') futures = (file_system.ReadSingle('bob/bob1'), file_system.ReadSingle('bob/bob2'), file_system.ReadSingle('bob/bob3')) self.assertTrue(*mock_fs.CheckAndReset(read_count=3)) self.assertEqual( ('bob/bob1 contents', 'bob/bob2 contents', 'bob/bob3 contents'), tuple(future.Get() for future in futures)) self.assertTrue( *mock_fs.CheckAndReset(read_resolve_count=3, stat_count=1)) test_fs.IncrementStat(path='bob/bob0') file_system = create_empty_caching_fs() self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1').Get()) self.assertEqual('bob/bob2 contents', file_system.ReadSingle('bob/bob2').Get()) self.assertEqual('bob/bob3 contents', file_system.ReadSingle('bob/bob3').Get()) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) file_system = create_empty_caching_fs() file_system._stat_cache.Del('bob/bob0') future = file_system.ReadSingle('bob/bob0') self.assertTrue(*mock_fs.CheckAndReset(read_count=1)) self.assertEqual('bob/bob0 contents', future.Get()) self.assertTrue( *mock_fs.CheckAndReset(read_resolve_count=1, stat_count=1)) self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0').Get()) self.assertTrue(*mock_fs.CheckAndReset()) # Test skip_not_found caching behavior. file_system = create_empty_caching_fs() future = file_system.ReadSingle('bob/no_file', skip_not_found=True) self.assertTrue(*mock_fs.CheckAndReset(read_count=1)) self.assertEqual(None, future.Get()) self.assertTrue( *mock_fs.CheckAndReset(read_resolve_count=1, stat_count=1)) future = file_system.ReadSingle('bob/no_file', skip_not_found=True) # There shouldn't be another read/stat from the file system; # we know the file is not there. self.assertTrue(*mock_fs.CheckAndReset()) future = file_system.ReadSingle('bob/no_file') self.assertTrue(*mock_fs.CheckAndReset(read_count=1)) # Even though we cached information about non-existent files, # trying to read one without specifiying skip_not_found should # still raise an error. self.assertRaises(FileNotFoundError, future.Get)
def testCaching(self): test_fs = TestFileSystem({ 'bob': { 'bob0': 'bob/bob0 contents', 'bob1': 'bob/bob1 contents', 'bob2': 'bob/bob2 contents', 'bob3': 'bob/bob3 contents', } }) mock_fs = MockFileSystem(test_fs) def create_empty_caching_fs(): return self._CreateCachingFileSystem(mock_fs, start_empty=True) file_system = create_empty_caching_fs() # The stat/read should happen before resolving the Future, and resolving # the future shouldn't do any additional work. get_future = file_system.ReadSingle('bob/bob0') self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual('bob/bob0 contents', get_future.Get()) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=1)) # Resource has been cached, so test resource is not re-fetched. self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0').Get()) self.assertTrue(*mock_fs.CheckAndReset()) # Test if the Stat version is the same the resource is not re-fetched. file_system = create_empty_caching_fs() self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0').Get()) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) # Test if there is a newer version, the resource is re-fetched. file_system = create_empty_caching_fs() test_fs.IncrementStat(); future = file_system.ReadSingle('bob/bob0') self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual('bob/bob0 contents', future.Get()) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=1)) # Test directory and subdirectory stats are cached. file_system = create_empty_caching_fs() file_system._stat_object_store.Del('bob/bob0') file_system._read_object_store.Del('bob/bob0') file_system._stat_object_store.Del('bob/bob1') test_fs.IncrementStat(); futures = (file_system.ReadSingle('bob/bob1'), file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(read_count=2, stat_count=1)) self.assertEqual(('bob/bob1 contents', 'bob/bob0 contents'), tuple(future.Get() for future in futures)) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=2)) self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1').Get()) self.assertTrue(*mock_fs.CheckAndReset()) # Test a more recent parent directory doesn't force a refetch of children. file_system = create_empty_caching_fs() file_system._read_object_store.Del('bob/bob0') file_system._read_object_store.Del('bob/bob1') futures = (file_system.ReadSingle('bob/bob1'), file_system.ReadSingle('bob/bob2'), file_system.ReadSingle('bob/bob3')) self.assertTrue(*mock_fs.CheckAndReset(read_count=3, stat_count=1)) self.assertEqual( ('bob/bob1 contents', 'bob/bob2 contents', 'bob/bob3 contents'), tuple(future.Get() for future in futures)) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=3)) test_fs.IncrementStat(path='bob/') file_system = create_empty_caching_fs() self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1').Get()) self.assertEqual('bob/bob2 contents', file_system.ReadSingle('bob/bob2').Get()) self.assertEqual('bob/bob3 contents', file_system.ReadSingle('bob/bob3').Get()) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) file_system = create_empty_caching_fs() file_system._stat_object_store.Del('bob/bob0') future = file_system.ReadSingle('bob/bob0') self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual('bob/bob0 contents', future.Get()) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=1)) self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0').Get()) self.assertTrue(*mock_fs.CheckAndReset())
class APIModelsTest(unittest.TestCase): def setUp(self): object_store_creator = ObjectStoreCreator.ForTest() compiled_fs_factory = CompiledFileSystem.Factory(object_store_creator) self._mock_file_system = MockFileSystem(TestFileSystem(_TEST_DATA)) features_bundle = FeaturesBundle(self._mock_file_system, compiled_fs_factory, object_store_creator) self._api_models = APIModels(features_bundle, compiled_fs_factory, self._mock_file_system) def testGetNames(self): # Both 'app' and 'app.runtime' appear here because 'app.runtime' has # noparent:true, but 'app.runtime.experimental' etc doesn't so it's a # sub-feature of 'app.runtime' not a separate API. # 'devtools.inspectedWindow' is an API because there is no 'devtools'. self.assertEqual([ 'alarms', 'app', 'app.runtime', 'declarativeWebRequest', 'devtools.inspectedWindow', 'experimental.accessibility', 'storage' ], sorted(self._api_models.GetNames())) def testGetModel(self): def get_model_name(api_name): return self._api_models.GetModel(api_name).Get().name self.assertEqual('devtools.inspectedWindow', get_model_name('devtools.inspectedWindow')) self.assertEqual('devtools.inspectedWindow', get_model_name('devtools/inspected_window.json')) self.assertEqual('devtools.inspectedWindow', get_model_name('api/devtools/inspected_window.json')) self.assertEqual('alarms', get_model_name('alarms')) self.assertEqual('alarms', get_model_name('alarms.idl')) self.assertEqual('alarms', get_model_name('api/alarms.idl')) self.assertEqual('declarativeWebRequest', get_model_name('declarativeWebRequest')) self.assertEqual('declarativeWebRequest', get_model_name('declarative_web_request.json')) self.assertEqual('declarativeWebRequest', get_model_name('api/declarative_web_request.json')) self.assertEqual('experimental.accessibility', get_model_name('experimental.accessibility')) self.assertEqual('experimental.accessibility', get_model_name('experimental_accessibility.json')) self.assertEqual('experimental.accessibility', get_model_name('api/experimental_accessibility.json')) self.assertEqual('pageAction', get_model_name('pageAction')) self.assertEqual('pageAction', get_model_name('page_action.json')) self.assertEqual('pageAction', get_model_name('api/page_action.json')) def testGetNonexistentModel(self): self.assertRaises(FileNotFoundError, self._api_models.GetModel('notfound').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('notfound.json').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('api/notfound.json').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('api/alarms.json').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('storage').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('api/storage.json').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('api/storage.idl').Get) def testSingleFile(self): # 2 stats (1 for JSON and 1 for IDL), 1 read (for IDL file which existed). future = self._api_models.GetModel('alarms') self.assertTrue( *self._mock_file_system.CheckAndReset(read_count=1, stat_count=2)) # 1 read-resolve (for the IDL file). # # The important part here and above is that it's only doing a single read; # any more would break the contract that only a single file is accessed - # see the SingleFile annotation in api_models._CreateAPIModel. future.Get() self.assertTrue(*self._mock_file_system.CheckAndReset( read_resolve_count=1)) # 2 stats (1 for JSON and 1 for IDL), no reads (still cached). future = self._api_models.GetModel('alarms') self.assertTrue(*self._mock_file_system.CheckAndReset(stat_count=2)) future.Get() self.assertTrue(*self._mock_file_system.CheckAndReset())