def create_file_system(revision=None): '''Creates a MockFileSystem at |revision| by applying that many |updates| to it. ''' mock_file_system = MockFileSystem(TestFileSystem(test_data)) for update in updates[:revision]: mock_file_system.Update(update) return mock_file_system
def setUp(self): object_store_creator = ObjectStoreCreator.ForTest() compiled_fs_factory = CompiledFileSystem.Factory(object_store_creator) self._mock_file_system = MockFileSystem(TestFileSystem(_TEST_DATA)) features_bundle = FeaturesBundle(self._mock_file_system, compiled_fs_factory, object_store_creator) self._api_models = APIModels(features_bundle, compiled_fs_factory, self._mock_file_system)
def create_file_system(revision=None): '''Creates a MockFileSystem at |revision| by applying that many |updates| to it. ''' mock_file_system = MockFileSystem(TestFileSystem(test_data)) updates_for_revision = (updates if revision is None else updates[:int(revision)]) for update in updates_for_revision: mock_file_system.Update(update) return mock_file_system
def create_file_system(commit=None): '''Creates a MockFileSystem at |commit| by applying that many |updates| to it. ''' mock_file_system = MockFileSystem(TestFileSystem(test_data)) updates_for_commit = (updates if commit is None else updates[:int(commit)]) for update in updates_for_commit: mock_file_system.Update(update) return mock_file_system
def setUp(self): object_store_creator = ObjectStoreCreator.ForTest() compiled_fs_factory = CompiledFileSystem.Factory(object_store_creator) self._mock_file_system = MockFileSystem( TestFileSystem(_TEST_DATA, relative_to=CHROME_EXTENSIONS)) features_bundle = FeaturesBundle(self._mock_file_system, compiled_fs_factory, object_store_creator, 'extensions') self._api_models = APIModels(features_bundle, compiled_fs_factory, self._mock_file_system, object_store_creator, 'extensions')
def testSidenavDataSource(self): file_system = MockFileSystem( TestFileSystem( { 'chrome_sidenav.json': json.dumps([{ 'title': 'H1', 'href': 'H1.html', 'items': [{ 'title': 'H2', 'href': '/H2.html' }] }]) }, relative_to=JSON_TEMPLATES)) expected = [{ 'level': 2, 'child_selected': True, 'title': 'H1', 'href': '/H1.html', 'items': [{ 'level': 3, 'selected': True, 'related': True, 'title': 'H2', 'href': '/H2.html', 'parent': { 'href': '/H1.html', 'title': 'H1' } }] }] sidenav_data_source = SidenavDataSource( ServerInstance.ForTest(file_system), Request.ForTest('/H2.html')) self.assertTrue(*file_system.CheckAndReset()) log_output = CaptureLogging(lambda: self.assertEqual( expected, sidenav_data_source.get('chrome'))) self.assertEqual(1, len(log_output)) self.assertTrue(log_output[0].msg.startswith( 'Paths in sidenav must be qualified.')) # Test that only a single file is read when creating the sidenav, so that # we can be confident in the compiled_file_system.SingleFile annotation. self.assertTrue(*file_system.CheckAndReset( read_count=1, stat_count=1, read_resolve_count=1))
def testEverything(self): # All these tests are dependent (see above comment) so lump everything in # the one test. delegate = _TestDelegate( lambda _: MockFileSystem(LocalFileSystem.Create())) # Test that the cron runs successfully. response = CronServlet(Request.ForTest('trunk'), delegate_for_test=delegate).Get() self.assertEqual(200, response.status) # Save the file systems created, start with a fresh set for the next run. first_run_file_systems = delegate.file_systems[:] delegate.file_systems[:] = [] # When re-running, all file systems should be Stat()d the same number of # times, but the second round shouldn't have been re-Read() since the # Stats haven't changed. response = CronServlet(Request.ForTest('trunk'), delegate_for_test=delegate).Get() self.assertEqual(200, response.status) self.assertEqual(len(first_run_file_systems), len(delegate.file_systems)) for i, second_run_file_system in enumerate(delegate.file_systems): self.assertTrue(*second_run_file_system.CheckAndReset( read_count=0, stat_count=first_run_file_systems[i].GetStatCount()))
def testFreshStat(self): test_fs = TestFileSystem({ 'bob': { 'bob0': 'bob/bob0 contents', 'bob1': 'bob/bob1 contents' } }) mock_fs = MockFileSystem(test_fs) def run_expecting_stat(stat): def run(): file_system = self._CreateCachingFileSystem(mock_fs, start_empty=True) self.assertEqual( StatInfo(stat, child_versions={ 'bob0': stat, 'bob1': stat }), file_system.Stat('bob/')) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) self.assertEqual(StatInfo(stat), file_system.Stat('bob/bob0')) self.assertEqual(StatInfo(stat), file_system.Stat('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset()) run() run() run_expecting_stat('0') test_fs.IncrementStat() run_expecting_stat('1')
def testCachedStat(self): test_fs = TestFileSystem({ 'bob': { 'bob0': 'bob/bob0 contents', 'bob1': 'bob/bob1 contents' } }) mock_fs = MockFileSystem(test_fs) file_system = self._CreateCachingFileSystem(mock_fs, start_empty=False) self.assertEqual(StatInfo('0'), file_system.Stat('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) self.assertEqual(StatInfo('0'), file_system.Stat('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset()) # Caching happens on a directory basis, so reading other files from that # directory won't result in a stat. self.assertEqual(StatInfo('0'), file_system.Stat('bob/bob1')) self.assertEqual( StatInfo('0', child_versions={'bob0': '0', 'bob1': '0'}), file_system.Stat('bob/')) self.assertTrue(*mock_fs.CheckAndReset()) # Even though the stat is bumped, the object store still has it cached so # this won't update. test_fs.IncrementStat() self.assertEqual(StatInfo('0'), file_system.Stat('bob/bob0')) self.assertEqual(StatInfo('0'), file_system.Stat('bob/bob1')) self.assertEqual( StatInfo('0', child_versions={'bob0': '0', 'bob1': '0'}), file_system.Stat('bob/')) self.assertTrue(*mock_fs.CheckAndReset())
def testWalkCaching(self): test_fs = TestFileSystem({ 'root': { 'file1': 'file1', 'file2': 'file2', 'dir1': { 'dir1_file1': 'dir1_file1', 'dir2': {}, 'dir3': { 'dir3_file1': 'dir3_file1', 'dir3_file2': 'dir3_file2' } } } }) mock_fs = MockFileSystem(test_fs) file_system = self._CreateCachingFileSystem(mock_fs, start_empty=True) for walkinfo in file_system.Walk(''): pass self.assertTrue(*mock_fs.CheckAndReset( read_resolve_count=5, read_count=5, stat_count=5)) all_dirs, all_files = [], [] for root, dirs, files in file_system.Walk(''): all_dirs.extend(dirs) all_files.extend(files) self.assertEqual(sorted(['root/', 'dir1/', 'dir2/', 'dir3/']), sorted(all_dirs)) self.assertEqual( sorted( ['file1', 'file2', 'dir1_file1', 'dir3_file1', 'dir3_file2']), sorted(all_files)) # All data should be cached. self.assertTrue(*mock_fs.CheckAndReset()) # Starting from a different root should still pull cached data. for walkinfo in file_system.Walk('root/dir1/'): pass self.assertTrue(*mock_fs.CheckAndReset())
def testVersionedStat(self): test_fs = TestFileSystem({ 'bob': { 'bob0': 'bob/bob0 contents', 'bob1': 'bob/bob1 contents' } }) # Create a versioned FileSystem and verify that multiple CachingFileSystem # instances wrapping it will share the same stat cache. mock_fs = MockFileSystem(test_fs) mock_fs.SetVersion('abcdefg') def run_and_expect_stat_count(paths, stat_count=0): file_system = self._CreateCachingFileSystem(mock_fs, start_empty=True) [file_system.Stat(path) for path in paths] self.assertTrue(*mock_fs.CheckAndReset(stat_count=stat_count)) run_and_expect_stat_count(['bob/', 'bob/bob0', 'bob/bob1'], stat_count=1) run_and_expect_stat_count(['bob/', 'bob/bob0', 'bob/bob1'], stat_count=0)
def testFreshStat(self): test_fs = TestFileSystem({ 'bob': { 'bob0': 'bob/bob0 contents', 'bob1': 'bob/bob1 contents' } }) mock_fs = MockFileSystem(test_fs) def run_expecting_stat(stat): def run(): file_system = self._CreateCachingFileSystem(mock_fs, start_empty=True) self.assertEqual( StatInfo(stat, child_versions={ 'bob0': stat, 'bob1': stat }), file_system.Stat('bob/')) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) self.assertEqual(StatInfo(stat), file_system.Stat('bob/bob0')) self.assertEqual(StatInfo(stat), file_system.Stat('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset()) run() run() run_expecting_stat('0') test_fs.IncrementStat() run_expecting_stat('1') def testSkipNotFound(self): caching_fs = self._CreateCachingFileSystem( TestFileSystem({ 'bob': { 'bob0': 'bob/bob0 contents', 'bob1': 'bob/bob1 contents' } })) def read_skip_not_found(paths): return caching_fs.Read(paths, skip_not_found=True).Get() self.assertEqual({}, read_skip_not_found(('grub', ))) self.assertEqual({}, read_skip_not_found(('bob/bob2', ))) self.assertEqual({ 'bob/bob0': 'bob/bob0 contents', }, read_skip_not_found(('bob/bob0', 'bob/bob2')))
def testEverything(self): # All these tests are dependent (see above comment) so lump everything in # the one test. delegate = _TestDelegate( lambda _: MockFileSystem(LocalFileSystem.Create())) # Test that the cron runs successfully. response = CronServlet(Request.ForTest('trunk'), delegate_for_test=delegate).Get() self.assertEqual(1, len(delegate.file_systems)) self.assertEqual(200, response.status) # When re-running, all file systems should be Stat()d the same number of # times, but the second round shouldn't have been re-Read() since the # Stats haven't changed. response = CronServlet(Request.ForTest('trunk'), delegate_for_test=delegate).Get() self.assertEqual(2, len(delegate.file_systems)) self.assertTrue(*delegate.file_systems[1].CheckAndReset( read_count=0, stat_count=delegate.file_systems[0].GetStatCount()))
def testSkipNotFound(self): mock_fs = MockFileSystem(TestFileSystem(_TEST_DATA)) compiled_fs = CompiledFileSystem.Factory( ObjectStoreCreator.ForTest()).Create( mock_fs, Cache(lambda path, contents: contents), type(self)) future = compiled_fs.GetFromFile('no_file', skip_not_found=True) # If the file doesn't exist, then the file system is not read. self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual(None, future.Get()) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=1)) future = compiled_fs.GetFromFile('no_file', skip_not_found=True) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) self.assertEqual(None, future.Get()) # The result for a non-existent file should still be cached. self.assertTrue(*mock_fs.CheckAndReset()) future = compiled_fs.GetFromFile('no_file') self.assertRaises(FileNotFoundError, future.Get)
def testCaching(self): test_fs = TestFileSystem({ 'bob': { 'bob0': 'bob/bob0 contents', 'bob1': 'bob/bob1 contents', 'bob2': 'bob/bob2 contents', 'bob3': 'bob/bob3 contents', } }) mock_fs = MockFileSystem(test_fs) def create_empty_caching_fs(): return self._CreateCachingFileSystem(mock_fs, start_empty=True) file_system = create_empty_caching_fs() self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) # Resource has been cached, so test resource is not re-fetched. self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset()) # Test if the Stat version is the same the resource is not re-fetched. file_system = create_empty_caching_fs() self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) # Test if there is a newer version, the resource is re-fetched. file_system = create_empty_caching_fs() test_fs.IncrementStat() self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) # Test directory and subdirectory stats are cached. file_system = create_empty_caching_fs() file_system._stat_object_store.Del('bob/bob0') file_system._read_object_store.Del('bob/bob0') file_system._stat_object_store.Del('bob/bob1') test_fs.IncrementStat() self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1')) self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(read_count=2, stat_count=1)) self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1')) self.assertTrue(*mock_fs.CheckAndReset()) # Test a more recent parent directory doesn't force a refetch of children. file_system = create_empty_caching_fs() file_system._read_object_store.Del('bob/bob0') file_system._read_object_store.Del('bob/bob1') self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1')) self.assertEqual('bob/bob2 contents', file_system.ReadSingle('bob/bob2')) self.assertEqual('bob/bob3 contents', file_system.ReadSingle('bob/bob3')) self.assertTrue(*mock_fs.CheckAndReset(read_count=3, stat_count=1)) test_fs.IncrementStat(path='bob/') file_system = create_empty_caching_fs() self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1')) self.assertEqual('bob/bob2 contents', file_system.ReadSingle('bob/bob2')) self.assertEqual('bob/bob3 contents', file_system.ReadSingle('bob/bob3')) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) file_system = create_empty_caching_fs() file_system._stat_object_store.Del('bob/bob0') self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset())
def testUpdates(self): fs = MockFileSystem(TestFileSystem(deepcopy(_TEST_DATA))) self.assertEqual(StatInfo('0', child_versions={ '404.html': '0', 'apps/': '0', 'extensions/': '0' }), fs.Stat('')) self.assertEqual(StatInfo('0'), fs.Stat('404.html')) self.assertEqual(StatInfo('0', child_versions={ 'a11y.html': '0', 'about_apps.html': '0', 'fakedir/': '0', }), fs.Stat('apps/')) self.assertEqual('404.html contents', fs.ReadSingle('404.html').Get()) fs.Update({ '404.html': 'New version!' }) self.assertEqual(StatInfo('1', child_versions={ '404.html': '1', 'apps/': '0', 'extensions/': '0' }), fs.Stat('')) self.assertEqual(StatInfo('1'), fs.Stat('404.html')) self.assertEqual(StatInfo('0', child_versions={ 'a11y.html': '0', 'about_apps.html': '0', 'fakedir/': '0', }), fs.Stat('apps/')) self.assertEqual('New version!', fs.ReadSingle('404.html').Get()) fs.Update({ '404.html': 'Newer version!', 'apps': { 'fakedir': { 'file.html': 'yo' } } }) self.assertEqual(StatInfo('2', child_versions={ '404.html': '2', 'apps/': '2', 'extensions/': '0' }), fs.Stat('')) self.assertEqual(StatInfo('2'), fs.Stat('404.html')) self.assertEqual(StatInfo('2', child_versions={ 'a11y.html': '0', 'about_apps.html': '0', 'fakedir/': '2', }), fs.Stat('apps/')) self.assertEqual(StatInfo('0'), fs.Stat('apps/a11y.html')) self.assertEqual(StatInfo('2', child_versions={ 'file.html': '2' }), fs.Stat('apps/fakedir/')) self.assertEqual(StatInfo('2'), fs.Stat('apps/fakedir/file.html')) self.assertEqual(StatInfo('0', child_versions={ 'activeTab.html': '0', 'alarms.html': '0' }), fs.Stat('extensions/')) self.assertEqual('Newer version!', fs.ReadSingle('404.html').Get()) self.assertEqual('yo', fs.ReadSingle('apps/fakedir/file.html').Get())
def testCorrectFutureBehaviour(self): # Tests that the underlying FileSystem's Read Future has had Get() called # on it before the Future is resolved, but the underlying Future isn't # resolved until Get is. mock_fs = MockFileSystem(TestFileSystem(_TEST_DATA)) compiled_fs = CompiledFileSystem.Factory( ObjectStoreCreator.ForTest()).Create( mock_fs, lambda path, contents: contents, type(self)) self.assertTrue(*mock_fs.CheckAndReset()) future = compiled_fs.GetFromFile('404.html') self.assertTrue(*mock_fs.CheckAndReset(stat_count=1, read_count=1)) future.Get() self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=1)) future = compiled_fs.GetFromFileListing('apps/') # Current behaviour is to have read=2 and read_resolve=1 because the first # level is read eagerly, then all of the second is read (in parallel). If # it weren't eager (and it may be worth experimenting with that) then it'd # be read=1 and read_resolve=0. self.assertTrue(*mock_fs.CheckAndReset( stat_count=1, read_count=2, read_resolve_count=1)) future.Get() # It's doing 1 more level 'deeper' (already read 'fakedir' and 'deepdir' # though not resolved), so that's 1 more read/resolve + the resolve from # the first read. self.assertTrue( *mock_fs.CheckAndReset(read_count=1, read_resolve_count=2)) # Even though the directory is 1 layer deep the caller has no way of # determining that ahead of time (though perhaps the API could give some # kind of clue, if we really cared). future = compiled_fs.GetFromFileListing('extensions/') self.assertTrue(*mock_fs.CheckAndReset( stat_count=1, read_count=1, read_resolve_count=1)) future.Get() self.assertTrue(*mock_fs.CheckAndReset()) # Similar configuration to the 'apps/' case but deeper. future = compiled_fs.GetFromFileListing('') self.assertTrue(*mock_fs.CheckAndReset( stat_count=1, read_count=2, read_resolve_count=1)) future.Get() self.assertTrue( *mock_fs.CheckAndReset(read_count=2, read_resolve_count=3))
def host_file_system_constructor(branch, commit=None): self.assertEqual('master', branch) self.assertTrue(commit is not None) return MockFileSystem.Create( TestFileSystem(test_data, relative_to=SERVER2), updates[:commit])
def testCheckAndReset(self): fs = MockFileSystem(TestFileSystem(deepcopy(_TEST_DATA))) self.assertTrue(*fs.CheckAndReset()) self.assertFalse(*fs.CheckAndReset(read_count=1)) self.assertFalse(*fs.CheckAndReset(stat_count=1)) future = fs.ReadSingle('apps/') self.assertTrue(*fs.CheckAndReset(read_count=1)) future.Get() self.assertTrue(*fs.CheckAndReset(read_resolve_count=1)) self.assertFalse(*fs.CheckAndReset(read_count=1)) self.assertTrue(*fs.CheckAndReset()) future = fs.ReadSingle('apps/') self.assertFalse(*fs.CheckAndReset(read_count=2)) future.Get() self.assertFalse(*fs.CheckAndReset(read_resolve_count=2)) fs.ReadSingle('extensions/').Get() fs.ReadSingle('extensions/').Get() self.assertTrue(*fs.CheckAndReset(read_count=2, read_resolve_count=2)) self.assertFalse(*fs.CheckAndReset(read_count=2, read_resolve_count=2)) self.assertTrue(*fs.CheckAndReset()) fs.ReadSingle('404.html').Get() self.assertTrue(*fs.CheckAndReset(read_count=1, read_resolve_count=1)) future = fs.Read(['notfound.html', 'apps/']) self.assertTrue(*fs.CheckAndReset(read_count=1)) self.assertRaises(FileNotFoundError, future.Get) self.assertTrue(*fs.CheckAndReset(read_resolve_count=0)) fs.Stat('404.html') fs.Stat('404.html') fs.Stat('apps/') self.assertFalse(*fs.CheckAndReset(stat_count=42)) self.assertFalse(*fs.CheckAndReset(stat_count=42)) self.assertTrue(*fs.CheckAndReset()) fs.ReadSingle('404.html').Get() fs.Stat('404.html') fs.Stat('apps/') self.assertTrue( *fs.CheckAndReset(read_count=1, read_resolve_count=1, stat_count=2)) self.assertTrue(*fs.CheckAndReset())
def host_file_system_constructor(branch, revision=None): self.assertEqual('trunk', branch) self.assertTrue(revision is not None) return MockFileSystem.Create(TestFileSystem(test_data), updates[:revision])
class APIModelsTest(unittest.TestCase): def setUp(self): object_store_creator = ObjectStoreCreator.ForTest() compiled_fs_factory = CompiledFileSystem.Factory(object_store_creator) self._mock_file_system = MockFileSystem( TestFileSystem(_TEST_DATA, relative_to=CHROME_EXTENSIONS)) features_bundle = FeaturesBundle(self._mock_file_system, compiled_fs_factory, object_store_creator, 'extensions') self._api_models = APIModels(features_bundle, compiled_fs_factory, self._mock_file_system, object_store_creator, 'extensions', SchemaProcessorFactoryForTest()) def testGetNames(self): # Both 'app' and 'app.runtime' appear here because 'app.runtime' has # noparent:true, but 'app.runtime.foo' etc doesn't so it's a sub-feature of # 'app.runtime' not a separate API. 'devtools.inspectedWindow' is an API # because there is no 'devtools'. self.assertEqual([ 'alarms', 'app', 'app.runtime', 'declarativeWebRequest', 'devtools.inspectedWindow', 'input', 'storage' ], sorted(self._api_models.GetNames())) def testGetModel(self): def get_model_name(api_name): return self._api_models.GetModel(api_name).Get().name self.assertEqual('devtools.inspectedWindow', get_model_name('devtools.inspectedWindow')) self.assertEqual('devtools.inspectedWindow', get_model_name('devtools/inspected_window.json')) self.assertEqual( 'devtools.inspectedWindow', get_model_name(CHROME_API + 'devtools/inspected_window.json')) self.assertEqual('alarms', get_model_name('alarms')) self.assertEqual('alarms', get_model_name('alarms.idl')) self.assertEqual('alarms', get_model_name(CHROME_API + 'alarms.idl')) self.assertEqual('input.ime', get_model_name('input.ime')) self.assertEqual('input.ime', get_model_name('input_ime.json')) self.assertEqual('input.ime', get_model_name(CHROME_API + 'input_ime.json')) self.assertEqual('pageAction', get_model_name('pageAction')) self.assertEqual('pageAction', get_model_name('page_action.json')) self.assertEqual('pageAction', get_model_name(CHROME_API + 'page_action.json')) def testGetNonexistentModel(self): self.assertRaises( FileNotFoundError, self._api_models.GetModel('declarativeWebRequest').Get) self.assertRaises( FileNotFoundError, self._api_models.GetModel('declarative_web_request.json').Get) self.assertRaises( FileNotFoundError, self._api_models.GetModel(CHROME_API + 'declarative_web_request.json').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('notfound').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('notfound.json').Get) self.assertRaises( FileNotFoundError, self._api_models.GetModel(CHROME_API + 'notfound.json').Get) self.assertRaises( FileNotFoundError, self._api_models.GetModel(CHROME_API + 'alarms.json').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('storage').Get) self.assertRaises( FileNotFoundError, self._api_models.GetModel(CHROME_API + 'storage.json').Get) self.assertRaises( FileNotFoundError, self._api_models.GetModel(CHROME_API + 'storage.idl').Get) def testSingleFile(self): # 2 stats (1 for JSON and 1 for IDL) for each available API path. # 1 read (for IDL file which existed). future = self._api_models.GetModel('alarms') self.assertTrue(*self._mock_file_system.CheckAndReset( read_count=1, stat_count=len(API_PATHS) * 2)) # 1 read-resolve (for the IDL file). # # The important part here and above is that it's only doing a single read; # any more would break the contract that only a single file is accessed - # see the SingleFile annotation in api_models._CreateAPIModel. future.Get() self.assertTrue(*self._mock_file_system.CheckAndReset( read_resolve_count=1)) # 2 stats (1 for JSON and 1 for IDL) for each available API path. # No reads (still cached). future = self._api_models.GetModel('alarms') self.assertTrue(*self._mock_file_system.CheckAndReset( stat_count=len(API_PATHS) * 2)) future.Get() self.assertTrue(*self._mock_file_system.CheckAndReset())
def GetHostFileSystemForRevision(self, revision): assert_true(revision is not None) assert_true(revision >= 0) return MockFileSystem.Create(TestFileSystem(test_data), updates[:revision])
def testInstanceMethods(self): test_data = { 'server2': { 'app.yaml': _GenerateAppYaml('1-0'), 'app_yaml_helper.py': 'Copyright notice etc' } } updates = [] # Pass a specific file system at head to the HostFileSystemProvider so that # we know it's always going to be backed by a MockFileSystem. The Provider # may decide to wrap it in caching etc. file_system_at_head = MockFileSystem(TestFileSystem(test_data)) def apply_update(update): file_system_at_head.Update(update) updates.append(update) def host_file_system_constructor(branch, revision=None): self.assertEqual('trunk', branch) self.assertTrue(revision is not None) return MockFileSystem.Create(TestFileSystem(test_data), updates[:revision]) object_store_creator = ObjectStoreCreator.ForTest() host_file_system_provider = HostFileSystemProvider( object_store_creator, default_trunk_instance=file_system_at_head, constructor_for_test=host_file_system_constructor) helper = AppYamlHelper('server2/app.yaml', object_store_creator, host_file_system_provider) def assert_is_up_to_date(version): self.assertTrue(helper.IsUpToDate(version), '%s is not up to date' % version) self.assertRaises(ValueError, helper.GetFirstRevisionGreaterThan, version) self.assertEqual(0, helper.GetFirstRevisionGreaterThan('0-5-0')) assert_is_up_to_date('1-0-0') assert_is_up_to_date('1-5-0') # Revision 1. apply_update({'server2': {'app.yaml': _GenerateAppYaml('1-5-0')}}) self.assertEqual(0, helper.GetFirstRevisionGreaterThan('0-5-0')) self.assertEqual(1, helper.GetFirstRevisionGreaterThan('1-0-0')) assert_is_up_to_date('1-5-0') assert_is_up_to_date('2-5-0') # Revision 2. apply_update({'server2': {'app_yaml_helper.py': 'fixed a bug'}}) self.assertEqual(0, helper.GetFirstRevisionGreaterThan('0-5-0')) self.assertEqual(1, helper.GetFirstRevisionGreaterThan('1-0-0')) assert_is_up_to_date('1-5-0') assert_is_up_to_date('2-5-0') # Revision 3. apply_update({'server2': {'app.yaml': _GenerateAppYaml('1-6-0')}}) self.assertEqual(0, helper.GetFirstRevisionGreaterThan('0-5-0')) self.assertEqual(1, helper.GetFirstRevisionGreaterThan('1-0-0')) self.assertEqual(3, helper.GetFirstRevisionGreaterThan('1-5-0')) assert_is_up_to_date('2-5-0') # Revision 4. apply_update({'server2': {'app.yaml': _GenerateAppYaml('1-8-0')}}) # Revision 5. apply_update({'server2': {'app.yaml': _GenerateAppYaml('2-0-0')}}) # Revision 6. apply_update({'server2': {'app.yaml': _GenerateAppYaml('2-2-0')}}) # Revision 7. apply_update({'server2': {'app.yaml': _GenerateAppYaml('2-4-0')}}) # Revision 8. apply_update({'server2': {'app.yaml': _GenerateAppYaml('2-6-0')}}) self.assertEqual(0, helper.GetFirstRevisionGreaterThan('0-5-0')) self.assertEqual(1, helper.GetFirstRevisionGreaterThan('1-0-0')) self.assertEqual(3, helper.GetFirstRevisionGreaterThan('1-5-0')) self.assertEqual(5, helper.GetFirstRevisionGreaterThan('1-8-0')) self.assertEqual(6, helper.GetFirstRevisionGreaterThan('2-0-0')) self.assertEqual(6, helper.GetFirstRevisionGreaterThan('2-1-0')) self.assertEqual(7, helper.GetFirstRevisionGreaterThan('2-2-0')) self.assertEqual(7, helper.GetFirstRevisionGreaterThan('2-3-0')) self.assertEqual(8, helper.GetFirstRevisionGreaterThan('2-4-0')) self.assertEqual(8, helper.GetFirstRevisionGreaterThan('2-5-0')) assert_is_up_to_date('2-6-0') assert_is_up_to_date('2-7-0')
def testCaching(self): test_fs = TestFileSystem({ 'bob': { 'bob0': 'bob/bob0 contents', 'bob1': 'bob/bob1 contents', 'bob2': 'bob/bob2 contents', 'bob3': 'bob/bob3 contents', } }) mock_fs = MockFileSystem(test_fs) def create_empty_caching_fs(): return self._CreateCachingFileSystem(mock_fs, start_empty=True) file_system = create_empty_caching_fs() # The stat/read should happen before resolving the Future, and resolving # the future shouldn't do any additional work. get_future = file_system.ReadSingle('bob/bob0') self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual('bob/bob0 contents', get_future.Get()) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=1)) # Resource has been cached, so test resource is not re-fetched. self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0').Get()) self.assertTrue(*mock_fs.CheckAndReset()) # Test if the Stat version is the same the resource is not re-fetched. file_system = create_empty_caching_fs() self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0').Get()) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) # Test if there is a newer version, the resource is re-fetched. file_system = create_empty_caching_fs() test_fs.IncrementStat(); future = file_system.ReadSingle('bob/bob0') self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual('bob/bob0 contents', future.Get()) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=1)) # Test directory and subdirectory stats are cached. file_system = create_empty_caching_fs() file_system._stat_object_store.Del('bob/bob0') file_system._read_object_store.Del('bob/bob0') file_system._stat_object_store.Del('bob/bob1') test_fs.IncrementStat(); futures = (file_system.ReadSingle('bob/bob1'), file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(read_count=2, stat_count=1)) self.assertEqual(('bob/bob1 contents', 'bob/bob0 contents'), tuple(future.Get() for future in futures)) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=2)) self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1').Get()) self.assertTrue(*mock_fs.CheckAndReset()) # Test a more recent parent directory doesn't force a refetch of children. file_system = create_empty_caching_fs() file_system._read_object_store.Del('bob/bob0') file_system._read_object_store.Del('bob/bob1') futures = (file_system.ReadSingle('bob/bob1'), file_system.ReadSingle('bob/bob2'), file_system.ReadSingle('bob/bob3')) self.assertTrue(*mock_fs.CheckAndReset(read_count=3, stat_count=1)) self.assertEqual( ('bob/bob1 contents', 'bob/bob2 contents', 'bob/bob3 contents'), tuple(future.Get() for future in futures)) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=3)) test_fs.IncrementStat(path='bob/') file_system = create_empty_caching_fs() self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1').Get()) self.assertEqual('bob/bob2 contents', file_system.ReadSingle('bob/bob2').Get()) self.assertEqual('bob/bob3 contents', file_system.ReadSingle('bob/bob3').Get()) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) file_system = create_empty_caching_fs() file_system._stat_object_store.Del('bob/bob0') future = file_system.ReadSingle('bob/bob0') self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual('bob/bob0 contents', future.Get()) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=1)) self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0').Get()) self.assertTrue(*mock_fs.CheckAndReset())
class APIModelsTest(unittest.TestCase): def setUp(self): object_store_creator = ObjectStoreCreator.ForTest() compiled_fs_factory = CompiledFileSystem.Factory(object_store_creator) self._mock_file_system = MockFileSystem(TestFileSystem(_TEST_DATA)) features_bundle = FeaturesBundle(self._mock_file_system, compiled_fs_factory, object_store_creator) self._api_models = APIModels(features_bundle, compiled_fs_factory, self._mock_file_system) def testGetNames(self): # Both 'app' and 'app.runtime' appear here because 'app.runtime' has # noparent:true, but 'app.runtime.experimental' etc doesn't so it's a # sub-feature of 'app.runtime' not a separate API. # 'devtools.inspectedWindow' is an API because there is no 'devtools'. self.assertEqual([ 'alarms', 'app', 'app.runtime', 'declarativeWebRequest', 'devtools.inspectedWindow', 'experimental.accessibility', 'storage' ], sorted(self._api_models.GetNames())) def testGetModel(self): def get_model_name(api_name): return self._api_models.GetModel(api_name).Get().name self.assertEqual('devtools.inspectedWindow', get_model_name('devtools.inspectedWindow')) self.assertEqual('devtools.inspectedWindow', get_model_name('devtools/inspected_window.json')) self.assertEqual('devtools.inspectedWindow', get_model_name('api/devtools/inspected_window.json')) self.assertEqual('alarms', get_model_name('alarms')) self.assertEqual('alarms', get_model_name('alarms.idl')) self.assertEqual('alarms', get_model_name('api/alarms.idl')) self.assertEqual('declarativeWebRequest', get_model_name('declarativeWebRequest')) self.assertEqual('declarativeWebRequest', get_model_name('declarative_web_request.json')) self.assertEqual('declarativeWebRequest', get_model_name('api/declarative_web_request.json')) self.assertEqual('experimental.accessibility', get_model_name('experimental.accessibility')) self.assertEqual('experimental.accessibility', get_model_name('experimental_accessibility.json')) self.assertEqual('experimental.accessibility', get_model_name('api/experimental_accessibility.json')) self.assertEqual('pageAction', get_model_name('pageAction')) self.assertEqual('pageAction', get_model_name('page_action.json')) self.assertEqual('pageAction', get_model_name('api/page_action.json')) def testGetNonexistentModel(self): self.assertRaises(FileNotFoundError, self._api_models.GetModel('notfound').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('notfound.json').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('api/notfound.json').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('api/alarms.json').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('storage').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('api/storage.json').Get) self.assertRaises(FileNotFoundError, self._api_models.GetModel('api/storage.idl').Get) def testSingleFile(self): # 2 stats (1 for JSON and 1 for IDL), 1 read (for IDL file which existed). future = self._api_models.GetModel('alarms') self.assertTrue( *self._mock_file_system.CheckAndReset(read_count=1, stat_count=2)) # 1 read-resolve (for the IDL file). # # The important part here and above is that it's only doing a single read; # any more would break the contract that only a single file is accessed - # see the SingleFile annotation in api_models._CreateAPIModel. future.Get() self.assertTrue(*self._mock_file_system.CheckAndReset( read_resolve_count=1)) # 2 stats (1 for JSON and 1 for IDL), no reads (still cached). future = self._api_models.GetModel('alarms') self.assertTrue(*self._mock_file_system.CheckAndReset(stat_count=2)) future.Get() self.assertTrue(*self._mock_file_system.CheckAndReset())
def GetBranch(self, branch): return MockFileSystem( TestFileSystem(self._file_system_data[str(branch)]))
def testCaching(self): test_fs = TestFileSystem({ 'bob': { 'bob0': 'bob/bob0 contents', 'bob1': 'bob/bob1 contents', 'bob2': 'bob/bob2 contents', 'bob3': 'bob/bob3 contents', } }) mock_fs = MockFileSystem(test_fs) def create_empty_caching_fs(): return self._CreateCachingFileSystem(mock_fs, start_empty=True) file_system = create_empty_caching_fs() # The stat/read should happen before resolving the Future, and resolving # the future shouldn't do any additional work. get_future = file_system.ReadSingle('bob/bob0') self.assertTrue(*mock_fs.CheckAndReset(read_count=1)) self.assertEqual('bob/bob0 contents', get_future.Get()) self.assertTrue( *mock_fs.CheckAndReset(read_resolve_count=1, stat_count=1)) # Resource has been cached, so test resource is not re-fetched. self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0').Get()) self.assertTrue(*mock_fs.CheckAndReset()) # Test if the Stat version is the same the resource is not re-fetched. file_system = create_empty_caching_fs() self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0').Get()) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) # Test if there is a newer version, the resource is re-fetched. file_system = create_empty_caching_fs() test_fs.IncrementStat() future = file_system.ReadSingle('bob/bob0') self.assertTrue(*mock_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual('bob/bob0 contents', future.Get()) self.assertTrue(*mock_fs.CheckAndReset(read_resolve_count=1)) # Test directory and subdirectory stats are cached. file_system = create_empty_caching_fs() file_system._stat_cache.Del('bob/bob0') file_system._read_cache.Del('bob/bob0') file_system._stat_cache.Del('bob/bob1') test_fs.IncrementStat() futures = (file_system.ReadSingle('bob/bob1'), file_system.ReadSingle('bob/bob0')) self.assertTrue(*mock_fs.CheckAndReset(read_count=2)) self.assertEqual(('bob/bob1 contents', 'bob/bob0 contents'), tuple(future.Get() for future in futures)) self.assertTrue( *mock_fs.CheckAndReset(read_resolve_count=2, stat_count=1)) self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1').Get()) self.assertTrue(*mock_fs.CheckAndReset()) # Test a more recent parent directory doesn't force a refetch of children. file_system = create_empty_caching_fs() file_system._read_cache.Del('bob/bob0') file_system._read_cache.Del('bob/bob1') futures = (file_system.ReadSingle('bob/bob1'), file_system.ReadSingle('bob/bob2'), file_system.ReadSingle('bob/bob3')) self.assertTrue(*mock_fs.CheckAndReset(read_count=3)) self.assertEqual( ('bob/bob1 contents', 'bob/bob2 contents', 'bob/bob3 contents'), tuple(future.Get() for future in futures)) self.assertTrue( *mock_fs.CheckAndReset(read_resolve_count=3, stat_count=1)) test_fs.IncrementStat(path='bob/bob0') file_system = create_empty_caching_fs() self.assertEqual('bob/bob1 contents', file_system.ReadSingle('bob/bob1').Get()) self.assertEqual('bob/bob2 contents', file_system.ReadSingle('bob/bob2').Get()) self.assertEqual('bob/bob3 contents', file_system.ReadSingle('bob/bob3').Get()) self.assertTrue(*mock_fs.CheckAndReset(stat_count=1)) file_system = create_empty_caching_fs() file_system._stat_cache.Del('bob/bob0') future = file_system.ReadSingle('bob/bob0') self.assertTrue(*mock_fs.CheckAndReset(read_count=1)) self.assertEqual('bob/bob0 contents', future.Get()) self.assertTrue( *mock_fs.CheckAndReset(read_resolve_count=1, stat_count=1)) self.assertEqual('bob/bob0 contents', file_system.ReadSingle('bob/bob0').Get()) self.assertTrue(*mock_fs.CheckAndReset()) # Test skip_not_found caching behavior. file_system = create_empty_caching_fs() future = file_system.ReadSingle('bob/no_file', skip_not_found=True) self.assertTrue(*mock_fs.CheckAndReset(read_count=1)) self.assertEqual(None, future.Get()) self.assertTrue( *mock_fs.CheckAndReset(read_resolve_count=1, stat_count=1)) future = file_system.ReadSingle('bob/no_file', skip_not_found=True) # There shouldn't be another read/stat from the file system; # we know the file is not there. self.assertTrue(*mock_fs.CheckAndReset()) future = file_system.ReadSingle('bob/no_file') self.assertTrue(*mock_fs.CheckAndReset(read_count=1)) # Even though we cached information about non-existent files, # trying to read one without specifiying skip_not_found should # still raise an error. self.assertRaises(FileNotFoundError, future.Get)
def testInstanceMethods(self): test_data = { 'server2': { 'app.yaml': _GenerateAppYaml('1-0'), 'app_yaml_helper.py': 'Copyright notice etc' } } updates = [] file_system_at_head = MockFileSystem(TestFileSystem(test_data)) def apply_update(update): file_system_at_head.Update(update) updates.append(update) def constructor(branch, revision=None): return MockFileSystem.Create(TestFileSystem(test_data), updates[:revision]) host_file_system_creator = HostFileSystemCreator( ObjectStoreCreator.ForTest(), constructor_for_test=constructor) helper = AppYamlHelper( 'server2/app.yaml', file_system_at_head, ObjectStoreCreator.ForTest(disable_wrappers=False), host_file_system_creator) def assert_is_up_to_date(version): self.assertTrue(helper.IsUpToDate(version), '%s is not up to date' % version) self.assertRaises(ValueError, helper.GetFirstRevisionGreaterThan, version) self.assertEqual(0, helper.GetFirstRevisionGreaterThan('0-5-0')) assert_is_up_to_date('1-0-0') assert_is_up_to_date('1-5-0') # Revision 1. apply_update({'server2': {'app.yaml': _GenerateAppYaml('1-5-0')}}) self.assertEqual(0, helper.GetFirstRevisionGreaterThan('0-5-0')) self.assertEqual(1, helper.GetFirstRevisionGreaterThan('1-0-0')) assert_is_up_to_date('1-5-0') assert_is_up_to_date('2-5-0') # Revision 2. apply_update({'server2': {'app_yaml_helper.py': 'fixed a bug'}}) self.assertEqual(0, helper.GetFirstRevisionGreaterThan('0-5-0')) self.assertEqual(1, helper.GetFirstRevisionGreaterThan('1-0-0')) assert_is_up_to_date('1-5-0') assert_is_up_to_date('2-5-0') # Revision 3. apply_update({'server2': {'app.yaml': _GenerateAppYaml('1-6-0')}}) self.assertEqual(0, helper.GetFirstRevisionGreaterThan('0-5-0')) self.assertEqual(1, helper.GetFirstRevisionGreaterThan('1-0-0')) self.assertEqual(3, helper.GetFirstRevisionGreaterThan('1-5-0')) assert_is_up_to_date('2-5-0') # Revision 4. apply_update({'server2': {'app.yaml': _GenerateAppYaml('1-8-0')}}) # Revision 5. apply_update({'server2': {'app.yaml': _GenerateAppYaml('2-0-0')}}) # Revision 6. apply_update({'server2': {'app.yaml': _GenerateAppYaml('2-2-0')}}) # Revision 7. apply_update({'server2': {'app.yaml': _GenerateAppYaml('2-4-0')}}) # Revision 8. apply_update({'server2': {'app.yaml': _GenerateAppYaml('2-6-0')}}) self.assertEqual(0, helper.GetFirstRevisionGreaterThan('0-5-0')) self.assertEqual(1, helper.GetFirstRevisionGreaterThan('1-0-0')) self.assertEqual(3, helper.GetFirstRevisionGreaterThan('1-5-0')) self.assertEqual(5, helper.GetFirstRevisionGreaterThan('1-8-0')) self.assertEqual(6, helper.GetFirstRevisionGreaterThan('2-0-0')) self.assertEqual(6, helper.GetFirstRevisionGreaterThan('2-1-0')) self.assertEqual(7, helper.GetFirstRevisionGreaterThan('2-2-0')) self.assertEqual(7, helper.GetFirstRevisionGreaterThan('2-3-0')) self.assertEqual(8, helper.GetFirstRevisionGreaterThan('2-4-0')) self.assertEqual(8, helper.GetFirstRevisionGreaterThan('2-5-0')) assert_is_up_to_date('2-6-0') assert_is_up_to_date('2-7-0')
def constructor(branch, revision=None): return MockFileSystem.Create(TestFileSystem(test_data), updates[:revision])
def setUp(self): mock_file_system = MockFileSystem( TestFileSystem(_TEST_DATA, relative_to=CHROME_EXTENSIONS)) server_instance = ServerInstance.ForTest(file_system=mock_file_system) self._platform_bundle = server_instance.platform_bundle