def setUp(self): self._branch_util = BranchUtility( os.path.join('branch_utility', 'first.json'), { 'extensions': 'stable', 'apps': 'trunk' }, FakeUrlFetcher(os.path.join(sys.path[0], 'test_data')), InMemoryObjectStore(''))
def setUp(self): self._base_path = os.path.join(sys.path[0], 'test_data', 'template_data_source') self._fake_api_data_source_factory = _FakeFactory() self._fake_api_list_data_source_factory = _FakeFactory() self._fake_intro_data_source_factory = _FakeFactory() self._fake_samples_data_source_factory = _FakeFactory() self._object_store = InMemoryObjectStore('fake_branch')
def setUp(self): ConfigureFakeFetchers(os.path.join(sys.path[0], os.pardir)) self._base_path = os.path.join(sys.path[0], 'test_data', 'github_file_system') self._file_system = GithubFileSystem( AppEngineUrlFetcher(url_constants.GITHUB_URL), InMemoryObjectStore('github'), AppEngineBlobstore())
def setUp(self): object_store = InMemoryObjectStore('') self._file_system = MemcacheFileSystem( LocalFileSystem(os.path.join(sys.path[0], 'test_data')), object_store) self._example_zipper = ExampleZipper( self._file_system, CompiledFileSystem.Factory(self._file_system, object_store), 'example_zipper')
def _GetInstanceForBranch(channel_name, local_path): branch = BRANCH_UTILITY.GetBranchNumberForChannelName(channel_name) # The key for the server is a tuple of |channel_name| with |branch|, since # sometimes stable and beta point to the same branch. instance_key = _MakeInstanceKey(channel_name, branch) instance = SERVER_INSTANCES.get(instance_key, None) if instance is not None: return instance branch_memcache = InMemoryObjectStore(branch) if branch == 'local': file_system = LocalFileSystem(local_path) else: file_system = _CreateMemcacheFileSystem(branch, branch_memcache) cache_factory = CompiledFileSystem.Factory(file_system, branch_memcache) api_list_data_source_factory = APIListDataSource.Factory(cache_factory, file_system, API_PATH, PUBLIC_TEMPLATE_PATH) intro_data_source_factory = IntroDataSource.Factory( cache_factory, [INTRO_PATH, ARTICLE_PATH]) samples_data_source_factory = SamplesDataSource.Factory( channel_name, file_system, GITHUB_FILE_SYSTEM, cache_factory, GITHUB_COMPILED_FILE_SYSTEM, api_list_data_source_factory, EXAMPLES_PATH) api_data_source_factory = APIDataSource.Factory(cache_factory, API_PATH, samples_data_source_factory) template_data_source_factory = TemplateDataSource.Factory( channel_name, api_data_source_factory, api_list_data_source_factory, intro_data_source_factory, samples_data_source_factory, KNOWN_ISSUES_DATA_SOURCE, cache_factory, PUBLIC_TEMPLATE_PATH, PRIVATE_TEMPLATE_PATH) example_zipper = ExampleZipper(file_system, cache_factory, DOCS_PATH) instance = ServerInstance(template_data_source_factory, example_zipper, cache_factory) SERVER_INSTANCES[instance_key] = instance return instance
def Create(self, version=None, category=None): '''Creates a new object store with the top namespace given in the constructor, at version |version|, with an optional |category| for classes that need multiple object stores (e.g. one for stat and one for read). ''' namespace = self._name if category is not None: assert not any(c.isdigit() for c in category) namespace = '%s/%s' % (namespace, category) if version is not None: assert isinstance(version, int) namespace = '%s/%s' % (namespace, version) if self._store_type is not None: return self._store_type(namespace) return InMemoryObjectStore(MemcacheObjectStore(namespace))
def _HandleCron(self, path): # Cache population strategy: # # We could list all files in PUBLIC_TEMPLATE_PATH then render them. However, # this would be inefficient in the common case where files haven't changed # since the last cron. # # Instead, let the CompiledFileSystem give us clues when to re-render: we # use the CFS to check whether the templates, examples, or API folders have # been changed. If there has been a change, the compilation function will # be called. The same is then done separately with the apps samples page, # since it pulls its data from Github. channel = path.split('/')[-1] branch = BRANCH_UTILITY.GetBranchNumberForChannelName(channel) logging.info('Running cron job for %s.' % branch) branch_memcache = InMemoryObjectStore(branch) file_system = _CreateMemcacheFileSystem(branch, branch_memcache) factory = CompiledFileSystem.Factory(file_system, branch_memcache) needs_render = self._ValueHolder(False) invalidation_cache = factory.Create(lambda _: needs_render.Set(True), compiled_fs.CRON_INVALIDATION, version=_VERSION) for path in [TEMPLATE_PATH, EXAMPLES_PATH, API_PATH]: invalidation_cache.GetFromFile(path + '/') if needs_render.Get(): file_listing_cache = factory.Create(lambda x: x, compiled_fs.CRON_FILE_LISTING) self._Render( file_listing_cache.GetFromFileListing(PUBLIC_TEMPLATE_PATH), channel) else: # If |needs_render| was True, this page was already rendered, and we don't # need to render again. github_invalidation_cache = GITHUB_COMPILED_FILE_SYSTEM.Create( lambda _: needs_render.Set(True), compiled_fs.CRON_GITHUB_INVALIDATION) if needs_render.Get(): self._Render([PUBLIC_TEMPLATE_PATH + '/apps/samples.html'], channel) # It's good to keep the extensions samples page fresh, because if it # gets dropped from the cache ALL the extensions pages time out. self._Render([PUBLIC_TEMPLATE_PATH + '/extensions/samples.html'], channel) self.response.out.write('Success')
def DISABLED_testSimple(self): cache_factory = CompiledFileSystem.Factory( LocalFileSystem(self._base_path), InMemoryObjectStore('fake_branch')) data_source_factory = APIDataSource.Factory(cache_factory, '.', FakeSamplesDataSource()) data_source = data_source_factory.Create({}) # Take the dict out of the list. expected = json.loads(self._ReadLocalFile('expected_test_file.json')) expected['permissions'] = None test1 = data_source['test_file'] test1.pop('samples') self.assertEqual(expected, test1) test2 = data_source['testFile'] test2.pop('samples') self.assertEqual(expected, test2) test3 = data_source['testFile.html'] test3.pop('samples') self.assertEqual(expected, test3) self.assertRaises(FileNotFoundError, data_source.get, 'junk')
from third_party.json_schema_compiler.model import UnixName import url_constants # The branch that the server will default to when no branch is specified in the # URL. This is necessary because it is not possible to pass flags to the script # handler. # Production settings: DEFAULT_BRANCHES = {'extensions': 'stable', 'apps': 'trunk'} # Dev settings: # DEFAULT_BRANCHES = { 'extensions': 'local', 'apps': 'local' } # Increment this version to force the server to reload all pages in the first # cron job that is run. _VERSION = 0 BRANCH_UTILITY_MEMCACHE = InMemoryObjectStore('branch_utility') BRANCH_UTILITY = BranchUtility(url_constants.OMAHA_PROXY_URL, DEFAULT_BRANCHES, AppEngineUrlFetcher(None), BRANCH_UTILITY_MEMCACHE) GITHUB_MEMCACHE = InMemoryObjectStore('github') GITHUB_FILE_SYSTEM = GithubFileSystem( AppEngineUrlFetcher(url_constants.GITHUB_URL), GITHUB_MEMCACHE, AppEngineBlobstore()) GITHUB_COMPILED_FILE_SYSTEM = CompiledFileSystem.Factory( GITHUB_FILE_SYSTEM, GITHUB_MEMCACHE) EXTENSIONS_PATH = 'chrome/common/extensions' DOCS_PATH = 'docs' API_PATH = 'api' TEMPLATE_PATH = DOCS_PATH + '/templates'
def setUp(self): self._object_store = InMemoryObjectStore('') self._local_fs = LocalFileSystem(os.path.join(sys.path[0], 'test_data', 'file_system'))
class MemcacheFileSystemTest(unittest.TestCase): def setUp(self): self._object_store = InMemoryObjectStore('') self._local_fs = LocalFileSystem(os.path.join(sys.path[0], 'test_data', 'file_system')) def _SetReadCacheItem(self, key, value, stat): self._object_store.Set(key, (value, stat), object_store.FILE_SYSTEM_READ) def _SetStatCacheItem(self, key, value): self._object_store.Set(key, value, object_store.FILE_SYSTEM_STAT) def _DeleteReadCacheItem(self, key): self._object_store.Delete(key, object_store.FILE_SYSTEM_READ) def _DeleteStatCacheItem(self, key): self._object_store.Delete(key, object_store.FILE_SYSTEM_STAT) def testReadFiles(self): file_system = MemcacheFileSystem(self._local_fs, self._object_store) expected = { './test1.txt': 'test1\n', './test2.txt': 'test2\n', './test3.txt': 'test3\n', } self.assertEqual( expected, file_system.Read(['./test1.txt', './test2.txt', './test3.txt']).Get()) def testListDir(self): file_system = MemcacheFileSystem(self._local_fs, self._object_store) expected = ['dir/'] for i in range(7): expected.append('file%d.html' % i) self._SetReadCacheItem('list/', expected, file_system.Stat('list/').version) self.assertEqual(expected, sorted(file_system.ReadSingle('list/'))) expected.remove('file0.html') self._SetReadCacheItem('list/', expected, file_system.Stat('list/').version) self.assertEqual(expected, sorted(file_system.ReadSingle('list/'))) def testCaching(self): fake_fs = _FakeFileSystem() file_system = MemcacheFileSystem(fake_fs, self._object_store) self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0')) self.assertTrue(fake_fs.CheckAndReset(read_count=1, stat_count=1)) # Resource has been cached, so test resource is not re-fetched. self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0')) self.assertTrue(fake_fs.CheckAndReset()) # Test if the Stat version is the same the resource is not re-fetched. self._DeleteStatCacheItem('bob/bob0') self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0')) self.assertTrue(fake_fs.CheckAndReset(stat_count=1)) # Test if there is a newer version, the resource is re-fetched. self._DeleteStatCacheItem('bob/bob0') fake_fs.stat_value += 1 self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0')) self.assertTrue(fake_fs.CheckAndReset(read_count=1, stat_count=1)) # Test directory and subdirectory stats are cached. self._DeleteStatCacheItem('bob/bob0') self._DeleteReadCacheItem('bob/bob0') self._DeleteStatCacheItem('bob/bob1') self.assertEqual('bob/bob1', file_system.ReadSingle('bob/bob1')) self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0')) self.assertTrue(fake_fs.CheckAndReset(read_count=2, stat_count=1)) self.assertEqual('bob/bob1', file_system.ReadSingle('bob/bob1')) self.assertTrue(fake_fs.CheckAndReset()) # Test a more recent parent directory doesn't force a refetch of children. self._DeleteReadCacheItem('bob/bob0') self._DeleteReadCacheItem('bob/bob1') self.assertEqual('bob/bob1', file_system.ReadSingle('bob/bob1')) self.assertEqual('bob/bob2', file_system.ReadSingle('bob/bob2')) self.assertEqual('bob/bob3', file_system.ReadSingle('bob/bob3')) self.assertTrue(fake_fs.CheckAndReset(read_count=3)) self._SetStatCacheItem('bob/', 10) self.assertEqual('bob/bob1', file_system.ReadSingle('bob/bob1')) self.assertEqual('bob/bob2', file_system.ReadSingle('bob/bob2')) self.assertEqual('bob/bob3', file_system.ReadSingle('bob/bob3')) self.assertTrue(fake_fs.CheckAndReset()) self._DeleteStatCacheItem('bob/bob0') self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0')) self.assertTrue(fake_fs.CheckAndReset(read_count=1, stat_count=1)) self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0')) self.assertTrue(fake_fs.CheckAndReset())
def _CreateRefResolver(self, filename): data_source = FakeAPIAndListDataSource(self._LoadJSON(filename)) return ReferenceResolver.Factory(data_source, data_source, InMemoryObjectStore('')).Create()
def setUp(self): self._branch_util = BranchUtility( os.path.join('branch_utility', 'first.json'), FakeUrlFetcher(os.path.join(sys.path[0], 'test_data')), InMemoryObjectStore(''))
def testGetLink(self): data_source = FakeAPIDataSource( json.loads(self._ReadLocalFile('fake_data_source.json'))) resolver = ReferenceResolver(data_source, data_source, InMemoryObjectStore('')) self.assertEqual( { 'href': 'foo.html#type-foo_t1', 'text': 'foo.foo_t1', 'name': 'foo_t1' }, resolver.GetLink('foo.foo_t1', 'baz')) self.assertEqual( { 'href': 'baz.html#event-baz_e1', 'text': 'baz_e1', 'name': 'baz_e1' }, resolver.GetLink('baz.baz_e1', 'baz')) self.assertEqual( { 'href': 'baz.html#event-baz_e1', 'text': 'baz_e1', 'name': 'baz_e1' }, resolver.GetLink('baz_e1', 'baz')) self.assertEqual( { 'href': 'foo.html#method-foo_f1', 'text': 'foo.foo_f1', 'name': 'foo_f1' }, resolver.GetLink('foo.foo_f1', 'baz')) self.assertEqual( { 'href': 'foo.html#property-foo_p3', 'text': 'foo.foo_p3', 'name': 'foo_p3' }, resolver.GetLink('foo.foo_p3', 'baz')) self.assertEqual( { 'href': 'bar.bon.html#type-bar_bon_t3', 'text': 'bar.bon.bar_bon_t3', 'name': 'bar_bon_t3' }, resolver.GetLink('bar.bon.bar_bon_t3', 'baz')) self.assertEqual( { 'href': 'bar.bon.html#property-bar_bon_p3', 'text': 'bar_bon_p3', 'name': 'bar_bon_p3' }, resolver.GetLink('bar_bon_p3', 'bar.bon')) self.assertEqual( { 'href': 'bar.bon.html#property-bar_bon_p3', 'text': 'bar_bon_p3', 'name': 'bar_bon_p3' }, resolver.GetLink('bar.bon.bar_bon_p3', 'bar.bon')) self.assertEqual( { 'href': 'bar.html#event-bar_e2', 'text': 'bar_e2', 'name': 'bar_e2' }, resolver.GetLink('bar.bar_e2', 'bar')) self.assertEqual( { 'href': 'bar.html#type-bon', 'text': 'bon', 'name': 'bon' }, resolver.GetLink('bar.bon', 'bar')) self.assertEqual( { 'href': 'foo.html#event-foo_t3-foo_t3_e1', 'text': 'foo_t3.foo_t3_e1', 'name': 'foo_t3_e1' }, resolver.GetLink('foo_t3.foo_t3_e1', 'foo')) self.assertEqual( { 'href': 'foo.html#event-foo_t3-foo_t3_e1', 'text': 'foo_t3.foo_t3_e1', 'name': 'foo_t3_e1' }, resolver.GetLink('foo.foo_t3.foo_t3_e1', 'foo')) self.assertEqual( { 'href': 'foo.html#event-foo_t3-foo_t3_e1', 'text': 'foo_t3.foo_t3_e1', 'name': 'foo_t3_e1' }, resolver.GetLink('foo.foo_p1.foo_t3_e1', 'foo')) self.assertEqual( { 'href': 'bar.html#property-bar_t1-bar_t1_p1', 'text': 'bar.bar_t1.bar_t1_p1', 'name': 'bar_t1_p1' }, resolver.GetLink('bar.bar_p3.bar_t1_p1', 'foo')) self.assertEqual( { 'href': 'bar.html#property-bar_t1-bar_t1_p1', 'text': 'bar_t1.bar_t1_p1', 'name': 'bar_t1_p1' }, resolver.GetLink('bar_p3.bar_t1_p1', 'bar')) self.assertEqual(None, resolver.GetLink('bar.bar_p3.bar_t2_p1', 'bar')) self.assertEqual(None, resolver.GetLink('bar.bon.bar_e3', 'bar')) self.assertEqual(None, resolver.GetLink('bar_p3', 'baz.bon')) self.assertEqual(None, resolver.GetLink('falafel.faf', 'a')) self.assertEqual(None, resolver.GetLink('bar_p3', 'foo')) self.assertEqual( 'Hello <a href="bar.bon.html#property-bar_bon_p3">bar_bon_p3</a>, ' '<a href="bar.bon.html#property-bar_bon_p3">Bon Bon</a>, ' '<a href="bar.bon.html#property-bar_bon_p3">bar_bon_p3</a>', resolver.ResolveAllLinks( 'Hello $ref:bar_bon_p3, $ref:[bar_bon_p3 Bon Bon], $ref:bar_bon_p3', 'bar.bon')) self.assertEqual( 'I like <a href="bar.html#property-bar_t1-bar_t1_p1">food</a>.', resolver.ResolveAllLinks( 'I like $ref:[bar.bar_p3.bar_t1_p1 food].', 'foo')) self.assertEqual('Ref <a href="bar.html#type-bon">bon</a>', resolver.ResolveAllLinks('Ref $ref:[bar.bon]', 'bar'))
def testGetLink(self): data_source = FakeAPIDataSource( json.loads(self._ReadLocalFile('fake_data_source.json'))) resolver = ReferenceResolver(data_source, data_source, InMemoryObjectStore('')) self.assertEqual({ 'href': 'foo.html', 'text': 'foo', 'name': 'foo' }, resolver.GetLink('foo', namespace='baz')) self.assertEqual({ 'href': 'foo.html#type-foo_t1', 'text': 'foo.foo_t1', 'name': 'foo_t1' }, resolver.GetLink('foo.foo_t1', namespace='baz')) self.assertEqual({ 'href': 'baz.html#event-baz_e1', 'text': 'baz_e1', 'name': 'baz_e1' }, resolver.GetLink('baz.baz_e1', namespace='baz')) self.assertEqual({ 'href': 'baz.html#event-baz_e1', 'text': 'baz_e1', 'name': 'baz_e1' }, resolver.GetLink('baz_e1', namespace='baz')) self.assertEqual({ 'href': 'foo.html#method-foo_f1', 'text': 'foo.foo_f1', 'name': 'foo_f1' }, resolver.GetLink('foo.foo_f1', namespace='baz')) self.assertEqual({ 'href': 'foo.html#property-foo_p3', 'text': 'foo.foo_p3', 'name': 'foo_p3' }, resolver.GetLink('foo.foo_p3', namespace='baz')) self.assertEqual({ 'href': 'bar.bon.html#type-bar_bon_t3', 'text': 'bar.bon.bar_bon_t3', 'name': 'bar_bon_t3' }, resolver.GetLink('bar.bon.bar_bon_t3', namespace='baz')) self.assertEqual({ 'href': 'bar.bon.html#property-bar_bon_p3', 'text': 'bar_bon_p3', 'name': 'bar_bon_p3' }, resolver.GetLink('bar_bon_p3', namespace='bar.bon')) self.assertEqual({ 'href': 'bar.bon.html#property-bar_bon_p3', 'text': 'bar_bon_p3', 'name': 'bar_bon_p3' }, resolver.GetLink('bar.bon.bar_bon_p3', namespace='bar.bon')) self.assertEqual({ 'href': 'bar.html#event-bar_e2', 'text': 'bar_e2', 'name': 'bar_e2' }, resolver.GetLink('bar.bar_e2', namespace='bar')) self.assertEqual({ 'href': 'bar.html#type-bon', 'text': 'bon', 'name': 'bon' }, resolver.GetLink('bar.bon', namespace='bar')) self.assertEqual({ 'href': 'foo.html#event-foo_t3-foo_t3_e1', 'text': 'foo_t3.foo_t3_e1', 'name': 'foo_t3_e1' }, resolver.GetLink('foo_t3.foo_t3_e1', namespace='foo')) self.assertEqual({ 'href': 'foo.html#event-foo_t3-foo_t3_e1', 'text': 'foo_t3.foo_t3_e1', 'name': 'foo_t3_e1' }, resolver.GetLink('foo.foo_t3.foo_t3_e1', namespace='foo')) self.assertEqual({ 'href': 'foo.html#event-foo_t3-foo_t3_e1', 'text': 'foo_t3.foo_t3_e1', 'name': 'foo_t3_e1' }, resolver.GetLink('foo.foo_p1.foo_t3_e1', namespace='foo')) self.assertEqual({ 'href': 'bar.html#property-bar_t1-bar_t1_p1', 'text': 'bar.bar_t1.bar_t1_p1', 'name': 'bar_t1_p1' }, resolver.GetLink('bar.bar_p3.bar_t1_p1', namespace='foo')) self.assertEqual({ 'href': 'bar.html#property-bar_t1-bar_t1_p1', 'text': 'bar_t1.bar_t1_p1', 'name': 'bar_t1_p1' }, resolver.GetLink('bar_p3.bar_t1_p1', namespace='bar')) self.assertEqual( None, resolver.GetLink('bar.bar_p3.bar_t2_p1', namespace='bar')) self.assertEqual( None, resolver.GetLink('bar.bon.bar_e3', namespace='bar')) self.assertEqual( None, resolver.GetLink('bar_p3', namespace='baz.bon')) self.assertEqual( None, resolver.GetLink('falafel.faf', namespace='a')) self.assertEqual( None, resolver.GetLink('bar_p3', namespace='foo')) self.assertEqual( 'Hello <a href="bar.bon.html#property-bar_bon_p3">bar_bon_p3</a>, ' '<a href="bar.bon.html#property-bar_bon_p3">Bon Bon</a>, ' '<a href="bar.bon.html#property-bar_bon_p3">bar_bon_p3</a>', resolver.ResolveAllLinks( 'Hello $ref:bar_bon_p3, $ref:[bar_bon_p3 Bon Bon], $ref:bar_bon_p3', namespace='bar.bon')) self.assertEqual( 'I like <a href="bar.html#property-bar_t1-bar_t1_p1">food</a>.', resolver.ResolveAllLinks('I like $ref:[bar.bar_p3.bar_t1_p1 food].', namespace='foo')) self.assertEqual( 'Ref <a href="foo.html">It\'s foo!</a>', resolver.ResolveAllLinks('Ref $ref:[foo It\'s foo!]', namespace='bar')) self.assertEqual( 'Ref <a href="bar.html#type-bon">Bon</a>', resolver.ResolveAllLinks('Ref $ref:[bar.bon Bon]', namespace='bar')) # Different kinds of whitespace can be significant inside <pre> tags. self.assertEqual( '<pre><a href="bar.html#type-bon">bar.bon</a>({\nkey: value})', resolver.ResolveAllLinks('<pre>$ref:[bar.bon]({\nkey: value})', namespace='baz')) # Allow bare "$ref:foo.bar." at the end of a string. self.assertEqual( '<a href="bar.html#type-bon">bar.bon</a>.', resolver.ResolveAllLinks('$ref:bar.bon.', namespace='baz'))
def setUp(self): self._base_path = os.path.join(sys.path[0], 'test_data', 'sidenav_data_source') self._cache_factory = CompiledFileSystem.Factory( LocalFileSystem(self._base_path), InMemoryObjectStore('fake_branch'))
from samples_data_source import SamplesDataSource from server_instance import ServerInstance from subversion_file_system import SubversionFileSystem from template_data_source import TemplateDataSource from third_party.json_schema_compiler.model import UnixName import url_constants # The branch that the server will default to when no branch is specified in the # URL. This is necessary because it is not possible to pass flags to the script # handler. # Production settings: DEFAULT_BRANCHES = {'extensions': 'stable', 'apps': 'trunk'} # Dev settings: # DEFAULT_BRANCHES = { 'extensions': 'local', 'apps': 'local' } BRANCH_UTILITY_MEMCACHE = InMemoryObjectStore('branch_utility') BRANCH_UTILITY = BranchUtility(url_constants.OMAHA_PROXY_URL, DEFAULT_BRANCHES, AppEngineUrlFetcher(None), BRANCH_UTILITY_MEMCACHE) GITHUB_MEMCACHE = InMemoryObjectStore('github') GITHUB_FILE_SYSTEM = GithubFileSystem( AppEngineUrlFetcher(url_constants.GITHUB_URL), GITHUB_MEMCACHE, AppEngineBlobstore()) GITHUB_COMPILED_FILE_SYSTEM = CompiledFileSystem.Factory( GITHUB_FILE_SYSTEM, GITHUB_MEMCACHE) EXTENSIONS_PATH = 'chrome/common/extensions' DOCS_PATH = 'docs' API_PATH = 'api' TEMPLATE_PATH = DOCS_PATH + '/templates'
def testGetLink(self): data_source = FakeAPIDataSource( json.loads(self._ReadLocalFile('fake_data_source.json'))) resolver = ReferenceResolver(data_source, data_source, InMemoryObjectStore('')) self.assertEqual({ 'href': 'foo.html#type-foo_t1', 'text': 'foo.foo_t1' }, resolver.GetLink('baz', 'foo.foo_t1')) self.assertEqual({ 'href': 'baz.html#event-baz_e1', 'text': 'baz_e1' }, resolver.GetLink('baz', 'baz.baz_e1')) self.assertEqual({ 'href': '#event-baz_e1', 'text': 'baz_e1' }, resolver.GetLink('baz', 'baz_e1')) self.assertEqual({ 'href': 'foo.html#method-foo_f1', 'text': 'foo.foo_f1' }, resolver.GetLink('baz', 'foo.foo_f1')) self.assertEqual({ 'href': 'foo.html#property-foo_p3', 'text': 'foo.foo_p3' }, resolver.GetLink('baz', 'foo.foo_p3')) self.assertEqual({ 'href': 'bar.bon.html#type-bar_bon_t3', 'text': 'bar.bon.bar_bon_t3' }, resolver.GetLink('baz', 'bar.bon.bar_bon_t3')) self.assertEqual({ 'href': '#property-bar_bon_p3', 'text': 'bar_bon_p3' }, resolver.GetLink('bar.bon', 'bar_bon_p3')) self.assertEqual({ 'href': 'bar.bon.html#property-bar_bon_p3', 'text': 'bar_bon_p3' }, resolver.GetLink('bar.bon', 'bar.bon.bar_bon_p3')) self.assertEqual({ 'href': 'bar.html#event-bar_e2', 'text': 'bar_e2' }, resolver.GetLink('bar', 'bar.bar_e2')) self.assertEqual({ 'href': 'bar.html#type-bon', 'text': 'bon' }, resolver.GetLink('bar', 'bar.bon')) self.assertEqual({ 'href': '#event-foo_t3-foo_t3_e1', 'text': 'foo_t3.foo_t3_e1' }, resolver.GetLink('foo', 'foo_t3.foo_t3_e1')) self.assertEqual({ 'href': 'foo.html#event-foo_t3-foo_t3_e1', 'text': 'foo_t3.foo_t3_e1' }, resolver.GetLink('foo', 'foo.foo_t3.foo_t3_e1')) self.assertEqual( None, resolver.GetLink('bar', 'bar.bon.bar_e3')) self.assertEqual( None, resolver.GetLink('baz.bon', 'bar_p3')) self.assertEqual( None, resolver.GetLink('a', 'falafel.faf')) self.assertEqual( None, resolver.GetLink('foo', 'bar_p3'))
from samples_data_source import SamplesDataSource from server_instance import ServerInstance from sidenav_data_source import SidenavDataSource from subversion_file_system import SubversionFileSystem from template_data_source import TemplateDataSource from third_party.json_schema_compiler.model import UnixName import url_constants # Increment this version to force the server to reload all pages in the first # cron job that is run. _VERSION = 1 # The default channel to serve docs for if no channel is specified. _DEFAULT_CHANNEL = 'stable' BRANCH_UTILITY_MEMCACHE = InMemoryObjectStore('branch_utility') BRANCH_UTILITY = BranchUtility(url_constants.OMAHA_PROXY_URL, AppEngineUrlFetcher(None), BRANCH_UTILITY_MEMCACHE) GITHUB_MEMCACHE = InMemoryObjectStore('github') GITHUB_FILE_SYSTEM = GithubFileSystem( AppEngineUrlFetcher(url_constants.GITHUB_URL), GITHUB_MEMCACHE, AppEngineBlobstore()) GITHUB_COMPILED_FILE_SYSTEM = CompiledFileSystem.Factory(GITHUB_FILE_SYSTEM, GITHUB_MEMCACHE) EXTENSIONS_PATH = 'chrome/common/extensions' DOCS_PATH = 'docs' API_PATH = 'api'