def GetIdentity(self): # NOTE: no revision here, since it would mess up the caching of reads. It # probably doesn't matter since all the caching classes will use the result # of Stat to decide whether to re-read - and Stat has a ceiling of the # revision - so when the revision changes, so might Stat. That is enough. return '@'.join( (self.__class__.__name__, StringIdentity(self._svn_path)))
def __init__(self, file_system, compiled_fs_factory, object_store_creator, platform): def create_features_cache(features_type, feature_file, *extra_paths): return _FeaturesCache( file_system, compiled_fs_factory, [Join(path, feature_file) for path in API_PATHS], extra_paths, self._platform, features_type) if platform not in GetExtensionTypes(): self._platform = PlatformToExtensionType(platform) else: self._platform = platform self._caches = { 'api': create_features_cache('api', _API_FEATURES), 'manifest': create_features_cache('manifest', _MANIFEST_FEATURES, Join(JSON_TEMPLATES, 'manifest.json')), 'permission': create_features_cache('permission', _PERMISSION_FEATURES, Join(JSON_TEMPLATES, 'permissions.json')) } # Namespace the object store by the file system ID because this class is # used by the availability finder cross-channel. self._object_store = object_store_creator.Create( _FeaturesCache, category=StringIdentity(file_system.GetIdentity(), self._platform))
def GetIdentity(self): # NOTE: Do not use commit information to create the string identity. # Doing so will mess up caching. if self._commit is None and self._branch != 'master': str_id = GITILES_BRANCH_BASE else: str_id = GITILES_BASE return '@'.join((self.__class__.__name__, StringIdentity(str_id)))
def GetIdentity(self): if self._branch == 'master': # A master FS always carries the same identity even if pinned to a commit. str_id = 'master' elif self._commit is not None: str_id = self._commit else: str_id = '%s/%s' % (GITILES_BRANCHES_PATH, self._branch) return '@'.join((self.__class__.__name__, StringIdentity( '%s/%s/%s' % (GITILES_BASE, GITILES_SRC_ROOT, str_id))))
def Create(self, file_system, populate_function, cls, category=None): return ChainedCompiledFileSystem( # Chain of CompiledFileSystem instances. tuple( CompiledFileSystem.Factory(self._object_store).Create( fs, populate_function, cls, category=category) for fs in [file_system] + self._file_system_chain), # Identity, as computed by all file systems. StringIdentity(*(fs.GetIdentity() for fs in self._file_system_chain)))
def __init__(self, server_instance, _): self._features_bundle = server_instance.features_bundle self._api_models = server_instance.api_models self._object_store = server_instance.object_store_creator.Create( # Update the model when the API or Features model updates. APIListDataSource, category=StringIdentity(self._features_bundle.GetIdentity(), self._api_models.GetIdentity())) self._api_categorizer = server_instance.api_categorizer self._availability_finder = server_instance.availability_finder
def testStringIdentity(self): # The important part really is that these are all different. self.assertEqual('C+7Hteo/', StringIdentity('foo')) self.assertEqual('Ys23Ag/5', StringIdentity('bar')) self.assertEqual('T5FOBOjX', StringIdentity('foo', 'bar')) self.assertEqual('K7XzI1GD', StringIdentity('bar', 'foo')) self.assertEqual('CXypceHn', StringIdentity('foo', 'bar', 'baz')) self.assertEqual('gGo0GTF6', StringIdentity('foo', 'baz', 'bar'))
def __init__(self, server_instance, request): file_system = server_instance.host_file_system_provider.GetMaster() self._json_cache = server_instance.compiled_fs_factory.ForJson(file_system) self._template_cache = server_instance.compiled_fs_factory.ForTemplates( file_system) self._platform_bundle = server_instance.platform_bundle self._view_cache = server_instance.object_store_creator.Create( APIDataSource, # Update the models when any of templates, APIs, or Features change. category=StringIdentity(self._json_cache.GetIdentity(), self._template_cache.GetIdentity(), self._platform_bundle.GetIdentity())) # This caches the result of _LoadEventByName. self._event_byname_futures = {} self._request = request
def __init__(self, server_instance, request): file_system = server_instance.host_file_system_provider.GetTrunk() self._json_cache = server_instance.compiled_fs_factory.ForJson( file_system) self._template_cache = server_instance.compiled_fs_factory.ForTemplates( file_system) self._availability_finder = server_instance.availability_finder self._api_models = server_instance.api_models self._features_bundle = server_instance.features_bundle self._model_cache = server_instance.object_store_creator.Create( APIDataSource, # Update the models when any of templates, APIs, or Features change. category=StringIdentity(self._json_cache.GetIdentity(), self._template_cache.GetIdentity(), self._api_models.GetIdentity(), self._features_bundle.GetIdentity())) # This caches the result of _LoadEventByName. self._event_byname = None self._samples = server_instance.samples_data_source_factory.Create( request)
def GetIdentity(self): return '%s@%s' % (self.__class__.__name__, StringIdentity(self._url))
def GetIdentity(self): return '@'.join( (self.__class__.__name__, StringIdentity(self._base_path)))
def GetIdentity(self): # NOTE: no revision here, consider it just an implementation detail of the # file version that is handled by Stat. return '@'.join( (self.__class__.__name__, StringIdentity(self._svn_path)))
def GetIdentity(self): return '%s' % StringIdentity(self.__class__.__name__ + self._repo_key)
def GetIdentity(self): return StringIdentity( '%s/%s' % (self._file_system.GetIdentity(), self._root))
def GetIdentity(self): return '@'.join( (self.__class__.__name__, StringIdentity(self._bucket)))