def get_commits(self, start=None): """Returns a list of commits. This is paginated via the 'start' parameter. Any exceptions are expected to be handled by the caller. """ hosting_service = self.hosting_service cache_key = make_cache_key('repository-commits:%s:%s' % (self.pk, start)) if hosting_service: commits_callable = lambda: hosting_service.get_commits(self, start) else: commits_callable = lambda: self.get_scmtool().get_commits(start) # We cache both the entire list for 'start', as well as each individual # commit. This allows us to reduce API load when people are looking at # the "new review request" page more frequently than they're pushing # code, and will usually save 1 API request when they go to actually # create a new review request. commits = cache_memoize(cache_key, commits_callable) for commit in commits: cache.set(self.get_commit_cache_key(commit.id), commit, self.COMMITS_CACHE_PERIOD) return commits
def get_file_exists(self, path, revision, base_commit_id=None, request=None): """Returns whether or not a file exists in the repository. If the repository is backed by a hosting service, this will go through that. Otherwise, it will attempt to directly access the repository. The result of this call will be cached, making future lookups of this path and revision on this repository faster. """ key = self._make_file_exists_cache_key(path, revision, base_commit_id) if cache.get(make_cache_key(key)) == '1': return True exists = self._get_file_exists_uncached(path, revision, base_commit_id, request) if exists: cache_memoize(key, lambda: '1') return exists
def _get_file_exists_uncached(self, path, revision, request): """Internal function for checking that a file exists. This is called by get_file_eixsts if the file isn't already in the cache. This function is smart enough to check if the file exists in cache, and will use that for the result instead of making a separate call. """ # First we check to see if we've fetched the file before. If so, # it's in there and we can just return that we have it. file_cache_key = make_cache_key(self._make_file_cache_key(path, revision)) if cache.has_key(file_cache_key): exists = True else: # We didn't have that in the cache, so check from the repository. checking_file_exists.send(sender=self, path=path, revision=revision, request=request) hosting_service = self.hosting_service if hosting_service: exists = hosting_service.get_file_exists(self, path, revision) else: exists = self.get_scmtool().file_exists(path, revision) checked_file_exists.send(sender=self, path=path, revision=revision, request=request, exists=exists) # We're expected to return a string for cache_memoize, so serialize # this as small as possible. if exists: return "1" else: return "0"
def get_branches(self): """Returns a list of branches.""" hosting_service = self.hosting_service cache_key = make_cache_key('repository-branches:%s' % self.pk) if hosting_service: branches_callable = lambda: hosting_service.get_branches(self) else: branches_callable = self.get_scmtool().get_branches return cache_memoize(cache_key, branches_callable, self.BRANCHES_CACHE_PERIOD)
def __init__(self, key): self.key = key self.pkg_resources = None self._extension_classes = {} self._extension_instances = {} # State synchronization self._sync_key = make_cache_key('extensionmgr:%s:gen' % key) self._last_sync_gen = None self.dynamic_urls = DynamicURLResolver() _extension_managers.append(self)
def _get_file_exists_uncached(self, path, revision, base_commit_id, request): """Internal function for checking that a file exists. This is called by get_file_eixsts if the file isn't already in the cache. This function is smart enough to check if the file exists in cache, and will use that for the result instead of making a separate call. """ # First we check to see if we've fetched the file before. If so, # it's in there and we can just return that we have it. file_cache_key = make_cache_key( self._make_file_cache_key(path, revision, base_commit_id)) if cache.has_key(file_cache_key): exists = True else: # We didn't have that in the cache, so check from the repository. checking_file_exists.send(sender=self, path=path, revision=revision, base_commit_id=base_commit_id, request=request) hosting_service = self.hosting_service if hosting_service: exists = hosting_service.get_file_exists( self, path, revision, base_commit_id=base_commit_id) else: exists = self.get_scmtool().file_exists(path, revision) checked_file_exists.send(sender=self, path=path, revision=revision, base_commit_id=base_commit_id, request=request, exists=exists) return exists
def get_file_exists(self, path, revision, request=None): """Returns whether or not a file exists in the repository. If the repository is backed by a hosting service, this will go through that. Otherwise, it will attempt to directly access the repository. The result of this call will be cached, making future lookups of this path and revision on this repository faster. """ key = self._make_file_exists_cache_key(path, revision) if cache.get(make_cache_key(key)) == '1': return True exists = self._get_file_exists_uncached(path, revision, request) if exists: cache_memoize(key, lambda: '1') return exists
def _delete_widget_cache(*args, **kwargs): """Clear the cache to keep the admin dashboard up to date.""" timestamp = str(datetime.date.today()) cache.delete_many([make_cache_key(widget + timestamp) for widget in _CACHED_WIDGETS])