Esempio n. 1
0
    def history(self):
        """Get page history.

        History can take a long time to generate for repositories with many commits.
        This returns an iterator to avoid having to load them all at once, and caches
        as it goes.

        :return: iter -- Iterator over dicts

        """
        cache_head = []
        cache_tail = cache.get(self._cache_key('history')) or [{'_cache_missing': True}]
        while True:
            if not cache_tail:
                return
            for index, cached_rev in enumerate(cache_tail):
                if cached_rev.get("_cache_missing"):
                    break
                else:
                    yield cached_rev
            cache_head.extend(cache_tail[:index])
            cache_tail = cache_tail[index+1:]
            start_sha = cached_rev.get('sha')
            end_sha = cache_tail[0].get('sha') if cache_tail else None
            for rev in self._iter_revs(start_sha=start_sha, end_sha=end_sha, filename=cached_rev.get('filename')):
                cache_head.append(rev)
                placeholder = {
                    '_cache_missing': True,
                    'sha': rev['sha'],
                    'filename': rev['new_filename']
                }
                cache.set(self._cache_key('history'), cache_head + [placeholder] + cache_tail)
                yield rev
            cache.set(self._cache_key('history'), cache_head + cache_tail)
Esempio n. 2
0
    def get_page(self, name, sha='HEAD'):
        """Get page data, partials, commit info.

        :param name: Name of page.
        :param sha: Commit sha.
        :return: dict

        """
        cached = cache.get(self._cache_key(name, sha))
        if cached:
            return cached

        # commit = gittle.utils.git.commit_info(self.repo[sha])
        filename = cname_to_filename(name).encode('utf8')
        sha = sha.encode('latin-1')

        try:
            data = self.gittle.get_commit_files(sha,
                                                paths=[filename]).get(filename)
            if not data:
                return None
            partials = {}
            if data.get('data'):
                meta = self.get_meta(data['data'])
                if meta and 'import' in meta:
                    for partial_name in meta['import']:
                        partials[partial_name] = self.get_page(partial_name)
            data['partials'] = partials
            data['info'] = self.get_history(name, limit=1)[0]
            cache.set(self._cache_key(name, sha), data)
            return data

        except KeyError:
            # HEAD doesn't exist yet
            return None
Esempio n. 3
0
    def history(self):
        """Get page history.

        History can take a long time to generate for repositories with many commits.
        This returns an iterator to avoid having to load them all at once, and caches
        as it goes.

        :return: iter -- Iterator over dicts

        """
        cache_head = []
        cache_tail = cache.get(self._cache_key('history')) or [{'_cache_missing': True}]
        while True:
            if not cache_tail:
                return
            for index, cached_rev in enumerate(cache_tail):
                if cached_rev.get("_cache_missing"):
                    break
                else:
                    yield cached_rev
            cache_head.extend(cache_tail[:index])
            cache_tail = cache_tail[index+1:]
            start_sha = cached_rev.get('sha')
            end_sha = cache_tail[0].get('sha') if cache_tail else None
            for rev in self._iter_revs(start_sha=start_sha, end_sha=end_sha, filename=cached_rev.get('filename')):
                cache_head.append(rev)
                placeholder = {
                    '_cache_missing': True,
                    'sha': rev['sha'],
                    'filename': rev['new_filename']
                }
                cache.set(self._cache_key('history'), cache_head + [placeholder] + cache_tail)
                yield rev
            cache.set(self._cache_key('history'), cache_head + cache_tail)
Esempio n. 4
0
    def get_page(self, name, sha='HEAD'):
        """Get page data, partials, commit info.

        :param name: Name of page.
        :param sha: Commit sha.
        :return: dict

        """
        cached = cache.get(self._cache_key(name, sha))
        if cached:
            return cached

        # commit = gittle.utils.git.commit_info(self.repo[sha])
        filename = cname_to_filename(name).encode('utf8')
        sha = sha.encode('latin-1')

        try:
            data = self.gittle.get_commit_files(sha, paths=[filename]).get(filename)
            if not data:
                return None
            partials = {}
            if data.get('data'):
                meta = self.get_meta(data['data'])
                if meta and 'import' in meta:
                    for partial_name in meta['import']:
                        partials[partial_name] = self.get_page(partial_name)
            data['partials'] = partials
            data['info'] = self.get_history(name, limit=1)[0]
            cache.set(self._cache_key(name, sha), data)
            return data

        except KeyError:
            # HEAD doesn't exist yet
            return None
Esempio n. 5
0
 def _invalidate_cache(self, save_history=None):
     cache.delete(self._cache_key('data'))
     if save_history:
         if not save_history[0].get('_cache_missing'):
             save_history = [{'_cache_missing': True}] + save_history
         cache.set(self._cache_key('history'), save_history)
     else:
         cache.delete(self._cache_key('history'))
Esempio n. 6
0
 def _invalidate_cache(self, save_history=None):
     cache.delete(self._cache_key('data'))
     if save_history:
         if not save_history[0].get('_cache_missing'):
             save_history = [{'_cache_missing': True}] + save_history
         cache.set(self._cache_key('history'), save_history)
     else:
         cache.delete(self._cache_key('history'))
Esempio n. 7
0
    def data(self):
        cache_key = self._cache_key('data')
        cached = cache.get(cache_key)
        if cached:
            return cached

        data = self.wiki.gittle.get_commit_files(self.sha, paths=[self.filename]).get(self.filename).get('data')
        cache.set(cache_key, data)
        return data
Esempio n. 8
0
    def data(self):
        cache_key = self._cache_key('data')
        cached = cache.get(cache_key)
        if cached:
            return cached

        mode, sha = tree_lookup_path(self.wiki.repo.get_object, self.wiki.repo[self.sha].tree, self.filename.encode())
        data = self.wiki.repo[sha].data
        cache.set(cache_key, data)
        return data
Esempio n. 9
0
    def data(self):
        cache_key = self._cache_key('data')
        cached = cache.get(cache_key)
        if cached:
            return cached

        mode, sha = tree_lookup_path(self.wiki.repo.get_object, self.wiki.repo[self.sha].tree, self.filename.encode())
        data = self.wiki.repo[sha].data
        cache.set(cache_key, data)
        return data
Esempio n. 10
0
    def geojson(self):
        cache_key = self._cache_key('geojson')
        cached = cache.get(cache_key)
        if cached:
            return cached

        filename = self.filename[:-3] + '.geojson'
        mode, sha = tree_lookup_path(self.wiki.repo.get_object,
                                     self.wiki.repo[self.sha].tree, filename)
        data = self.wiki.repo[sha].data
        cache.set(cache_key, data)
        return data
Esempio n. 11
0
    def data(self):
        cache_key = self._cache_key('data')
        cached = cache.get(cache_key)
        if cached:
            return cached

        mode, sha = tree_lookup_path(self.wiki.repo.get_object,
                                     self.wiki.repo[self.sha].tree,
                                     self.filename)
        data = self.wiki.repo[sha].data
        # print "cache_key"
        # print cache_key
        # print "data"
        # print data
        # print "e"
        cache.set(cache_key, data)
        return data