def history(self): """Get page history. History can take a long time to generate for repositories with many commits. This returns an iterator to avoid having to load them all at once, and caches as it goes. :return: iter -- Iterator over dicts """ cache_head = [] cache_tail = cache.get(self._cache_key('history')) or [{'_cache_missing': True}] while True: if not cache_tail: return for index, cached_rev in enumerate(cache_tail): if cached_rev.get("_cache_missing"): break else: yield cached_rev cache_head.extend(cache_tail[:index]) cache_tail = cache_tail[index+1:] start_sha = cached_rev.get('sha') end_sha = cache_tail[0].get('sha') if cache_tail else None for rev in self._iter_revs(start_sha=start_sha, end_sha=end_sha, filename=cached_rev.get('filename')): cache_head.append(rev) placeholder = { '_cache_missing': True, 'sha': rev['sha'], 'filename': rev['new_filename'] } cache.set(self._cache_key('history'), cache_head + [placeholder] + cache_tail) yield rev cache.set(self._cache_key('history'), cache_head + cache_tail)
def get_page(self, name, sha="HEAD"): cached = cache.get(name) if cached: return cached # commit = gittle.utils.git.commit_info(self.repo[sha]) name = self.cname_to_filename(name).encode("latin-1") sha = sha.encode("latin-1") try: data = self.repo.get_commit_files(sha, paths=[name]).get(name) if not data: return None partials = {} if data.get("data"): meta = self.get_meta(data["data"]) if meta and "import" in meta: for partial_name in meta["import"]: partials[partial_name] = self.get_page(partial_name) data["partials"] = partials return data except KeyError: # HEAD doesn't exist yet return None
def get_page(self, name, sha='HEAD'): """Get page data, partials, commit info. :param name: Name of page. :param sha: Commit sha. :return: dict """ cached = cache.get(self._cache_key(name, sha)) if cached: return cached # commit = gittle.utils.git.commit_info(self.repo[sha]) filename = cname_to_filename(name).encode('utf8') sha = sha.encode('latin-1') try: data = self.gittle.get_commit_files(sha, paths=[filename]).get(filename) if not data: return None partials = {} if data.get('data'): meta = self.get_meta(data['data']) if meta and 'import' in meta: for partial_name in meta['import']: partials[partial_name] = self.get_page(partial_name) data['partials'] = partials data['info'] = self.get_history(name, limit=1)[0] cache.set(self._cache_key(name, sha), data) return data except KeyError: # HEAD doesn't exist yet return None
def write(self, content, message=None, username=None, email=None): """Write page to git repo :param content: Content of page. :param message: Commit message. :param username: Commit Name. :param email: Commit Email. :return: Git commit sha1. """ assert self.sha == b'HEAD' dirname = posixpath.join(self.wiki.path, posixpath.dirname(self.filename)) if not os.path.exists(dirname): os.makedirs(dirname) with open(self.wiki.path + "/" + self.filename, 'w') as f: f.write(content) if not message: message = "Updated %s" % self.name username, email = self._get_user(username, email) ret = self.wiki.commit(name=username, email=email, message=message, files=[self.filename]) old_history = cache.get(self._cache_key('history')) self._invalidate_cache(save_history=old_history) return ret
def get_page(self, name, sha='HEAD'): cached = cache.get(name) if cached: return cached # commit = gittle.utils.git.commit_info(self.repo[sha]) filename = cname_to_filename(name).encode('latin-1') sha = sha.encode('latin-1') try: data = self.gittle.get_commit_files(sha, paths=[filename]).get(filename) if not data: return None partials = {} if data.get('data'): meta = self.get_meta(data['data']) if meta and 'import' in meta: for partial_name in meta['import']: partials[partial_name] = self.get_page(partial_name) data['partials'] = partials data['info'] = self.get_history(name, limit=1)[0] return data except KeyError: # HEAD doesn't exist yet return None
def data(self): cache_key = self._cache_key('data') cached = cache.get(cache_key) if cached: return cached data = self.wiki.gittle.get_commit_files(self.sha, paths=[self.filename]).get(self.filename).get('data') cache.set(cache_key, data) return data
def data(self): cache_key = self._cache_key('data') cached = cache.get(cache_key) if cached: return cached mode, sha = tree_lookup_path(self.wiki.repo.get_object, self.wiki.repo[self.sha].tree, self.filename.encode()) data = self.wiki.repo[sha].data cache.set(cache_key, data) return data
def history_cache(self): """Get info about the history cache. :return: tuple -- (cached items, cache complete?) """ cached_revs = cache.get(self._cache_key('history')) if not cached_revs: return 0, False elif any(rev.get('_cache_missing') for rev in cached_revs): return len(cached_revs) - 1, False return len(cached_revs), True
def geojson(self): cache_key = self._cache_key('geojson') cached = cache.get(cache_key) if cached: return cached filename = self.filename[:-3] + '.geojson' mode, sha = tree_lookup_path(self.wiki.repo.get_object, self.wiki.repo[self.sha].tree, filename) data = self.wiki.repo[sha].data cache.set(cache_key, data) return data
def get_page(self, name, sha='HEAD'): """Get page data, partials, commit info. :param name: Name of page. :param sha: Commit sha. :return: dict """ cached = cache.get(name) if cached: return cached # commit = gittle.utils.git.commit_info(self.repo[sha]) filename = cname_to_filename(name).encode('latin-1') sha = sha.encode('latin-1') namespace_path = os.path.join(self.path, os.path.splitext(filename)[0]) namespace_cname = to_canonical(os.path.splitext(filename)[0]) if not os.path.exists(os.path.join( self.path, filename)) and os.path.isdir(namespace_path): files = [ "[%s](%s_%s)" % (x, namespace_cname, filename_to_cname(x)) for x in os.listdir(namespace_path) ] print(files) return { 'data': "# Namespace %s \n\n This is an automatically generated list of pages in this namespace.\n\n %s" % (os.path.splitext(filename)[0], '\n'.join(files)) } try: data = self.gittle.get_commit_files(sha, paths=[filename]).get(filename) if not data: return None partials = {} if data.get('data'): meta = self.get_meta(data['data']) if meta and 'import' in meta: for partial_name in meta['import']: partials[partial_name] = self.get_page(partial_name) data['partials'] = partials data['info'] = self.get_history(name, limit=1)[0] return data except KeyError: # HEAD doesn't exist yet return None
def data(self): cache_key = self._cache_key('data') cached = cache.get(cache_key) if cached: return cached mode, sha = tree_lookup_path(self.wiki.repo.get_object, self.wiki.repo[self.sha].tree, self.filename) data = self.wiki.repo[sha].data # print "cache_key" # print cache_key # print "data" # print data # print "e" cache.set(cache_key, data) return data
def rename(self, new_name, username=None, email=None, message=None): """Rename page. :param new_name: New name of page. :param username: Committer name :param email: Committer email :return: str -- Commit sha1 """ assert self.sha == 'HEAD' old_filename, new_filename = self.filename, cname_to_filename(new_name) if old_filename not in self.wiki.gittle.index: # old doesn't exist return None elif old_filename == new_filename: return None else: # file is being overwritten, but that is ok, it's git! pass username, email = self._get_user(username, email) if not message: message = "Moved %s to %s" % (self.name, new_name) os.rename(os.path.join(self.wiki.path, old_filename), os.path.join(self.wiki.path, new_filename)) self.wiki.gittle.add(new_filename) self.wiki.gittle.rm(old_filename) commit = self.wiki.commit(name=username, email=email, message=message, files=[old_filename, new_filename]) old_history = cache.get(self._cache_key('history')) self._invalidate_cache() self.name = new_name self.filename = new_filename # We need to clear the cache for the new name as well as the old self._invalidate_cache(save_history=old_history) return commit
def get_page(self, name, sha='HEAD'): """Get page data, partials, commit info. :param name: Name of page. :param sha: Commit sha. :return: dict """ cached = cache.get(name) if cached: return cached # commit = gittle.utils.git.commit_info(self.repo[sha]) filename = cname_to_filename(name).encode('latin-1') sha = sha.encode('latin-1') namespace_path = os.path.join(self.path, os.path.splitext(filename)[0]) namespace_cname = to_canonical(os.path.splitext(filename)[0]) if not os.path.exists(os.path.join(self.path, filename)) and os.path.isdir(namespace_path): files = ["[%s](%s_%s)" % (x, namespace_cname, filename_to_cname(x)) for x in os.listdir(namespace_path)] print(files) return {'data': "# Namespace %s \n\n This is an automatically generated list of pages in this namespace.\n\n %s" % (os.path.splitext(filename)[0], '\n'.join(files))} try: data = self.gittle.get_commit_files(sha, paths=[filename]).get(filename) if not data: return None partials = {} if data.get('data'): meta = self.get_meta(data['data']) if meta and 'import' in meta: for partial_name in meta['import']: partials[partial_name] = self.get_page(partial_name) data['partials'] = partials data['info'] = self.get_history(name, limit=1)[0] return data except KeyError: # HEAD doesn't exist yet return None
def rename(self, new_name, username=None, email=None, message=None): """Rename page. :param new_name: New name of page. :param username: Committer name :param email: Committer email :return: str -- Commit sha1 """ assert self.sha == 'HEAD' old_filename, new_filename = self.filename, cname_to_filename(new_name) if old_filename not in self.wiki.repo.open_index(): # old doesn't exist return None elif old_filename == new_filename: return None else: # file is being overwritten, but that is ok, it's git! pass username, email = self._get_user(username, email) if not message: message = "Moved %s to %s" % (self.name, new_name) os.rename(os.path.join(self.wiki.path, old_filename), os.path.join(self.wiki.path, new_filename)) commit = self.wiki.commit(name=username, email=email, message=message, files=[old_filename, new_filename]) old_history = cache.get(self._cache_key('history')) self._invalidate_cache() self.name = new_name self.filename = new_filename # We need to clear the cache for the new name as well as the old self._invalidate_cache(save_history=old_history) return commit