def run(self, name): if name: cleaned = _force_ascii(name) if cleaned == name: try: return Tag._by_name(cleaned) except: pass abort(404, 'page not found')
def run(self, tag_field): tags = [] if tag_field: # Tags are comma delimited tags = [ x for x in self.comma_sep.split(tag_field) if x == _force_ascii(x) ] return tags
def sequences_for_article_url(self, url): """Return the article sequences extracted from the wiki export""" if url is None: return {} from pylons import g all_sequences = self.data['sequences'] url = UrlParser(url) cache_key = _force_ascii(self.cache_key + url.path) sequences = g.permacache.get(cache_key) if sequences is None: sequences = {} for sequence in all_sequences: articles = sequence['articles'] # Find the index of the given URL in the sequence's articles try: article_index = articles.index(url.path) except ValueError: article_index = None if article_index is not None: # The url passed is a part of the current sequence try: next_in_seq = articles[article_index + 1] except IndexError: next_in_seq = None prev_in_seq = articles[article_index - 1] if article_index > 0 else None # Add the result title = sequence['title'] sequences[title] = { 'title': title, 'next': next_in_seq, 'prev': prev_in_seq, 'index': article_index } g.permacache.set(cache_key, sequences) return sequences
def run(self, tag_field): tags = [] if tag_field: # Tags are comma delimited tags = [x for x in self.comma_sep.split(tag_field) if x==_force_ascii(x)] return tags