def dayone_title_replacer(cfg, pagename, request, **kw): mt = get_middleware_type(request, pagename) link_only = kw.get('link_only', False) if mt == 'do': p = Page(request, pagename) return p.link_to(request, text=p._get_adpt().text()) return
def logchain(request, log1): # inv_map = {} # for k, v in request.cfg.routes.iteritems(): # inv_map[v] = inv_map.get(v, []) # inv_map[v].append(k) logs = [(log1, 'wiki'),] # default wiki recent-logs for name, storage in request.storage.iteritems(): data = storage.history(request) # name_filter_re = inv_map[name] logs.append((data,name)) # next_data_checker next_data = [] for a in range(len(logs)): next_data.append(None) while True: for idx, packed in enumerate(logs): # print packed s, storage_name = packed try: if next_data[idx]: pass else: next_data[idx] = next(s) # if next_data is None, get the next data using 'next(s)' while True: mt = get_middleware_type(request, next_data[idx].pagename) # mt = Page(request, next_data[idx].pagename).middleware_type() if mt == storage_name: break else: next_data[idx] = next(s) except StopIteration: next_data[idx] = None if not max(next_data): # all is None break # pick the latest log among storages times = [] for s in next_data: if s is None: times.append(0) else: times.append(request.user.getTime(wikiutil.version2timestamp(s.ed_time_usecs))[:5]) mtime = max(times) idx = times.index(mtime) ydata = next_data[idx] next_data[idx] = None # invalidate yield ydata