def do_chart19(self, wikis): """Compute wiki commentors, projectwise chart19_data, { project.id : [ [ wikipath, [ [commentor, count], ... ] ] ... ], ... } """ from zeta.lib.base import BaseController wdets = [] allcmtrs = {} cntlr = BaseController() for w in wikis: cmtrs = h.computecount(w.comments, lambda x: x.commentby.username) allcmtrs.setdefault(w.project.id, []).extend(cmtrs.keys()) wdets.append([w.project.id, h.wiki_parseurl(w.wikiurl), cmtrs]) allcmtrs = dict([(p, sorted(set(allcmtrs[p]))) for p in allcmtrs]) for w in wdets: pid = w[0] cmtrs = w[2] w[2] = [[u, cmtrs.get(u, 0)] for u in allcmtrs[pid]] chart19_usrs = dict([(pid, [[u, cntlr.url_user(u)] for u in users]) for pid, users in allcmtrs.iteritems()]) chart19_data = {} [chart19_data.setdefault(w[0], []).append(w[1:]) for w in wdets] return chart19_data, chart19_usrs
def do_chart20(self, tags): """Compute chart for wiki Vs tags chart20_data, { project: [ tagname, [ [ wiki.id, wiki.wikiurl ], ... ] ... ], ... } chart20_tags, { project: [ tagname, ...], ... } """ data = {} for tag in tags: for w in tag.wikipages: data.setdefault(w.project.id, {}).setdefault( tag.tagname, []).append([w.id, h.wiki_parseurl(w.wikiurl), w.wikiurl]) data = dict([(p, sorted([[k, v] for k, v in data[p].iteritems()], key=lambda x: x[0])) for p in data]) chart20_data = data chart20_tags = {} for p, tlist in chart20_data.iteritems(): chart20_tags[p] = sorted(map(lambda x: x[0], tlist)) return chart20_data, chart20_tags
def do_chart18(self, wikicomp, wikis): """Compute wiki authors, projectwise chart18_data, { project.id : [ [ wikipath, [ [author, edits], ... ] ] ... ], ... } """ from zeta.lib.base import BaseController wdets = [] allauthors = {} cntlr = BaseController() for w in wikis: wcnts = wikicomp.get_content(w, all=True) authors = h.computecount(wcnts, lambda x: x.author) allauthors.setdefault(w.project.id, []).extend(authors.keys()) wdets.append([w.project.id, h.wiki_parseurl(w.wikiurl), authors]) allauthors = dict([(p, sorted(set(allauthors[p]))) for p in allauthors]) for w in wdets: pid = w[0] authors = w[2] w[2] = [[u, authors.get(u, 0)] for u in allauthors[pid]] chart18_usrs = dict([(pid, [[u, cntlr.url_user(u)] for u in users]) for pid, users in allauthors.iteritems()]) chart18_data = {} [chart18_data.setdefault(w[0], []).append(w[1:]) for w in wdets] return chart18_data, chart18_usrs
def list_wiki( self, projectname ) : """List wiki urls for project `projectname`""" from zeta.config.environment import wikicomp wikipages = [ h.wiki_parseurl( wu ) for wid, wu in wikicomp.wikiurls( projectname ) ] return ( True, wikipages, '' )
def test_7_listwiki(self): """Testing method list_wiki()""" log.info("Testing method list_wiki()") projects = projcomp.get_project() # For valid project p = choice(projects) rc, wikipages, msg = xicomp.list_wiki(p.projectname) assert_equal(sorted(wikipages), sorted([h.wiki_parseurl(w.wikiurl) for w in p.wikis]), 'Mismatch for valid call to list_wiki()') # For invalid project rc, wikipages, msg = xicomp.list_wiki(u'invalidproject') assert_equal(wikipages, [], 'Mismatch for invalid call to list_wiki()')
def do_chart17(self, wikis): """Compute wiki votes, projectwise chart1y_data, { project.id : [ [ wikipath, upvotes, downvotes ] ... ], ... } """ wdets = sorted([[ w.project.id, w.wikiurl, h.wiki_parseurl(w.wikiurl), sum([1 for v in w.votes if v.votedas == 'up']), sum([1 for v in w.votes if v.votedas == 'down']) ] for w in wikis], key=lambda x: x[2]) chart17_data = {} [chart17_data.setdefault(w[0], []).append(w[2:]) for w in wdets] return chart17_data
def do_chart16(self, wikis): """Compute wiki edits and comments, projectwise chart16_data, { project.id : [ [ wikipath, versioncount, commentscount ] ... ], ... } """ wdets = sorted([[ w.project.id, w.wikiurl, h.wiki_parseurl(w.wikiurl), w.latest_version, len(w.comments) ] for w in wikis], key=lambda x: x[2]) chart16_wiki = {} chart16_data = {} [chart16_data.setdefault(w[0], []).append(w[2:]) for w in wdets] [chart16_wiki.setdefault(w[0], []).append([w[1], w[2]]) for w in wdets] return chart16_data, chart16_wiki
def titleindex(self, environ, projectname): """Title index of all Project wiki pages. URLS : /p/{projectname}/wiki/titleindex /p/{projectname}/wiki/TitleIndex """ from zeta.config.environment import projcomp, wikicomp c.rclose = h.ZResp() # Setup context for page generation c.projsummary = c.project.summary wikiurls = wikicomp.wikiurls(c.project) c.wikipagenames = self.wikipagename(wikiurls) fn = lambda wu: (wu[0], h.wiki_parseurl(wu[1])) c.titlepages = sorted(map(fn, wikiurls), key=lambda x: x[1]) c.wikipagename = None c.wa = ca.get_analyticobj('wiki') c.wsnippets = getattr(c.wa, 'pagesnippets', {}).get(c.project.id, {}) c.title = '%s:titleindex' % projectname # HTML page generation c.rclose.append(render('/derived/projects/wikitindex.html')) return c.rclose
def do_wdets(self, wikis): wdets = dict([(w.id, [w.project.projectname, h.wiki_parseurl(w.wikiurl)]) for w in wikis]) return wdets
def wikipagenames(self, wikiurls): fn = lambda wu: [wu[1], h.wiki_parseurl(wu[1])] pagenames = map(fn, wikiurls) return pagenames
def documentof(self, wiki, search='xapian'): """Make a document for 'wiki' entry to create a searchable index [ metadata, attributes, document ], where metadata : { key, value } pairs to be associated with document attributes : searchable { key, value } pairs to be associated with document document : [ list , of, ... ] document string, with increasing weightage """ wiki = self.get_wiki(wiki, attrload=['type', 'project', 'creator', 'tags']) wcnts = self.get_content(wiki, all=True) q = select( [t_user.c.username, t_wiki_comment.c.text], bind=meta.engine).select_from( t_wiki.outerjoin(t_wiki_comment). outerjoin(at_wiki_commentors).outerjoin( t_user, at_wiki_commentors.c.commentorid == t_user.c.id)).where( t_wiki.c.id == wiki.id) cmtusers = [] cmttexts = [] for tup in q.execute().fetchall(): tup[0] and cmtusers.append(tup[0]) tup[1] and cmttexts.append(tup[1]) wikiusers = [ wiki.creator.username ] + \ [ wcnt.author for wcnt in wcnts ] + cmtusers tagnames = [t.tagname for t in wiki.tags] url = h.wiki_parseurl(wiki.wikiurl) projname = getattr(wiki.project, 'projectname', '') metadata = { 'doctype': 'wiki', 'id': wiki.id, 'projectname': projname, } attributes = \ search == 'xapian' and \ [ 'XID:wiki_%s' % wiki.id, # id 'XCLASS:wiki', # class 'XPROJECT:%s' % projname, # project ] + \ [ 'XUSER:%s' % u # user for u in wikiusers ] + \ [ 'XTAG:%s' % t # tag for t in tagnames ] \ or \ [] attrs = ' '.join( [ projname, wiki.type.wiki_typename ] +\ wikiusers + tagnames ) sourceurl = wiki.sourceurl or u'' document = [ ' '.join([wcnt.text for wcnt in wcnts] + cmttexts), ' '.join([url, wiki.summary, sourceurl]), attrs ] return [metadata, attributes, document]
def test_1_tag(self): """Testing Tag analytics""" log.info("Testing Tag analytics") ta = ca.get_analyticobj('tags') ta.analyse() ta.cacheme() ta = ca.get_analyticobj('tags') attrs = [ 'attachments', 'licenses', 'projects', 'tickets', 'reviews', 'wikipages' ] # chart1_data for t, v in ta.chart1_data.iteritems(): t = tagcomp.get_tag(t) data = [[a, len(getattr(t, a, []))] for a in attrs] assert_equal(sorted(v, key=lambda x: x[0]), sorted(data, key=lambda x: x[0]), 'Mismatch in chart1_data') # chart1_rtags # TODO : not testing the percentile of related tags. for t, v in ta.chart1_rtags.iteritems(): t = tagcomp.get_tag(t) ref = [] for a in attrs: objs = getattr(t, a) ref.extend([ rtag.tagname for obj in objs for rtag in getattr(obj, 'tags', []) if rtag.tagname != t.tagname ]) ref = list(set(ref)) data = map(lambda x: x[0], v) assert_equal(sorted(data), sorted(ref), 'Mismatch in chart1_rtag') # chart4_data for t, atts in ta.chart4_data: t = tagcomp.get_tag(t) ref = sorted([[a.id, a.filename] for a in t.attachments], key=lambda x: x[0]) data = sorted(atts, key=lambda x: x[0]) assert_equal(data, ref, 'Mismatch in chart4_data') # chart4_tags data = [] for a in attachcomp.get_attach(attrload=['tags']): data.extend([t.tagname for t in a.tags]) data = list(set(data)) assert_equal(sorted(data), sorted(ta.chart4_tags), 'Mismatch in chart4_tag') # chart7_data for t, lics in ta.chart7_data: t = tagcomp.get_tag(t) ref = sorted([[l.id, l.licensename] for l in t.licenses], key=lambda x: x[0]) data = sorted(lics, key=lambda x: x[0]) assert_equal(data, ref, 'Mismatch in chart7_data') # chart7_tags data = [] for l in liccomp.get_license(attrload=['tags']): data.extend([t.tagname for t in l.tags]) data = list(set(data)) assert_equal(sorted(data), sorted(ta.chart7_tags), 'Mismatch in chart7_tag') # chart20_data for p, v in ta.chart20_data.iteritems(): p = projcomp.get_project(p) for t, wikis in v: t = tagcomp.get_tag(t) ref = sorted( [[w.id, h.wiki_parseurl(w.wikiurl), w.wikiurl] for w in t.wikipages if w.project == p], key=lambda x: x[0]) data = sorted(wikis, key=lambda x: x[0]) assert_equal(data, ref, 'mismatch in chart20_data') # chart20_tag data = [] projects = projcomp.get_project(attrload=['wikis'], attrload_all=['wikis.tags']) for p in projects: ref = sorted( list(set([t.tagname for w in p.wikis for t in w.tags]))) assert_equal(ref, ta.chart20_tags[p.id], 'mismatch in chart20_tag')