def correlate_note_search_with_num_notes_created(users): rsize = r.c() rsearch = r.c() for u in users: zoo = wuw.reduceRepeatLogsValues2(u.activitylog_set.filter(action='search').values()) rsize = r.c(rsize, u.note_owner.count()) rsearch = r.c(rsearch, len(zoo) ) return rsize,rsearch,r('cor.test')(notes,searches)
def correlate_note_search_with_num_notes_created(users): rsize = r.c() rsearch = r.c() for u in users: zoo = wuw.reduceRepeatLogsValues2( u.activitylog_set.filter(action='search').values()) rsize = r.c(rsize, u.note_owner.count()) rsearch = r.c(rsearch, len(zoo)) return rsize, rsearch, r('cor.test')(notes, searches)
def searchterms(users): totalfreq = nltk.FreqDist() total = 0 for u in users: zoo = wuw.reduceRepeatLogsValues2(u.activitylog_set.filter(action='search').values()) terms = [ getQuery(z) for z in zoo if getQuery(z) is not None ] total = total + len(terms) totalfreq = totalfreq + nltk.FreqDist(terms) return totalfreq,[ z['when'] for z in zoo ],total
def times_search_used(users): times = {} for u in users: if u.activitylog_set.filter(action='search').count() == 0: continue times[u.id] = len([ z for z in wuw.reduceRepeatLogsValues2( u.activitylog_set.filter(action='search').values()) if getQuery(z) is not None ]) return times
def searchterms(users): totalfreq = nltk.FreqDist() total = 0 for u in users: zoo = wuw.reduceRepeatLogsValues2( u.activitylog_set.filter(action='search').values()) terms = [getQuery(z) for z in zoo if getQuery(z) is not None] total = total + len(terms) totalfreq = totalfreq + nltk.FreqDist(terms) return totalfreq, [z['when'] for z in zoo], total
def correlate_note_search_with_mean_alive(users): rsize = r.c() rsearch = r.c() for u in users: print u zoo = wuw.reduceRepeatLogsValues2(list(u.activitylog_set.filter(action='search').values())) if len(zoo) == 0: print "warning zero" continue #if u.activitylog_set.count() == 0: continue try: rsize = r.c(rsize, wuw.user_mean_alive(u.id).values()[0]) rsearch = r.c(rsearch, len(zoo) ) except: print sys.exc_info() return rsize,rsearch,r('cor.test')(rsize,rsearch)
def get_search_hits(users): us = {} for u in users: usearches = [] zoo = [z for z in wuw.reduceRepeatLogsValues2(u.activitylog_set.filter(action='search').values()) if getQuery(z) is not None] for z in zoo: s = z.get("search",'') if s is None or len(s.strip()) == 0: continue try: usearches.append(json.loads(s)["hits"]) except: import sys print sys.exc_info() if len(usearches) > 0: us[u.id] = usearches return us
def correlate_note_search_with_mean_alive(users): rsize = r.c() rsearch = r.c() for u in users: print u zoo = wuw.reduceRepeatLogsValues2( list(u.activitylog_set.filter(action='search').values())) if len(zoo) == 0: print "warning zero" continue #if u.activitylog_set.count() == 0: continue try: rsize = r.c(rsize, wuw.user_mean_alive(u.id).values()[0]) rsearch = r.c(rsearch, len(zoo)) except: print sys.exc_info() return rsize, rsearch, r('cor.test')(rsize, rsearch)
def get_search_hits(users): us = {} for u in users: usearches = [] zoo = [ z for z in wuw.reduceRepeatLogsValues2( u.activitylog_set.filter(action='search').values()) if getQuery(z) is not None ] for z in zoo: s = z.get("search", '') if s is None or len(s.strip()) == 0: continue try: usearches.append(json.loads(s)["hits"]) except: import sys print sys.exc_info() if len(usearches) > 0: us[u.id] = usearches return us
def times_search_used(users): times = {} for u in users: if u.activitylog_set.filter(action='search').count() == 0: continue times[u.id] = len([z for z in wuw.reduceRepeatLogsValues2(u.activitylog_set.filter(action='search').values()) if getQuery(z) is not None]) return times