def creations(users): print "bar" cusave = [wuw.reduceRepeatLogsValues(list(cu.activitylog_set.filter(action__in=['notecapture-focus','note-add']).values())) for cu in users] means = [] varss = [] userstimes = [] for u_i in xrange(len(cusave)): print u_i user = users[u_i] print user uals = cusave[u_i] uals.sort(key=lambda x: long(x["when"])) if len(uals) == 0: continue print len(uals) thisal = uals[0] usertimes = [] for al in uals[1:]: if thisal["action"] == 'notecapture-focus' and al["action"] == 'note-add': elapsed = long(al["when"]) - long(thisal["when"]) usertimes.append(long(al["when"]) - long(thisal["when"])) thisal = al userstimes.append(usertimes) try: means.append(mean(usertimes)) varss.append(variance(usertimes)) except: import sys print sys.exc_info() return reduce(lambda x,y:x+y, userstimes),userstimes,means,varss
def user_search(user, days_ago=None, nosmoothing=False): from jv3.study.content_analysis import activity_logs_for_user global search_cache global search_query_cache alogs = wuw.reduceRepeatLogsValues( activity_logs_for_user(user, None, days_ago)) searches = [] queries = [] last_time = 0 for al_i in range(len(alogs)): al = alogs[al_i] if al["action"] == 'search': try: query = JSONDecoder().decode(al["search"]) except: continue if type(query) == dict: key = None if 'search' in query: key = 'search' if 'query' in query: key = 'query' if key is not None: # no empty searches pls if len(query[key].strip()) > 0 and nosmoothing or long( al['when']) - long(last_time) > ( 10 * 1000): # 10 second smoothing queries.append(query[key]) al['query'] = query[key] al['hits'] = query.get('hits', []) searches.append(al) last_time = al['when'] elif al["action"] == 'clear-search' and ( nosmoothing or long(al['when']) - long(last_time) > (10 * 1000)): al['query'] = '' searches.append(al) last_time = al["when"] search_cache[user.id] = searches search_query_cache[user.id] = queries return searches, nltk.FreqDist(queries)
def user_search(user,days_ago=None,nosmoothing=False): from jv3.study.content_analysis import activity_logs_for_user global search_cache global search_query_cache alogs = wuw.reduceRepeatLogsValues(activity_logs_for_user(user,None,days_ago)) searches = [] queries = [] last_time = 0 for al_i in range(len(alogs)): al = alogs[al_i] if al["action"] == 'search': try: query = JSONDecoder().decode(al["search"]) except: continue if type(query) == dict: key = None if 'search' in query: key = 'search' if 'query' in query: key = 'query' if key is not None: # no empty searches pls if len(query[key].strip()) > 0 and nosmoothing or long(al['when'])-long(last_time) > (10*1000): # 10 second smoothing queries.append(query[key]) al['query'] = query[key] al['hits'] = query.get('hits',[]) searches.append(al) last_time = al['when'] elif al["action"] == 'clear-search' and (nosmoothing or long(al['when'])-long(last_time) > (10*1000)): al['query'] = '' searches.append(al) last_time = al["when"] search_cache[user.id] = searches search_query_cache[user.id] = queries return searches,nltk.FreqDist(queries)
def creations(users): print "bar" cusave = [ wuw.reduceRepeatLogsValues( list( cu.activitylog_set.filter( action__in=['notecapture-focus', 'note-add']).values())) for cu in users ] means = [] varss = [] userstimes = [] for u_i in xrange(len(cusave)): print u_i user = users[u_i] print user uals = cusave[u_i] uals.sort(key=lambda x: long(x["when"])) if len(uals) == 0: continue print len(uals) thisal = uals[0] usertimes = [] for al in uals[1:]: if thisal["action"] == 'notecapture-focus' and al[ "action"] == 'note-add': elapsed = long(al["when"]) - long(thisal["when"]) usertimes.append(long(al["when"]) - long(thisal["when"])) thisal = al userstimes.append(usertimes) try: means.append(mean(usertimes)) varss.append(variance(usertimes)) except: import sys print sys.exc_info() return reduce(lambda x, y: x + y, userstimes), userstimes, means, varss