def record_query(user, now): user = user str_u_ID = str(user.id) try: qt_obj = QueryTime.objects.get(user=user) except QueryTime.DoesNotExist: qt_obj = QueryTime.objects.create(user=user, last_query_time=now) last_query = QueryTime.objects.get(user=user).last_query_time.replace(tzinfo=None, microsecond=0) time = now - last_query - timedelta(hours=1) qt_obj.last_query_time = now qt_obj.save() if time <= timedelta(seconds=0): event_logger.info(str_u_ID + ' 1Q') elif time < timedelta(minutes=10): event_logger.info(str_u_ID + ' RP ' + str(time)) elif time < timedelta(hours=1): event_logger.info(str_u_ID + ' TO') else: event_logger.info(str_u_ID + ' S1')
def paginated_search(request, query, user): condition = get_user_condition(request) user_id = str(user.id) q_len = str(len(query)) page = request.REQUEST.get('page') hq = hashlib.sha1(query) hash_q = hq.hexdigest() if query: if not response_cache.get(query): try: response = run_query(query, condition=1) response_cache.set(query, pickle.dumps(response), 600) except exceptions.EngineConnectionException: response = 'Unable to connect to the search engine at this time. Please try again later.' profile = UserProfile.objects.get(user=user) query_num = int(profile.num_query) profile.num_query = query_num + 1 profile.save() event_logger.info(user_id + ' QL ' + q_len + ' HQ ' + hash_q + ' CA ') else: response = pickle.loads(response_cache.get(query)) event_logger.info(user_id + ' HQ ' + hash_q + ' PA ' + str(page) + ' RR') if type(response) == str: results = response else: paginator = Paginator(response.results, 10) try: results = paginator.page(page) except PageNotAnInteger: # if page not an integer, deliver first page results = paginator.page(1) except EmptyPage: # if page out of range, deliver last page of results results = paginator.page(paginator.num_pages) return results
def record_link(user, now, url, rank): """ records data about links visited by user :param user:(User) the current user instance :param now: (datetime)the current time :param url: (unicode)the url clicked by the user :param rank:(unicode)the rank of teh url on the page :return:None """ user = user str_u_ID = str(user.id) hl = hashlib.sha1(url) url_visited = hl.hexdigest() url_rank = str(rank) profile = UserProfile.objects.get(user=user) link_num = int(profile.links_visited) profile.links_visited = link_num + 1 profile.save() try: lt_obj = LinkTime.objects.get(user=user) except LinkTime.DoesNotExist: lt_obj = LinkTime.objects.create(user=user, last_link_time=now) last_link = LinkTime.objects.get(user=user).last_link_time.replace( tzinfo=None, microsecond=0) time = now - last_link - timedelta(hours=1) lt_obj.last_link_time = now lt_obj.save() if time <= timedelta(seconds=0): event_logger.info(str_u_ID + ' LV ' + url_visited + ' LR ' + url_rank + ' 1L') elif time < timedelta(minutes=10): event_logger.info(str_u_ID + ' LV ' + url_visited + ' LR ' + url_rank + ' LT ' + str(time)) elif time < timedelta(hours=1): event_logger.info(str_u_ID + ' LV ' + url_visited + ' LR ' + url_rank + ' TO') else: event_logger.info(str_u_ID + ' LV ' + url_visited + ' LR ' + url_rank + ' S1')
def record_link(user, now, url, rank): """ records data about links visited by user :param user:(User) the current user instance :param now: (datetime)the current time :param url: (unicode)the url clicked by the user :param rank:(unicode)the rank of teh url on the page :return:None """ user = user str_u_ID = str(user.id) hl = hashlib.sha1(url) url_visited = hl.hexdigest() url_rank = str(rank) profile = UserProfile.objects.get(user=user) link_num = int(profile.links_visited) profile.links_visited = link_num + 1 profile.save() try: lt_obj = LinkTime.objects.get(user=user) except LinkTime.DoesNotExist: lt_obj = LinkTime.objects.create(user=user, last_link_time=now) last_link = LinkTime.objects.get(user=user).last_link_time.replace(tzinfo=None, microsecond=0) time = now - last_link - timedelta(hours=1) lt_obj.last_link_time = now lt_obj.save() if time <= timedelta(seconds=0): event_logger.info(str_u_ID + ' LV ' + url_visited + ' LR ' + url_rank + ' 1L') elif time < timedelta(minutes=10): event_logger.info(str_u_ID + ' LV ' + url_visited + ' LR ' + url_rank + ' LT ' + str(time)) elif time < timedelta(hours=1): event_logger.info(str_u_ID + ' LV ' + url_visited + ' LR ' + url_rank + ' TO') else: event_logger.info(str_u_ID + ' LV ' + url_visited + ' LR ' + url_rank + ' S1')
def record_query(user, now): """ records details of user activity in log file :param user:(User)the user instance :param now: (time)the current time :return:None """ user = user str_u_ID = str(user.id) try: qt_obj = QueryTime.objects.get(user=user) except QueryTime.DoesNotExist: qt_obj = QueryTime.objects.create(user=user, last_query_time=now) last_query = QueryTime.objects.get(user=user).last_query_time.replace( tzinfo=None, microsecond=0) time_spent = now - last_query - timedelta(hours=1) qt_obj.last_query_time = now qt_obj.save() if time_spent <= timedelta(milliseconds=0): event_logger.info(str_u_ID + ' 1Q') elif time_spent < timedelta(minutes=10): event_logger.info(str_u_ID + ' RP ' + str(time_spent)) # if the user spends > 10 min on a results page, assume they have wandered off elif time_spent < timedelta(hours=1): event_logger.info(str_u_ID + ' TO') # if it's been over an hour, assume its their first query of the session else: event_logger.info(str_u_ID + ' S1')
def record_link(user, now, url, rank): user = user str_u_ID = str(user.id) hl = hashlib.sha1(url) url_visited = hl.hexdigest() url_rank = str(rank) profile = UserProfile.objects.get(user=user) link_num = int(profile.num_links) profile.num_links = link_num + 1 profile.save() try: lt_obj = LinkTime.objects.get(user=user) except LinkTime.DoesNotExist: lt_obj = LinkTime.objects.create(user=user, last_link_time=now) last_link = LinkTime.objects.get(user=user).last_link_time.replace(tzinfo=None, microsecond=0) time = now - last_link - timedelta(hours=1) lt_obj.last_link_time = now lt_obj.save() if time <= timedelta(seconds=0): event_logger.info(str_u_ID + ' LV ' + url_visited + ' LR ' + url_rank + ' 1L') elif time < timedelta(minutes=10): event_logger.info(str_u_ID + ' LV ' + url_visited + ' LR ' + url_rank + ' LT ' + str(time)) elif time < timedelta(hours=1): event_logger.info(str_u_ID + ' LV ' + url_visited + ' LR ' + url_rank + ' TO') else: event_logger.info(str_u_ID + ' LV ' + url_visited + ' LR ' + url_rank + ' S1')
def record_query(user, now): """ records details of user activity in log file :param user:(User)the user instance :param now: (time)the current time :return:None """ user = user str_u_ID = str(user.id) try: qt_obj = QueryTime.objects.get(user=user) except QueryTime.DoesNotExist: qt_obj = QueryTime.objects.create(user=user, last_query_time=now) last_query = QueryTime.objects.get(user=user).last_query_time.replace(tzinfo=None, microsecond=0) time_spent = now - last_query - timedelta(hours=1) qt_obj.last_query_time = now qt_obj.save() if time_spent <= timedelta(milliseconds=0): event_logger.info(str_u_ID + ' 1Q') elif time_spent < timedelta(minutes=10): event_logger.info(str_u_ID + ' RP ' + str(time_spent)) # if the user spends > 10 min on a results page, assume they have wandered off elif time_spent < timedelta(hours=1): event_logger.info(str_u_ID + ' TO') # if it's been over an hour, assume its their first query of the session else: event_logger.info(str_u_ID + ' S1')
def paginated_search(query, condition, u_ID, page, user): """ performs a paginated search based on a given query and records data about the search to a log file :param query: (str)the search query input by the user :param condition: (int)the condition assigned to the user :param u_ID: (int)the user id :param page: (unicode)the page number of results to be displayed :param user: (User)the user instance :return: (paginator.Page(page))paginated results of the search """ q_len = str(len(query)) cnd = condition str_u_ID = str(u_ID) h1 = hashlib.sha1(query) q_hash = h1.hexdigest() profile = UserProfile.objects.get(user=user) queries = int(profile.queries_submitted) if query: # Run our Bing function to get the results list! if not (response_cache.get(query)): event_logger.info(str_u_ID + ' QL ' + 'HQ: ' + q_hash + ' ' + q_len + ' CA ') try: result_list = run_query(query, cnd) response_cache.set(query, pickle.dumps(result_list), 300) except exceptions.EngineConnectionException: result_list = 'Unable to connect to the search engine at this time. Please try again later.' profile.queries_submitted = queries + 1 profile.save() elif page is None: # a new query has been submitted event_logger.info(str_u_ID + ' RR ' + 'HQ: ' + q_hash + ' ' + q_len) result_list = pickle.loads(response_cache.get(query)) mod = conditions[condition] result_list = mod(result_list) profile.queries_submitted = queries + 1 profile.save() else: event_logger.info(str_u_ID + ' PA' + str(page) + ' RR') result_list = pickle.loads(response_cache.get(query)) if type(result_list) == str: contacts = result_list else: paginator = Paginator(result_list.results, 10) # show 10 results per page try: contacts = paginator.page(page) except PageNotAnInteger: # if page not an integer, deliver first page contacts = paginator.page(1) except EmptyPage: # if page out of range, deliver last page of results contacts = paginator.page(paginator.num_pages) if contacts is None: contacts = "No results." #right before the results are returned, log the fact that the search is completed. #this time difference can be used to calculate the lag from running the query through bing event_logger.info(str_u_ID + ' RPL') return contacts