def recordHistory(request): userPhone = request.POST['user_phone'] history_module_id = request.POST['history_id'] history_module_type = request.POST['history_type'] uh = UserHistory() uh.user_phone = userPhone uh.history_module_type = history_module_type uh.history_module_id = history_module_id try: uh.save() data = {'status':True,'message':'历史记录保存成功'} except BaseException as reason: data = {'status':False,'message':'历史记录保存失败'} result = json.dumps(data) return HttpResponse(result)
def trail_page(request): # check if get is available if len(request.GET.values())==0: return redirect('/search/') s = xmlrpclib.Server(serverPort, allow_none=True) # get user preferences user_obj = User.objects.get(id=request.user.id) user_pref = UserPreferences.objects.get(user=user_obj) # get trail. last argument is the amount of trail results you want to return at most targetAdIndex = int(request.GET.get("targetAd")) trail_list = s.QueryInterfaceImpl.rpcGetTrailResults(dataset_id, targetAdIndex, TRAIL_LIMIT) # get details for trailed ad targetAd = s.QueryInterfaceImpl.rpcGetAdByAdIndex(dataset_id, targetAdIndex) targetAdObject = createObjList(targetAd)[0] # generate list of trail result and get unique locations within obj_list,locations = createObjListPlusLocationSet(trail_list) if len(obj_list)==0: return render(request, "trail.html", {"preferences":user_pref,"resultAds":obj_list, "targetAd":targetAdObject, "phoneCount":0, "phoneDict":{}, "locDict":{}, "locCount":0, "trailtotal":0, "imageList":[],"resultAdsJSON":"{}"}) #load locations currentLocations = LocationsGeo.objects.filter(location__in=list(locations)).values() missingLocations = frozenset(LocationsMissing.objects.all().values_list('location', flat=True)) missing = frozenset(locations).difference(missingLocations,frozenset(currentLocations.values_list('location', flat=True))) if len(missing)>0: insert_list = [] for x in missing: insert_list.append(LocationsMissing(location=x)) LocationsMissing.objects.bulk_create(insert_list) #create a geo lookup dictionary geo_lookup={} for obj in currentLocations: geo_lookup[obj['location'].encode('UTF8')]=[obj['lat'],obj['lon']] flaggedAds=frozenset(UserFlaggedAds.objects.filter(user=user_obj).values_list('ad_id', flat=True)) phone_dict = {} loc_dict = {} images = {} for ad in obj_list: #add geoinfo if ad.location in geo_lookup: ad.lat=float(geo_lookup[ad.location][0]) ad.lon=float(geo_lookup[ad.location][1]) #check for flagged ad if long(ad.adIndex) in flaggedAds: ad.flagged=1 #calc phone numbers for phonenum in ad.phonenumbers: if phonenum in phone_dict: phone_dict[phonenum] += 1 else: phone_dict[phonenum] = 1 #calc locations if ad.displaylocation in loc_dict: loc_dict[ad.displaylocation] += 1 else: loc_dict[ad.displaylocation] = 1 #get (unique) images and associated ad IDs for img in ad.images: if not img in images: images[img]=[ad.adIndex] else: images[img].append(ad.adIndex) #paginate paginator = Paginator(obj_list, 100) page = request.GET.get("page") pageGet=re.sub(r'(page=\d+\&{0,1})','',request.GET.urlencode()) if 'page' in request.GET: page = request.GET.get("page") else: page=1 #create history entry if 'Al' not in request.GET: permHistory = UserHistoryPermanent(user_id=request.user.id,searchget=request.get_full_path(),type="trail") permHistory.save() if user_pref.save_history==1: targetAdString="location: <i>"+targetAdObject.displaylocation+"</i> phone: <i>"+",".join(targetAdObject.phonenumbers)+"</i> age: <i>"+targetAdObject.age+"</i> posted: <i>"+targetAdObject.date+"</i>" newHistory = UserHistory(user_id=request.user.id, searchtext=targetAdString,searchget=request.get_full_path(),type="trail") newHistory.save() resultAds = paginator.page(page) #trailResultList for map and graph AdsJSON=json.dumps([ads.__dict__ for ads in obj_list]) #render to response c= {} c.update(csrf(request)) c.update({"preferences":user_pref,"resultAds":resultAds, "targetAd":targetAdObject, "phoneCount":len(phone_dict), "phoneDict":phone_dict, "locDict":loc_dict,'pageGet':pageGet, "locCount":len(loc_dict), "trailtotal":len(obj_list), "imageList":images,"resultAdsJSON":AdsJSON}) return render(request, "trail.html",c)
def result_page(request): s = xmlrpclib.Server(serverPort, allow_none=True) #get user preferences user_obj = User.objects.get(id=request.user.id) user_pref = UserPreferences.objects.get(user=user_obj) #attrName_list = ["State", "Age", "Size", "Location"] attrName_list = ["Age", "Size", "Location"] attr_dict = s.QueryInterfaceImpl.rpcGetAttributes(attrName_list, dataset_id) #remove none form dict #attr_dict["State"]=attr_dict["State"][1:] #list of keywords available from dropdown regions=Regions.objects.defer("id").order_by('short')#.values_list('short', flat=True) regions_lookup={} for r in regions: regions_lookup[r.short]=r.long regions_lookup=json.dumps(regions_lookup) keyword_list = s.QueryInterfaceImpl.rpcGetKeywordsAttributes(dataset_id) #get dates for which data is available date_dict = s.QueryInterfaceImpl.rpcGetDatasetDates(dataset_id) # check if get is available if len(request.GET.values())==0: return redirect('/search/') #get search request search_request_dict = getDataFromRequest(request,date_dict) # get the search criteria the user enters #return to search page if request could not be parsed if not search_request_dict: return redirect('/search/') #make json of searchrequest so it can be sent back with results search_request_json=json.dumps(search_request_dict) #make request to data server result_dict = s.QueryInterfaceImpl.rpcGetSearchResult(dataset_id, search_request_dict["Age"], search_request_dict["Size"], search_request_dict["PhoneNumber"], search_request_dict["State"], search_request_dict["Location"], search_request_dict["Keyword"], search_request_dict["AnyKeyword"], search_request_dict["StartDate"], search_request_dict["EndDate"]) #result_dict["resultStat"]=[] obj_list = createObjList(result_dict["searchResult"]) obj_list = checkForFlaggedAds(obj_list,request.user.id) if len(result_dict["adStat"])>0: total=result_dict["adStat"][0] shown=len(obj_list) else: total=0 shown=0 # statistics for map display and graph jsonStr="".join(result_dict["resultStat"]) geoloc = [] if len(jsonStr) > 0 : statsJsonSparse=json.loads(jsonStr) #get all locations first so database call is only executed once locations = [location.encode('UTF8') for location in statsJsonSparse] currentLocations = LocationsGeo.objects.filter(location__in=locations) missingLocations = frozenset(LocationsMissing.objects.all().values_list('location', flat=True)) missing = frozenset(locations).difference(missingLocations,frozenset(currentLocations.values_list('location', flat=True))) if len(missing)>0: insert_list = [] for x in missing: insert_list.append(LocationsMissing(location=x)) LocationsMissing.objects.bulk_create(insert_list) gappend=geoloc.append for loc in currentLocations: gappend({'lat': float(loc.lat), 'lon' : float(loc.lon),'obs':statsJsonSparse[loc.location]}) geoloc=json.dumps(geoloc) # pagination paginator = Paginator(obj_list, page_size) page = request.GET.get("page") squerydict=request.GET.copy() squerydict.setdefault('EndDate',search_request_dict["EndDate"]) searchGet='?'+squerydict.urlencode() pageGet=re.sub(r'(page=\d+\&{0,1})','',squerydict.urlencode()) try: resultAds = paginator.page(page) except PageNotAnInteger: # results shown for first time resultAds = paginator.page(1) # create text description of search for history, then save # only when the result is shown for the 1st time if len(obj_list) >0: permHistory = UserHistoryPermanent(user_id=request.user.id,searchget=request.get_full_path(),type="search") permHistory.save() if user_pref.save_history==1: #set end date to today if it is empty search=request.path+searchGet if search_request_dict["Age"] != -2: keywordStr = "Age:" + str(search_request_dict["Age"]) + " " else: keywordStr = "" if search_request_dict["Size"] !="": keywordStr = keywordStr + "Size:" + search_request_dict["Size"] + " " if search_request_dict["PhoneNumber"] is not None and len(search_request_dict["PhoneNumber"]) > 0: keywordStr = keywordStr + "PhoneNumber:" + search_request_dict["PhoneNumber"] + " " if len(search_request_dict["State"]) > 0: keywordStr = keywordStr + "State:" + ", ".join(search_request_dict["State"]) + " " if len(search_request_dict["Location"])>0: keywordStr = keywordStr + "Location:" + ", ".join(search_request_dict["Location"]) + " " if len(search_request_dict["Keyword"]) > 0: keywordStr = keywordStr + "Keyword:" + search_request_dict["Keyword"][0] + " " if len(search_request_dict["AnyKeyword"]) > 0: keywordStr = keywordStr + "AnyKeyword:" + search_request_dict["AnyKeyword"] + " " keywordStr = keywordStr + "StartDate:" + search_request_dict["StartDate"] + " " + "EndDate:" + \ search_request_dict["EndDate"] newHistory = UserHistory(user_id=request.user.id, searchtext=keywordStr,searchget=search,type="search") newHistory.save() except EmptyPage: resultAds = paginator.page(paginator.num_pages) # replace empty list [] with "", for building GET request in html if len(search_request_dict["State"]) == 0: search_request_dict["State"] = "" else: search_request_dict["State"] = search_request_dict["State"][0] if len(search_request_dict["Location"]) == 0: search_request_dict["Location"] = "" else: search_request_dict["Location"] = search_request_dict["Location"][0] if len(search_request_dict["Keyword"]) == 0: search_request_dict["Keyword"] = "" else: search_request_dict["Keyword"] = search_request_dict["Keyword"][0] if search_request_dict["Size"] is None: search_request_dict["Size"] = "" return render(request,"newresult.html", {"preferences":user_pref,"resultAds":resultAds, "Age":search_request_dict["Age"], "Size":search_request_dict["Size"], "PhoneNumber":search_request_dict["PhoneNumber"], "State":search_request_dict["State"], "Location":search_request_dict["Location"], "Keyword":search_request_dict["Keyword"], "AnyKeyword":search_request_dict["AnyKeyword"], "StartDate":date_dict["StartDate"], "EndDate":date_dict["EndDate"], "regions":regions,"regions_lookup":regions_lookup,"age_list":attr_dict["Age"], "size_list":attr_dict["Size"], "location_list":attr_dict["Location"], "keyword_list":keyword_list, "total":total,"searchGet":searchGet,'pageGet':pageGet, "shown":shown,"Last_Request":search_request_json,"count_data":geoloc})
def get(self, request): # 从前端获取用户关键字 keywords_raw = request.GET.get("q", "") # 去除特殊字符 keywords_raw = re.sub(r',|;|<|>', '', keywords_raw) # 截取左右空格 keywords = keywords_raw.strip() # 获取用户搜索类型 s_type = request.GET.get("s_type", "") if s_type == "blog" or s_type == "course": if request.user.is_authenticated: user = request.user # 向redis缓存插入用户搜索的关键词形成搜索记录 self.set_history(keywords, user) # 从缓存中取出搜索记录 history_zip = self.get_history(user) # 更新搜索记录到数据库 for history, history_score in history_zip: # 如果用户和其搜索的关键词存在,更新关键词,搜索类型对应对象的score值,否则插入新的数据 try: user_history = UserHistory.objects.get( Q(user=user) & Q(keyword=history)) except Exception as e: user_history = None if user_history: # 更新数据 user_history.user = user user_history.keyword = history user_history.history_score = history_score user_history.save() else: # 插入数据 user_history = UserHistory() user_history.user = user user_history.keyword = history user_history.history_score = history_score user_history.save() # 从数据库中取出前10条历史记录 history_objects = UserHistory.objects.filter( user=user).order_by("-history_score")[:10] histories = [] for i in history_objects: histories.append(i.keyword) # 向数据库中插入搜索类型和关键词 if s_type == 'blog': keyword_type = 1 try: user_keywords = UserKeyWord.objects.get( user=user, keyword=keywords, keyword_type=keyword_type) except Exception as e: user_keywords = None if user_keywords: user_keywords.user = user user_keywords.keyword = keywords user_keywords.keyword_type = keyword_type # 存在的关键词score ++ user_keywords.score = user_keywords.score + 1 user_keywords.save() else: user_keywords = UserKeyWord() user_keywords.user = user user_keywords.keyword = keywords user_keywords.keyword_type = keyword_type user_keywords.save() elif s_type == 'course': keyword_type = 2 try: user_keywords = UserKeyWord.objects.get( user=user, keyword=keywords, keyword_type=keyword_type) except Exception as e: user_keywords = None if user_keywords: user_keywords.user = user user_keywords.keyword = keywords user_keywords.keyword_type = keyword_type # 同对象的score ++ user_keywords.score = user_keywords.score + 1 user_keywords.save() else: user_keywords = UserKeyWord() user_keywords.user = user user_keywords.keyword = keywords user_keywords.keyword_type = keyword_type user_keywords.save() elif s_type == 'graph': pass else: histories = None # 向缓存中插入所有用户关键词形成搜索热点 set_topn(keywords) # 从缓存中取出搜索热点 topn_zip = get_topn_zip() # 获取当前页数 current_page = request.GET.get("p", "1") try: current_page = int(current_page) except: current_page = 1 # 调用search()返回相关数据 hit_list, total_numbers, page_numbers, query_time = search( s_type, keywords, current_page) # 取出关键词词汇体系数据 phrase_dicts = PhrasePipeline().process_phrase(keyword=keywords) # 更新用户搜索热点数据库 TopUserPhrasePipeline().update_users_keywords() return render( request, 'main.html', { "hit_list": hit_list, "keywords": keywords, "keywords_raw": keywords_raw, "histories": histories, "current_page": current_page, "total_numbers": total_numbers, "page_numbers": page_numbers, "query_time": query_time, "topn_zip": topn_zip, "s_type": s_type, "phrase_dicts": phrase_dicts }) else: return HttpResponse("搜索异常,请输入正确的搜索类型")