def item(request,pid): product_collection=db['product'] db_size = product_collection.count() suggestion=search_suggestion(product_collection) hot_keys = get_hot_keys(15) visit_counts() access_token = request.session.get('access_token', None) expires_in = request.session.get('expires_in', None) uid = request.session.get('uid',None) profile =db['user'].find({"_id":uid}) if profile.count()==0: profile=None else: profile=profile[0] template = loader.get_template('item.html') item_id = pid #request.GET['id'].encode('utf-8') item_visit_counts(item_id) itemInfo = product_collection.find({"ProductID":item_id})[0] insert_result=None taokeurl=None try: taokeinfo = db['taoke'].find({"num_iid":int(item_id)}) if taokeinfo.count()==0: taokeinfo=None if not taokeinfo: taoke = taobaoke('taobao.taobaoke.items.convert',itemInfo['ProductID'],'num_iid,nick,click_url,commission,commission_rate,commission_num,commission_volume,shop_click_url,seller_credit_score,volume') taokeinfo=db['taoke'].insert(taoke['taobaoke_items']['taobaoke_item'][0]) try: taokeurl=taokeinfo[0]['click_url'] except Exception,what: print what print 'error get clickurl' except Exception, what: print what print 'error get taobaoke info'
from pymongo import Connection from pymmseg import mmseg from search_suggestion import search_suggestion from django.core.paginator import PageNotAnInteger, Paginator, InvalidPage, EmptyPage #mongodb connection connection = Connection('localhost', 27017) db = connection['focusense'] version=settings.VERSION #support chinese import sys reload(sys) sys.setdefaultencoding('utf-8') suggestion=search_suggestion(db['product']) def index (request): visit_counts() access_token = request.session.get('access_token', None) expires_in = request.session.get('expires_in', None) uid = request.session.get('uid', None) try: profile =db['user'].find({"_id":uid}) if profile.count()==0: profile=None else: profile=profile[0] except: pass
def search(request): hot_keys = get_hot_keys(40) r.incr('visit:search:count') visit_counts() suggestion = search_suggestion(db['product']) mmseg_keys = [] collection = db['product'] access_token = request.session.get('access_token', None) expires_in = request.session.get('expires_in', None) uid = request.session.get('uid', None) profile = db['user'].find({"_id": uid}) if profile.count() == 0: profile = None else: profile = profile[0] #chinese word segmentation engine mmseg.Dictionary.load_dictionaries() db_size = collection.count() template = loader.get_template('search.html') keywords = request.GET['keys'].encode('utf-8') current_url = request.path + '?keys=' + keywords seg_keys = [] patterns = [] search_str = '' mmseg_keys_temp = mmseg.Algorithm(keywords) for tok in mmseg_keys_temp: mmseg_keys.append(tok.text) if len(keywords) > 30: algor = mmseg.Algorithm(keywords) for tok in algor: seg_keys.append(tok.text) patterns.append(re.compile('.*%s.*' % tok.text)) search_str = search_str + '.*' + tok.text + '.*|' else: algor = keywords.split(' ') for tok in algor: #add to redis server statics seg_keys.append(tok.strip()) patterns.append(re.compile('.*%s.*' % tok.strip())) search_str = search_str + '.*' + tok + '.*|' #restrict search result_list = collection.find({"ProductName": {"$all": patterns}}) if result_list.count() == 0: #restrict search return none,then use the loose search method search_str = search_str.rstrip('|') pattern = re.compile(search_str) result_list = collection.find({"ProductName": pattern}) if keywords.strip() == '': result_list = None if result_list and result_list.count() >= 1: algor = keywords.split(' ') for tok in algor: try: if tok.strip() != '': r.zincrby('search_keywords', tok) except: print 'error redis search static' after_range_num = 3 befor_range_num = 4 try: page = int(request.GET.get("page", 1)) if page < 1: page = 1 except ValueError: page = 1 paginator = Paginator(result_list, 10) try: search_result = paginator.page(page) except (EmptyPage, InvalidPage, PageNotAnInteger): search_result = paginator.page(paginator.num_pages) if page >= after_range_num: page_range = paginator.page_range[page - after_range_num:page + befor_range_num] else: page_range = paginator.page_range[0:int(page) + befor_range_num] else: algor = keywords.split(' ') for tok in algor: r.zincrby('search_keywords_not_exist', tok) search_result = None page_range = None most_like_item = get_most_like_items() MostLikeList = [] for mll in most_like_item: try: rresult = db['product'].find({"ProductID": mll})[0] except: rresult = None if rresult: recommend = { "pid": rresult['ProductID'], "cover": rresult['MorePhotos'], "title": rresult['ProductName'], "price": rresult['ProductPrice'] } MostLikeList.append(recommend) if len(MostLikeList) == 0: MostLikeList = None params = Context({ "MostLikeList": MostLikeList, "mmseg_keys": mmseg_keys, "hotkeys": hot_keys, "current_url": current_url, 'page_range': page_range, 'userProfile': profile, 'result_list': search_result, 'instant_search': suggestion, 'search_key_words': seg_keys, 'system_version': version, 'database_size': db_size }) return HttpResponse(template.render(params))
def search (request): hot_keys = get_hot_keys(40) r.incr('visit:search:count') visit_counts() suggestion=search_suggestion(db['product']) mmseg_keys=[] collection = db['product'] access_token = request.session.get('access_token', None) expires_in = request.session.get('expires_in', None) uid = request.session.get('uid', None) profile =db['user'].find({"_id":uid}) if profile.count()==0: profile=None else: profile=profile[0] #chinese word segmentation engine mmseg.Dictionary.load_dictionaries() db_size = collection.count() template = loader.get_template('search.html') keywords = request.GET['keys'].encode('utf-8') current_url=request.path+'?keys='+keywords seg_keys=[] patterns=[] search_str='' mmseg_keys_temp = mmseg.Algorithm(keywords) for tok in mmseg_keys_temp: mmseg_keys.append(tok.text) if len(keywords)>30: algor = mmseg.Algorithm(keywords) for tok in algor: seg_keys.append(tok.text) patterns.append(re.compile('.*%s.*'%tok.text)) search_str=search_str+'.*'+tok.text+'.*|' else: algor=keywords.split(' ') for tok in algor: #add to redis server statics seg_keys.append(tok.strip()) patterns.append(re.compile('.*%s.*'%tok.strip())) search_str=search_str+'.*'+tok+'.*|' #restrict search result_list=collection.find({"ProductName":{"$all":patterns}}) if result_list.count()==0: #restrict search return none,then use the loose search method search_str=search_str.rstrip('|') pattern = re.compile(search_str) result_list=collection.find({"ProductName":pattern}) if keywords.strip()=='': result_list=None if result_list and result_list.count() >=1: algor=keywords.split(' ') for tok in algor: try: if tok.strip()!='': r.zincrby('search_keywords',tok) except: print 'error redis search static' after_range_num = 3 befor_range_num = 4 try: page = int(request.GET.get("page",1)) if page < 1: page = 1 except ValueError: page = 1 paginator = Paginator(result_list,10) try: search_result = paginator.page(page) except(EmptyPage,InvalidPage,PageNotAnInteger): search_result = paginator.page(paginator.num_pages) if page >= after_range_num: page_range = paginator.page_range[page-after_range_num:page+befor_range_num] else: page_range = paginator.page_range[0:int(page)+befor_range_num] else: algor=keywords.split(' ') for tok in algor: r.zincrby('search_keywords_not_exist',tok) search_result=None page_range=None most_like_item=get_most_like_items() MostLikeList=[] for mll in most_like_item: try: rresult=db['product'].find({"ProductID":mll})[0] except: rresult=None if rresult: recommend={"pid":rresult['ProductID'],"cover":rresult['MorePhotos'],"title":rresult['ProductName'],"price":rresult['ProductPrice']} MostLikeList.append(recommend) if len(MostLikeList)==0: MostLikeList=None params = Context({"MostLikeList":MostLikeList,"mmseg_keys":mmseg_keys,"hotkeys":hot_keys,"current_url":current_url,'page_range':page_range,'userProfile':profile,'result_list':search_result,'instant_search':suggestion,'search_key_words':seg_keys,'system_version': version,'database_size':db_size}) return HttpResponse(template.render(params))