def sen_insert(): if request.method == 'POST': op = 'insert' elif request.method == 'PUT': op = 'update' elif request.method == 'DELETE': op = 'delete' cr = copy.deepcopy(result) try: get_eng_indexer(session['qq_openid'], 'sen').doc_op(json.loads(request.data), op) except Exception, e: cr['message'] = e.message return json.dumps(cr), 500
def sen_search(): args = args_parse(request) cr = copy.deepcopy(result) utils.logger.debug(' '.join(['Search Sentence', args['field'], args['words']])) sen_searcher = Searcher(get_eng_indexer(session['qq_openid'], 'sen')) with sen_searcher.open() as searcher: r = sen_searcher.search(searcher, args['field'], args['words'], args['page_num'], args['page_size']) cr['data']['schema'] = ['id', 'content', 'tags', 'categories', 'date', 'comments'] for h in r: cr['data']['hits'].append({s:h[s].encode('utf-8') if type(h[s]) is unicode else str(h[s]) for s in cr['data']['schema']}) return json.dumps(cr)
def iter_search(index_type, field, words): search = Searcher(get_eng_indexer(session['qq_openid'], index_type)) with search.open() as searcher: pn = 1 while True: r = search.search(searcher, field, words, pn, DEFAULT_PAGE_SIZE) count = 0 for h in r: count += 1 yield h if count < DEFAULT_PAGE_SIZE: break pn += 1
def doc_search(): args = args_parse(request) cr = copy.deepcopy(result) utils.logger.debug(' '.join(['Search Document', args['field'], args['words']])) doc_search = Searcher(get_eng_indexer(session['qq_openid'], 'doc')) with doc_search.open() as searcher: r = doc_search.search(searcher, args['field'], args['words'], args['page_num'], args['page_size']) cr['data']['schema'] = ['id', 'title', 'comments', 'categories', 'tags', 'date'] for h in r: hr = {s:h[s].encode('utf-8') if type(h[s]) is unicode else str(h[s]) for s in cr['data']['schema']} with open(h['path'], 'r') as file: hr['content'] = json.loads(file.read())['content'] cr['data']['hits'].append(hr) cr['data']['schema'].append('content') return json.dumps(cr)