def main(): try: parser = argparse.ArgumentParser() parser.add_argument("command", choices=["insert", "search"]) parser.add_argument("host", help="Host address") parser.add_argument("port", help="Port on the host", type=int) parser.add_argument("total_requests", help="Total requests", type=int) parser.add_argument("interval", help="Interval in seconds between between requests", type=int) parser.add_argument("-n", "--name", help="Starting name of the index. If not given 'sweep' will be used", default="sweep") parser.add_argument("-a", "--args", help="Other args in key value format e.g {'key':value, ...}", default=None) parser.add_argument("-o", "--output-file-path", help="Output file path", default=None) command = parser.parse_args() if command.output_file_path is not None: logging.basicConfig(filename=command.output_file_path, level=logging.INFO) else: logging.getLogger().setLevel(logging.INFO) if command.command == "insert": insert.do_insert(command.host, str(command.port), command.total_requests, command.interval, command.name, command.args) else: command.command == "search" search.do_search(command.host, str(command.port), command.total_requests, command.interval, command.name, command.args) except Exception: traceback.print_exc() pass
def search(request): if 'q' in request.GET: q = request.GET['q'] if 'page' in request.GET: page = request.GET['page'] else: page = 1 start = clock() search_res = do_search(q, page) total_hits = search_res["total"] results = [each["_source"] for each in search_res["hits"]] end = clock() return render(request, 'res_search.html', {'results': results, 'query': q, 'count': total_hits, 'time': end - start, 'page': page, 'total_page': total_hits / PAGE_SIZE, 'host': request.META['SERVER_NAME'], 'port': request.META['SERVER_PORT'], 'nextpage': int(page) + 1}) else: message = 'You submitted an empty form.' return HttpResponse(message)
def search(self, editable): '''Search''' content = self.searchEntry.get_chars(0, -1) keywords = content.split() if len(keywords) != 0: pkgList = filter (lambda n: n in self.repoCache.uninstallablePkgs, search.do_search(keywords)) self.entrySearchCallback(PAGE_UNINSTALL, content, pkgList)
def update(self, pkgName): '''Update.''' self.pkgList = filter (lambda n: n in self.repoCache.uninstallablePkgs, search.do_search(self.keywords)) self.topbar.searchCompletion.hide() self.topbar.updateTopbar(self.content, len(self.pkgList)) self.searchView.update(len(self.pkgList))
def search(request): query = request.GET results = do_search(query) context = { 'query': query.get('q'), 'results': results, } return render_to_response('search.html', context, RequestContext(request))
def search(self, editable): '''Search''' content = editable.get_chars(0, -1) keywords = content.split() if len(keywords) != 0: pkgList = search.do_search(keywords) self.pkgList = pkgList self.topbar.searchCompletion.hide() self.topbar.updateTopbar(content, len(pkgList)) self.searchView.updateSearch(len(pkgList))
def search(self, editable): '''Search''' self.content = editable.get_chars(0, -1) self.keywords = self.content.split() if len(self.keywords) != 0: pkgList = filter (lambda n: n in self.repoCache.uninstallablePkgs, search.do_search(self.keywords)) self.pkgList = pkgList self.topbar.searchCompletion.hide() self.topbar.updateTopbar(self.content, len(pkgList)) self.searchView.updateSearch(len(pkgList))
def export(request): """ Export should simply be like search, only with XML results. """ query = request.GET results = do_search(query, return_objects=True) context = { 'papers': results['papers'], } rendered = render_to_string('export.xml', context) response = HttpResponse(rendered, mimetype='text/xml') # response['Content-Disposition'] = 'attachment; filename="brainspell.xml"' return response
def isClicked(): print('Clicked Search') a=entryInput.get() print(a) docs=[] docs=search.do_search(a) # finalString="" # for x in range(len(docs)): # finalString = finalString + "\n " + docs[x].__str__()+ '\n' if(len(docs)!= 0 ): print(docs) textVar.set(docs) else: textVar.set("Bulunamadi!") sys.stdout.flush()
def results(): if request.method == 'POST': try: search_text = request.form['search-text'] in_order = request.form.get('in-order') if search_text.isalpha(): matches = do_search(search_text, in_order) return render_template('index.html', results="results.html", matches=matches, search_text=search_text) else: return render_template( 'index.html', help_text="Only alphabetic characters are allowed.") except Exception as e: print(e) return render_template('index.html', text="Something went wrong.") else: return redirect(url_for('index'))
async def subject(ctx, *, query): query = query.strip() if (len(query) > 50): raise ValidationError(f"Argument too long, exceeds 50 character limit") subject_list = do_search(query) title = f"Displaying search result(s) for '{query}'" if (len(subject_list) == 0): desc = "No results found" await ctx.send( embed=discord.Embed(title=title, description=desc, color=UoM_blue)) elif len(subject_list) == 1: await ctx.send(embed=get_subject_embed_detailed( subject=subject_list[0])) else: desc = f"{len(subject_list)} result(s) found" fields = subject_list_to_fields(subject_list) paginator = EmbedPaginator(title=title, description=desc, fields=fields) add_paginator(ctx.author, paginator) await ctx.send(embed=paginator.make_embed(ctx, page=1))
def search(): query = request.args.get('q') if 'page' in request.args: page = request.args['page'] else: page = 1 start = time.time() search_res = do_search(query, page) total_hits = search_res["total"] # results = [each["_source"] for each in search_res["hits"]] results = [] for each in search_res["hits"]: each["_source"]["people"] = each["_source"]["people_link"].split( "/")[-1].split("?")[0] if each["_source"]["people_link"] else u"匿名" for highlight in each["highlight"].keys(): each["_source"][highlight] = each["highlight"][highlight][0] results.append(each["_source"]) end = time.time() total_page = total_hits / PAGE_SIZE if total_page < 10: page_list = range(1, total_page) elif int(page) + 10 < total_page: page_list = range(int(page), int(page) + 10) else: page_list = range(page, total_page) next_page = 0 if int(page) + 1 >= total_page else int(page) + 1 return render_template('res_search.html', data={ 'results': results, 'query': query, 'count': total_hits, 'time': end - start, 'page': page, 'total_page': total_page or 1, 'page_list': page_list, 'previous_page': int(page) - 1, 'next_page': next_page })
def search(): query = request.args.get('q') if 'page' in request.args: page = request.args['page'] else: page = 1 start = time.time() search_res = do_search(query, page) total_hits = search_res["total"] # results = [each["_source"] for each in search_res["hits"]] results = [] for each in search_res["hits"]: each["_source"]["people"] = each["_source"]["people_link"].split("/")[-1].split("?")[0] if each["_source"]["people_link"] else u"匿名" for highlight in each["highlight"].keys(): each["_source"][highlight] = each["highlight"][highlight][0] results.append(each["_source"]) end = time.time() total_page = total_hits / PAGE_SIZE if total_page < 10: page_list = range(1, total_page) elif int(page) + 10 < total_page: page_list = range(int(page), int(page) + 10) else: page_list = range(page, total_page) next_page = 0 if int(page) + 1 >= total_page else int(page) + 1 return render_template('res_search.html', data={'results': results, 'query': query, 'count': total_hits, 'time': end - start, 'page': page, 'total_page': total_page or 1, 'page_list': page_list, 'previous_page': int( page) - 1, 'next_page': next_page})
def search(): now = datetime.utcnow() results = do_search(request.args.get('q', u''), expand=True) return render_template('search.html', results=results, now=now, newlimit=newlimit)
def search_word(word): return search.do_search(word)
def search(): now = datetime.utcnow() results = sorted(do_search(request.args.get('q', u''), expand=True), key=lambda r: getattr(r, 'datetime', now)) results.reverse() return render_template('search.html', results=results, now=now, newlimit=newlimit)