def make_app(): es_index = "feed" client = motor.motor_tornado.MotorClient("mongodb://mongodb:27017") db = client.test es = ESConnection("elasticsearch", 9200) handlers = [ (r"/", MainHandler), (r"/news/new", NewsCreateHandler), (r"/news/create", NewsCreateHandler), (r"/news/search", NewsSearchHandler), (r"/news/([^/]+)", NewsShowHandler), ] settings = dict( template_path=os.path.join(os.path.dirname(__file__), "templates"), static_path=os.path.join(os.path.dirname(__file__), "static"), xsrf_cookies=True, cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__", debug=True, db=db, es=es, es_index=es_index) return tornado.web.Application(handlers, **settings)
def setUp(self): super(TestUserSearch, self).setUp() self.es_connection = ESConnection("localhost", 9200, self.io_loop) self.us = ESSearch(index_mapping=user_index_mapping, index_name="index", type_name="user", analyze_fields=user_analyze_fields, none_analyze_fields=user_none_analyze_fields, io_loop=self.io_loop) self.io_loop.run_sync(self.setup_coro)
class SearchHandler(tornado.web.RequestHandler): es_connection = ESConnection(ES_HOST, ES_PORT) @tornado.web.asynchronous @coroutine def get(self, indice="index", tipo="user"): query = {"query": {"match_all": {}}} response = yield self.es_connection.search(index=indice, type=tipo, source=query) #print response self.write(json.loads(response.body)) self.finish()
def test_use_of_custom_http_clients(self): mocked_http_client = Mock() mocked_http_client.fetch = Mock() es_connection = ESConnection("localhost", "9200", self.io_loop, custom_client=mocked_http_client) es_connection.search(callback=self.stop, source={"query": {"term": {"ID": "171171"}}}, type="materia", index="teste") mocked_http_client.fetch.assert_called()
def __init__(self, config, db=None, authnz_wrapper=None, io_loop=None): self.debug = False self.config = config if db is not None: self.db = db self.syncES = ElasticSearch( '%(ELASTIC_SEARCH_PROTOCOL)s://%(ELASTIC_SEARCH_HOST)s:%(ELASTIC_SEARCH_PORT)s' % config ) self.asyncES = ESConnection( host=config.get('ELASTIC_SEARCH_HOST'), port=config.get('ELASTIC_SEARCH_PORT'), io_loop=io_loop, protocol=config.get('ELASTIC_SEARCH_PROTOCOL'), ) self.index = config.get('ELASTIC_SEARCH_INDEX') self.max_retries = config.get('ELASTIC_SEARCH_MAX_RETRIES')
def setUp(self): super(ESConnectionTestBase, self).setUp() self.es_connection = ESConnection("localhost", "9200", self.io_loop) self._set_version()
import json import tornado.ioloop from tornado import web from tornadoes import ESConnection es = ESConnection('local.elasticsearch.com', '9200') class MainHandler(tornado.web.RequestHandler): @web.gen.coroutine def get(self): res = yield es.search(index='sitemap_g1', body={"query": {"match_all": {}}}) data = json.loads(res.body.decode('utf-8')) self.render('templates/index.xml', sitemaps=data['hits']['hits']) def make_app(): return web.Application([ (r"/", MainHandler), ]) app = make_app() if __name__ == "__main__": app.listen(8888) tornado.ioloop.IOLoop.current().start()
from greplin import scales from greplin.scales.meter import MeterStat import json from hashlib import md5 import re import unicodedata STATS = scales.collection('/index', MeterStat('docs')) BASE_PATH = '/Users/jisaacso/Documents/projects/bayes-impact/team-thorn/data/escort_all' FBDUMP = os.path.join(BASE_PATH, 'escort_all.tsv') es = ESConnection('localhost', 9200) es.httprequest_kwargs = { 'request_timeout': 1500.00, 'connect_timeout': 1500.00 } wspaceNuker = re.compile(' +') def fold_accents(raw): if type(raw) == str: raw = unicode(raw, 'utf-8') return ''.join([ c for c in unicodedata.normalize('NFKD', raw).encode('ascii', 'ignore') ])
def setUp(self): self.io_loop = self.get_new_ioloop() self.es_connection = ESConnection("localhost", "9200", self.io_loop)
def setUp(self): super(TestESConnectionWithTornadoGen, self).setUp() self.es_connection = ESConnection("localhost", "9200", self.io_loop)