def __init__(self, host, index, type="query", max_size=10, batch_size=10, kwargs=None): """ settings ARE FOR THE ELASTICSEARCH INDEX """ es = Cluster(kwargs).get_or_create_index(schema=convert.json2value( convert.value2json(SCHEMA), leaves=True), limit_replicas=True, kwargs=kwargs) #ENSURE THE TYPE EXISTS FOR PROBING try: es.add({ "id": "dummy", "value": { "hash": "dummy", "create_time": Date.now(), "last_used": Date.now(), "query": {} } }) except Exception, e: Log.warning("Problem saving query", cause=e)
def open_test_instance(name, filename=None, es=None, kwargs=None): if filename != None: Log.note( "Using {{filename}} as {{type}}", filename=filename, type=name ) return FakeES(filename=filename) else: Log.note( "Using ES cluster at {{host}} as {{type}}", host=es.host, type=name ) cluster = Cluster(es) try: old_index = cluster.get_index(es) cluster.delete_index(old_index.settings.index) except Exception as e: if "Can not find index" not in e: Log.error("unexpected", cause=e) output = cluster.create_index(limit_replicas=True, limit_replicas_warning=False, kwargs=es) output.delete_all_but_self() output.add_alias(es.index) return output
def __init__( self, host, index, port=9200, type="log", queue_size=1000, batch_size=100, kwargs=None, ): """ settings ARE FOR THE ELASTICSEARCH INDEX """ kwargs.timeout = Duration(coalesce(kwargs.timeout, "30second")).seconds kwargs.retry.times = coalesce(kwargs.retry.times, 3) kwargs.retry.sleep = Duration(coalesce(kwargs.retry.sleep, MINUTE)).seconds self.es = Cluster(kwargs).get_or_create_index( schema=json2value(value2json(SCHEMA), leaves=True), limit_replicas=True, typed=True, kwargs=kwargs, ) self.batch_size = batch_size self.es.add_alias(coalesce(kwargs.alias, kwargs.index)) self.queue = Queue("debug logs to es", max=queue_size, silent=True) self.worker = Thread.run("add debug logs to es", self._insert_loop)
def open_test_instance(name, settings): if settings.filename: Log.note("Using {{filename}} as {{type}}", filename=settings.filename, type=name) return Fake_ES(settings) else: Log.note("Using ES cluster at {{host}} as {{type}}", host=settings.host, type=name) Index(settings).delete() es = Cluster(settings).create_index(settings, limit_replicas=True) return es
def __init__(self, host, index, type="log", max_size=1000, batch_size=100, settings=None): """ settings ARE FOR THE ELASTICSEARCH INDEX """ self.es = Cluster(settings).get_or_create_index( schema=convert.json2value(convert.value2json(SCHEMA), leaves=True), limit_replicas=True, tjson=True, settings=settings ) self.batch_size = batch_size self.es.add_alias(coalesce(settings.alias, settings.index)) self.queue = Queue("debug logs to es", max=max_size, silent=True) self.es.settings.retry.times = coalesce(self.es.settings.retry.times, 3) self.es.settings.retry.sleep = Duration(coalesce(self.es.settings.retry.sleep, MINUTE)) Thread.run("add debug logs to es", self._insert_loop)
def __init__(self, host, index, type="log", max_size=1000, batch_size=100, settings=None): """ settings ARE FOR THE ELASTICSEARCH INDEX """ self.es = Cluster(settings).get_or_create_index( schema=convert.json2value(convert.value2json(SCHEMA), paths=True), limit_replicas=True, settings=settings) self.queue = self.es.threaded_queue(max_size=max_size, batch_size=batch_size)
def __init__(self, host, index, type=DATA_TYPE, max_size=10, batch_size=10, kwargs=None): """ settings ARE FOR THE ELASTICSEARCH INDEX """ es = Cluster(kwargs).get_or_create_index(schema=json2value( convert.value2json(SCHEMA), leaves=True), limit_replicas=True, typed=False, kwargs=kwargs) es.add_alias(index) self.queue = es.threaded_queue(max_size=max_size, batch_size=batch_size, period=1) self.es = jx_elasticsearch.new_instance(es.settings)
def open_test_instance(name, settings): if settings.filename: Log.note("Using {{filename}} as {{type}}", filename=settings.filename, type=name) return FakeES(settings) else: Log.note("Using ES cluster at {{host}} as {{type}}", host=settings.host, type=name) cluster = Cluster(settings) try: old_index = cluster.get_index(kwargs=settings) old_index.delete() except Exception as e: if "Can not find index" not in e: Log.error("unexpected", cause=e) es = cluster.create_index(limit_replicas=True, limit_replicas_warning=False, kwargs=settings) es.delete_all_but_self() es.add_alias(settings.index) return es
container_types = Data(elasticsearch=ESUtils, ) try: # read_alternate_settings filename = os.environ.get("TEST_CONFIG") if filename: test_jx.global_settings = mo_json_config.get("file://" + filename) else: Log.alert( "No TEST_CONFIG environment variable to point to config file. Using " + DEFAULT_TEST_CONFIG) test_jx.global_settings = mo_json_config.get("file://" + DEFAULT_TEST_CONFIG) constants.set(test_jx.global_settings.constants) Log.start(test_jx.global_settings.debug) if not test_jx.global_settings.use: Log.error('Must have a {"use": type} set in the config file') test_jx.global_settings.elasticsearch.version = Cluster( test_jx.global_settings.elasticsearch).version test_jx.utils = container_types[test_jx.global_settings.use]( test_jx.global_settings) except Exception as e: Log.warning("problem", cause=e) Log.alert("Resetting test count") NEXT = 0