Esempio n. 1
0
 def connection(self):
     ctx = stack.top
     if ctx is not None:
         if not hasattr(ctx, 'esclient_connection'):
             ctx.esclient_connection = esclient.ESClient(
                 current_app.config['ELASTICSEARCH_URL'])
         return ctx.esclient_connection
Esempio n. 2
0
def main():
    config = parse_args(sys.argv[1:])
    print config

    es_client = es.ESClient(hosts=config.hosts.split(","), timeout=30)

    print "\nVerifying counts..."

    tenant_ids = []
    if config.tenantIds:
        tenant_ids = config.tenantIds.split(',')

    tenant_id_infos = count_index(es_client, config.current_index, "metrics",
                                  tenant_ids)

    if not config.dryrun:
        print "\nReindexing..."
        for tenantId in tenant_id_infos:
            print "\n***** Re-indexing tenantId: {0} started at: {1}".format(
                tenantId[0], datetime.datetime.now())
            es_client.reindex(tenantId[0], config.current_index,
                              config.new_index, config.new_index_type,
                              config.scroll_timeout, config.size,
                              config.transform, config.bulk_thread_count,
                              config.bulk_size)
            print "***** Re-indexing tenantId: {0} ended at  : {1}".format(
                tenantId[0], datetime.datetime.now())
Esempio n. 3
0
 def setUpClass(self):
     """Create an ESClient"""
     self.es = esclient.ESClient()
     """Delete the test schema, if any. This will prevent any errors
     due to the schema already existing """
     print("Deleting test indexes, if any")
     self.es.delete_index("contacts_esclient_test")
     self.es.delete_index("contacts_esclient_test2")
Esempio n. 4
0
def main():
    es_object = esclient.ESClient("http://*****:*****@gmail.com AND amazon'}
    result = es_object.search(query_string_args=query_string_args,
                              indexes=['sent_counts'])
    #print_dict(result)
    print result['hits']['total']
Esempio n. 5
0
def elasticsearch(es_port,es_host):
    #Create an ESClient
    global clientes
    clientes = esclient.ESClient("http://" + es_host + ":" + es_port + "/")
    #Parse all indices
    for inputs in configSectionValue('input'):
        # create index
        logger.debug("creating index: %s", inputs)
        clientes.create_index(inputs,body=None)
Esempio n. 6
0
def main():
    args = parse_arguments(sys.argv[1:])

    config = cf.Config(args.env.lower())

    db_client = db.DBClient()
    db_client.connect(config.get_cassandra_nodes())

    es_client = es.ESClient(config.get_es_nodes())

    # 'delete' command has the namespace: Namespace(dryrun=True, metricName='metric_name', tenantId='tenant_id')
    if 'dryrun' in args:
        clear_excess_enums(args, db_client, es_client)
    else:
        list_excess_enums(db_client)
Esempio n. 7
0
def main():

    LOGGER.info("Starting application")

    parser = argparse.ArgumentParser()
    parser.add_argument("--elasticsearch",
                        help="Base URL to contact ElasticSearch",
                        type=str,
                        required=True)
    parser.add_argument("--indexprefix",
                        help="ElasticSearch Index Prefix",
                        default=INDEX_PREFIX,
                        type=str)
    parser.add_argument("--hours-ahead",
                        help="How many hours in advance",
                        default=MAX_AGE_IN_HOURS,
                        type=int)
    args = parser.parse_args()

    elasticsearch_url = args.elasticsearch
    index_prefix = args.indexprefix
    hours_ahead = args.hours_ahead

    LOGGER.info("Contacting ElasticSearch for status: [%s]" %
                (elasticsearch_url))
    es_connection = esclient.ESClient(elasticsearch_url)

    now = datetime.utcnow()
    for hour in xrange(hours_ahead):
        hour_delta = timedelta(hours=hour)
        hour_date = now + hour_delta
        index_name = index_prefix + INDEX_SEPARATOR + "%.4d.%.2d.%.2d-%.2d" % (
            hour_date.year, hour_date.month, hour_date.day, hour_date.hour)

        retry = 0
        success = False
        while retry < MAX_CREATE_RETRY and not success:
            try:
                LOGGER.info("Creating index [%s]" % (index_name))
                es_connection.create_index(index_name)
                LOGGER.debug("Done creating index [%s]" % (index_name))
                success = True
            except Exception, e:
                LOGGER.error('Could not create index %s: [%s]' %
                             (index_name, str(e)))
            finally:
def main():

    LOGGER.info("Starting application")

    parser = argparse.ArgumentParser()
    parser.add_argument("--elasticsearch",
                        help="Base URL to contact ElasticSearch",
                        type=str,
                        required=True)
    parser.add_argument("--ttl",
                        help="Maximum age in days",
                        default=MAX_AGE_IN_DAYS,
                        type=int)
    parser.add_argument("--index",
                        help="Name of the index",
                        default=DEFAULT_INDEX_NAME,
                        type=str,
                        required=False)
    args = parser.parse_args()

    max_ttl_seconds = args.ttl * 24 * 3600
    elasticsearch_url = args.elasticsearch
    index_name = args.index

    LOGGER.info("Contacting ElasticSearch for status: [%s]" %
                (elasticsearch_url))
    es_connection = esclient.ESClient(elasticsearch_url)
    status = es_connection.status()

    LOGGER.info("ElasticSearch status retrieved")

    now = datetime.utcnow()

    if INDICES_KEY in status:
        for index in status[INDICES_KEY]:
            try:

                LOGGER.debug("Processing index [%s]" % (index))

                split_index = index.split(INDEX_SEPARATOR)
                if split_index and len(split_index) > 2:
                    split = index.split(INDEX_SEPARATOR)
                    logstash = INDEX_SEPARATOR.join(split[:-2])
                    date_str = split[-2]
                    shard = split[-1]

                    if logstash == index_name:
                        date = datetime.strptime(date_str, '%Y.%m.%d')
                        age_date = now - date
                        age_seconds = age_date.days * 24 * 3600 + age_date.seconds

                        if age_seconds > max_ttl_seconds:
                            retry = 0
                            success = False
                            while retry < MAX_DELETE_RETRY and not success:
                                try:
                                    LOGGER.info(
                                        "Deleting index [%s] since it is older than %d days"
                                        % (index, args.ttl))
                                    success = es_connection.delete_index(index)
                                    LOGGER.debug("Done deleting index [%s]" %
                                                 (index))
                                except Exception, e:
                                    LOGGER.error(
                                        'Could not delete index %s: [%s]' %
                                        (index, str(e)))
                                finally:
                                    retry = retry + 1
Esempio n. 9
0
 def setUpClass(cls):
     """Create an ESClient"""
     cls.es = esclient.ESClient("http://localhost:9200/")
     """Delete the test schema, if any. This will prevent any errors
     due to the schema already existing """
     cls.es.delete_index("contacts_esclient_test")