Пример #1
0
 def execute_query(self):
     if self.DB_QUERY is None:
         ES_LOGGER.warning("数据库语句空缺")
     try:
         self.DB_CURSOR.execute(self.DB_QUERY, self.DB_ARGS)
     except Exception, e:
         self.DB_CONNECT.rollback()
         ES_LOGGER.debug("数据库执行错误:%s " % e)
Пример #2
0
def bulk_elasticsearch(r_queue, w_lock, dbs, db_name):
    ES_LOGGER.info("Bulk Host: %s DB: %s Start" % (dbs['db_host'], db_name))
    es = Elasticsearch(dbs['es_colony'], retry_on_timeout=True, max_retries=3, timeout=3600)
    flag = True
    bulks = []
    data_lines_number = 0
    bulk_length = 0
    while flag:
        while not r_queue.empty():
            if bulk_length == 0:
                w_lock.acquire()
            data = r_queue.get()
            data_lines_number += 1
            bulk_length += 1
            if bulk_length >= BULK_LENGTH or r_queue.empty():
                w_lock.release()
            if isinstance(data, str) and data == 'False':
                try:
                    ES_LOGGER.info("Bulk Host: %s DB: %s Data: %s" % (dbs['db_host'], db_name, bulk_length))
                    streaming_bulks = helpers.streaming_bulk(es, bulks, chunk_size=len(bulks))
                    for streaming_bulk in streaming_bulks:
                        if streaming_bulk[0]:
                            pass
                    bulks = []
                except Exception, e:
                    ES_LOGGER.warning(e)
                flag = False
                break
            bulks.append({
                "_index": dbs['index'],
                "_type": dbs['doc_type'],
                "_source": data
            })
            if bulk_length >= BULK_LENGTH:
                try:
                    ES_LOGGER.info("Bulk Host: %s DB: %s Data: %s" % (dbs['db_host'], db_name, data_lines_number),)
                    streaming_bulks = helpers.streaming_bulk(es, bulks, chunk_size=len(bulks))
                    for streaming_bulk in streaming_bulks:
                        if streaming_bulk[0]:
                            pass
                    bulks = []
                    bulk_length = 0
                except Exception, e:
                    ES_LOGGER.warning("Bulk Error! %s", e)
Пример #3
0
def bulk_elasticsearch(r_queue, w_lock, dbs, db_name):
    ES_LOGGER.info("Bulk Host: %s DB: %s Start" % (dbs["db_host"], db_name))
    es = Elasticsearch(dbs["es_colony"], retry_on_timeout=True, max_retries=3, timeout=3600)
    flag = True
    bulks = []
    data_lines_number = 0
    bulk_length = 0
    while flag:
        while not r_queue.empty():
            if bulk_length == 0:
                w_lock.acquire()
            data = r_queue.get()
            data_lines_number += 1
            bulk_length += 1
            if bulk_length >= BULK_LENGTH or r_queue.empty():
                w_lock.release()
            if isinstance(data, str) and data == "False":
                try:
                    ES_LOGGER.info("Bulk Host: %s DB: %s Data: %s" % (dbs["db_host"], db_name, bulk_length))
                    streaming_bulks = helpers.streaming_bulk(es, bulks, chunk_size=len(bulks))
                    for streaming_bulk in streaming_bulks:
                        if streaming_bulk[0]:
                            pass
                    bulks = []
                except Exception, e:
                    ES_LOGGER.warning(e)
                flag = False
                break
            bulks.append({"_index": dbs["index"], "_type": dbs["doc_type"], "_source": data})
            if bulk_length >= BULK_LENGTH:
                try:
                    ES_LOGGER.info("Bulk Host: %s DB: %s Data: %s" % (dbs["db_host"], db_name, data_lines_number))
                    streaming_bulks = helpers.streaming_bulk(es, bulks, chunk_size=len(bulks))
                    for streaming_bulk in streaming_bulks:
                        if streaming_bulk[0]:
                            pass
                    bulks = []
                    bulk_length = 0
                except Exception, e:
                    ES_LOGGER.warning("Bulk Error! %s", e)