Beispiel #1
0
def create_incremental(index):

    global consumer_deal, logger_search
    if int(index) == 1:
        host, port = tsbconfig.get_es_config_1()
        client = DealSearchClient(Elasticsearch([{
            'host': host,
            'port': port
        }]))
    elif int(index) == 2:
        host, port = tsbconfig.get_es_config_2()
        client = DealSearchClient(Elasticsearch([{
            'host': host,
            'port': port
        }]))
    else:
        client = DealSearchClient()
    init_kafka(index)
    db = dbcon.connect_torndb()
    logger_search.info('Search client inited')
    while True:
        logger_search.info('Incremental create search%s index starts' % index)
        for message in consumer_deal:
            try:
                logger_search.info(
                    "%s:%d:%d: key=%s value=%s" %
                    (message.topic, message.partition, message.offset,
                     message.key, message.value))
                did = json.loads(message.value).get('dealId')
                client.create_single(db, int(did))
                logger_search.info('incremental deal %s created' % did)
                consumer_deal.commit()
            except Exception, e:
                logger_search.info(e)
Beispiel #2
0
def create_indice_makeup(index):

    global consumer_search, producer_search

    logging.getLogger('makeup').handlers = []
    logger_makeup = logging.getLogger('makeup')
    logger_makeup.setLevel(logging.INFO)
    formatter = logging.Formatter(
        '%(name)-12s %(asctime)s %(levelname)-8s %(message)s',
        '%a, %d %b %Y %H:%M:%S',
    )
    stream_handler = logging.StreamHandler(sys.stderr)
    stream_handler.setFormatter(formatter)
    logger_makeup.addHandler(stream_handler)

    if int(index) == 1:
        host, port = tsbconfig.get_es_config_1()
        client = IndexCreator(Elasticsearch([{'host': host, 'port': port}]))
    elif int(index) == 2:
        host, port = tsbconfig.get_es_config_2()
        client = IndexCreator(Elasticsearch([{'host': host, 'port': port}]))
    else:
        logger_makeup.error('Not legal elasticsearch config %s' % index)
        return

    db = dbcon.connect_torndb()
    for cid in dbutil.get_company_ids_by_modify(db):
        client.create_single(db, cid)
        logger_makeup.info('makeup %s index created' % cid)
    db.close()
def create_incremental(index=None):

    global logger_universal_index
    if not index:
        client = UniversalIndexCreator()
    elif int(index) == 1:
        host, port = tsbconfig.get_es_config_1()
        client = UniversalIndexCreator(
            Elasticsearch([{
                'host': host,
                'port': port
            }]))
    elif int(index) == 2:
        host, port = tsbconfig.get_es_config_2()
        client = UniversalIndexCreator(
            Elasticsearch([{
                'host': host,
                'port': port
            }]))
    else:
        client = UniversalIndexCreator()
    db = dbcon.connect_torndb()
    consumer_search = init_kafka()
    while True:
        logger_universal_index.info('Incremental create search1 index starts')
        try:
            for message in consumer_search:
                try:
                    logger_universal_index.info(
                        "%s:%d:%d: key=%s value=%s" %
                        (message.topic, message.partition, message.offset,
                         message.key, message.value))
                    action = json.loads(message.value).get('action', 'create')
                    cid = json.loads(message.value).get('id') or json.loads(
                        message.value).get('_id')
                    if action == 'create':
                        client.create_single(db, cid)
                        logger_universal_index.info(
                            'incremental %s index created' % cid)
                    elif action == 'delete':
                        if json.loads(message.value).get('aliasId', False):
                            client.create_single(db, cid)
                            logger_universal_index.info(
                                'incremental %s alias deleted' % cid)
                        elif json.loads(message.value).get(
                                'artifactId', False):
                            client.create_single(db, cid)
                            logger_universal_index.info(
                                'incremental %s artifact deleted' % cid)
                        else:
                            client.delete_index(
                                'universal', dbutil.get_company_code(db, cid))
                            logger_universal_index.info(
                                'incremental %s index deleted' % cid)
                    consumer_search.commit()
                except Exception, e:
                    logger_universal_index.exception(
                        'Incr exception# %s \n # %s' % (message, e))
        except Exception, e:
            logger_universal_index.exception('Incr outside exception # %s' % e)
Beispiel #4
0
def init_es():

    if sys.platform == 'darwin':
        host, port = config.get_es_local()
    else:
        # host, port = config.get_es_config()
        host, port = config.get_es_config_2()
    return SearchClient(Elasticsearch([{'host': host, 'port': port}]))
Beispiel #5
0
    def __init__(self):

        self.db = dbcon.connect_torndb()
        self.mongo = dbcon.connect_mongo()
        host, port = tsbconfig.get_es_config_2()
        es = Elasticsearch([{'host': host, 'port': port}])
        self.search_client = NewsSearchClient(es)

        self.news_read_url = '/api-gen-service/api2/service/x_service/system_news/get_news_details_by_id'
Beispiel #6
0
def incremental_process_investor_index(index):

    global logger_searchi, consumer_search, producer_search

    if int(index) == 1:
        host, port = tsbconfig.get_es_config_1()
        client = InvestorSearchClient(
            Elasticsearch([{
                'host': host,
                'port': port
            }]))
    elif int(index) == 2:
        host, port = tsbconfig.get_es_config_2()
        client = InvestorSearchClient(
            Elasticsearch([{
                'host': host,
                'port': port
            }]))
    else:
        host, port = tsbconfig.get_es_config()
        client = InvestorSearchClient(
            Elasticsearch([{
                'host': host,
                'port': port
            }]))
        logger_searchi.error('Not legal elasticsearch config %s' % index)

    init_kafka(index)

    while True:
        logger_searchi.info('Incremental create search%s index starts' % index)
        try:
            for message in consumer_search:
                try:
                    logger_searchi.info(
                        "%s:%d:%d: key=%s value=%s" %
                        (message.topic, message.partition, message.offset,
                         message.key, message.value))
                    iid = json.loads(message.value).get('id') or json.loads(
                        message.value).get('_id')
                    action = json.loads(message.value).get('action', 'create')
                    if action == 'create':
                        client.create_index(iid)
                        logger_searchi.info('incremental %s index created' %
                                            iid)
                    elif action == 'delete':
                        client.delete_index(iid)
                        logger_searchi.info('incremental %s index deleted' %
                                            iid)
                    consumer_search.commit()
                except Exception, e:
                    logger_searchi.exception('Incr exception# %s \n # %s' %
                                             (message, e))
        except Exception, e:
            logger_searchi.exception('Incr outside exception # %s' % e)
Beispiel #7
0
def process_investor_indice(index):

    global logger_searchi
    if int(index) == 1:
        host, port = tsbconfig.get_es_config_1()
        client = InvestorSearchClient(
            Elasticsearch([{
                'host': host,
                'port': port
            }]))
    elif int(index) == 2:
        host, port = tsbconfig.get_es_config_2()
        client = InvestorSearchClient(
            Elasticsearch([{
                'host': host,
                'port': port
            }]))
    elif int(index) == 0:
        host, port = tsbconfig.get_es_config()
        client = InvestorSearchClient(
            Elasticsearch([{
                'host': host,
                'port': port
            }]))
        logger_searchi.info('Using default client, %s, %s' % (host, client))
    else:
        logger_searchi.error('Not legal elasticsearch config %s' % index)
        return

    logger_searchi.info('Start to create index')
    db = dbcon.connect_torndb()
    for iid, _ in dbutil.get_all_investor(db):
        try:
            client.create_index(iid)
            logger_searchi.info('%s created' % iid)
        except Exception, e:
            logger_searchi.exception('%s failed' % iid)
        investor['portfolio_number_annual'] = len(
            list(dbutil.get_investor_portfilio(db, i.id, period)))
        self.es.index(index="xiniudata2",
                      doc_type='investor',
                      id=i.code,
                      body=investor)


if __name__ == '__main__':

    if len(sys.argv) > 1:
        if int(sys.argv[1]) == 1:
            host, port = tsbconfig.get_es_config_1()
            uic = UniversalInvestorIndexCreator(
                Elasticsearch([{
                    'host': host,
                    'port': port
                }]))
        elif int(sys.argv[1]) == 2:
            host, port = tsbconfig.get_es_config_2()
            uic = UniversalInvestorIndexCreator(
                Elasticsearch([{
                    'host': host,
                    'port': port
                }]))
        else:
            uic = UniversalInvestorIndexCreator()
    else:
        uic = UniversalInvestorIndexCreator()
    uic.create_indice()
Beispiel #9
0
def create_incremental(index):

    global logger_index, consumer_search, producer_search

    if int(index) == 1:
        host, port = tsbconfig.get_es_config_1()
    elif int(index) == 2:
        host, port = tsbconfig.get_es_config_2()
        # client = IndexCreator(Elasticsearch([{'host': host, 'port': port}]))
    else:
        host, port = tsbconfig.get_es_config()
        # client = IndexCreator(Elasticsearch([{'host': host, 'port': port}]))
        logger_index.error('Not legal elasticsearch config %s, using default' %
                           index)
    client = IndexCreator(Elasticsearch([{'host': host, 'port': port}]))
    i_client = InteriorIndexCreator(
        Elasticsearch([{
            'host': host,
            'port': port
        }]))

    db = dbcon.connect_torndb()
    init_kafka(index)

    while True:
        logger_index.info('Incremental create search%s index starts' % index)
        try:
            for message in consumer_search:
                try:
                    logger_index.info(
                        "%s:%d:%d: key=%s value=%s" %
                        (message.topic, message.partition, message.offset,
                         message.key, message.value))
                    action = json.loads(message.value).get('action', 'create')
                    # sb create a new tag
                    if action == 'keyword':
                        client.create_indice_completion_keywords(db,
                                                                 update=True)
                        # consumer_search.commit()
                        logger_index.info('Update keyword')
                        continue
                    cid = json.loads(message.value).get('id') or json.loads(
                        message.value).get('_id')
                    if action == 'create':
                        client.create_single(db, cid)
                        i_client.create_index(db, cid)
                        logger_index.info('incremental %s index created' % cid)
                    elif action == 'delete':
                        if json.loads(message.value).get('aliasId', False):
                            client.create_single(db, cid)
                            i_client.create_index(db, cid)
                            logger_index.info('incremental %s alias deleted' %
                                              cid)
                        elif json.loads(message.value).get(
                                'artifactId', False):
                            client.create_single(db, cid)
                            i_client.create_index(db, cid)
                            logger_index.info(
                                'incremental %s artifact deleted' % cid)
                        else:
                            client.delete_index(
                                'company', dbutil.get_company_code(db, cid))
                            client.delete_index('completion', cid)
                            i_client.create_index(db, cid)
                            logger_index.info('incremental %s index deleted' %
                                              cid)
                    consumer_search.commit()
                except Exception, e:
                    logger_index.exception('Incr exception# %s \n # %s' %
                                           (message, e))
        except Exception, e:
            logger_index.exception('Incr outside exception # %s' % e)