def create_indice(self): if not self.es.indices.exists(["xiniudata2"]): self.logger.info('Creating index xiniudata') self.es.indices.create("xiniudata2") self.logger.info('Created') self.logger.info('Start to create indice of amac') self.logger.info(str(self.es.info())) self.logger.info('ES Config %s' % str(tsbconfig.get_es_config())) self.es.indices.put_mapping("amac", mappings.get_amac_mapping(), "xiniudata2") mongo_back = dbcon.connect_mongo() coordinate = mongo_back.amac.fund.find().sort('_id', ASCENDING).limit(1)[0] self.create_single(coordinate) self.logger.info('Coordinate prepared') coordinate = coordinate.get('_id') end_flag = False while not end_flag: end_flag = True for fund in mongo_back.amac.fund.find({ '_id': { '$gt': coordinate } }).sort('_id', ASCENDING).limit(5000): coordinate = fund['_id'] end_flag = False try: self.create_single(fund) self.logger.info('%s index created' % fund['_id']) except Exception, e: self.logger.exception('Fail to index %s, due to %s' % (coordinate, e))
def __init__(self, es=None): if not es: host, port = tsbconfig.get_es_config() self.es = Elasticsearch([{'host': host, 'port': port}]) else: self.es = es
def test(): global logger_searchi host, port = tsbconfig.get_es_config() client = InvestorSearchClient(Elasticsearch([{ 'host': host, 'port': port }])) client.create_index(122)
def __init__(self, es): global logger_searchi if not es: host, port = tsbconfig.get_es_config() self.es = Elasticsearch([{'host': host, 'port': port}]) else: self.es = es self.db = dbcon.connect_torndb()
def __init__(self, es=None): global logger_universali_index if not es: host, port = tsbconfig.get_es_config() self.es = Elasticsearch([{'host': host, 'port': port}]) else: self.es = es logger_universali_index.info('Universal Index Creator inited')
def incremental_process_investor_index(index): global logger_searchi, consumer_search, producer_search if int(index) == 1: host, port = tsbconfig.get_es_config_1() client = InvestorSearchClient( Elasticsearch([{ 'host': host, 'port': port }])) elif int(index) == 2: host, port = tsbconfig.get_es_config_2() client = InvestorSearchClient( Elasticsearch([{ 'host': host, 'port': port }])) else: host, port = tsbconfig.get_es_config() client = InvestorSearchClient( Elasticsearch([{ 'host': host, 'port': port }])) logger_searchi.error('Not legal elasticsearch config %s' % index) init_kafka(index) while True: logger_searchi.info('Incremental create search%s index starts' % index) try: for message in consumer_search: try: logger_searchi.info( "%s:%d:%d: key=%s value=%s" % (message.topic, message.partition, message.offset, message.key, message.value)) iid = json.loads(message.value).get('id') or json.loads( message.value).get('_id') action = json.loads(message.value).get('action', 'create') if action == 'create': client.create_index(iid) logger_searchi.info('incremental %s index created' % iid) elif action == 'delete': client.delete_index(iid) logger_searchi.info('incremental %s index deleted' % iid) consumer_search.commit() except Exception, e: logger_searchi.exception('Incr exception# %s \n # %s' % (message, e)) except Exception, e: logger_searchi.exception('Incr outside exception # %s' % e)
def __init__(self, es=None): global logger_cs self.logger = logger_cs if not es: # host, port, user, pswd = tsbconfig.get_es_config() # self.es = Elasticsearch([{'host': host, 'port': port}], http_auth=(user, pswd)) host, port = tsbconfig.get_es_config() self.es = Elasticsearch([{'host': host, 'port': port}]) else: self.es = es
def __init__(self, es=None, full_feature=True): if full_feature: IndexCreator.__init__(self, es) global logger_search self.logger = logger_search if not es: host, port = tsbconfig.get_es_config() self.es = Elasticsearch([{'host': host, 'port': port}]) else: self.es = es
def __init__(self, es=None): global logger_coin self.db = dbcon.connect_torndb() self.logger = logger_coin if not es: host, port = tsbconfig.get_es_config() self.es = Elasticsearch([{'host': host, 'port': port}]) else: self.es = es self.logger.info('Coin Client inited')
def __init__(self, es=None): global logger_iid self.db = dbcon.connect_torndb() self.logger = logger_iid self.domestic_locations = [lname for lid, lname in dbutil.get_all_locations(self.db) if lid < 371] if not es: host, port = tsbconfig.get_es_config() self.es = Elasticsearch([{'host': host, 'port': port}]) else: self.es = es self.logger.info('Interior Client inited')
def create_indice(self): db = dbcon.connect_torndb() self.logger.info('Start to create indice') self.logger.info(str(self.es.info())) self.logger.info('ES Config %s' % str(tsbconfig.get_es_config())) for cid in dbutil.get_all_company_id_withna(db): try: self.create_index(db, cid) self.logger.info('%s index created, %s' % (cid, dbutil.get_company_name(db, cid))) except Exception, e: self.logger.exception('%s failed # %s' % (cid, e))
def __init__(self, es=None): global logger_index self.logger = logger_index if not es: host, port = tsbconfig.get_es_config() self.logger.info(','.join([host, port])) self.es = Elasticsearch([{'host': host, 'port': port}]) # host, port, user, pswd = tsbconfig.get_es_config() # self.logger.info(','.join([host, port, user, pswd])) # self.es = Elasticsearch([{'host': host, 'port': port}], http_auth=(user, pswd)) else: self.es = es self.logger.info(self.es) self.logger.info('Index Creator inited')
def create_indice(self): self.__check() db = dbcon.connect_torndb() self.logger.info('Start to create indice') self.logger.info(str(self.es.info())) self.logger.info('ES Config %s' % str(tsbconfig.get_es_config())) try: self.logger.info('Start to create location & tag indice') self.create_indice_completion_locations(db) self.create_indice_completion_keywords(db) except Exception, e: self.logger.exception('location indice & tag failed') self.logger.exception(e)
def __init__(self, es=None): global logging_index self.logger = logger_index if es: self.es = es else: host, port = tsbconfig.get_es_config() self.es = Elasticsearch([{'host': host, 'port': port}]) self.logger.info('Search client initiated') self.stopwords = stopword.get_stopwords('chinese', 'english') self.seg = Segmenter() self.mongo = dbcon.connect_mongo()
def create_recent_indice(self): global logger_universale_index db = dbcon.connect_torndb() logger_universale_index.info('Start to create recent funding indice') logger_universale_index.info(str(self.es.info())) logger_universale_index.info('ES Config %s' % str(tsbconfig.get_es_config())) self.__check() for funding in dbutil.get_makeup_funding(db): try: self.create_single(db, funding) logger_universale_index.info('%s index created' % funding.id) except Exception, e: logger_universale_index.exception('%s failed # %s' % (funding.id, e))
def create_indice(self): db = dbcon.connect_torndb() self.logger.info('Start to create indice') self.logger.info(str(self.es.info())) self.logger.info('ES Config %s' % str(tsbconfig.get_es_config())) self.es.indices.put_mapping("digital_token", mappings.get_digital_token_mapping(), "xiniudata") self.logger.info('Digital token mapping created') for dtid in dbutil.get_all_digital_token(db): try: self.create_index(dtid) self.logger.info('%s index created' % dtid) except Exception, e: self.logger.exception('%s failed # %s' % (dtid, e))
def before_request(): if sys.platform == 'darwin': host, port = config.get_es_local() else: host, port = config.get_es_config() es_client = Elasticsearch([{'host': host, 'port': port}]) g.db = dbcon.connect_torndb() g.mongo = dbcon.connect_mongo() g.amacsc = AMACClient(es_client) g.usc = UniversalSearchClient(es_client) g.dsc = DealSearchClient(es_client, False) g.nsc = NewsSearchClient(es_client) g.rsc = ReportSearchClient(es_client) g.isc = InteriorSearchClient(es_client) g.dtsc = DigitalTokenSearchClient(es_client) g.sc = SearchClient(es_client) g.logger = dbcon.connect_mongo().log.search
def create_indice(self): global logger_universal_index self.__check() db = dbcon.connect_torndb() self.topic_tags = dbutil.get_topic_corresponding_tags(db) logger_universal_index.info('Start to create indice') logger_universal_index.info(str(self.es.info())) logger_universal_index.info('ES Config %s' % str(tsbconfig.get_es_config())) for cid in dbutil.get_all_company_id(db): try: self.create_single(db, cid) logger_universal_index.info( '%s index created, %s' % (cid, dbutil.get_company_name(db, cid))) except Exception, e: logger_universal_index.exception('%s failed # %s' % (cid, e))
def create_indice(self): global logger_universali_index self.__check() today = datetime.today().date() year2018 = datetime.strptime('2018-01-01', '%Y-%M-%d') db = dbcon.connect_torndb() logger_universali_index.info('Start to create indice') logger_universali_index.info(str(self.es.info())) logger_universali_index.info('ES Config %s' % str(tsbconfig.get_es_config())) for investor in dbutil.get_all_investor_info(db, False): try: self.create_single(db, investor, (year2018, today)) logger_universali_index.info('%s index created' % investor.id) except Exception, e: logger_universali_index.exception('%s failed # %s' % (investor.id, e))
def process_investor_indice(index): global logger_searchi if int(index) == 1: host, port = tsbconfig.get_es_config_1() client = InvestorSearchClient( Elasticsearch([{ 'host': host, 'port': port }])) elif int(index) == 2: host, port = tsbconfig.get_es_config_2() client = InvestorSearchClient( Elasticsearch([{ 'host': host, 'port': port }])) elif int(index) == 0: host, port = tsbconfig.get_es_config() client = InvestorSearchClient( Elasticsearch([{ 'host': host, 'port': port }])) logger_searchi.info('Using default client, %s, %s' % (host, client)) else: logger_searchi.error('Not legal elasticsearch config %s' % index) return logger_searchi.info('Start to create index') db = dbcon.connect_torndb() for iid, _ in dbutil.get_all_investor(db): try: client.create_index(iid) logger_searchi.info('%s created' % iid) except Exception, e: logger_searchi.exception('%s failed' % iid)
def create_incremental(index): global logger_index, consumer_search, producer_search if int(index) == 1: host, port = tsbconfig.get_es_config_1() elif int(index) == 2: host, port = tsbconfig.get_es_config_2() # client = IndexCreator(Elasticsearch([{'host': host, 'port': port}])) else: host, port = tsbconfig.get_es_config() # client = IndexCreator(Elasticsearch([{'host': host, 'port': port}])) logger_index.error('Not legal elasticsearch config %s, using default' % index) client = IndexCreator(Elasticsearch([{'host': host, 'port': port}])) i_client = InteriorIndexCreator( Elasticsearch([{ 'host': host, 'port': port }])) db = dbcon.connect_torndb() init_kafka(index) while True: logger_index.info('Incremental create search%s index starts' % index) try: for message in consumer_search: try: logger_index.info( "%s:%d:%d: key=%s value=%s" % (message.topic, message.partition, message.offset, message.key, message.value)) action = json.loads(message.value).get('action', 'create') # sb create a new tag if action == 'keyword': client.create_indice_completion_keywords(db, update=True) # consumer_search.commit() logger_index.info('Update keyword') continue cid = json.loads(message.value).get('id') or json.loads( message.value).get('_id') if action == 'create': client.create_single(db, cid) i_client.create_index(db, cid) logger_index.info('incremental %s index created' % cid) elif action == 'delete': if json.loads(message.value).get('aliasId', False): client.create_single(db, cid) i_client.create_index(db, cid) logger_index.info('incremental %s alias deleted' % cid) elif json.loads(message.value).get( 'artifactId', False): client.create_single(db, cid) i_client.create_index(db, cid) logger_index.info( 'incremental %s artifact deleted' % cid) else: client.delete_index( 'company', dbutil.get_company_code(db, cid)) client.delete_index('completion', cid) i_client.create_index(db, cid) logger_index.info('incremental %s index deleted' % cid) consumer_search.commit() except Exception, e: logger_index.exception('Incr exception# %s \n # %s' % (message, e)) except Exception, e: logger_index.exception('Incr outside exception # %s' % e)