def __init__(self): Bowler.__init__(self) kafka_server = '%s:%d' % (config.get('app.kafka.host'), config.get('app.kafka.port')) self._consumer = KafkaConsumer(AD_PROCESSED_TOPIC, client_id='ad_statistic_catcher_reporter', group_id='ad_statistic_catcher', bootstrap_servers=kafka_server) logger.info('Connect to kafka[%s] successfully' % kafka_server)
def __init__(self): self._db = db.get_mysql_client(config.get('app.db.mysql')) cfg = config.get('app.db.influxdb') self._influxdb = influxdb.InfluxDBClient(host=cfg['host'], port=cfg['port'], username=cfg['user'], password=cfg['password'], database=cfg['database'])
def __init__(self): Commander.__init__(self) kafka_server = '%s:%d' % (config.get('app.kafka.host'), config.get('app.kafka.port')) self._producer = KafkaProducer( value_serializer=lambda v: json.dumps(v).encode('utf-8'), client_id='agency_commander', compression_type='gzip', bootstrap_servers=kafka_server, retries=3)
def __init__(self): Catcher.__init__(self) kafka_server = '%s:%d' % (config.get('app.kafka.host'), config.get('app.kafka.port')) logger.info('Try to connect to kafka...') self._consumer = KafkaConsumer(AGENCY_COMMAND_REPORTER_TOPIC, client_id='commander_result_reporter', group_id='commander_result_reporter', bootstrap_servers=kafka_server) logger.info('Connect to kafka[%s] successfully' % AGENCY_COMMAND_REPORTER_TOPIC) self._consumer_command_res()
def fetch_order_info(start, end): ''' :param start: :param end: :return: ''' resp = requests.get(config.get('app.api.order.url') % (start, end)) if resp.status_code / 100 == 2: return json.loads(resp.content)
def __init__(self): logger.info('Init Reporter...') Catcher.__init__(self) self._mongo = db.get_mongo_client(config.get('app.db.mongo')) kafka_server = '%s:%d' % (config.get('app.kafka.host'), config.get('app.kafka.port')) logger.info('Try to connect to kafka...') self._consumer = KafkaConsumer( AD_ORIGIN_STATISTIC_TOPIC, client_id='ad_statistic_catcher_reporter', group_id='ad_statistic_catcher', bootstrap_servers=kafka_server) logger.info('Connect to kafka[%s] successfully' % AD_ORIGIN_STATISTIC_TOPIC) self._producer = KafkaProducer( value_serializer=lambda v: json.dumps(v).encode('utf-8'), client_id='ad.statistic.worker', compression_type='gzip', bootstrap_servers=kafka_server, retries=3) self._campaign_info_consumer = KafkaConsumer( AD_CAMPAIGN_INFO_TOPIC, client_id='ad_campaign_catcher_reporter', group_id='ad_campaign_catcher', bootstrap_servers=kafka_server) logger.info('Connect to kafka[%s] successfully' % AD_CAMPAIGN_INFO_TOPIC) t1 = threading.Thread(target=Reporter._consumer_statistic, args=(self, )) t2 = threading.Thread(target=Reporter._consumer_campaign_info, args=(self, )) t1.start() t2.start() t1.join() t2.join()
def __init__(self): self._db = db.get_mysql_client(config.get('app.db.mysql')) self._iterator = None
#!/usr/bin/env python # encoding: utf-8 """ @author: william @contact: [email protected] @site: http://www.xiaolewei.com @file: __init__.py.py @time: 10/05/2018 16:32 """ from .models import * from .campaign import CampaignHelper from .wxcampaign import WXCampaign from dc.core import db, config _conn = db.get_mysql_client(config.get('app.db.mysql')) Model.set_connection_resolver(_conn)
def run(data_q, command_q): global _data_q, _command_q _data_q = data_q _command_q = command_q _bridge.run(host='0.0.0.0', port=config.get('app.schedule.bowler.httpbowler.port'))
def rollback(): from dc.migration import rollback db = get_mysql_client(config.get('app.db.mysql')) schema = Schema(db) rollback(schema)
def migrate(): from dc.migration import migrate db = get_mysql_client(config.get('app.db.mysql')) schema = Schema(db) migrate(schema)
def __init__(self, bowler: Bowler, commander: Commander): self._bowler = bowler self._commander = commander self._db = db.get_mysql_client(config.get('app.db.mysql')) self._mongo = db.get_mongo_client(config.get('app.db.mongo'))