Ejemplo n.º 1
0
    async def process(self, data):
        LOG.info('company2data [{}] process {}'.format(self.job, data))
        config = self.config
        if config.ENV == 'DEV':
            return True
        async with aiohttp.ClientSession(loop=self._loop) as session:
            if 'corp_category' in data and int(data['corp_category']) != 1:
                LOG.warning(
                    'for job[{}], corp_category is invalid in {}'.format(
                        self.job, data))
                return False
            await self.pre_trans(session, data)
            # 上传公司(必要)
            company_increment_output = self.trans(
                data, config.CONFIG['DATA_MAP']['trans_company_increment_map'],
                None)
            await self.upload_company_increment(session,
                                                company_increment_output)

            # 上传产品(非必要)
            if not config.CONFIG['DATA_MAP'].get('trans_product_increment_map',
                                                 None):
                LOG.warning(
                    'for job[{}], trans_product_increment_map is invalid'.
                    format(self.job))
                return
            product_increment_output = self.trans(
                data, config.CONFIG['DATA_MAP']['trans_product_increment_map'],
                None)
            await self.upload_product_increment(session,
                                                product_increment_output)
Ejemplo n.º 2
0
 async def process(self, data):
     LOG.info('{} process {}'.format(self.job, data))
     async with aiohttp.ClientSession(loop=self._loop) as session:
         # 推送到启信宝易文斌
         if data.get('last_update') and int(
                 data['last_update']) > 1477929600:
             await self.update_biddings(session, data)
Ejemplo n.º 3
0
 async def add_ccinfo_msg_target(self, session, document):
     url = self.config.CONFIG['GLOBAL']['API']['YUNYING_PUSH_API'] + '/push/add_ccinfo_msg_target'
     ret = await self._lcurl.post(session, url, data=json.dumps(document))
     LOG.info('add ccinfo msg target by {}, result: {}'.format(document, ret))
     if ret and str(ret['errno']) == '0':
         return True
     else:
         return False
Ejemplo n.º 4
0
 async def synchronizeWeb(self, session, data):
     post = [{
         'info_id': str(data['_id']),
         'issue_time': data['last_update']
     }]
     ret = await self._lcurl.post(
         session, self.config.CONFIG['GLOBAL']['API']['WEB_API'] +
         "/seo/multiaddbidid", json.dumps(post))
     LOG.info('web seo result: {}'.format(ret))
Ejemplo n.º 5
0
 async def process(self, data):
     LOG.info('{} process {}'.format(self.job, data))
     async with aiohttp.ClientSession(loop=self._loop) as session:
         # 推送web seo到林雨森
         await self.synchronizeWeb(session, data)
         # 推送到启信宝易文斌
         await self.update_biddings(session, data)
         # 组装数据推送ccpush信息流
         target = await self.load_msg_target(session, data)
         await self.add_ccinfo_msg_target(session, target)
Ejemplo n.º 6
0
Archivo: pusher.py Proyecto: Dkner/mr2p
 async def getSummaryByName(self, session, name):
     if not name:
         return False
     url = self.config.CONFIG['GLOBAL']['API'][
         'CORP_QUERY_API'] + '/CCAppService/enterprise/getSummaryByName'
     url_param = {'name': name}
     ret = await self._lcurl.get(session, url, url_param)
     LOG.info('get corp summary by name[{}]...result: {}'.format(name, ret))
     if not ret:
         return False
     if str(ret['status']) == '1':
         return ret['data']
     else:
         return False
Ejemplo n.º 7
0
 async def update_biddings(self, session, document):
     if not document:
         return False
     channel = 'HR72FESQE5TZPWDLIAY8D8EX71Z3WGNW'
     timestamp = int(time.time())
     body_str = json.dumps(document)
     sign = Tool.md5('{}{}{}'.format(body_str, channel, timestamp))
     url = '{}?timestamp={}&sign={}'.format(self.config.CONFIG['GLOBAL']['API']['RM_UPDATE_BIDDINGS_API'], timestamp, sign)
     ret = await self._lcurl.post(session=session, url=url, data=body_str, headers={"Content-Type":"application/json"})
     LOG.info('update total bidding by {}, result: {}'.format(document, ret))
     if not ret:
         return False
     if ret['status'] == 200:
         return True
     else:
         return False
Ejemplo n.º 8
0
 async def upload_news(self, session, document):
     if not document:
         return False
     url = self.config.CONFIG['GLOBAL']['API']['BUSINESS_TOPNEWS_API']
     data = {"records": [{"value": document}]}
     ret = await self._lcurl.post(
         session=session,
         url=url,
         data=json.dumps(data),
         headers={"Content-Type": "application/vnd.kafka.json.v1+json"},
         do_log=False)
     LOG.info('upload news by {}, result: {}'.format(document, ret))
     if not ret:
         return False
     else:
         return True
Ejemplo n.º 9
0
Archivo: pusher.py Proyecto: Dkner/mr2p
    async def worker(self, redis_conn):
        LOG.info('Start worker for job {}'.format(self.job))
        push_redis_key = self.config.CONFIG['GLOBAL']['JOB'][
            self.job]['PUSH_REDIS_KEY']

        while True:
            record = redis_conn.lpop(push_redis_key)
            if record is None:
                await asyncio.sleep(1)
                continue
            data = json.loads(record.decode('utf-8'))
            try:
                await self.process(data)
            except Exception as e:
                LOG.error('Error during data processing: %s' % e)
            finally:
                pass
Ejemplo n.º 10
0
    async def worker(self, message_queue):
        LOG.info('Start worker for job {}'.format(self.job))

        while True:
            try:
                data = message_queue.get(block=False)
            except Empty:
                await asyncio.sleep(1)
                continue
            try:
                if data:
                    future = await self.process(data)

            except Exception as e:
                LOG.error('Error during data processing: %s' % e)
            finally:
                pass
Ejemplo n.º 11
0
    def process_message(self, message_queue):
        LOG.info('Start message consumer for job {}'.format(self.job))
        task_config = self.config.CONFIG['GLOBAL']['JOB'][self.job]
        processor_num = int(task_config.get('PROCESSOR_NUM', 1))

        # 事件循环
        self._loop = asyncio.new_event_loop()
        try:
            for i in range(processor_num):
                asyncio.ensure_future(coro_or_future=self.worker(message_queue), loop=self._loop)
            self._loop.run_forever()
            # self._loop.run_until_complete(asyncio.gather(self.worker(redis_conn)))
        except Exception as e:
            print(asyncio.gather(*asyncio.Task.all_tasks()).cancel(), loop=self._loop)
            # self._loop.run_until_complete(self._loop.shutdown_asyncgens())
        finally:
            self._loop.close()
Ejemplo n.º 12
0
 async def upload_product_increment(self, session, document):
     if not document:
         return False
     url = self.config.CONFIG['GLOBAL']['API']['COMPANY_INCREMENT_API']
     data = {"topic": "ProductsIntsigInc", "document": document}
     ret = await self._lcurl.post(
         session=session,
         url=url,
         data=json.dumps(data),
         headers={"Content-Type": "application/json"})
     LOG.info('upload product increment by {}, result: {}'.format(
         data, ret))
     if not ret:
         return False
     if ret['code'] == 0:
         return True
     else:
         return False
Ejemplo n.º 13
0
    async def process(self, data):
        LOG.info('Kr2data process {}'.format(data))
        config = self.config
        async with aiohttp.ClientSession(loop=self._loop) as session:
            self.pre_trans(session, data)
            # 上传公司
            company_increment_output = self.trans(
                data, config.CONFIG['DATA_MAP']['trans_company_increment_map'],
                None)
            await self.upload_company_increment(session,
                                                company_increment_output)

            # 上传产品
            if not config.CONFIG['DATA_MAP'].get('trans_product_increment_map',
                                                 None):
                LOG.warning(
                    'for job[{}], trans_product_increment_map is invalid'.
                    format(self.job))
                return
            product_increment_output = self.trans(
                data, config.CONFIG['DATA_MAP']['trans_product_increment_map'],
                None)
            await self.upload_product_increment(session,
                                                product_increment_output)
Ejemplo n.º 14
0
 def get_message(self, message_queue):
     LOG.info('Start message producer for job {}'.format(self.job))
     task_config = self.config.CONFIG['GLOBAL']['JOB'][self.job]
     redis_schema = task_config.get('REDIS_SCHEMA', 'DEFAULT')
     redis_instance = ConnectionFactory.get_redis_connection(**self.config.CONFIG['GLOBAL']['REDIS'][redis_schema])
     with redis_instance as redis_conn:
         while True:
             if message_queue.qsize() < 10:
                 record = redis_conn.blpop(task_config['PUSH_REDIS_KEY'])
                 if record:
                     LOG.info('put message into queue: {}'.format(record))
                     data = json.loads(record[1].decode('utf-8'))
                     message_queue.put(data)
             else:
                 LOG.info('too busy, have a rest...')
                 time.sleep(1)