def preload_router(self): """ 处理rabbitmq中preload_task的内容 """ try: # messages = queue.get('preload_task', self.batch_size) messages = self.get_preload_messages() if not messages: messages = queue.get('preload_task', self.batch_size) s1_db = database.s1_db_session() logger.debug( "preload_router.work process messages begin, count: %d " % len(messages)) url_dict = {} url_other = [] for message in messages: self.merge_preload_task(message, url_dict, url_other) for urls in list(url_dict.values()): preload_worker_new.dispatch.delay(urls) if url_other: #定时任务先只插入库中 for url_t in url_other: url_t['_id'] = ObjectId() s1_db.preload_url.insert(url_other) logger.info( "preload_router.work process messages end, count: %d " % len(messages)) except Exception: logger.warning('preload_router work error:%s' % traceback.format_exc())
def run(batch_size=500): try: messages = queue.get('email', batch_size) logger.debug("sendEmail.work process messages begin, count: %d " % len(messages)) for body in messages: try: email = json.loads(body) to_addrs = email.get("to_addrs", None) if to_addrs and '@' in to_addrs[0]: send(email.get("to_addrs"), email.get("title").encode("UTF-8"), email.get("body").encode("UTF-8")) # try: # send_imp(email.get("to_addrs"),email.get("title").encode("UTF-8"),email.get("body").encode("UTF-8")) # except Exception,e: # logger.warning('seng to imp error:%s' % traceback.format_exc()) logger.debug( "sendEmail.send %s to_addrs: %s " % (email.get('username'), email.get("to_addrs"))) else: logger.debug("sendEmail.send %s to_addrs is None " % email.get("username")) except Exception: logger.warning('sendEmail body error:%s' % traceback.format_exc()) logger.debug("sendEmail.work process messages end, count: %d " % len(messages)) except Exception: logger.warning('sendEmail work error:%s' % traceback.format_exc())
def refresh_router(self, queue_name='url_queue'): """ 从url_queue中提取url,进行处理 """ try: messages = queue.get(queue_name, self.batch_size) logger.debug( "refresh_router %s .work process messages begin, count: %d " % (queue_name, len(messages))) for body in messages: url = json.loads(body) logger.debug('router for url: %s' % url.get('id')) if url.get('isdir'): dir_refresh.work.delay(url) #todo change delay to queue.put elif url.get('url_encoding'): url_refresh.work.delay([url]) else: self.merge_urlMsg(url) for urls in list(self.merged_urls.values()): url_refresh.work.delay(urls) logger.info( "refresh_router %s .work process messages end, count: %d " % (queue_name, len(messages))) except Exception: logger.warning('refresh_router %s work error:%s' % (queue_name, traceback.format_exc()))
def preload_router(self): """ 处理rabbitmq中preload_task的内容 """ try: # messages = queue.get('preload_task', self.batch_size) messages = self.get_preload_messages() if not messages: messages = queue.get('preload_task', self.batch_size) logger.debug( "preload_router.work process messages begin, count: %d " % len(messages)) url_dict = {} url_other = [] # timer, interval, schedule tasks url_compressed = [] for message in messages: self.merge_preload_task(message, url_dict, url_other) for urls in url_dict.values(): logger.debug("preload_router url_dict: %s" % url_dict) preload_worker_new.dispatch.delay(urls) if url_other: logger.debug("preload_router url_other: %s" % url_other) self.scheduleTask(url_other) logger.info("preload_router count: %d" % len(messages)) except Exception: logger.warning('preload_router [error]: %s' % traceback.format_exc())
def get_preload_messages(self): try: s1_db = database.s1_db_session() queue_list = [{ i['queue_name']: int(i['queue_ratio']) } for i in self.s1_db.preload_queue_ratio.find({'status': 'ready'}) ] for q in queue_list: PRELOAD_RATIO.update(q) logger.info('get_preload_messages PRELOAD_RATIO: %s' % (PRELOAD_RATIO, )) all_p = sum(PRELOAD_RATIO.values()) all_m_dict = {} for pi, pv in PRELOAD_RATIO.items(): g_num = int(ceil((pv / float(all_p)) * self.batch_size)) g_messages = queue.get(pi, g_num) # logger.debug('get_preload_messages g_messages key %s value len %s' % # (pi, len(g_messages))) all_m_dict[pi] = g_messages sorted_s = sorted(PRELOAD_RATIO.items(), key=lambda x: x[1], reverse=True) messages = [] for k in sorted_s: append_key = k[0] messages.extend(all_m_dict[k[0]]) for x in xrange(len(PRELOAD_RATIO)): if len(messages) < self.batch_size: left_n = self.batch_size - len(messages) left_m = queue.get(sorted_s[x][0], left_n) messages.extend(left_m) logger.info('get_preload_messages messages count %s' % (len(messages))) return messages except Exception: logger.info('get_preload_messages error %s' % (traceback.format_exc())) return []
def main(): logger.debug("error_email start!") concurrency = 10 bodys = queue.get('error_task', 5000) logger.debug("bodys count:%s!" % len(bodys)) step = len(bodys) / concurrency + 1 steped_bodys = [bodys[i:i + step] for i in range(0, len(bodys), step)] for steped_body in steped_bodys: Process(target=send_error_mail.run, args=( database.query_db_session(), steped_body, )).start()
def cert_router(self, queue_name='cert_task'): ''' 证书任务打包 ''' try: messages = queue.get(queue_name, self.batch_size) logger.debug( "cert_router %s .work process messages begin, count: %d " % (queue_name, len(messages))) for body in messages: task = json.loads(body) logger.debug('router for cert: %s' % task.get('_id')) self.merge_cert(task) for tasks in list(self.merged_cert.values()): cert_trans_worker.dispatch.delay(tasks) logger.info( "cert_router %s .work process messages end, count: %d " % (queue_name, len(messages))) except Exception: logger.warning('cert_router %s work error:%s' % (queue_name, traceback.format_exc()))
def physicalrefresh_router(self, queue_name='physical_refresh'): """ 从url_queue中提取url,进行处理 """ try: messages = queue.get(queue_name, self.batch_size) logger.debug( "refresh_router %s .work process messages begin, count: %d " % (queue_name, len(messages))) for body in messages: url = json.loads(body) logger.debug('router for url: %s' % url.get('id')) self.physical_urlMsg(url) for urls in list(self.physical_urls.values()): physical_refresh.work.delay(urls) logger.info( "physical refresh_router %s .work process messages end, count: %d " % (queue_name, len(messages))) except Exception: logger.warning('physical refresh_router %s work error:%s' % (queue_name, traceback.format_exc()))
def transfer_cert_router(self, queue_name='transfer_cert_task'): ''' 证书转移任务 ''' try: messages = queue.get(queue_name, self.batch_size) logger.debug( "transfer_cert_router %s .work process messages begin, count: %d " % (queue_name, len(messages))) task_set = {} for body in messages: task = json.loads(body) logger.debug('router for transfer_cert: %s' % task.get('_id')) self.merge_transfer_cert(task) #for k,v in self.merged_cert_query.items(): for tasks in list(self.merged_transfer_cert.values()): #if len(v) > self.package_size: #cert_query_worker.dispatch.delay(self.merged_cert_query.pop(k)) transfer_cert_worker.dispatch.delay(tasks) except Exception: logger.warning('transfer_cert_router %s work error:%s' % (queue_name, traceback.format_exc()))