def onConsumerCallback(self, params): if not self.running: return False if self.reloading: log.info("Replace task to updated.") self.task = self.updatedTask self.updatedTask = None self.reloading = False if params == None or len(params) == 0: log.error("No task params for '%s' (%s)" % (self.taskName, self.threadId)) try: start = time.time() self.task.handler(params) runTime = time.time() - start self.statSumProcessCount += 1 self.statSumProcessTime = runTime if self.statNextPeriod <= time.time(): log.debug("%s - %d messages. Avg. %f sec" % (self.taskName, self.statSumProcessCount, 1.0 * self.statSumProcessTime / self.statSumProcessCount)) self.statSumProcessCount = 0 self.statSumProcessTime = 0 self.statNextPeriod = time.time() + 30 except Exception, e: log.error("Task Exception - %s (%s) - %s" % (self.taskName, e, params)) worker.sentry.client.captureException(data=params) exc_type, exc_value, exc_traceback = sys.exc_info() log.error(traceback.format_exception(exc_type, exc_value, exc_traceback)) return False
def __checkMongo(self): try: mongoqueue = self.db['APATLogQueue'].count() log.debug("QUEUE - %d" % mongoqueue) except pymongo.errors.AutoReconnect: pass except Exception, e: log.error(e)
def releaseSlot(self, slot): """ release assigned slot :param slot: slot :return: None """ log.debug("RELEASE SLOT - %s" % slot.taskName) slot.stop()
def __initTaskMapTable(self, excludeTasks=[]): """ Create current allocated task map. exclude 'excludeTasks' and 'consistent-hashing' task :param excludeTasks: exclude task list :return: None """ # only for production worker workers = dict((name, info) for name, info in self.metaClient.getNodes().iteritems() if info['environment'] == 'production') tasks = self.metaClient.getTasks() # get task list of worker if 'all' in excludeTasks: # Reset all task except consistent-hashing funcFilter = lambda x: tasks[x]['useHashing'] else: # Reset given or consistent-hashing task funcFilter = lambda x: x not in excludeTasks or tasks[x]['useHashing'] self.mapWorkerTask = dict([(name, filter(funcFilter, info['tasks'])) for name, info in workers.iteritems()]) log.debug('CurrentTaskMap - %s' % self.mapWorkerTask)
if totalTaskSlot > totalWorkerSlot: log.warn("needs Scale-out!!!!!") # TODO : add fork worker routine. return log.info("Total Required slot : %d / Total Slot : %d" % (totalTaskSlot, totalWorkerSlot)) # Get allocated slots per task for task in tasks: cnt = taskInfos[task]['maxWorker'] try: self.__assignTask(task, cnt) except Exception, e: log.error(e) log.debug("Rebalance result : %s" % self.mapWorkerTask) # Update evtId = str(uuid.uuid4()) try: for worker, newTasks in self.mapWorkerTask.iteritems(): event = {} event['event'] = 'evtWorkerUpdateTask' event['tasks'] = newTasks event['evtId'] = evtId self.metaClient.sendNodeEvent(worker, event) except Exception, e: log.error(e)
def stop(self): log.debug("Get SLOT STOP! - %s" % self.taskName) self.running = False if self.amqpConsumer: self.amqpConsumer.stop() return True
def __metaEventHandler(self, event, evtObj=None): log.debug("Receive Meta Event - %s, %s" % (event, evtObj)) if self.environment != 'production': return self.msgQueue.put((event, evtObj))
worker['tasks'] = [] try: worker['ip'] = socket.gethostbyname(socket.gethostname()) except Exception, e: worker['ip'] = '0.0.0.0' worker['lastEvtId'] = None self.metaClient = meta.MetaHandler(hosts=ZK_URL, nodeInfo=worker, eventCallback=self.__metaEventHandler) # update global (server-side) configuration try: workerConfig.update(self.metaClient.getConfig(), keepOrgValue=True) except Exception, e: log.error("Meta Configuration error - %s" % e) log.debug(dict(workerConfig.items('db'))) if workerConfig.has_section('db'): dbmanager.initDbParams(dict(workerConfig.items('db'))) if workerConfig.has_section('server') and workerConfig.has_option('server', 'task_module'): self.moduleClient = module.TaskModule(workerConfig.get('server', 'task_module')) if self.environment == 'production': self.downloadModules() self.taskClient = task.TaskManager(self.metaClient) self.slotPool = slot.SlotPool() self.slotPool.open()