def addBroberServerHandler(event): if event and event.brokerServer: brokerServer = event.brokerServer Logger.info(log,"添加队列服务器【%s】,URI:【%s】"%( brokerServer.hostName,brokerServer.connectUri)) cabbage = Cabbage(hostName=brokerServer.hostName,broker=brokerServer.connectUri) CabbageHolder.getServerCabbages()[brokerServer.hostName]= cabbage Logger.debug(log,"添加队列服务器【%s】"% CabbageHolder.getServerCabbagesStr())
def workBrokerQueueChangeHandler(event): # CacheHolder.getCache().put(QUEUES,event.brokerQueues,WORKS) with storeFactory.store() as store: work = store.getWork(HOST_NAME) CacheHolder.getCache().put(HOST_NAME,work,WORKS) if event.isEvent and (work.status == ON_LINE): Logger.info(log,"restart") CabbageControlHolder.getCabbageControl().restartCelery()
def workOnlineWatch(data, stat=None, event=None): if event is not None: #节点已死 if event.type=="DELETED": #"/cabbage/works/"+HOST_NAME+"/"+ON_LINE hostName = event.path.split("/")[3] work = StoreHolder.getServerStore().getWork(hostName) work.status = LOST Logger.info( log,"节点:【%s】IP:【%s】已经死亡!" % (hostName,work.ip) ) StoreHolder.getServerStore().updateWorkStatus(work)
def save(self): try: nfsPath=ConfigHolder.getConfig().getProperty(BASE,NFS_DIRECTORY) dateStr = getNowDateStr() if self.jobId: localPath = getLocalFilesPath() dateStr = getNowDateStr() hour = getNowHour() if hour == 0:# 提交前一天的数据 dateStr = formatDate(subDay(getNow(),1),f="%Y%m%d") localPath = localPath+"/"+self.jobId+"/result/"+dateStr Logger.info( log, "upload file to nfs. jobId【%s】 date【%s】" % (self.jobId,dateStr)) if not os.path.isdir(localPath): return fileNames = os.listdir(localPath) if len(fileNames) == 0: return remoteDire=nfsPath+"/"+self.jobId+"/"+dateStr if not os.path.isdir(remoteDire): os.makedirs(remoteDire) # os.chmod(remoteDire,777) Logger.info(log,"hour:%s files:%s"%(hour,",".join(fileNames))) for fileName in fileNames: if hour != 0: if int(fileName) >= hour: continue newFileName = None if os.environ[CABBAGE] ==MASTER: newFileName = HOST_NAME+"_"+LOCAL_IP+"_"+MASTER+"_"+fileName else: newFileName = HOST_NAME+"_"+LOCAL_IP+"_"+NODE+"_"+fileName if os.path.isfile(localPath+"/"+fileName): shutil.move(localPath+"/"+fileName,remoteDire+"/"+newFileName) except Exception as e: Logger.exception(log)
def jobWebWatch(children): store = storeFactory.getStore() try: brokers={} for jobId in children: try: job=store.getJob(jobId)#toreHolder.getStore().getJob(jobId) if CacheHolder.getCache().hasKey(jobId, JOBS) is False: CacheHolder.getCache().put(jobId, job,JOBS) # kazooClient.addDataListener(parent+"/"+STATUS, jobRunStatusWatch) #偷个懒,只要没有删除的全部放到ROUTER里面去 if job.status != JOB_DELETE: brokerServer=job.brokerServer routes={} for taskName in job.tasks: que=store.getQueue(job.brokerQueue) routes[taskName]={'queue': que.queueName, 'routing_key': que.routingKey} TaskCacheHolder.getJobCache().put(taskName,job.jobId) if brokerServer in brokers: brokers[brokerServer].update(routes) else: brokers[brokerServer] = routes except Exception: Logger.exception( log) #偷个懒,只要没有删除的全部放到ROUTER里面去 for broker,routes in brokers.items(): brokerServer = store.getBrokerServer(broker) #修复BUG,导致任务提交的celery队列里面去了 cabbage = Cabbage(hostName=brokerServer.hostName,broker=brokerServer.connectUri) cabbage.app.conf.update(CELERY_ROUTES = routes) CabbageHolder.getServerCabbages()[brokerServer.hostName] = cabbage # CabbageHolder.getServerCabbages()[brokerServer.hostName].getApp().conf.update(CELERY_ROUTES = routes) Logger.info(log,"更新队列服务器【%s】ROUTES【%s】"% (brokerServer.hostName,str(routes))) except Exception: Logger.exception( log) finally: storeFactory.returnStroe(store)
def save(self): hdfsPath=ConfigHolder.getConfig().getProperty(BASE,HDFS_ROOT_PATH) dateStr = getNowDateStr() if self.jobId: localPath = getLocalFilesPath() dateStr = getNowDateStr() hour = getNowHour() if hour == 0:# 提交前一天的数据 dateStr = formatDate(subDay(getNow(),1),f="%Y%m%d") p = localPath+"/"+self.jobId+"/result/"+dateStr Logger.debug( log, "upload file to hdfs. jobId【%s】 date【%s】" % (self.jobId,dateStr)) if not os.path.isdir(p): return fileNames = os.listdir(p) if len(fileNames) == 0: return client =HdfsClientHolder.getHdfsClient() remoteDire=hdfsPath+"/"+self.jobId if not client.isDirectory(remoteDire): client.mkdir(remoteDire) remoteDire= remoteDire+"/"+dateStr if not client.isDirectory(remoteDire): client.mkdir(remoteDire) Logger.info(log,"hour:%s files:%s"%(hour,",".join(fileNames))) for fileName in fileNames: if hour != 0: if int(fileName) >= hour: continue # if os.path.isfile(p+"/"+fileName): self.uploadToHdfs(client,localPath,self.jobId,hdfsPath,fileName,dateStr) os.remove(p+"/"+fileName)
def monitor(cabbage): Logger.info(log, "添加监控【%s】,URI:【%s】"%( brokerServer.hostName,brokerServer.connectUri)) cabbage_monitor(cabbage.getApp()) Logger.info(log, "添加监控结束")