def addBroberServerHandler(event):
    if event and event.brokerServer:
        brokerServer = event.brokerServer
        Logger.info(log,"添加队列服务器【%s】,URI:【%s】"%( brokerServer.hostName,brokerServer.connectUri))
        cabbage = Cabbage(hostName=brokerServer.hostName,broker=brokerServer.connectUri)
        
        CabbageHolder.getServerCabbages()[brokerServer.hostName]= cabbage
        Logger.debug(log,"添加队列服务器【%s】"% CabbageHolder.getServerCabbagesStr())
Exemplo n.º 2
0
    def saveJob(self, job):
        parent = "/" + CABBAGE + "/" + JOBS + "/" + job.jobId
        self.client.create(parent, makepath=True)
        Logger.debug(log, parent)
        self.client.create(parent + "/" + JOB_NAME, value=job.jobName)
        self.client.create(parent + "/" + FILE_PATH, value=job.filePath)
        self.client.create(parent + "/" + FILE_NAME, value=job.fileName)
        self.client.create(parent + "/" + FILE_TYPE, value=job.fileType)
        self.client.create(parent + "/" + STATUS, value=job.status)
        self.client.create(parent + "/" + AUDIT_STATUS, value=job.auditStatus)
        self.client.create(parent + "/" + RUN_STRATEGY, value=job.runStrategy)
        self.client.create(parent + "/" + STRATEGY_VALUE,
                           value=job.strategyValue)
        self.client.create(parent + "/" + ATTACH_FILES)
        self.client.create(parent + "/" + REULST_BACKEND,
                           value=job.resultBackend)
        for f in job.attachFiles:
            self.client.create(parent + "/" + ATTACH_FILES + "/" + f.fileName,
                               value=f.filePath,
                               makepath=True)
            self.client.create(parent + "/" + ATTACH_FILES + "/" + f.fileName +
                               "/" + FILE_TYPE,
                               value=f.fileType,
                               makepath=True)
        for w in job.works:
            self.client.create(parent + "/" + WORKS + "/" + LIST + "/" +
                               w.hostName,
                               value=w.port,
                               makepath=True)
            if not self.client.isExistPath(parent + "/" + WORKS + "/" +
                                           READIES):
                self.client.create(parent + "/" + WORKS + "/" + READIES)

        if job.tasks:
            for task in job.tasks:
                self.client.create(parent + "/" + TASKS + "/" + task,
                                   makepath=True)
                self.client.create("/" + CABBAGE + "/" + JOBS + "/" + RESULTS +
                                   "/" + job.jobId + "/" + task,
                                   makepath=True)

        self.client.create(parent + "/" + BROKER_SERVER,
                           value=job.brokerServer)

        self.client.create(parent + "/" + QUEUE, value=job.brokerQueue)

        #         for q in job.queue:
        #             self.client.create(parent+"/"+BROKER_SERVER+"/"+QUEUES+"/"+q)

        #使用该目录是因为,如果前面的目录没有创建完,集群的节点已经开始监控,导致数据不完整,所以,单独才用该目录来进行节点的监控
        self.client.create("/" + CABBAGE + "/" + JOBS + "/" + READIES + "/" +
                           job.jobId)
def doAction(jobId):
    if not CacheHolder.getCache().hasKey(jobId, JOBS):
        with storeFactory.store() as store:
            job = store.getJob(jobId)
            CacheHolder.getCache().put(jobId, job, JOBS)

    job = CacheHolder.getCache().get(jobId, JOBS)

    Logger.debug(log, "upload files. job【%s】" % str(job.asDict()))

    if job.resultBackend == None:
        return
    elif job.resultBackend == NFS:
        CabbageNfsBackend(jobId).save()
    elif job.resultBackend == HDFS:
        CabbageHdfsBackend(jobId).save()
    def save(self):

        hdfsPath=ConfigHolder.getConfig().getProperty(BASE,HDFS_ROOT_PATH)
       
        dateStr = getNowDateStr()
        if self.jobId:
            localPath = getLocalFilesPath()
            dateStr = getNowDateStr()
            hour = getNowHour()
            
            if hour == 0:# 提交前一天的数据
                dateStr = formatDate(subDay(getNow(),1),f="%Y%m%d")
            
            p = localPath+"/"+self.jobId+"/result/"+dateStr
            Logger.debug( log, "upload file to hdfs. jobId【%s】 date【%s】" % (self.jobId,dateStr))
            if not  os.path.isdir(p):
                return
            
            fileNames = os.listdir(p)
            if len(fileNames) == 0:
                return
            
            client =HdfsClientHolder.getHdfsClient()
            remoteDire=hdfsPath+"/"+self.jobId
            
            if not client.isDirectory(remoteDire):
                client.mkdir(remoteDire)
            remoteDire= remoteDire+"/"+dateStr
            
            if not client.isDirectory(remoteDire):
                client.mkdir(remoteDire)
            Logger.info(log,"hour:%s  files:%s"%(hour,",".join(fileNames)))
            for fileName in fileNames:
                
                if hour != 0:
                    if int(fileName) >= hour:
                        continue
#                 if os.path.isfile(p+"/"+fileName):
            
                self.uploadToHdfs(client,localPath,self.jobId,hdfsPath,fileName,dateStr)
                os.remove(p+"/"+fileName)