def _initJobs(self): store = StoreHolder.getRetryStore() jobs =store.getJobs() work =store.getWork(HOST_NAME) queues=work.queues routes={} for job in jobs: if job.status != JOB_DELETE and job.brokerQueue in queues: #fixbug 动态扩容时,缓存JOB if not CacheHolder.getCache().hasKey(job.jobId, JOBS): CacheHolder.getCache().put(job.jobId, job,JOBS) clientDir = ConfigHolder.getConfig().getProperty(BASE,CLIENT_FILE_DIRECTORY) path = clientDir+"/"+job.jobId if not os.path.isdir(path) : # @FIX BUG 文件不同步 syncJob(job.jobId,store) self.addScriptJobId(job.jobId) for taskName in job.tasks: que= store.getQueue(job.brokerQueue) routes[taskName]={'queue': que.queueName, 'routing_key': que.routingKey} celeryconfig.CELERY_ROUTES = routes
def _initJobs(self, cabbage): store = StoreHolder.getRetryStore() jobs = store.getJobs() work = store.getWork(HOST_NAME) queues = work.queues routes = {} queues_celery = [] for que in queues: que = store.getQueue(que) queues_celery.append( Queue(que.queueName, Exchange(que.queueName), routing_key=que.queueName, queue_arguments={'x-max-priority': int(que.priority)})) for job in jobs: if job.status != JOB_DELETE and job.brokerQueue in queues: #fixbug 动态扩容时,缓存JOB if not CacheHolder.getCache().hasKey(job.jobId, JOBS): CacheHolder.getCache().put(job.jobId, job, JOBS) clientDir = ConfigHolder.getConfig().getProperty( BASE, CLIENT_FILE_DIRECTORY) path = clientDir + "/" + job.jobId if not os.path.isdir(path): # @FIX BUG 文件不同步 syncJob(job.jobId, store) self.addScriptJobId(job.jobId, cabbage) for taskName in job.tasks: que = store.getQueue(job.brokerQueue) routes[taskName] = { 'queue': que.queueName, 'routing_key': que.routingKey } log.info(routes) celeryconfig.CELERY_QUEUES = tuple(queues_celery) celeryconfig.CELERY_ROUTES = routes
def _create(self): return StoreHolder.getRetryStore()