def __init__(self, bus, port, options): plugins.SimplePlugin.__init__(self, bus) self.id = None self.port = port self.master_url = options.master self.master_proxy = MasterProxy(self, bus, self.master_url) self.master_proxy.subscribe() if options.hostname is None: self.hostname = self.master_proxy.get_public_hostname() else: self.hostname = options.hostname self.lighty_conf_template = options.lighty_conf if options.blockstore is None: self.static_content_root = tempfile.mkdtemp(prefix=os.getenv('TEMP', default='/tmp/sw-files-')) else: self.static_content_root = options.blockstore block_store_dir = os.path.join(self.static_content_root, "data") try: os.mkdir(block_store_dir) except: pass self.block_store = BlockStore(ciel.engine, self.hostname, self.port, block_store_dir, ignore_blocks=options.ignore_blocks, aux_listen_port=options.aux_port) self.block_store.subscribe() self.block_store.build_pin_set() self.block_store.check_local_blocks() create_watcher_thread(bus, self.block_store) self.upload_deferred_work = DeferredWorkPlugin(bus, 'upload_work') self.upload_deferred_work.subscribe() self.upload_manager = UploadManager(self.block_store, self.upload_deferred_work) self.execution_features = ExecutionFeatures() #self.task_executor = TaskExecutorPlugin(bus, self, self.master_proxy, self.execution_features, 1) #self.task_executor.subscribe() self.scheduling_classes = parse_scheduling_class_option(options.scheduling_classes, options.num_threads) self.multiworker = MultiWorker(ciel.engine, self) self.multiworker.subscribe() self.process_pool = ProcessPool(bus, self) self.process_pool.subscribe() self.runnable_executors = self.execution_features.runnable_executors.keys() self.server_root = WorkerRoot(self) self.pinger = Pinger(bus, self.master_proxy, None, 30) self.pinger.subscribe() self.stopping = False self.event_log = [] self.log_lock = Lock() self.log_condition = Condition(self.log_lock) self.cherrypy_conf = {} cherrypy.config.update({"server.thread_pool" : 20}) if options.staticbase is not None: self.cherrypy_conf["/skyweb"] = { "tools.staticdir.on": True, "tools.staticdir.dir": options.staticbase } self.subscribe()
def __init__(self, bus, hostname, port, options): plugins.SimplePlugin.__init__(self, bus) self.id = None self.hostname = hostname self.port = port self.master_url = options.master self.master_proxy = MasterProxy(self, bus, self.master_url) self.master_proxy.subscribe() if options.blockstore is None: block_store_dir = tempfile.mkdtemp( prefix=os.getenv('TEMP', default='/tmp/sw-files-')) else: block_store_dir = options.blockstore self.block_store = BlockStore(cherrypy.engine, self.hostname, self.port, block_store_dir, ignore_blocks=options.ignore_blocks) self.block_store.build_pin_set() self.upload_deferred_work = DeferredWorkPlugin(bus, 'upload_work') self.upload_deferred_work.subscribe() self.upload_manager = UploadManager(self.block_store, self.upload_deferred_work) self.execution_features = ExecutionFeatures() self.task_executor = TaskExecutorPlugin(bus, self.block_store, self.master_proxy, self.execution_features, 1) self.task_executor.subscribe() self.server_root = WorkerRoot(self) self.pinger = Pinger(bus, self.master_proxy, None, 30) self.pinger.subscribe() self.stopping = False self.event_log = [] self.log_lock = Lock() self.log_condition = Condition(self.log_lock) self.cherrypy_conf = {} cherrypy.config.update({"server.thread_pool": 20}) if options.staticbase is not None: self.cherrypy_conf["/skyweb"] = { "tools.staticdir.on": True, "tools.staticdir.dir": options.staticbase } if options.lib is not None: self.cherrypy_conf["/stdlib"] = { "tools.staticdir.on": True, "tools.staticdir.dir": options.lib } self.subscribe()
def worker_process_main(base_dir, task_queue, response_queue): master_proxy = QueueMasterProxy(response_queue) execution_features = ExecutionFeatures() block_store = BlockStore(ciel.engine, 'localhost', 8000, base_dir, True) # XXX: Broken because we now need a pseudoworker in place of a block_store. thread_task_executor = TaskExecutorPlugin(ciel.engine, PseudoWorker(block_store), master_proxy, execution_features, 1) while True: task = task_queue.get() if isinstance(task, ThreadTerminator): return task_descriptor = task.as_descriptor(False) thread_task_executor.handle_input(task_descriptor)
def __init__(self, initial_task, initial_cont_ref, block_store, options): self.block_store = block_store self.task_queue = multiprocessing.Queue() self.response_queue = multiprocessing.Queue() self.task_graph = AllInOneDynamicTaskGraph(self.task_queue) self.master_proxy = AllInOneMasterProxy(self.task_graph, self) self.execution_features = ExecutionFeatures() self.initial_task = initial_task self.initial_cont_ref = initial_cont_ref self.job_output = AllInOneJobOutput() self.task_graph.subscribe(self.initial_task.expected_outputs[0], self.job_output) self.options = options self.is_running = False self.num_workers = options.num_threads self.workers = None