Exemplo n.º 1
0
    def __init__(self, root_task_descriptor, block_store, master_proxy,
                 execution_features, worker, job, job_manager):
        self.id = root_task_descriptor['task_id']
        self._record_list_lock = threading.Lock()
        self.task_records = []
        self.block_store = worker.block_store
        self.master_proxy = worker.master_proxy
        self.execution_features = worker.execution_features
        self.worker = worker
        self.reference_cache = job.reference_cache
        # XXX: Should possibly combine_with()?
        for ref in root_task_descriptor['inputs']:
            self.reference_cache[ref.id] = ref
        self.initial_td = root_task_descriptor
        self.task_graph = job.task_graph

        self._refcount = 0

        self.job = job
        self.job_manager = job_manager

        self.aborted = False

        # LocalJobOutput gets self so that it can notify us when done.
        self.job_output = LocalJobOutput(self.initial_td["expected_outputs"],
                                         self)
Exemplo n.º 2
0
 def __init__(self, root_task_descriptor, block_store, master_proxy,
              execution_features, worker):
     self._lock = Lock()
     self.task_records = []
     self.current_task = None
     self.current_td = None
     self.block_store = block_store
     self.master_proxy = master_proxy
     self.execution_features = execution_features
     self.worker = worker
     self.reference_cache = dict([
         (ref.id, ref) for ref in root_task_descriptor["inputs"]
     ])
     self.initial_td = root_task_descriptor
     self.task_graph = LocalTaskGraph(execution_features,
                                      [self.initial_td["task_id"]])
     self.job_output = LocalJobOutput(self.initial_td["expected_outputs"])
     for ref in self.initial_td["expected_outputs"]:
         self.task_graph.subscribe(ref, self.job_output)
     self.task_graph.spawn_and_publish([self.initial_td],
                                       self.initial_td["inputs"])