def _inform_subsystems(client, task, packager_result): task_id = task.header.task_id package_path, package_sha1 = packager_result task.header.resource_size = os.path.getsize(package_path) if client.config_desc.net_masking_enabled: task.header.mask = _get_mask_for_task( client=client, task=task, ) else: task.header.mask = masking.Mask() estimated_fee = client.transaction_system.eth_for_batch_payment( task.get_total_tasks()) client.task_manager.add_new_task(task, estimated_fee=estimated_fee) client_options = client.task_server.get_share_options(task_id, None) client_options.timeout = common.deadline_to_timeout(task.header.deadline, ) resource_server_result = yield client.resource_server.add_task( package_path, package_sha1, task_id, task.header.resource_size, client_options=client_options, ) return resource_server_result
def restore_resources(self) -> None: task_manager = getattr(self, 'task_manager') if not task_manager.task_persistence: return states = dict(task_manager.tasks_states) tasks = dict(task_manager.tasks) for task_id, task_state in states.items(): # 'package_path' does not exist in version pre 0.15.1 package_path = getattr(task_state, 'package_path', None) # There is a single zip package to restore files = [package_path] if package_path else None # Calculate timeout task = tasks[task_id] timeout = deadline_to_timeout(task.header.deadline) logger.info("Restoring task '%s' resources (timeout: %r s)", task_id, timeout) logger.debug("%r", files) self._restore_resources(files, task_id, resource_hash=task_state.resource_hash, timeout=timeout)
def start(self, verifier_class) -> Deferred: self.verifier = verifier_class(self.kwargs) if deadline_to_timeout(self.deadline) > 0: if self.verifier.simple_verification(self.kwargs): return self.verifier.start_verification(self.kwargs) return succeed(self.verifier.verification_completed()) else: return succeed(self.verifier.task_timeout(self.subtask_id))
def task_given(self, ctd): if self.assigned_subtask is not None: logger.error("Trying to assign a task, when it's already assigned") return False self.wait(ttl=deadline_to_timeout(ctd['deadline'])) self.assigned_subtask = ctd self.__request_resource( ctd['task_id'], ctd['subtask_id'] ) return True
def resource_given(self, task_id): if task_id in self.task_to_subtask_mapping: subtask_id = self.task_to_subtask_mapping[task_id] if subtask_id in self.assigned_subtasks: subtask = self.assigned_subtasks[subtask_id] timeout = deadline_to_timeout(subtask.deadline) self.__compute_task(subtask_id, subtask.docker_images, subtask.src_code, subtask.extra_data, subtask.short_description, timeout) self.waiting_for_task = None return True else: return False
def task_resource_collected(self, task_id, unpack_delta=True): if task_id in self.task_to_subtask_mapping: subtask_id = self.task_to_subtask_mapping[task_id] if subtask_id in self.assigned_subtasks: subtask = self.assigned_subtasks[subtask_id] if unpack_delta: self.task_server.unpack_delta( self.dir_manager.get_task_resource_dir(task_id), self.delta, task_id) self.delta = None self.last_task_timeout_checking = time.time() self.__compute_task(subtask_id, subtask.docker_images, subtask.src_code, subtask.extra_data, subtask.short_description, deadline_to_timeout(subtask.deadline)) return True return False
def test_to_dictionary(): ss = SubtaskState() ss.subtask_definition = "My long task definition" ss.subtask_id = "ABCDEF" ss.subtask_progress = 0.92 ss.time_started = get_timestamp_utc() ss.deadline = timeout_to_deadline(ss.time_started + 5) ss.extra_data = {"param1": 1323, "param2": "myparam"} ss.subtask_rem_time = deadline_to_timeout( ss.deadline) - ss.time_started ss.subtask_status = SubtaskStatus.starting ss.value = 138 ss.stdout = "path/to/file" ss.stderr = "path/to/file2" ss.results = ["path/to/file3", "path/to/file4"] ss.computation_time = 130 ss.node_id = "NODE1" ss_dict = ss.to_dictionary() assert ss_dict['description'] == "My long task definition" assert ss_dict['subtask_id'] == "ABCDEF" assert ss_dict['progress'] == 0.92 assert ss_dict['time_started'] == get_timestamp_utc() assert ss_dict.get('deadline') is None assert ss_dict.get('extra_data') is None assert ss_dict['time_remaining'] == 5 assert ss_dict['status'] == SubtaskStatus.starting.value assert ss_dict.get('value') is None assert ss_dict['stdout'] == "path/to/file" assert ss_dict['stderr'] == "path/to/file2" assert ss_dict['results'] == ["path/to/file3", "path/to/file4"] assert ss_dict.get('computation_time') is None assert ss_dict['node_id'] == "NODE1"
def test_deadline_to_timeout(self): timeout = 10**10 ts = timeout_to_deadline(timeout) new_timeout = deadline_to_timeout(ts) assert 0 < new_timeout <= timeout
def __compute_task(self, subtask_id, docker_images, src_code, extra_data, subtask_deadline): task_id = self.assigned_subtask['task_id'] task_header = self.task_server.task_keeper.task_headers.get(task_id) if not task_header: logger.warning("Subtask '%s' of task '%s' cannot be computed: " "task header has been unexpectedly removed", subtask_id, task_id) return self.session_closed() deadline = min(task_header.deadline, subtask_deadline) task_timeout = deadline_to_timeout(deadline) unique_str = str(uuid.uuid4()) logger.info("Starting computation of subtask %r (task: %r, deadline: " "%r, docker images: %r)", subtask_id, task_id, deadline, docker_images) self.reset(counting_task=task_id) with self.dir_lock: resource_dir = self.resource_manager.get_resource_dir(task_id) temp_dir = os.path.join( self.resource_manager.get_temporary_dir(task_id), unique_str) # self.dir_manager.clear_temporary(task_id) if not os.path.exists(temp_dir): os.makedirs(temp_dir) if docker_images: docker_images = [DockerImage(**did) for did in docker_images] dir_mapping = DockerTaskThread.generate_dir_mapping(resource_dir, temp_dir) tt = DockerTaskThread(subtask_id, docker_images, src_code, extra_data, dir_mapping, task_timeout) elif self.support_direct_computation: tt = PyTaskThread(subtask_id, src_code, extra_data, resource_dir, temp_dir, task_timeout) else: logger.error("Cannot run PyTaskThread in this version") subtask = self.assigned_subtask self.assigned_subtask = None self.task_server.send_task_failed( subtask_id, subtask['task_id'], "Host direct task not supported", ) self.counting_task = None if self.finished_cb: self.finished_cb() return with self.lock: self.counting_thread = tt tt.start().addBoth(lambda _: self.task_computed(tt))