def load(self, file_name, skip_test): try: definition = self.__read_from_file(file_name) except Exception as exc: return CommandResult( error="Error reading task from file '{}': {}".format( file_name, exc)) if hasattr(definition, 'resources'): definition.resources = { os.path.normpath(res) for res in definition.resources } datadir = sync_wait(Tasks.client.get_datadir()) # TODO: unify GUI and CLI logic rendering_task_state = TaskDesc() rendering_task_state.definition = definition rendering_task_state.task_state.status = TaskStatus.starting if not Tasks.application_logic: Tasks.application_logic = CommandAppLogic.instantiate( Tasks.client, datadir) task_builder = Tasks.application_logic.get_builder( rendering_task_state) task = Task.build_task(task_builder) rendering_task_state.task_state.outputs = task.get_output_names() rendering_task_state.task_state.total_subtasks = task.get_total_tasks() task.header.task_id = str(uuid.uuid4()) if not skip_test: test_task = Task.build_task(task_builder) test_task.header.task_id = str(uuid.uuid4()) queue = Queue() TaskTester(test_task, datadir, success_callback=lambda *a, **kw: queue.put(True), error_callback=lambda *a, **kw: queue.put(a)).run() test_result = queue.get() if test_result is not True: return CommandResult( error="Test failed: {}".format(test_result)) task_dict = DictSerializer.dump(task) task_def = task_dict['task_definition'] task_def['resources'] = list(task_def.get('task_definition', [])) deferred = Tasks.client.create_task(task_dict) return sync_wait(deferred, timeout=1800)
def run_benchmark(self, benchmark, label, cfg_param_name): task_state = TaskDesc() task_state.status = TaskStatus.notStarted task_state.definition = benchmark.task_definition self._validate_task_state(task_state) tb = self.get_builder(task_state) t = Task.build_task(tb) reactor = self.__get_reactor() self.br = BenchmarkRunner( t, self.datadir, lambda p: reactor.callFromThread( self._benchmark_computation_success, performance=p, label=label, cfg_param=cfg_param_name), self._benchmark_computation_error, benchmark) self.progress_dialog = TestingTaskProgressDialog( self.customizer.gui.window) self.progress_dialog_customizer = TestingTaskProgressDialogCustomizer( self.progress_dialog, self) self.progress_dialog_customizer.enable_ok_button( False) # disable 'ok' button self.customizer.gui.setEnabled('recount', False) # disable all 'recount' buttons self.progress_dialog.show() self.br.run()
def add_tasks(self, tasks): for task_def in tasks: task_builder = self._get_task_builder(task_def) golem_task = Task.build_task( task_builder(self.client.get_node_name(), task_def, self.client.datadir)) self.client.enqueue_new_task(golem_task)
def test_run(self): benchmark = BlenderBenchmark() task_definition = benchmark.task_definition task_state = TaskDesc() task_state.status = TaskStatus.notStarted task_state.definition = task_definition dir_manager = DirManager(self.path) task = Task.build_task( BlenderRenderTaskBuilder("node name", task_definition, self.path, dir_manager)) result = [None] def success(*_): result[0] = True def error(*_): result[0] = False self.br = BenchmarkRunner(task, self.path, success, error, benchmark) self.br.run() if self.br.tt: self.br.tt.join() assert result[0]
def run_benchmark(self, benchmark, task_builder, datadir, node_name, success_callback, error_callback): task_state = TaskDesc() task_state.status = TaskStatus.notStarted task_state.definition = benchmark.task_definition self._validate_task_state(task_state) builder = task_builder(node_name, task_state.definition, datadir, self.dir_manager) t = Task.build_task(builder) br = BenchmarkRunner(t, datadir, success_callback, error_callback, benchmark) br.run()
def create_task(self, dictionary): # FIXME: remove after the new interface has been integrated with if not isinstance(dictionary, dict): return dictionary type_name = dictionary['type'].lower() task_type = self.task_types[type_name] builder_type = task_type.task_builder_type definition = builder_type.build_definition(task_type, dictionary) builder = builder_type(self.node_name, definition, self.root_path, self.dir_manager) return Task.build_task(builder)
def test_task_simple_serializer(self): with self.assertRaises(TypeError): Task.build_task("Not Task Builder") with self.assertRaises(TypeError): Task.register_listener("Not Listener") t = Task(Mock(), "") self.assertIsInstance(t, Task) self.assertEqual(t.get_stdout("abc"), "") self.assertEqual(t.get_stderr("abc"), "") self.assertEqual(t.get_results("abc"), []) t = Task( TaskHeader("ABC", "xyz", "10.10.10.10", 1023, "key", "DEFAULT", Node()), "print 'Hello world'") tl1 = TaskEventListener() tl2 = TaskEventListener() t.register_listener(tl1) t.register_listener(tl2) assert len(t.listeners) == 2 p = SimpleSerializer.dumps(t) u = SimpleSerializer.loads(p) assert t.src_code == u.src_code assert t.header.task_id == u.header.task_id assert t.header.task_owner.node_name == u.header.task_owner.node_name assert u.get_results("abc") == [] assert len(t.listeners) == 2 assert len(u.listeners) == 0 t.unregister_listener(tl2) assert len(t.listeners) == 1 assert t.listeners[0] == tl1 t.listeners[0].notify_update_task("abc") t.unregister_listener(tl1) assert len(t.listeners) == 0 with self.assertLogs(logger, level="WARNING"): t.unregister_listener(tl1)
def build_and_serialize_task(self, task_state, cbk=None): tb = self.get_builder(task_state) t = Task.build_task(tb) t.header.max_price = str(t.header.max_price) t_serialized = DictSerializer.dump(t) if 'task_definition' in t_serialized: t_serialized_def = t_serialized['task_definition'] t_serialized_def['resources'] = list(t_serialized_def['resources']) if 'max_price' in t_serialized_def: t_serialized_def['max_price'] = str( t_serialized_def['max_price']) from pprint import pformat logger.debug('task serialized: %s', pformat(t_serialized)) if cbk: cbk(t) return t_serialized