def doit_auto(dependency_file, task_list, filter_tasks, loop_callback=None): """Re-execute tasks automatically a depedency changes @param filter_tasks (list -str): print only tasks from this list @loop_callback: used to stop loop on unittests """ task_control = TaskControl(task_list) task_control.process(filter_tasks) tasks_to_run = list(set([t for t in task_control.task_dispatcher(True)])) watch_tasks = [t.name for t in tasks_to_run] watch_files = list(itertools.chain(*[s.file_dep for s in tasks_to_run])) watch_files = list(set(watch_files)) class DoitAutoRun(FileModifyWatcher): """Execute doit on event handler of file changes """ def handle_event(self, event): doit_run(dependency_file, task_list, sys.stdout, watch_tasks, reporter='executed-only') # reset run_status for task in task_list: task.run_status = None file_watcher = DoitAutoRun(watch_files) # always run once when started file_watcher.handle_event(None) file_watcher.loop(loop_callback)
def test_delayed_creation(self): def creator(): yield {'name': 'foo1', 'actions': None, 'file_dep': ['bar']} yield {'name': 'foo2', 'actions': None, 'targets': ['bar']} delayed_loader = DelayedLoader(creator, executed='t2') tasks = [Task('t0', None, task_dep=['t1']), Task('t1', None, loader=delayed_loader), Task('t2', None)] control = TaskControl(tasks) control.process(['t0']) disp = control.task_dispatcher() gen = disp.generator nt2 = next(gen) assert nt2.task.name == "t2" # wait for t2 to be executed assert "hold on" == next(gen) assert "hold on" == next(gen) # hold until t2 is done # delayed creation of tasks for t1 does not mess existing info assert disp.nodes['t1'].waiting_me == set([disp.nodes['t0']]) nf2 = gen.send(nt2) assert disp.nodes['t1'].waiting_me == set([disp.nodes['t0']]) assert nf2.task.name == "t1:foo2" nf1 = gen.send(nf2) assert nf1.task.name == "t1:foo1" assert nf1.task.task_dep == ['t1:foo2'] # implicit dep added nt1 = gen.send(nf1) assert nt1.task.name == "t1" nt0 = gen.send(nt1) assert nt0.task.name == "t0" pytest.raises(StopIteration, lambda gen: next(gen), gen)
def test_include_setup(self): tasks = [Task("t1", None, task_dep=["t2"]), Task("t2", None,)] control = TaskControl(tasks) control.process(['t1']) gen = control.task_dispatcher(include_setup=True).generator # dont wait for tasks assert tasks[0] == gen.send(None).task assert tasks[1] == gen.send(None).task pytest.raises(StopIteration, gen.send, None)
def test_normal(self): tasks = [Task("t1", None, task_dep=["t2"]), Task("t2", None,)] control = TaskControl(tasks) control.process(['t1']) gen = control.task_dispatcher().generator n2 = next(gen) assert tasks[1] == n2.task assert "hold on" == next(gen) assert "hold on" == next(gen) # hold until t2 is done assert tasks[0] == gen.send(n2).task pytest.raises(StopIteration, lambda gen: next(gen), gen)
def testAllTasksWaiting(self): tasks = [Task("taskX",None,setup=["taskY"]), Task("taskY",None,)] tc = TaskControl(tasks) tc.process(['taskX']) gen = tc.task_dispatcher() assert tasks[0] == gen.next() # tasks with setup are yield twice assert "hold on" == gen.next() # nothing else really available tasks[0].run_status = 'run' # should be executed assert tasks[1] == gen.next() # execute setup before assert tasks[0] == gen.next() # second time, ok py.test.raises(StopIteration, gen.next) # nothing left
def test_include_setup(self): tasks = [Task("t1", None, task_dep=["t2"]), Task( "t2", None, )] control = TaskControl(tasks) control.process(['t1']) gen = control.task_dispatcher(include_setup=True).generator # dont wait for tasks assert tasks[0] == gen.send(None).task assert tasks[1] == gen.send(None).task pytest.raises(StopIteration, gen.send, None)
class Run(AnadamaCmdBase, DoitRun): my_opts = (opt_runner, opt_pipeline_name, opt_grid_part, opt_perf_url, opt_tmpfiles, opt_grid_args, opt_reporter_url, opt_auth_info) def _execute(self, outfile=sys.stdout, verbosity=None, always=False, continue_=False, reporter='default', num_process=0, par_type='process', single=False, pipeline_name="Custom Pipeline"): """ @param reporter: (str) one of provided reporters or ... (class) user defined reporter class (can only be specified from DOIT_CONFIG - never from command line) (reporter instance) - only used in unittests """ # get tasks to be executed # self.control is saved on instance to be used by 'auto' command self.control = TaskControl(self.task_list) self.control.process(self.sel_tasks) if single: for task_name in self.sel_tasks: task = self.control.tasks[task_name] if task.has_subtask: for task_name in task.task_dep: sub_task = self.control.tasks[task_name] sub_task.task_dep = [] else: task.task_dep = [] # reporter if isinstance(reporter, six.string_types): if reporter not in REPORTERS: msg = ("No reporter named '%s'." " Type 'doit help run' to see a list " "of available reporters.") raise InvalidCommand(msg % reporter) reporter_cls = REPORTERS[reporter] else: # user defined class reporter_cls = reporter # verbosity if verbosity is None: use_verbosity = Task.DEFAULT_VERBOSITY else: use_verbosity = verbosity show_out = use_verbosity < 2 # show on error report # outstream if isinstance(outfile, six.string_types): outstream = codecs.open(outfile, 'w', encoding='utf-8') else: # outfile is a file-like object (like StringIO or sys.stdout) outstream = outfile # run try: # FIXME stderr will be shown twice in case of task error/failure if isinstance(reporter_cls, type): reporter_obj = reporter_cls( outstream, { 'show_out': show_out, 'show_err': True, 'reporter_url': self.opt_values['reporter_url'], 'auth_info': self.opt_values['auth_info'] }) else: # also accepts reporter instances reporter_obj = reporter_cls run_args = [ self.dep_class, self.dep_file, reporter_obj, continue_, always, verbosity ] run_kwargs = {} RunnerClass = RUNNER_MAP.get(self.opt_values["runner"]) if not RunnerClass: RunnerClass = self._discover_runner_class( num_process, par_type) elif self.opt_values['runner'] in GRID_RUNNER_MAP: if not self.opt_values.get('partition', None): raise InvalidCommand("--partition option is required " "when using a grid runner") run_args = [ self.opt_values['partition'], self.opt_values['perf_url'], self.opt_values['tmpfiledir'], self.opt_values['grid_args'] ] + run_args run_kwargs['num_process'] = num_process if num_process else 1 runner = RunnerClass(*run_args, **run_kwargs) runner.pipeline_name = pipeline_name return runner.run_all(self.control.task_dispatcher()) finally: if isinstance(outfile, str): outstream.close() def _discover_runner_class(self, num_process, par_type): if num_process == 0: return Runner else: if par_type == 'process': if MRunner.available(): return partial(MRunner, num_process=num_process) else: sys.stderr.write( "WARNING: multiprocessing module not available, " + "running in parallel using threads.") elif par_type == 'thread': return partial(MThreadRunner, num_process=num_process) else: msg = "Invalid parallel type %s" raise InvalidCommand(msg % par_type)
def testChangeOrder_AddJustOnce(self): tasks = [Task("taskX",None,task_dep=["taskY"]), Task("taskY",None,)] tc = TaskControl(tasks) tc.process(None) assert [tasks[1], tasks[0]] == [x for x in tc.task_dispatcher()]