def testDontIgnoreTaskDependency(self, tasks, depfile): output = StringIO.StringIO() cmd = Ignore(outstream=output, dep_file=depfile.name, task_list=tasks) cmd._execute(["t3"]) dep = Dependency(depfile.name) assert '1' == dep._get("t3", "ignore:") assert None == dep._get("t1", "ignore:")
def check(self): common.print_header('Checking for database prep (dir: {0})'.format( self.directory), level=2) dep_manager = Dependency(SqliteDB, self.doit_config['dep_file']) missing = False for task in self.tasks: status = dep_manager.get_status(task, self.tasks) self.logger.debug('{0}:{1}'.format(task.name, status.status)) if status.status != 'up-to-date': missing = True self.logger.warning('[ ] {0}'.format(task.name)) else: self.logger.info('[x] {0}'.format(task.name)) common.print_header('Database results', level=2) if missing: self.logger.warning('Database prep incomplete...') else: self.logger.info('All databases prepared!') return missing
def check(self): common.print_header('Checking for database prep (dir: {0})'.format(self.directory), level=2) dep_manager = Dependency(SqliteDB, self.doit_config['dep_file']) missing = False for task in self.tasks: status = dep_manager.get_status(task, self.tasks) self.logger.debug('{0}:{1}'.format(task.name, status.status)) if status.status != 'up-to-date': missing = True self.logger.warning('[ ] {0}'.format(task.name)) else: self.logger.info('[x] {0}'.format(task.name)) common.print_header('Database results', level=2) if missing: self.logger.warning('Database prep incomplete') common.print_header('to prepare databases, run: dammit databases'\ ' --install', level=2) else: self.logger.info('All databases prepared!') return missing
def testDontForgetTaskDependency(self, tasks, depfile): self._add_task_deps(tasks, depfile.name) output = StringIO.StringIO() cmds.doit_forget(depfile.name, tasks, output, ["t3"]) dep = Dependency(depfile.name) assert None == dep._get("t3", "dep") assert "1" == dep._get("t1", "dep")
def testIgnoreAll(self, tasks, depfile): output = StringIO.StringIO() cmds.doit_ignore(depfile.name, tasks, output, []) got = output.getvalue().split("\n")[:-1] assert ["You cant ignore all tasks! Please select a task."] == got, got dep = Dependency(depfile.name) for task in tasks: assert None == dep._get(task.name, "ignore:")
def testForgetAll(self, tasks): output = StringIO.StringIO() cmds.doit_forget(TESTDB, tasks, output, []) got = output.getvalue().split("\n")[:-1] assert ["forgeting all tasks"] == got, repr(output.getvalue()) dep = Dependency(TESTDB) for task in tasks: assert None == dep._get(task.name, "dep")
def testForgetOne(self, tasks): output = StringIO.StringIO() cmds.doit_forget(TESTDB, tasks, output, ["t2", "t1"]) got = output.getvalue().split("\n")[:-1] assert ["forgeting t2", "forgeting t1"] == got dep = Dependency(TESTDB) assert None == dep._get("t1", "dep") assert None == dep._get("t2", "dep")
def testDontIgnoreTaskDependency(self, tasks, depfile_name): output = StringIO() cmd = CmdFactory(Ignore, outstream=output, dep_file=depfile_name, backend='dbm', task_list=tasks) cmd._execute(["t3"]) dep = Dependency(DbmDB, depfile_name) assert '1' == dep._get("t3", "ignore:") assert None == dep._get("t1", "ignore:")
def __init__(self, directory, logger, files=None, profile=False, db=None, n_threads=1, **doit_config_kwds): '''Stores tasks and the files they operate on, along with doit config and other metadata. This is the core of the pipelines: it passes its tasks along to doit for execution, and can check task and pipeline completion status. Args: directory (str): The directory in which to run the tasks. Will be created it it doesn't exist. logger (logging.Logger): Logger to record to. files (dict): Files used by the handler. Starts empty if omitted. profile (bool): If True, profile task execution. db (str): Name of the doit database. **doit_config_kwds: Keyword arguments passed to doit. Attributes: files (dict): Files used by the tasks. directory (str): Working directory for execution. tasks (OrderedDict): The tasks to execute. dep_file (str): Path of the doit database. doit_config (dict): The doit configuration given to the task runner. doit_dep_mgr (doit.dependency.Dependency): Doit object to track task status. profile (bool): Whether to run the profiler on tasks. logger (logging.Logger): Logger to use. ''' super(TaskHandler, self).__init__() if files is None: self.files = {} elif type(files) is not dict: raise TypeError('files must be of type dict') else: self.files = files self.tasks = OrderedDict() self.directory = directory try: mkdir(directory) except OSError: pass if db is None: dep_file = path.join(self.directory, 'doit.db') else: dep_file = path.join(self.directory, '{0}.doit.db'.format(db)) self.dep_file = dep_file logger.debug('Dependency Database File: {0}'.format(dep_file)) self.doit_config = dict(dep_file=self.dep_file, reporter=ui.GithubMarkdownReporter, **doit_config_kwds) self.doit_dep_mgr = Dependency(SqliteDB, dep_file) self.n_threads = n_threads self.profile = profile self.logger = logger
def testIgnoreAll(self, tasks, depfile): output = StringIO.StringIO() cmd = Ignore(outstream=output, dep_file=depfile.name, task_list=tasks) cmd._execute([]) got = output.getvalue().split("\n")[:-1] assert ["You cant ignore all tasks! Please select a task."] == got, got dep = Dependency(depfile.name) for task in tasks: assert None == dep._get(task.name, "ignore:")
def test_successRunOnce(self, reporter, RunnerClass): tasks = [Task("taskX", [my_print], run_once=True)] my_runner = RunnerClass(TESTDB, reporter) tc = TaskControl(tasks) tc.process(None) my_runner.run_tasks(tc) assert runner.SUCCESS == my_runner.finish() d = Dependency(TESTDB) assert '1' == d._get('taskX', 'run-once:')
def testIgnoreOne(self, tasks, depfile): output = StringIO.StringIO() cmds.doit_ignore(depfile.name, tasks, output, ["t2", "t1"]) got = output.getvalue().split("\n")[:-1] assert ["ignoring t2", "ignoring t1"] == got dep = Dependency(depfile.name) assert '1' == dep._get("t1", "ignore:") assert '1' == dep._get("t2", "ignore:") assert None == dep._get("t3", "ignore:")
def testDontForgetTaskDependency(self, tasks, depfile): self._add_task_deps(tasks, depfile.name) output = StringIO() cmd_forget = Forget(outstream=output, dep_file=depfile.name, task_list=tasks, sel_tasks=["t3"]) cmd_forget._execute() dep = Dependency(depfile.name) assert None == dep._get("t3", "dep") assert "1" == dep._get("t1", "dep")
def testIgnoreOne(self, tasks, depfile): output = StringIO.StringIO() cmd = Ignore(outstream=output, dep_file=depfile.name, task_list=tasks) cmd._execute(["t2", "t1"]) got = output.getvalue().split("\n")[:-1] assert ["ignoring t2", "ignoring t1"] == got dep = Dependency(depfile.name) assert '1' == dep._get("t1", "ignore:") assert '1' == dep._get("t2", "ignore:") assert None == dep._get("t3", "ignore:")
def test_dump(self, pdep_manager): # save and close db pdep_manager._set("taskId_X", "dependency_A", "da_md5") pdep_manager.close() # open it again and check the value d2 = Dependency(pdep_manager.db_class, pdep_manager.name) value = d2._get("taskId_X", "dependency_A") assert "da_md5" == value, value
def testForgetAll(self, tasks, depfile): self._add_task_deps(tasks, depfile.name) output = StringIO.StringIO() cmds.doit_forget(depfile.name, tasks, output, []) got = output.getvalue().split("\n")[:-1] assert ["forgeting all tasks"] == got, repr(output.getvalue()) # assert False dep = Dependency(depfile.name) for task in tasks: assert None == dep._get(task.name, "dep")
def testDontForgetTaskDependency(self, tasks, depfile_name): self._add_task_deps(tasks, depfile_name) output = StringIO() cmd_forget = CmdFactory( Forget, outstream=output, dep_file=depfile_name, backend='dbm', task_list=tasks, sel_tasks=["t3"]) cmd_forget._execute(False) dep = Dependency(DbmDB, depfile_name) assert None == dep._get("t3", "dep") assert "1" == dep._get("t1", "dep")
def testDontIgnoreTaskDependency(self, tasks, dep_manager): output = StringIO() cmd = CmdFactory(Ignore, outstream=output, dep_manager=dep_manager, task_list=tasks) cmd._execute(["t3"]) dep = Dependency(DbmDB, dep_manager.name) assert '1' == dep._get("t3", "ignore:") assert None == dep._get("t1", "ignore:")
def testForgetOne(self, tasks, depfile): self._add_task_deps(tasks, depfile.name) output = StringIO.StringIO() cmds.doit_forget(depfile.name, tasks, output, ["t2", "t1"]) got = output.getvalue().split("\n")[:-1] assert ["forgeting t2", "forgeting t1"] == got dep = Dependency(depfile.name) assert None == dep._get("t1", "dep") assert None == dep._get("t2", "dep") assert "1" == dep._get("g1.a", "dep")
def test_dump(self, pdepfile): # save and close db pdepfile._set("taskId_X","dependency_A","da_md5") pdepfile.close() # open it again and check the value d2 = Dependency(pdepfile.db_class, pdepfile.name) value = d2._get("taskId_X","dependency_A") assert "da_md5" == value, value
def testIgnoreOne(self, tasks, depfile_name): output = StringIO() cmd = CmdFactory(Ignore, outstream=output, dep_file=depfile_name, backend='dbm', task_list=tasks) cmd._execute(["t2", "t1"]) got = output.getvalue().split("\n")[:-1] assert ["ignoring t2", "ignoring t1"] == got dep = Dependency(DbmDB, depfile_name) assert '1' == dep._get("t1", "ignore:") assert '1' == dep._get("t2", "ignore:") assert None == dep._get("t3", "ignore:")
def testForgetAll(self, tasks, depfile): self._add_task_deps(tasks, depfile.name) output = StringIO() cmd_forget = Forget(outstream=output, dep_file=depfile.name, task_list=tasks, sel_tasks=[]) cmd_forget._execute() got = output.getvalue().split("\n")[:-1] assert ["forgeting all tasks"] == got, repr(output.getvalue()) dep = Dependency(depfile.name) for task in tasks: assert None == dep._get(task.name, "dep")
def testForgetAll(self, tasks, depfile_name): self._add_task_deps(tasks, depfile_name) output = StringIO() cmd_forget = CmdFactory(Forget, outstream=output, dep_file=depfile_name, backend='dbm', task_list=tasks, sel_tasks=[]) cmd_forget._execute(False) got = output.getvalue().split("\n")[:-1] assert ["forgetting all tasks"] == got, repr(output.getvalue()) dep = Dependency(DbmDB, depfile_name) for task in tasks: assert None == dep._get(task.name, "dep")
def testForgetOne(self, tasks, depfile): self._add_task_deps(tasks, depfile.name) output = StringIO() cmd_forget = Forget(outstream=output, dep_file=depfile.name, task_list=tasks, sel_tasks=["t2", "t1"]) cmd_forget._execute() got = output.getvalue().split("\n")[:-1] assert ["forgeting t2", "forgeting t1"] == got dep = Dependency(depfile.name) assert None == dep._get("t1", "dep") assert None == dep._get("t2", "dep") assert "1" == dep._get("g1.a", "dep")
def testForgetOne(self, tasks, depfile_name): self._add_task_deps(tasks, depfile_name) output = StringIO() cmd_forget = CmdFactory(Forget, outstream=output, dep_file=depfile_name, backend='dbm', task_list=tasks, sel_tasks=["t2", "t1"]) cmd_forget._execute(False) got = output.getvalue().split("\n")[:-1] assert ["forgetting t2", "forgetting t1"] == got dep = Dependency(DbmDB, depfile_name) assert None == dep._get("t1", "dep") assert None == dep._get("t2", "dep") assert "1" == dep._get("g1.a", "dep")
def dep_manager_fixture(request, dep_class, tmp_path_factory): filename = str(tmp_path_factory.mktemp('x', True) / 'testdb') dep_file = Dependency(dep_class, filename) dep_file.whichdb = whichdb(dep_file.name) if dep_class is DbmDB else 'XXX' dep_file.name_ext = db_ext.get(dep_file.whichdb, ['']) def remove_depfile(): if not dep_file._closed: dep_file.close() remove_db(dep_file.name) request.addfinalizer(remove_depfile) return dep_file
def testIgnoreGroup(self, tasks, depfile_name): output = StringIO() cmd = Ignore(outstream=output, dep_file=depfile_name, backend='dbm', task_list=tasks) cmd._execute(["g1"]) got = output.getvalue().split("\n")[:-1] dep = Dependency(depfile_name) assert None == dep._get("t1", "ignore:"), got assert None == dep._get("t2", "ignore:") assert '1' == dep._get("g1", "ignore:") assert '1' == dep._get("g1.a", "ignore:") assert '1' == dep._get("g1.b", "ignore:")
def testIgnoreGroup(self, tasks, dep_manager): output = StringIO() cmd = CmdFactory(Ignore, outstream=output, dep_manager=dep_manager, task_list=tasks) cmd._execute(["g1"]) got = output.getvalue().split("\n")[:-1] dep = Dependency(DbmDB, dep_manager.name) assert None == dep._get("t1", "ignore:"), got assert None == dep._get("t2", "ignore:") assert '1' == dep._get("g1", "ignore:") assert '1' == dep._get("g1.a", "ignore:") assert '1' == dep._get("g1.b", "ignore:")
def test_remove_from_non_empty_file(self, pdep_manager): # 1 - put 2 tasks of file pdep_manager._set("taskId_XXX", "dep_1", "x") pdep_manager._set("taskId_YYY", "dep_1", "x") pdep_manager.close() # 2 - re-open and remove one task reopened = Dependency(pdep_manager.db_class, pdep_manager.name) reopened.remove("taskId_YYY") reopened.close() # 3 - re-open again and check task was really removed reopened2 = Dependency(pdep_manager.db_class, pdep_manager.name) assert reopened2._in("taskId_XXX") assert not reopened2._in("taskId_YYY")
def testIgnoreOne(self, tasks, dep_manager): output = StringIO() cmd = CmdFactory(Ignore, outstream=output, dep_manager=dep_manager, task_list=tasks) cmd._execute(["t2", "t1"]) got = output.getvalue().split("\n")[:-1] assert ["ignoring t2", "ignoring t1"] == got dep = Dependency(DbmDB, dep_manager.name) assert '1' == dep._get("t1", "ignore:") assert '1' == dep._get("t2", "ignore:") assert None == dep._get("t3", "ignore:")
def _add_task_deps(tasks, testdb): """put some data on testdb""" dep = Dependency(testdb) for task in tasks: dep._set(task.name,"dep","1") dep.close() dep2 = Dependency(testdb) assert "1" == dep2._get("g1.a", "dep") dep2.close()
def create_tasks(): remove_testdb() tasks = [Task("t1", [""]), Task("t2", [""]), Task("g1", None, task_dep=['g1.a','g1.b']), Task("g1.a", [""]), Task("g1.b", [""]), Task("t3", [""], task_dep=['t1']), Task("g2", None, task_dep=['t1','g1'])] dep = Dependency(TESTDB) for task in tasks: dep._set(task.name,"dep","1") dep.close() return tasks
def testForgetGroup(self, tasks, depfile_name): self._add_task_deps(tasks, depfile_name) output = StringIO() cmd_forget = Forget(outstream=output, dep_file=depfile_name, backend='dbm', task_list=tasks, sel_tasks=["g1"]) cmd_forget._execute(False) got = output.getvalue().split("\n")[:-1] assert "forgetting g1" == got[0] dep = Dependency(depfile_name) assert "1" == dep._get("t1", "dep") assert "1" == dep._get("t2", "dep") assert None == dep._get("g1", "dep") assert None == dep._get("g1.a", "dep") assert None == dep._get("g1.b", "dep")
def CmdFactory(cls, outstream=None, task_loader=None, dep_file=None, backend=None, task_list=None, sel_tasks=None, dep_manager=None, config=None, cmds=None): """helper for test code, so test can call _execute() directly""" loader = get_loader(config, task_loader, cmds) cmd = cls(task_loader=loader, config=config, cmds=cmds) if outstream: cmd.outstream = outstream if backend: assert backend == "dbm" # the only one used on tests cmd.dep_manager = Dependency(DbmDB, dep_file, MD5Checker) elif dep_manager: cmd.dep_manager = dep_manager cmd.dep_file = dep_file # (str) filename usually '.doit.db' cmd.task_list = task_list # list of tasks cmd.sel_tasks = sel_tasks # from command line or default_tasks return cmd
def dep_manager_fixture(request, dep_class): # copied from tempdir plugin name = request._pyfuncitem.name name = py.std.re.sub("[\W]", "_", name) my_tmpdir = request.config._tmpdirhandler.mktemp(name, numbered=True) dep_file = Dependency(dep_class, os.path.join(my_tmpdir.strpath, "testdb")) dep_file.whichdb = whichdb(dep_file.name) if dep_class is DbmDB else 'XXX' dep_file.name_ext = db_ext.get(dep_file.whichdb, ['']) def remove_depfile(): if not dep_file._closed: dep_file.close() remove_db(dep_file.name) request.addfinalizer(remove_depfile) return dep_file
def test_updateDependencies(self, reporter, RunnerClass, depfile_name): depPath = os.path.join(os.path.dirname(__file__), "data/dependency1") ff = open(depPath, "a") ff.write("xxx") ff.close() dependencies = [depPath] filePath = os.path.join(os.path.dirname(__file__), "data/target") ff = open(filePath, "a") ff.write("xxx") ff.close() targets = [filePath] t1 = Task("t1", [my_print], dependencies, targets) my_runner = RunnerClass(Dependency, depfile_name, reporter) my_runner.run_tasks(TaskDispatcher({'t1': t1}, [], ['t1'])) assert runner.SUCCESS == my_runner.finish() d = Dependency(depfile_name) assert d._get("t1", os.path.abspath(depPath))
def test_updateDependencies(self, reporter, RunnerClass, depfile_name): depPath = os.path.join(os.path.dirname(__file__),"data/dependency1") ff = open(depPath,"a") ff.write("xxx") ff.close() dependencies = [depPath] filePath = os.path.join(os.path.dirname(__file__),"data/target") ff = open(filePath,"a") ff.write("xxx") ff.close() targets = [filePath] t1 = Task("t1", [my_print], dependencies, targets) my_runner = RunnerClass(Dependency, depfile_name, reporter) my_runner.run_tasks(TaskDispatcher({'t1':t1}, [], ['t1'])) assert runner.SUCCESS == my_runner.finish() d = Dependency(depfile_name) assert d._get("t1", os.path.abspath(depPath))
def test_remove_from_non_empty_file(self, pdepfile): # 1 - put 2 tasks of file pdepfile._set("taskId_XXX", "dep_1", "x") pdepfile._set("taskId_YYY", "dep_1", "x") pdepfile.close() # 2 - re-open and remove one task reopened = Dependency(pdepfile.db_class, pdepfile.name) reopened.remove("taskId_YYY") reopened.close() # 3 - re-open again and check task was really removed reopened2 = Dependency(pdepfile.db_class, pdepfile.name) assert reopened2._in("taskId_XXX") assert not reopened2._in("taskId_YYY")
def test_updateDependencies(self, reporter, RunnerClass): depPath = os.path.join(os.path.dirname(__file__),"data/dependency1") ff = open(depPath,"a") ff.write("xxx") ff.close() dependencies = [depPath] filePath = os.path.join(os.path.dirname(__file__),"data/target") ff = open(filePath,"a") ff.write("xxx") ff.close() targets = [filePath] tasks = [Task("taskX", [my_print], dependencies, targets)] my_runner = RunnerClass(TESTDB, reporter) tc = TaskControl(tasks) tc.process(None) my_runner.run_tasks(tc) assert runner.SUCCESS == my_runner.finish() d = Dependency(TESTDB) assert d._get("taskX", os.path.abspath(depPath))
def check_status(task, tasks=None, dep_file='.doit.db'): if tasks is None: tasks = [task] mgr = Dependency(DbmDB, os.path.abspath(dep_file)) status = mgr.get_status(task, tasks) return status
class TaskHandler(TaskLoader): def __init__(self, directory, logger, files=None, profile=False, db=None, n_threads=1, **doit_config_kwds): '''Stores tasks and the files they operate on, along with doit config and other metadata. This is the core of the pipelines: it passes its tasks along to doit for execution, and can check task and pipeline completion status. Args: directory (str): The directory in which to run the tasks. Will be created it it doesn't exist. logger (logging.Logger): Logger to record to. files (dict): Files used by the handler. Starts empty if omitted. profile (bool): If True, profile task execution. db (str): Name of the doit database. **doit_config_kwds: Keyword arguments passed to doit. Attributes: files (dict): Files used by the tasks. directory (str): Working directory for execution. tasks (OrderedDict): The tasks to execute. dep_file (str): Path of the doit database. doit_config (dict): The doit configuration given to the task runner. doit_dep_mgr (doit.dependency.Dependency): Doit object to track task status. profile (bool): Whether to run the profiler on tasks. logger (logging.Logger): Logger to use. ''' super(TaskHandler, self).__init__() if files is None: self.files = {} elif type(files) is not dict: raise TypeError('files must be of type dict') else: self.files = files self.tasks = OrderedDict() self.directory = directory try: mkdir(directory) except OSError: pass if db is None: dep_file = path.join(self.directory, 'doit.db') else: dep_file = path.join(self.directory, '{0}.doit.db'.format(db)) self.dep_file = dep_file logger.debug('Dependency Database File: {0}'.format(dep_file)) self.doit_config = dict(dep_file=self.dep_file, reporter=ui.GithubMarkdownReporter, **doit_config_kwds) self.doit_dep_mgr = Dependency(SqliteDB, dep_file) self.n_threads = n_threads self.profile = profile self.logger = logger def register_task(self, name, task, files=None): '''Register a new task and its files with the handler. It may seem redundant or confusing to give the tasks a name different than their internal doit name. I do this because doit tasks need to have names as unique as possible, so that they can be reused in different projects. A particular TaskHandler instance is only used for one pipeline run, and allowing different names makes it easier to reference tasks from elsewhere. Args: name (str): Name of the task. Does not have to correspond to doit's internal task name. task (:obj:): Either a dictionary or Task object. files (dict): Dictionary of files used. ''' if files is None: files = {} if type(files) is not dict: raise TypeError('files must be of type dict') self.tasks[name] = task self.files.update(files) self.logger.debug('registered task {0}: {1}\n' ' with files {2}'.format(name, task, files)) def clear_tasks(self): '''Empty the task dictionary.''' self.logger.debug('Clearing {0} tasks'.format(len(self.tasks))) self.tasks = {} def get_status(self, task, move=False): '''Get the up-to-date status of a single task. Args: task (str): The task name to look up. move (bool): If True, move to the handler's directory before checking. Whether this is necessary depends mostly on whether the task uses relative or absolute paths. Returns: str: The string represenation of the status. Either "run" or "uptodate". ''' if type(task) is str: try: task = self.tasks[task] except KeyError: self.logger.error('Task not found:{0}'.format(task)) raise self.logger.debug('Getting status for task {0}'.format(task.name)) if move: with Move(self.directory): status = self.doit_dep_mgr.get_status(task, self.tasks.values(), get_log=True) else: status = self.doit_dep_mgr.get_status(task, self.tasks.values(), get_log=True) self.logger.debug('Task {0} had status {1}'.format(task, status.status)) try: self.logger.debug('Task {0} had reasons {1}'.format(task, status.reasons)) except AttributeError: pass return status.status def print_statuses(self, uptodate_msg='All tasks up-to-date!', outofdate_msg='Some tasks out of date!'): '''Print the up-to-date status of all tasks. Args: uptodate_msg (str): The message to print if all tasks are up to date. Returns: tuple: A bool (True if all up to date) and a dictionary of statuses. ''' uptodate, statuses = self.check_uptodate() if uptodate: print(ui.paragraph(uptodate_msg)) else: print(ui.paragraph(outofdate_msg)) uptodate_list = [t for t,s in statuses.items() if s is True] outofdate_list = [t for t,s in statuses.items() if s is False] if uptodate_list: print('\nUp-to-date tasks:') print(ui.listing(uptodate_list)) if outofdate_list: print('\nOut-of-date tasks:') print(ui.listing(outofdate_list)) return uptodate, statuses def check_uptodate(self): '''Check if all tasks are up-to-date, ie if the pipeline is complete. Note that this moves to the handler's directory to lessen issues with relative versus absolute paths. Returns: bool: True if all are up to date. ''' with Move(self.directory): statuses = {} outofdate = False for task_name, task in self.tasks.items(): status = self.get_status(task) statuses[task_name] = status == 'up-to-date' return all(statuses.values()), statuses def load_tasks(self, cmd, opt_values, pos_args): '''Internal to doit -- triggered by the TaskLoader.''' self.logger.debug('loading {0} tasks'.format(len(self.tasks))) return self.tasks.values(), self.doit_config def run(self, doit_args=None, verbose=True): '''Run the pipeline. Movees to the directory, loads the tasks into doit, and executes that tasks that are not up-to-date. Args: doit_args (list): Args that would be passed to the doit shell command. By default, just run. verbose (bool): If True, print UI stuff. Returns: int: Exit status of the doit command. ''' if verbose: print(ui.header('Run Tasks', level=4)) if doit_args is None: doit_args = ['run'] if self.n_threads > 1: doit_args.extend(['-n', str(self.n_threads)]) runner = DoitMain(self) with Move(self.directory): if self.profile is True: profile_fn = path.join(self.directory, 'profile.csv') with StartProfiler(filename=profile_fn): return runner.run(doit_args) else: return runner.run(doit_args)
def _add_task_deps(tasks, testdb): """put some data on testdb""" dep = Dependency(DbmDB, testdb) for task in tasks: dep._set(task.name,"dep","1") dep.close() dep2 = Dependency(DbmDB, testdb) assert "1" == dep2._get("g1.a", "dep") dep2.close()
def dep_manager(request, depfile_name): return Dependency(DbmDB, depfile_name)
def check_status(task, dep_file='.doit.db'): mgr = Dependency(DbmDB, os.path.abspath(dep_file)) status = mgr.get_status(task, [task]) return status