def task_projection(): departements = "departements-version-simplifiee.geojson" projected_departements = BUILD_DIR / "projected-departements.geojson" yield { "name": "departements", "file_dep": [departements], "targets": [projected_departements], "actions": [ f"geoproject '{PROJECTION}' < '{departements}' > '{projected_departements}'" ], "uptodate": [config_changed(PROJECTION)], } villes = BUILD_DIR / "villes.geojson" projected_villes = BUILD_DIR / "projected-villes.geojson" yield { "name": "villes", "file_dep": [villes], "targets": [projected_villes], "actions": [f"geoproject '{PROJECTION}' < '{villes}' > '{projected_villes}'"], "uptodate": [config_changed(PROJECTION)], }
def test_unicode(self): ua = tools.config_changed({'x': u"中文"}) ub = tools.config_changed('b') t1 = task.Task("TaskX", None, uptodate=[ua]) assert False == ua(t1, t1.values) assert False == ub(t1, t1.values) t1.save_extra_values() assert True == ua(t1, t1.values) assert False == ub(t1, t1.values)
def test_string(self): ua = tools.config_changed('a') ub = tools.config_changed('b') t1 = task.Task("TaskX", None, uptodate=[ua]) assert False == ua(t1, t1.values) assert False == ub(t1, t1.values) t1.save_extra_values() assert True == ua(t1, t1.values) assert False == ub(t1, t1.values)
def test_unicode(self): ua = tools.config_changed({'x':six.u("中文")}) ub = tools.config_changed('b') t1 = task.Task("TaskX", None, uptodate=[ua]) assert False == ua(t1, t1.values) assert False == ub(t1, t1.values) t1.save_extra_values() assert True == ua(t1, t1.values) assert False == ub(t1, t1.values)
def test_dict(self): ua = tools.config_changed({'x': 'a', 'y': 1}) ub = tools.config_changed({'x': 'b', 'y': 1}) t1 = task.Task("TaskX", None, uptodate=[ua]) assert False == ua(t1, t1.values) assert False == ub(t1, t1.values) t1.save_extra_values() assert True == ua(t1, t1.values) assert False == ub(t1, t1.values)
def test_dict(self): ua = tools.config_changed({'x':'a', 'y':1}) ub = tools.config_changed({'x':'b', 'y':1}) t1 = task.Task("TaskX", None, uptodate=[ua]) assert False == ua(t1, t1.values) assert False == ub(t1, t1.values) t1.save_extra_values() assert True == ua(t1, t1.values) assert False == ub(t1, t1.values)
def _make_ext_data_files(ext): """ensure a single extension's data_files are set up properly""" wxyz_name = ext.parent.name py_pkg = ext.parent.parent.parent.parent package_json = ext / "package.json" package_data = P.TS_PACKAGE_CONTENT[package_json] setup_py = py_pkg / "setup.py" manifest_in = py_pkg / "MANIFEST.in" install_json = ext.parent / "install.json" yield dict( name=f"{wxyz_name}:setup.py", uptodate=[config_changed(P.PY_SETUP_TEXT)], file_dep=[package_json], targets=[setup_py], actions=[ lambda: [ setup_py.write_text( P.PY_SETUP_TEMPLATE.render(wxyz_name=wxyz_name, **package_data)), None, ][-1], ["isort", setup_py], ["black", setup_py], ], ) yield dict( name=f"{wxyz_name}:manifest.in", uptodate=[config_changed(P.MANIFEST_TEXT)], file_dep=[package_json], targets=[manifest_in], actions=[ lambda: [ manifest_in.write_text( P.MANIFEST_TEMPLATE.render(wxyz_name=wxyz_name, **package_data)), None, ][-1] ], ) yield dict( name=f"{wxyz_name}:install.json", uptodate=[config_changed(P.INSTALL_JSON_TEXT)], file_dep=[package_json], targets=[install_json], actions=[ lambda: [ install_json.write_text( P.INSTALL_JSON_TEMPLATE.render(wxyz_name=wxyz_name, **package_data)), None, ][-1] ], )
def test_nested_dict(self): # actually both dictionaries contain same values # but nested dictionary keys are in a different order c1a = tools.config_changed({'x': 'a', 'y': {'one': 1, 'two': 2}}) c1b = tools.config_changed({'y': {'two': 2, 'one': 1}, 'x': 'a'}) t1 = task.Task("TaskX", None, uptodate=[c1a]) assert False == c1a(t1, t1.values) t1.save_extra_values() assert True == c1a(t1, t1.values) assert True == c1b(t1, t1.values)
def test_nested_dict(self): # actually both dictionaries contain same values # but nested dictionary keys are in a different order c1a = tools.config_changed({'x':'a', 'y':{'one':1, 'two':2}}) c1b = tools.config_changed({'y':{'two':2, 'one':1}, 'x':'a'}) t1 = task.Task("TaskX", None, uptodate=[c1a]) assert False == c1a(t1, t1.values) t1.save_extra_values() assert True == c1a(t1, t1.values) assert True == c1b(t1, t1.values)
def task_test(): """(dry)run tests""" env = "test" pym = [*P.RUN_IN[env], *P.PYM] dry_run_stem = P.get_atest_stem(extra_args=["--dryrun"], lockfile=P.get_lockfile(env), browser=P.BROWSER) real_stem = P.get_atest_stem(lockfile=P.get_lockfile(env), browser=P.BROWSER) dry_target = P.ATEST_OUT / dry_run_stem / P.ATEST_OUT_XML real_target = P.ATEST_OUT / real_stem / P.ATEST_OUT_XML clean, touch = P.get_ok_actions(P.OK.robot_dry_run) robot_deps = [ *P.PY_SRC, *P.ALL_ROBOT, P.PIP_LISTS[env], P.SCRIPTS / "atest.py" ] yield dict( name="dryrun", doc= "pass the tests through the robot machinery, but don't actually _run_ anything", uptodate=[config_changed(os.environ.get("ATEST_ARGS", ""))], actions=[clean, [*pym, "_scripts.atest", "--dryrun"], touch], file_dep=robot_deps, targets=[dry_target, P.OK.robot_dry_run], ) clean, touch = P.get_ok_actions(P.OK.robot) yield dict( name="atest", doc="run acceptance tests with robot", uptodate=[config_changed(os.environ.get("ATEST_ARGS", ""))], actions=[clean, [*pym, "_scripts.atest"], touch], file_dep=[P.OK.robot_dry_run, *robot_deps], targets=[real_target, P.OK.robot], ) # Presently not running this on CI yield dict( name="combine", doc="combine all robot outputs into a single HTML report", actions=[[*pym, "_scripts.combine"]], file_dep=[ real_target, *P.ATEST_OUT.rglob(P.ATEST_OUT_XML), P.SCRIPTS / "combine.py", ], )
def test_using_custom_encoder(self): class DatetimeJSONEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): return o.isoformat() ua = tools.config_changed({'a': datetime.datetime(2018, 12, 10, 10, 33, 55, 478421), 'b': 'bb'}, encoder=DatetimeJSONEncoder) ub = tools.config_changed({'a': datetime.datetime.now(), 'b': 'bb'}, encoder=DatetimeJSONEncoder) t1 = task.Task("TaskX", None, uptodate=[ua]) assert ua(t1, t1.values) is False assert ub(t1, t1.values) is False t1.save_extra_values() assert ua(t1, t1.values) is True assert ub(t1, t1.values) is False
def task_checklinks(): """check whether links in built docs are valid""" key = "check_links" args = [ "pytest-check-links", "-o", "junit_suite_name=checklinks", "--check-anchors", "--check-links-cache", "--check-links-cache-name=build/check_links/cache", # a few days seems reasonable f"--check-links-cache-expire-after={60 * 60 * 24 * 3}", # might be able to relax this, eventually "-k", "not (master or carousel)", ] return dict( uptodate=[config_changed(dict(args=args))], actions=[ U.okit(key, remove=True), lambda: (P.BUILD / "check_links/cache").mkdir(parents=True, exist_ok=True), [ *args, P.DOCS_OUT, ], U.okit(key), ], file_dep=[*P.ALL_SPELL_DOCS()], targets=[P.OK / key], )
def _make_py_rst(setup_py): pkg = setup_py.parent.name name = pkg.replace("wxyz_", "") out = P.DOCS / "widgets" target = out / f"""{name}.rst""" module = pkg.replace("_", ".", 1) def _write(): if not out.exists(): out.mkdir() target.write_text( P.PY_RST_TEMPLATE.render( name=name, module=module, stars="*" * len(module), exclude_members=", ".join(dir(ipywidgets.DOMWidget)), )) return dict( name=f"rst:{setup_py.parent.name}", actions=[_write], targets=[target], uptodate=[config_changed(P.PY_RST_TEMPLATE_TXT)], file_dep=[*(setup_py.parent / "src").rglob("*.py"), P.OK / "setup_py"], )
def gen_deps(self): """generate doit tasks to find imports generated tasks: * get_dep:<path> => find imported moudules * dep-json => save import info in a JSON file """ watched_modules = str(list(sorted(self.py_files))) for mod in self.py_files: # direct dependencies yield { 'basename': 'get_dep', 'name': mod, 'actions': [(self.action_get_dep, [mod])], 'file_dep': [mod], 'uptodate': [config_changed(watched_modules)], } # Create an intermediate json file with import information. # It is required to create an intermediate file because DelayedTasks # can not have get_args to use values from other tasks. yield { 'basename': 'dep-json', 'actions': [self.action_write_json_deps], 'task_dep': ['get_dep'], 'getargs': { 'imports': ('get_dep', None) }, 'targets': [self.json_file], 'doc': 'save dep info in {}'.format(self.json_file), }
def task_pytest(): """run python unit tests""" utest_args = [ *P.APR_DEFAULT, "pytest", "--cov-fail-under", str(P.PYTEST_COV_THRESHOLD), ] if P.UTEST_PROCESSES: utest_args += ["-n", P.UTEST_PROCESSES] pytest_args = os.environ.get("PYTEST_ARGS", "").strip() if pytest_args: try: utest_args += json.loads(pytest_args) except Exception as err: print(err) yield dict( name="utest", doc="run unit tests with pytest", uptodate=[config_changed(COMMIT)], file_dep=[*P.ALL_PY_SRC, P.SETUP_CFG, P.OK_PIP_INSTALL], targets=[P.HTMLCOV_INDEX, P.PYTEST_HTML, P.PYTEST_XUNIT], actions=[ utest_args, lambda: U.strip_timestamps( *P.HTMLCOV.rglob("*.html"), P.PYTEST_HTML, slug=COMMIT ), ], )
def task_haskell(): yield { 'name' : 'cabal', 'actions': [ (copy_file_replace, ['haskell/fresco-binding.cabal.tmpl', {'{version}' : version_haskell_fresco} ]), ], 'targets': ['haskell/fresco-binding.cabal'], 'uptodate': [config_changed(version_haskell_fresco)], 'file_dep': [ 'haskell/fresco-binding.cabal.tmpl', ] } yield { 'name' : 'library', 'actions' : [ 'cd haskell && stack build', 'cd haskell && stack sdist', (make_dir, ['build-haskell']), 'cd haskell && bash -c "cp `find .stack-work | grep .tar.gz` ../build-haskell"' ], 'targets' : ['build-haskell/fresco-binding-' + version_haskell_fresco + '.tar.gz'], 'file_dep': [ 'haskell/Fresco/Component.hs', 'haskell/Fresco/Entity.hs', 'haskell/Fresco/System.hs', 'haskell/fresco-binding.cabal', 'haskell/Fresco.hs', 'haskell/LICENSE', 'haskell/stack.yaml', ], }
def gen_deps(self): """generate doit tasks to find imports generated tasks: * get_dep:<path> => find imported moudules * dep-json => save import info in a JSON file """ watched_modules = str(list(sorted(self.py_files))) for mod in self.py_files: # direct dependencies yield { 'basename': 'get_dep', 'name': mod, 'actions':[(self.action_get_dep, [mod])], 'file_dep': [mod], 'uptodate': [config_changed(watched_modules)], } # Create an intermediate json file with import information. # It is required to create an intermediate file because DelayedTasks # can not have get_args to use values from other tasks. yield { 'basename': 'dep-json', 'actions': [self.action_write_json_deps], 'task_dep': ['get_dep'], 'getargs': {'imports': ('get_dep', None)}, 'targets': [self.json_file], 'doc': 'save dep info in {}'.format(self.json_file), }
def task_setup(): """perform all setup activities""" _install = ["--no-deps", "--ignore-installed", "-vv"] if P.INSTALL_ARTIFACT == "wheel": _install += [P.WHEEL] elif P.INSTALL_ARTIFACT == "sdist": _install += [P.SDIST] else: _install += ["-e", "."] yield _ok( dict( name="py", file_dep=[ P.SETUP_PY, P.SETUP_CFG, P.OK_ENV["dev"], P.WHEEL, P.SDIST ], uptodate=[config_changed({"artifact": P.INSTALL_ARTIFACT})], actions=[ [*P.APR_DEV, *P.PIP, "install", *_install], [*P.APR_DEV, *P.PIP, "check"], ], ), P.OK_PIP_INSTALL, ) yield dict( name="js", file_dep=[P.YARN_LOCK, P.PACKAGE, P.OK_ENV["dev"]], actions=[[*P.APR_DEV, *P.JLPM_INSTALL]], targets=[P.YARN_INTEGRITY], )
def task_lab(): """start a jupyter lab server (with all other extensions)""" env = "test" lockfile = P.get_lockfile(env) str_lock = str(lockfile) needs_build = "lab1" in str_lock or "lab2" in str_lock frozen = P.PIP_LISTS[env] run_in = P.RUN_IN[env] pym = [*run_in, *P.PYM] app_dir = [] if needs_build and not P.IN_BINDER: app_dir = ["--app-dir", P.APP_DIR] lab = [*pym, "jupyter", "lab"] lab_ext = [*pym, "jupyter", "labextension"] serve_deps = [frozen] if needs_build: yield dict( name="ext", uptodate=[config_changed({"labextensions": P.LAB_EXTENSIONS})], actions=[ [ *lab_ext, "install", *app_dir, *P.LAB_EXTENSIONS, "--no-build" ], [*lab, "build", *app_dir, "--debug"], ], file_dep=[frozen], targets=[P.APP_INDEX], ) serve_deps += [P.APP_INDEX] def _lab(): p = subprocess.Popen([*lab, *app_dir, "--no-browser", "--debug"], stdin=subprocess.PIPE) try: p.wait() except KeyboardInterrupt: p.terminate() p.communicate(b"y\n") p.terminate() finally: p.wait() print("maybe check your process log") yield dict( name="serve", doc="runs lab (never stops)", uptodate=[lambda: False], actions=[PythonInteractiveAction(_lab)], file_dep=serve_deps, )
def task_k8s(): return { 'actions': [make_k8s_def], 'file_dep': ['k8s.tpl.yml'], 'uptodate': [config_changed(get_k8s_config())], 'targets': ['k8s.yml'], }
def dodict(action, alias=None, name=None, always=False, clean=False): to_clean = [] cstrings = {} # long actions can be supplied as list if type(action) is list: action = " ".join(action) cstrings["action_original"] = action # replace aliased items via python formatting if alias is None: alias = {} for old, new in c_default_alias.items(): if old in alias and alias[old] != new: print >> sys.stderr, "warning, default alias overwrite:", old, alias[ old], "<--", new alias[old] = new action = action.format(**alias) # process file deps cstrings["action_formatted"] = action file_dep = [] for match in re.finditer("([Dd]):(.*?)(\s|$)", action): flag, item, other = match.groups() if flag == "d": file_dep.append(item) elif flag == "D": cstrings[item] = status(item) action = re.sub("[Dd]:", "", action) # process targets targets = [] for match in re.finditer("([Tt]):(.*?)(\s|$)", action): flag, item, other = match.groups() if flag == "t": targets.append(item) elif flag == "T": if not clean: sys.exit( "Lethal Error: Folder target 'T:' used without invoking clean=True" ) targets.append(item) to_clean.append(item) action = re.sub("[Tt]:", "", action) if len(targets) == 0: say("Action has no targets?\n\t{}".format(cstrings["action_original"])) # remove commented items action = re.sub(" +#.*", "", action) cstrings["action_uncommented"] = action # expected task dictionary (augmented below) doitdict = { "targets": targets, "file_dep": file_dep, "actions": [(clean_targets, to_clean), (mkdirs, targets), action], "uptodate": [not always, config_changed(cstrings)], "verbosity": 2, } if name is not None: if type(name) is not str: name = ":".join([str(k) for k in name]) doitdict["name"] = name # return task dictionary return doitdict
def task_setup(): """ensure a working setup""" yield dict( name="js", doc="ensure local npm dependencies", uptodate=[tools.config_changed(U.pkg_deps(P.PKG_JSONS))], actions=[[*C.JLPM, "--prefer-offline"], [*C.LERNA, "bootstrap"]], targets=[P.YARN_INTEGRITY], )
def create_render_task(name, build_dir, conf): template = f"{name}.xmds" script = build_dir / template return { 'name': 'render', 'actions': [(render_template, (template, script, conf))], 'uptodate': [config_changed(conf)], 'file_dep': template_files, 'targets': [script] }
def task_lint(): """detect and (hopefully) correct code style/formatting""" for label, files in P.LINT_GROUPS.items(): for linter in _make_linters(label, files): yield linter yield dict( name="prettier:core", uptodate=[config_changed(P.README.read_text(encoding="utf-8"))], file_dep=[P.YARN_INTEGRITY, P.YARN_LOCK], actions=[[ "jlpm", "prettier", "--write", "--list-different", P.README ]], targets=[P.README], ) yield dict( name="prettier:rest", file_dep=[P.YARN_INTEGRITY, P.YARN_LOCK, *P.ALL_PRETTIER], targets=[P.OK / "prettier"], actions=[ U.okit("prettier", remove=True), ["jlpm", "lint:prettier"], U.okit("prettier"), ], ) yield dict( name="eslint", file_dep=[ P.YARN_INTEGRITY, P.YARN_LOCK, P.OK / "prettier", *sum([[*p.rglob("*.ts")] for p in P.TS_SRC], []), ], targets=[P.OK / "eslint"], actions=[ U.okit("eslint", remove=True), ["jlpm", "lint:eslint"], U.okit("eslint"), ], ) yield dict( name="robot", file_dep=[*P.ALL_ROBOT, *P.ATEST_PY], targets=[P.OK / "robot_lint"], actions=[ U.okit("robot_dry_run", remove=True), [*P.PYM, "robot.tidy", "--inplace", *P.ALL_ROBOT], [*ATEST, "--dryrun"], U.okit("robot_lint"), ], )
def create_catalog_tasks(packages: List[ElmPackage], output_path: Path, mount_point: str = ''): page_flags = {'mount_point': mount_point} # index index_path = output_path / 'index.html' yield { 'basename': 'index', 'actions': [(page_tasks.write_page, (index_path, ), page_flags)], 'targets': [index_path], 'uptodate': [config_changed(page_flags)], } # search.json search_json_path = output_path / 'search.json' search_entries = list(map(SearchEntry.from_package, packages)) yield { 'basename': 'search_json', 'actions': [(write_search_json, (search_entries, search_json_path))], 'targets': [search_json_path], 'uptodate': [ config_changed( {'entries': [attr.asdict(entry) for entry in search_entries]}) ], } # help pages for help_file in asset_tasks.bundled_helps: url_path = Path(help_file).relative_to('assets').with_suffix('') help_output_path = (output_path / url_path) yield { 'basename': 'help', 'name': url_path, 'actions': [(page_tasks.write_page, (help_output_path, ), page_flags)], 'targets': [help_output_path], 'file_dep': [output_path / help_file], 'uptodate': [config_changed(page_flags)], }
def task__download_deb_packages() -> types.TaskDict: """Download Debian packages locally.""" witness = constants.PKG_DEB_ROOT / '.witness' def clean() -> None: """Delete downloaded Debian packages.""" for repository in DEB_REPOSITORIES: # Repository with an explicit list of packages are created by a # dedicated task that will also handle their cleaning, so we skip # them here. if repository.packages: continue coreutils.rm_rf(repository.pkgdir) utils.unlink_if_exist(witness) constants.REPO_DEB_ROOT.rmdir() def mkdirs() -> None: """Create directories for the repositories.""" for repository in DEB_REPOSITORIES: repository.pkgdir.mkdir(exist_ok=True) mounts = [ utils.bind_ro_mount( source=constants.ROOT / 'packages' / 'debian' / 'download_packages.py', target=Path('/download_packages.py'), ), utils.bind_mount(source=constants.PKG_DEB_ROOT, target=Path('/repositories')), ] dl_packages_callable = docker_command.DockerRun( command=['/download_packages.py', *DEB_TO_DOWNLOAD], builder=DEB_BUILDER, mounts=mounts, environment={'SALT_VERSION': versions.SALT_VERSION}, run_config=docker_command.DEB_BASE_CONFIG) return { 'title': utils.title_with_target1('GET DEB PKGS'), 'actions': [mkdirs, dl_packages_callable], 'targets': [constants.PKG_DEB_ROOT / '.witness'], 'task_dep': [ '_package_mkdir_deb_root', '_package_mkdir_deb_iso_root', '_build_deb_container' ], 'clean': [clean], 'uptodate': [config_changed(_TO_DOWNLOAD_DEB_CONFIG)], # Prevent Docker from polluting our output. 'verbosity': 0, }
def make_lock_task(kind_, env_files, config, platform_, python_, lab_=None): """generate a single dodo excursion for conda-lock""" lockfile = ( P.LOCKS / f"conda.{kind_}.{platform_}-{python_}-{lab_ if lab_ else ''}.lock" ) all_envs = [ *env_files, P.REQS / f"py_{python_}.yml", ] if lab_: all_envs += [P.REQS / f"lab_{lab_}.yml"] file_dep = [*all_envs] def _lock(): with tempfile.TemporaryDirectory() as td: tdp = Path(td) rc = 1 for extra_args in [[], ["--no-mamba"]]: args = [ "conda-lock", "-p", platform_, *sum([["-f", str(p)] for p in all_envs], []), ] + extra_args print(">>>", " ".join(args), flush=True) rc = subprocess.call(args, cwd=str(tdp)) if rc == 0: break if rc != 0: raise Exception("couldn't solve at all", all_envs) tmp_lock = tdp / f"conda-{platform_}.lock" tmp_lock_txt = tmp_lock.read_text(encoding="utf-8") tmp_lock_lines = tmp_lock_txt.splitlines() urls = [line for line in tmp_lock_lines if line.startswith("https://")] print(len(urls), "urls") if not lockfile.parent.exists(): lockfile.parent.mkdir() lockfile.write_text(tmp_lock_txt) return dict( name=lockfile.name, uptodate=[config_changed(config)], file_dep=file_dep, actions=[_lock], targets=[lockfile], )
def task_compute_ica(): """Compute ICA solution for filtered and resampled data. Skip emptyroom.""" script = "preproc/05-compute_ica.py" for subj, task, _ in iter_files(cfg.subjects, None): filt = bp.filt.fpath(subject=subj, task=task, session=None) ica_sol = bp.ica_sol.fpath(subject=subj, task=task) yield dict( name=filt.name, uptodate=[config_changed(cfg.ica_config)], file_dep=[filt], actions=[f"python {script} {subj} {task}"], targets=[ica_sol], )
def test_using_custom_encoder(self): class DatetimeJSONEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, datetime.datetime): return o.isoformat() ua = tools.config_changed( { 'a': datetime.datetime(2018, 12, 10, 10, 33, 55, 478421), 'b': 'bb' }, encoder=DatetimeJSONEncoder) ub = tools.config_changed({ 'a': datetime.datetime.now(), 'b': 'bb' }, encoder=DatetimeJSONEncoder) t1 = task.Task("TaskX", None, uptodate=[ua]) assert ua(t1, t1.values) is False assert ub(t1, t1.values) is False t1.save_extra_values() assert ua(t1, t1.values) is True assert ub(t1, t1.values) is False
def task_creer_carte(): topology = BUILD_DIR / "topology.json" return { "file_dep": [topology], "targets": [SVG_FILE], "actions": [ f"node createSVG.mjs --width {WIDTH} --height {HEIGHT} '{topology}' > '{SVG_FILE}'" ], "uptodate": [config_changed({ "width": WIDTH, "height": HEIGHT })], }
def task_compute_tfr_epochs(): """Compute time-frequency for epochs""" script = "preproc/15-compute_tfr_epochs.py" for subj in cfg.subjects: subj_bids = f"sub-{subj}" epochs_path = bp.epochs.fpath(subject=subj) tfr_path = bp.tfr.fpath(subject=subj) yield dict( name=subj_bids, uptodate=[config_changed(cfg.tfr_config)], file_dep=[epochs_path], targets=[tfr_path], actions=[f"python {script} {subj}"], clean=True, )
def task_start(): for name, dct in services: container = PREFIX + name yield { 'name': name, 'actions': [(run, [name, dct])], 'uptodate': [ container_uptodate(container, dct['image']), config_changed({ 'prefix': PREFIX, 'tag': TAG }) ], 'task_dep': ['network'] + dct.get('deps', []), 'clean': ['docker rm -f -v {0} || true'.format(container)], }
def task_setup_ts(): """set up typescript environment""" dep_types = ["devDependencies", "dependencies", "peerDependencies"] return dict( uptodate=[ config_changed({ pkg["name"]: {dep: pkg.get(dep) for dep in dep_types} for pkg in P.TS_PACKAGE_CONTENT.values() }) ], file_dep=[P.ROOT_PACKAGE], targets=[P.YARN_INTEGRITY, P.YARN_LOCK], actions=[ ["jlpm", "--prefer-offline", "--ignore-optional"], ["jlpm", "lerna", "bootstrap"], ], )
def task_js(): """javascript cruft""" env = "lint" run_in = P.RUN_IN[env] env_lock = P.CONDA_LISTS[env] yield dict( name="yarn", uptodate=[ config_changed({k: P.PACKAGE[k] for k in ["devDependencies", "prettier"]}) ], file_dep=[P.YARN_LOCK, env_lock], actions=[ [*run_in, "yarn", "--silent", "--prefer-offline", "--ignore-optional"], ], targets=[P.YARN_INTEGRITY], )
def task_make_epochs(): """Create epochs ignoring bad segments""" script = "preproc/09-make_epochs.py" for subj, task, ses in iter_files(cfg.subjects, None): if task in cfg.subj_tasks[subj][1:]: continue cleaned_fif = bp.ica.fpath(subject=subj, task=task) annot = bp.annot_final.fpath(subject=subj, task=task) beh = bp.beh.fpath(subject=subj) epochs = bp.epochs.fpath(subject=subj) yield dict( name=cleaned_fif.name, uptodate=[config_changed(cfg.epochs_config)], file_dep=[cleaned_fif, annot, beh], actions=[f"python {script} {subj}"], targets=[epochs], clean=True, )
def task_preflight(): """ ensure a sane development environment """ file_dep = [P.PROJ_LOCK, P.SCRIPTS / "preflight.py"] yield _ok( dict( uptodate=[config_changed({"commit": COMMIT})], name="conda", file_dep=file_dep, actions=([ _echo_ok( "skipping preflight, hope you know what you're doing!") ] if P.SKIP_CONDA_PREFLIGHT else [[*P.PREFLIGHT, "conda"]]), ), P.OK_PREFLIGHT_CONDA, ) yield _ok( dict( name="kernel", file_dep=[*file_dep, P.OK_ENV["dev"]], actions=[[*P.APR_DEV, *P.PREFLIGHT, "kernel"]], ), P.OK_PREFLIGHT_KERNEL, ) yield _ok( dict( name="lab", file_dep=[*file_dep, P.LAB_INDEX, P.OK_ENV["dev"]], actions=[[*P.APR_DEV, *P.PREFLIGHT, "lab"]], ), P.OK_PREFLIGHT_LAB, ) yield _ok( dict( name="release", file_dep=[P.CHANGELOG, P.VERSION_PY, P.SDIST, P.WHEEL, *P.ALL_PY], actions=[[*P.APR_DEV, *P.PREFLIGHT, "release"]], ), P.OK_PREFLIGHT_RELEASE, )
def task_intonaco(): if get_os() == "windows": intonaco_lib = "intonaco.dll" elif get_os() == "darwin": intonaco_lib = "libintonaco.dylib" else: intonaco_lib = "libintonaco.so" yield { 'name' : 'compile', 'actions': ['cd intonaco && cargo build'], 'file_dep': ['intonaco/src/lib.rs', 'intonaco/src/lockfree_value.rs', 'intonaco/src/thread_guard.rs'], 'targets': ['intonaco/target/debug/libintonaco.so'], } yield { 'name' : 'build-dir', 'actions' : [ (make_dir, ['build-intonaco']), (make_dir, ['build-intonaco/intonaco-' + arch_os + '-' + version_intonaco]), 'cp intonaco/target/debug/' + intonaco_lib + ' build-intonaco/intonaco-' + arch_os + '-' + version_intonaco + '/intonaco.gio', ], 'file_dep': [ 'intonaco/target/debug/' + intonaco_lib, ], 'targets' : ['build-intonaco/' + intonaco_lib + '-' + arch_os + '-' + version_intonaco + '/intonaco.gio', ], } yield { 'name' : 'arriccio', 'actions': [ (copy_file_replace, ['component/Intonaco', {'{version}' : version_intonaco} ]), 'aio local http://www.hgamer3d.org/component/Intonaco build-intonaco || true', ], 'targets': ['build-intonaco/arriccio.toml'], 'uptodate': [config_changed(version_intonaco)], 'file_dep': [ 'component/Intonaco', ] }
def tasks(self, patterns, group='all', exclude=(), options=None): """yield tasks as given by pattern @param group: (str) name of a group @param pattern: (list - str) list of path patterns of files to be linted @param exclude: (list - str) list of path of files to be removed from selection @param options: (dict) extra options for group """ # It seems jshint won't ever accept options from command line # https://github.com/jshint/jshint/issues/807 # So we create a jshint config file for each "group" cfg = ConfigDict(copy.deepcopy(self._config)) if options: cfg.merge(options) config_file = '_hint_{}.json'.format(group) def write_config(): with open(config_file, 'w') as fp: json.dump(cfg, fp, indent=4, sort_keys=True) yield { 'name': config_file, 'actions': [write_config], 'targets': [config_file], 'uptodate': [config_changed(cfg)], } else: config_file = self.config_file # yield a task for every js file in selection base = Path('.') excluded = set([base.joinpath(e) for e in exclude]) for pattern in patterns: for src in base.glob(pattern): if src not in excluded: yield self(config_file, str(src))
def task_with_params(): return {'actions': ['echo %s' % option], 'uptodate': [config_changed(option)], 'verbosity': 2, }
def test_invalid_type(self): class NotValid(object):pass uptodate = tools.config_changed(NotValid()) pytest.raises(Exception, uptodate, None, None)