def test_repro_multiple_params(tmp_dir, dvc): from tests.func.test_run_multistage import supported_params from dvc.serialize import get_params_deps with (tmp_dir / "params2.yaml").open("w+") as f: yaml.dump(supported_params, f) with (tmp_dir / "params.yaml").open("w+") as f: yaml.dump(supported_params, f) (tmp_dir / "foo").write_text("foo") stage = dvc.run( name="read_params", deps=["foo"], outs=["bar"], params=[ "params2.yaml:lists,floats,name", "answer,floats,nested.nested1", ], cmd="cat params2.yaml params.yaml > bar", ) params, deps = get_params_deps(stage) assert len(params) == 2 assert len(deps) == 1 assert len(stage.outs) == 1 lockfile = stage.dvcfile._lockfile assert lockfile.load()["read_params"]["params"] == { "params2.yaml": { "lists": [42, 42.0, "42"], "floats": 42.0, "name": "Answer", }, "params.yaml": { "answer": 42, "floats": 42.0, "nested.nested1": { "nested2": "42", "nested2-2": 41.99999 }, }, } data, _ = stage.dvcfile._load() params = data["stages"]["read_params"]["params"] custom, defaults = lsplit(lambda v: isinstance(v, dict), params) assert set(custom[0]["params2.yaml"]) == {"name", "lists", "floats"} assert set(defaults) == {"answer", "floats", "nested.nested1"} assert not dvc.reproduce(stage.addressing) with (tmp_dir / "params.yaml").open("w+") as f: params = deepcopy(supported_params) params["answer"] = 43 yaml.dump(params, f) assert dvc.reproduce(stage.addressing) == [stage]
def feedback(self, inputs, outputs, initials=None, latches=None, keep_outputs=False, signed=False): if latches is None: latches = inputs idrop, imap = fn.lsplit(lambda x: x[0] in inputs, self.input_map) odrop, omap = fn.lsplit(lambda x: x[0] in outputs, self.output_map) wordlens = [len(vals) for i, vals in idrop] new_latches = [(n, common.named_indexes(k, n)) for k, n in zip(wordlens, latches)] if initials is None: initials = [0 for _ in inputs] assert len(inputs) == len(outputs) == len(initials) == len(latches) initials = fn.lcat( common.encode_int(k, i, signed) for k, i in zip(wordlens, initials)) def get_names(key_vals): return fn.lcat(fn.pluck(1, key_vals)) aig = self.aig.feedback( inputs=get_names(idrop), outputs=get_names(odrop), latches=get_names(new_latches), initials=initials, keep_outputs=keep_outputs, ) imap, odrop, omap = map(frozenset, [imap, odrop, omap]) return AIGBV( aig=aig, input_map=imap, output_map=omap | (odrop if keep_outputs else frozenset()), latch_map=self.latch_map | set(new_latches), )
async def main(codes: List[str]): async with aiohttp.ClientSession(connector=aiohttp.TCPConnector( ssl=False)) as session: fetch_datas = [fetch_data(session, _code) for _code in codes] results = await asyncio.gather(*fetch_datas, return_exceptions=True) fund_data, exceptions = lsplit(lambda x: not isinstance(x, Exception), results) if exceptions: print('Failed to get data for', ','.join(map(str, exceptions))) pretty_print_fund_data( fund_data=sorted(fund_data, key=cmp_to_key(cmp_by_gszzl)))
def init_deps(stage: PipelineStage) -> List["Dependency"]: from funcy import rpartial from dvc.dependency import ParamsDependency from dvc.fs import localfs new_deps = [dep for dep in stage.deps if not dep.exists] params, deps = lsplit(rpartial(isinstance, ParamsDependency), new_deps) # always create a file for params, detect file/folder based on extension # for other dependencies dirs = [dep.fs_path for dep in deps if not is_file(dep.fs_path)] files = [dep.fs_path for dep in deps + params if is_file(dep.fs_path)] for path in dirs: localfs.makedirs(path) for path in files: localfs.makedirs(localfs.path.parent(path), exist_ok=True) localfs.touch(path) return new_deps
def split_params_deps(stage): from ..dependency import ParamsDependency return lsplit(rpartial(isinstance, ParamsDependency), stage.deps)
def split_params_deps(stage): return lsplit(rpartial(isinstance, ParamsDependency), stage.deps)
def get_params_deps(stage: "PipelineStage"): return lsplit(rpartial(isinstance, ParamsDependency), stage.deps)
def _parse_query(q): tags, words = lsplit(r'^tag:', q.split()) tags = lmap(r'^tag:(.*)', tags) return ' '.join(words), tags
def search(request): # Save last specie in session specie = request.GET.get('specie') if specie != request.session.get('specie'): request.session['specie'] = specie q = request.GET.get('q') if not q: return {'series': None} exclude_tags = lkeep(silent(int), request.GET.getlist('exclude_tags')) series_tags, tag_series, tag_ids = series_tags_data() # Parse query q_string, q_tags = _parse_query(q) q_tags, wrong_tags = lsplit(lambda t: t.lower() in tag_ids, q_tags) if wrong_tags: message = 'Unknown tag%s %s.' % ('s' if len(wrong_tags) > 1 else '', ', '.join(wrong_tags)) messages.warning(request, message) if not q_string and not q_tags: return {'series': None} # Build qs qs = search_series_qs(q_string) if specie: qs = qs.filter(specie=specie) if q_tags: q_tag_ids = lkeep(tag_ids.get(t.lower()) for t in q_tags) include_series = reduce(set.intersection, (tag_series[t] for t in q_tag_ids)) if include_series: qs = qs.filter(id__in=include_series) else: message = 'No series annotated with %s.' \ % (q_tags[0] if len(q_tags) == 1 else 'all these tags simultaneously') messages.warning(request, message) return {'series': []} series_ids = qs.values_list('id', flat=True).order_by() tags = ldistinct(mapcat(series_tags, series_ids), key=itemgetter('id')) if exclude_tags: exclude_series = join(tag_series[t] for t in exclude_tags) qs = qs.exclude(id__in=exclude_series) series = paginate(request, qs, 10) # Get annotations statuses annos_qs = SeriesAnnotation.objects.filter(series__in=series) \ .values_list('series_id', 'tag_id', 'best_cohens_kappa') tags_validated = {(s, t): k == 1 for s, t, k in annos_qs} return dict( { 'series': series, 'page': series, 'tags_validated': tags_validated, 'tags': tags, 'series_tags': series_tags, }, **_search_stats(qs))