def test_returns_dict_keyword_renames(): res = {"1": 1, "2": 2} op = operation(lambda: res, provides=keyword("11", "1"), returns_dict=True) assert op.compute({}) == {"11": 1} assert len(res) == 2 # check it did not mutate results res = {"1": 1, "11": 11} op = operation(lambda: res, provides=keyword("11", "1"), returns_dict=True) assert op.compute({}) == {"11": 1} # original '11' was discarded assert len(res) == 2 # check it did not mutate results
def test_keyword_jsonp(): op = operation(lambda *, bar: bar * 2, "kw", needs=keyword("foo/bar"), provides="a") sol = op.compute({"foo/bar": "ciao"}) assert sol == {"a": "ciaociao"}
def param_to_modifier(name: str, param: inspect.Parameter) -> str: return ( optional(name) # is optional? if param.default is not inspect._empty # type: ignore else keyword(name) if param.kind == Parameter.KEYWORD_ONLY else name )
def test_cwd_fnop(): op = operation( str, None, needs=[ "a", "a/b", "/r/b", optional("o"), keyword("k"), implicit("i"), vararg("v1"), varargs("v2"), sfx("s1"), sfxed("s2", "s22"), vcat("vc"), ], provides=["A/B", "C", "/R"], aliases=[("A/B", "aa"), ("C", "CC"), ("/R", "RR")], cwd="root", ) exp = """ FnOp(name='str', needs=['root/a'($), 'root/a/b'($), '/r/b'($), 'root/o'($?'o'), 'root/k'($>'k'), 'root/i'($), 'root/v1'($*), 'root/v2'($+), sfx('s1'), sfxed('root/s2'($), 's22'), 'root/vc'($)], provides=['root/A/B'($), 'root/C'($), '/R'($), 'root/aa'($), 'root/CC'($), 'root/RR'($)], aliases=[('root/A/B'($), 'root/aa'($)), ('root/C'($), 'root/CC'($)), ('/R'($), 'root/RR'($))], fn='str') """ assert oneliner(op) == oneliner(exp)
def test_conveyor_identity_fn(): op = operation(name="copy values", needs="a")() assert not op.fn op = operation(None, needs="a", provides="A") assert not op.fn op = operation(None, name="a", needs="a", provides="A") assert op.fn assert op(a=5) == {"A": 5} op = operation(name="a", needs=["a", "b"], provides=["A", "B"])() assert op.compute({"a": 5, "b": 6}) == {"A": 5, "B": 6} op = operation(name="a", needs=["a", keyword("b")], provides=["A", "B"])() assert op(a=55, b=66) == {"A": 55, "B": 66} op = operation( fn=None, name="a", needs=[optional("a"), vararg("b"), "c"], # positional, vararg, keyword, optional provides=["C", "B", "A"], ) assert op(c=7, a=5, b=6) == {"A": 5, "B": 6, "C": 7}
"n_min_drive2_stopdecel", "n_min_drive2", "n_min_drive_set", "n_min_drive_up", "n_min_drive_up_start", "n_min_drive_down", "n_min_drive_down_start", "t_cold_end", ), ) #: Consume (R)ounded values to construct a :class:`_NMinDrives` instance. NMinDrives = autographed( _NMinDrives, needs=[ keyword(n if n == "t_cold_end" else f"{n}_R", n) for n in _NMinDrives._fields ], inp_sideffects="valid: n_min_drives", provides="n_min_drives", ) def _compose_mdl_2_n_min_drives(aug: autog.Autograph = None, **pipeline_kw) -> "Pipeline": # type: ignore aug = Autograph(["calc_", "upd_"]) funcs = FnHarvester(base_modules=[__name__]).harvest() ops = aug.wrap_funcs(funcs) return compose("mdl_2_n_min_drives", *ops, **pipeline_kw)
roots_head[0], v_max, wot.loc[v_max - 5 * v_step : v_max + 5 * v_step, w.p_remain_stable], ) rec = VMaxRec(v_max, n_v_max, gid, False, wot) else: rec = VMaxRec(np.NAN, np.NAN, gid, False, wot) return rec @autog.autographed( needs=(), provides=[ *VMaxRec._fields[:-2], keyword("is_n_lim_vmax", "is_n_lim"), keyword("vmax_wots", "wot"), # `wot` causes cycle! ], inp_sideffects=[("gwots", "p_resist"), ("gwots", "p_avail")], returns_dict=True, ) def calc_v_max(gwots: Union[pd.Series, pd.DataFrame]) -> VMaxRec: """ Finds maximum velocity by scanning gears from the top. TODO: accept `n_lim` :param gwots: a dataframe indexed by a grid of rounded velocities, containing (at least) `p_resist` and `p_avail_stable` columns for all gears, as generated by :func:`~.engine.interpolate_wot_on_v_grid()`, and
else: raise AssertionError("Missing `n_min` ok-flags from:", gflags) n_ok = inv.AND_columns_with_NANFLAGs(pd.concat(flags_to_AND, axis=1)) assert isinstance(n_ok, pd.Series), ("groupby won't work otherwise", n_ok) g = flagcols[0][1] n_ok.name = g return n_ok @autog.autographed( needs=[ # .. AND ... keyword("cycle/ok_min_n_g3plus_ups"), keyword("cycle/ok_min_n_g3plus_dns"), # .. AND ... keyword("cycle/ok_min_n_g2"), keyword("cycle/ok_min_n_g2_stopdecel"), # .. AND ... keyword("cycle/ok_min_n_g1"), keyword("cycle/ok_min_n_g1_initaccel"), # .. ALONE ... keyword("cycle/OK_max_n"), ], provides=hcat("cycle/OK_n"), ) def derrive_ok_n_flags(**ok_n_flags: Mapping[str, pd.DataFrame]): """ Merge together all N-allowed flags using AND+OR boolean logic.