Ejemplo n.º 1
0
def test_varargs():
    def sumall(a, *args, b=0, **kwargs):
        return a + sum(args) + b + sum(kwargs.values())

    op = operation(
        sumall,
        name="t",
        needs=[
            "a",
            vararg("arg1"),
            vararg("arg2"),
            varargs("args"),
            optional("b"),
            optional("c"),
        ],
        provides="sum",
    )

    exp = sum(range(8))
    assert op.compute(dict(a=1, arg1=2, arg2=3, args=[4, 5], b=6,
                           c=7))["sum"] == exp
    assert op.compute(dict(a=1, arg1=2, arg2=3, args=[4, 5],
                           c=7))["sum"] == exp - 6
    assert op.compute(dict(a=1, arg1=2, arg2=3, args=[4, 5],
                           b=6))["sum"] == exp - 7
    assert op.compute(dict(a=1, arg2=3, args=[4, 5], b=6,
                           c=7))["sum"] == exp - 2
    assert op.compute(dict(a=1, arg1=2, arg2=3, b=6,
                           c=7))["sum"] == exp - 4 - 5
    with pytest.raises(ValueError, match="Missing compulsory needs.+'a'"):
        assert op.compute(dict(arg1=2, arg2=3, b=6, c=7))
Ejemplo n.º 2
0
def test_network_combine():
    sum_op1 = operation(name="sum_op1",
                        needs=[vararg("a"), vararg("b")],
                        provides="sum1")(addall)
    sum_op2 = operation(name="sum_op2",
                        needs=[vararg("a"), "b"],
                        provides="sum2")(addall)
    sum_op3 = operation(name="sum_op3", needs=["sum1", "c"],
                        provides="sum3")(add)
    net1 = compose("my network 1", sum_op1, sum_op2, sum_op3)
    exp = {"a": 1, "b": 2, "c": 4, "sum1": 3, "sum2": 3, "sum3": 7}
    assert net1(a=1, b=2, c=4) == exp
    assert repr(net1).startswith(
        "Pipeline('my network 1', needs=['a'(?), 'b', 'sum1', 'c'], "
        "provides=['sum1', 'sum2', 'sum3'], x3 ops")

    sum_op4 = operation(name="sum_op1",
                        needs=[vararg("a"), "b"],
                        provides="sum1")(addall)
    sum_op5 = operation(name="sum_op4", needs=["sum1", "b"],
                        provides="sum2")(add)
    net2 = compose("my network 2", sum_op4, sum_op5)
    exp = {"a": 1, "b": 2, "sum1": 3, "sum2": 5}
    assert net2(**{"a": 1, "b": 2}) == exp
    assert repr(net2).startswith(
        "Pipeline('my network 2', needs=['a'(?), 'b', 'sum1'], provides=['sum1', 'sum2'], x2 ops"
    )

    net3 = compose("merged", net1, net2)
    exp = {"a": 1, "b": 2, "c": 4, "sum1": 3, "sum2": 5, "sum3": 7}
    assert net3(a=1, b=2, c=4) == exp

    assert repr(net3).startswith(
        "Pipeline('merged', needs=['a'(?), 'b', 'sum1', 'c'], provides=['sum1', 'sum2', 'sum3'], x4 ops"
    )

    ## Reverse ops, change results and `needs` optionality.
    #
    net3 = compose("merged", net2, net1)
    exp = {"a": 1, "b": 2, "c": 4, "sum1": 3, "sum2": 3, "sum3": 7}
    assert net3(**{"a": 1, "b": 2, "c": 4}) == exp

    assert repr(net3).startswith(
        "Pipeline('merged', needs=['a'(?), 'b', 'sum1', 'c'], provides=['sum1', 'sum2', 'sum3'], x4 ops"
    )
Ejemplo n.º 3
0
def test_rescheduling(exemethod, resched, rescheduled):
    canc = operation(lambda: None, name="canc", needs=["b"], provides="cc")
    op = compose(
        "pipeline",
        operation(lambda: [1], name="op1", provides=["a", "b"], rescheduled=1),
        canc,
        operation(
            lambda C=1: C and NO_RESULT,
            name="op2",
            needs=optional("C"),
            provides=["c"],
            rescheduled=1,
        ),
        operation(
            lambda *args: sum(args),
            name="op3",
            needs=["a", vararg("b"), vararg("c")],
            provides=["d"],
        ),
        parallel=exemethod,
    )
    sol = op.compute({})
    assert sol == {"a": 1, "d": 1}
    assert list(sol.canceled) == [canc]
    dot = str(sol.plot())
    assert "#a9a9a9" in dot  # Canceled
    assert 'BORDER="4"' in dot  # Rescheduled
    assert "x2 partial-ops" in str(sol.check_if_incomplete())
    with pytest.raises(IncompleteExecutionError, match="x2 partial-ops"):
        assert sol.scream_if_incomplete()

    ## Check if modified state fails the 2nd time.
    assert op.compute({}) == {"a": 1, "d": 1}

    ## Tell op to cancel just 1 of 2 provides
    #  (the 2n one, 'b').
    #
    sol = op.compute({"C": False})
    assert sol == {"C": False, "a": 1, "c": False, "d": 1}
Ejemplo n.º 4
0
def test_cwd_fnop():
    op = operation(
        str,
        None,
        needs=[
            "a",
            "a/b",
            "/r/b",
            optional("o"),
            keyword("k"),
            implicit("i"),
            vararg("v1"),
            varargs("v2"),
            sfx("s1"),
            sfxed("s2", "s22"),
            vcat("vc"),
        ],
        provides=["A/B", "C", "/R"],
        aliases=[("A/B", "aa"), ("C", "CC"), ("/R", "RR")],
        cwd="root",
    )
    exp = """
    FnOp(name='str',
        needs=['root/a'($),
            'root/a/b'($),
            '/r/b'($),
            'root/o'($?'o'),
            'root/k'($>'k'),
            'root/i'($),
            'root/v1'($*),
            'root/v2'($+),
            sfx('s1'),
            sfxed('root/s2'($),
            's22'),
            'root/vc'($)],
        provides=['root/A/B'($),
            'root/C'($),
            '/R'($),
            'root/aa'($),
            'root/CC'($),
            'root/RR'($)],
         aliases=[('root/A/B'($), 'root/aa'($)),
            ('root/C'($), 'root/CC'($)),
            ('/R'($), 'root/RR'($))],
        fn='str')
    """
    assert oneliner(op) == oneliner(exp)
Ejemplo n.º 5
0
def test_jetsam_n_plot_with_DEBUG():
    pipe = compose(
        "mix",
        operation(
            str,
            "FUNC",
            needs=[
                "a",
                sfxed("b", "foo", keyword="bb"),
                implicit("c"),
                sfxed("d", "bar"),
                vararg("e"),
                varargs("f"),
            ],
            provides=[
                "A",
                sfxed("b", "FOO", keyword="bb"),
                implicit("C"),
                sfxed("d", "BAR", optional=True),
                sfx("FOOBAR"),
            ],
            aliases={
                "A": "aaa",
                "b": "bbb",
                "d": "ddd"
            },  # FIXME: "D" is implicit!
        ),
    )

    with debug_enabled(True), pytest.raises(ValueError, match="^Unsolvable"):
        pipe.compute()
    with debug_enabled(True), pytest.raises(
            ValueError, match="^Failed matching inputs <=> needs") as exc:
        pipe.compute({
            "a": 1,
            sfxed("b", "foo"): 2,
            "c": 3,
            sfxed("d", "bar"): 4,
            "e": 5,
            "f": [6, 7],
        })

    exc.value.jetsam.plot_fpath.unlink()
Ejemplo n.º 6
0
def test_conveyor_identity_fn():
    op = operation(name="copy values", needs="a")()
    assert not op.fn
    op = operation(None, needs="a", provides="A")
    assert not op.fn

    op = operation(None, name="a", needs="a", provides="A")
    assert op.fn
    assert op(a=5) == {"A": 5}

    op = operation(name="a", needs=["a", "b"], provides=["A", "B"])()
    assert op.compute({"a": 5, "b": 6}) == {"A": 5, "B": 6}

    op = operation(name="a", needs=["a", keyword("b")], provides=["A", "B"])()
    assert op(a=55, b=66) == {"A": 55, "B": 66}

    op = operation(
        fn=None,
        name="a",
        needs=[optional("a"), vararg("b"), "c"],
        # positional, vararg, keyword, optional
        provides=["C", "B", "A"],
    )
    assert op(c=7, a=5, b=6) == {"A": 5, "B": 6, "C": 7}
Ejemplo n.º 7
0
def test_network_nest_subdocs_NOT_LAYERED(solution_layered_false):
    days = ["Monday", "Tuesday", "Wednesday"]
    todos = sfxed("backlog", "todos")

    @operation(name="wake up",
               needs="backlog",
               provides=["tasks", todos],
               rescheduled=True)
    def pick_tasks(backlog):
        if not backlog:
            return NO_RESULT
        # Pick from backlog 1/3 of len-of-chars of my operation's (day) name.
        n_tasks = int(len(task_context.get().op.name) / 3)
        my_tasks, todos = backlog[:n_tasks], backlog[n_tasks:]
        return my_tasks, todos

    do_tasks = operation(None,
                         name="work!",
                         needs="tasks",
                         provides="daily_tasks")

    weekday = compose("weekday", pick_tasks, do_tasks)
    weekdays = [weekday.withset(name=d) for d in days]

    def nester(ra: RenArgs):
        dep = ra.name
        if ra.typ == "op":
            return True
        if ra.typ.endswith(".jsonpart"):
            return False
        if dep == "tasks":
            return True
        # if is_sfxed(dep):
        #     return modifier_withset(
        #         dep, sfx_list=[f"{ra.parent.name}.{s}" for s in dep._sfx_list]
        #     )
        if dep == "daily_tasks":
            return dep_renamed(dep, lambda n: f"{n}/{ra.parent.name}")
        return False

    week = compose("week", *weekdays, nest=nester)
    assert str(week) == re.sub(
        r"[\n ]{2,}",  # collapse all space-chars into a single space
        " ",
        """
        Pipeline('week', needs=['backlog', 'Monday.tasks', 'Tuesday.tasks', 'Wednesday.tasks'],
        provides=['Monday.tasks', sfxed('backlog', 'todos'),
                  'daily_tasks/Monday'($), 'Tuesday.tasks', 'daily_tasks/Tuesday'($),
                  'Wednesday.tasks', 'daily_tasks/Wednesday'($)],
        x6 ops: Monday.wake up, Monday.work!, Tuesday.wake up, Tuesday.work!,
        Wednesday.wake up, Wednesday.work!)
        """.strip(),
    )

    ## Add collector after nesting

    @operation(
        name="collect tasks",
        needs=[todos, *(vararg(f"daily_tasks/{d}") for d in days)],
        provides=["weekly_tasks", "todos"],
    )
    def collector(backlog, *daily_tasks):
        return daily_tasks or (), backlog or ()

    week = compose("week", week, collector)
    assert str(week) == re.sub(
        r"[\n ]{2,}",  # collapse all space-chars into a single space
        " ",
        """
        Pipeline('week',
            needs=['backlog',
                'Monday.tasks', 'Tuesday.tasks', 'Wednesday.tasks',
                sfxed('backlog', 'todos'),
                'daily_tasks/Monday'($?), 'daily_tasks/Tuesday'($?), 'daily_tasks/Wednesday'($?)],
            provides=['Monday.tasks',
                sfxed('backlog', 'todos'), 'daily_tasks/Monday'($),
                'Tuesday.tasks', 'daily_tasks/Tuesday'($),
                'Wednesday.tasks', 'daily_tasks/Wednesday'($),
                'weekly_tasks', 'todos'],
            x7 ops: Monday.wake up, Monday.work!, Tuesday.wake up, Tuesday.work!,
                    Wednesday.wake up, Wednesday.work!, collect tasks)
        """.strip(),
    )

    # +3 from week's capacity: 4 + 5 + 5

    sol = week.compute({"backlog": range(17)},
                       layered_solution=solution_layered_false)
    assert sol == {
        "backlog": range(14, 17),
        "Monday.tasks": range(0, 4),
        "daily_tasks": {
            "Monday": range(0, 4),
            "Tuesday": range(4, 9),
            "Wednesday": range(9, 14),
        },
        "Tuesday.tasks": range(4, 9),
        "Wednesday.tasks": range(9, 14),
        "weekly_tasks": (range(0, 4), range(4, 9), range(9, 14)),
        "todos": range(14, 17),
    }

    assert sol.overwrites == {
        "backlog": [range(14, 17),
                    range(9, 17),
                    range(4, 17),
                    range(0, 17)]
    }

    ## -1 tasks for Wednesday to enact

    sol = week.compute({"backlog": range(9)},
                       layered_solution=solution_layered_false)
    assert sol == {
        "backlog": range(9, 9),
        "Monday.tasks": range(0, 4),
        "daily_tasks": {
            "Monday": range(0, 4),
            "Tuesday": range(4, 9),
        },
        "Tuesday.tasks": range(4, 9),
        sfxed("backlog", "todos"): False,
        "weekly_tasks": (range(0, 4), range(4, 9)),
        "todos": (),
    }
    assert sol.overwrites == {
        "backlog": [range(9, 9), range(4, 9),
                    range(0, 9)]
    }
    sol = week.compute(
        {"backlog": range(9)},
        outputs=["backlog", "daily_tasks", "weekly_tasks", "todos"],
        layered_solution=solution_layered_false,
    )
    assert sol == {
        "backlog": range(9, 9),
        "daily_tasks": {
            "Monday": range(0, 4),
            "Tuesday": range(4, 9),
        },
        "weekly_tasks": (range(0, 4), range(4, 9)),
        "todos": (),
    }

    ## Were failing due to eager eviction of "backlog".
    #
    sol = week.compute(
        {"backlog": range(9)},
        outputs=["daily_tasks", "weekly_tasks", "todos"],
        layered_solution=solution_layered_false,
    )
    assert sol == {
        "daily_tasks": {
            "Monday": range(0, 4),
            "Tuesday": range(4, 9),
        },
        "weekly_tasks": (range(0, 4), range(4, 9)),
        "todos": (),
    }

    sol = week.compute(
        {"backlog": range(9)},
        outputs="daily_tasks/Monday",
        layered_solution=solution_layered_false,
    )
    assert sol == {"daily_tasks": {"Monday": range(0, 4)}}
    assert sol.overwrites == {}
    sol = week.compute(
        {"backlog": range(9)},
        outputs="daily_tasks",
        layered_solution=solution_layered_false,
    )
    assert sol == {
        "daily_tasks": {
            "Monday": range(0, 4),
            "Tuesday": range(4, 9),
        }
    }
    assert sol.overwrites == {}
Ejemplo n.º 8
0
    return OK_max_n


def fill_insufficient_power(cycle):
    """TODOFIXME: fill_insufficient_power() not calced yet! """
    c = wio.pstep_factory.get().cycle

    idx_miss_gear = cycle[c.g_max0] < 0
    ok_n = cycle.loc[:, c.ok_n]
    p_remain = cycle.loc[:, c.p_remain]
    cycle.loc[idx_miss_gear]


@autog.autographed(
    needs=[
        vararg("wltc_class_data/V_cycle"),
        vararg("V_dsc"),
        vararg("V_capped"),
        vararg("V_compensated"),
        vararg("forced_cycle"),
    ],
    provides=[
        sfxed("cycle", "init"),
        implicit("cycle/t"),
        implicit("cycle/V"),
    ],
)
def init_cycle_velocity(*velocities: Union[pd.Series, pd.DataFrame]) -> pd.DataFrame:
    """
    Concatenate velocities(series)/cycle(dataframe), cloning the last column as `V`.