Ejemplo n.º 1
0
def test_sideffecteds_endured(calc_prices_pipeline):
    ## Break `fill_in_vat_ratios()`.
    #
    @operation(
        needs=[sfxed("ORDER", "Items"), "vat rate"],
        provides=sfxed("ORDER", "VAT rates"),
        endured=True,
    )
    def fill_in_vat_ratios(order: DataFrame, base_vat: float) -> DataFrame:
        raise ValueError("EC transactions have no VAT!")

    calc_prices_pipeline = compose(calc_prices_pipeline.name,
                                   fill_in_vat_ratios,
                                   calc_prices_pipeline,
                                   nest=False)

    sol = calc_prices_pipeline.compute({
        "order_items":
        "milk babylino toilet-paper".split(),
        "vat rate":
        0.18
    })

    print(sol)
    assert sol == {
        "order_items": ["milk", "babylino", "toilet-paper"],
        "vat rate": 0.18,
        "ORDER": {
            "items": ["milk", "babylino", "toilet-paper"],
            "prices": [1, 2, 3],
            "totals": [1, 2, 3],
        },
        "vat owed": None,
    }
Ejemplo n.º 2
0
def test_aliases_pipeline(exemethod):
    provides = ("a", sfxed("s", "foo"))
    aliased = operation(
        lambda: ("A", "B"),
        name="op1",
        provides=provides,
        aliases={
            "a": "b",
            "s": "S1"
        },
    )
    assert aliased._user_provides == provides
    assert tuple(aliased.provides) == (
        "a",
        sfxed("s", "foo"),
        "b",
        "S1",
    )

    pipe = compose(
        "test_net",
        aliased,
        operation(lambda x: x * 2, name="op2", needs="b", provides="c"),
        parallel=exemethod,
    )
    assert pipe() == {"a": "A", "s": "B", "b": "A", "S1": "B", "c": "AA"}
    assert list(pipe.provides) == [*aliased.provides, "c"]
Ejemplo n.º 3
0
def test_sfxed_provides_in_pipeline():
    deps = (sfxed("a", "A", "B"), sfxed("a", "A", "C"), "c", "c")
    op = operation(str, "hh", provides=deps)

    singularized = (sfxed("a", "A"), sfxed("a", "B"), sfxed("a", "C"), "c")
    assert op.provides == singularized
    assert op._user_provides == deps
    assert op._fn_provides == ("a", "c", "c")

    pipe = compose(..., op)
    assert pipe.provides == singularized
Ejemplo n.º 4
0
def test_compose_nest_dict(caplog):
    pipe = compose(
        "t",
        compose(
            "p1",
            operation(
                str,
                name="op1",
                needs=[sfx("a"), "aa"],
                provides=[sfxed("S1", "g"), sfxed("S2", "h")],
            ),
        ),
        compose(
            "p2",
            operation(
                str,
                name="op2",
                needs=sfx("a"),
                provides=["a", sfx("b")],
                aliases=[("a", "b")],
            ),
        ),
        nest={
            "op1": True,
            "op2": lambda n: "p2.op2",
            "aa": False,
            sfx("a"): True,
            "b": lambda n: f"PP.{n}",
            sfxed("S1", "g"): True,
            sfxed("S2", "h"): lambda n: dep_renamed(n, "ss2"),
            sfx("b"): True,
        },
    )
    got = str(pipe.ops)
    print(got)
    assert got == re.sub(
        r"[\n ]{2,}",  # collapse all space-chars into a single space
        " ",
        """
        [FnOp(name='p1.op1', needs=[sfx('p1.a'), 'aa'],
         provides=[sfxed('p1.S1', 'g'), sfxed('ss2', 'h')], fn='str'),
        FnOp(name='p2.op2', needs=[sfx('p2.a')],
         provides=['a', sfx('p2.b'), 'PP.b'], aliases=[('a', 'PP.b')], fn='str')]

        """.strip(),
    )
    for record in caplog.records:
        assert record.levelname != "WARNING"
Ejemplo n.º 5
0
def test_sideffecteds_ok(calc_prices_pipeline):
    inp = {
        "order_items": "milk babylino toilet-paper".split(),
        "vat rate": 0.18
    }
    sol = calc_prices_pipeline.compute(inp)
    print(sol)
    assert sol == {
        "order_items": ["milk", "babylino", "toilet-paper"],
        "vat rate": 0.18,
        "ORDER": {
            "items": ["milk", "babylino", "toilet-paper"],
            "prices": [1, 2, 3],
            "VAT_rates": [0.18, 0.36, 0.18],
            "VAT": [0.18, 0.72, 0.54],
            "totals": [1.18, 2.7199999999999998, 3.54],
        },
        "vat owed": 1.44,
    }
    sol = calc_prices_pipeline.compute(
        inp, [sfxed("ORDER", "VAT"),
              sfxed("ORDER", "Totals")])
    print(sol)
    assert sol == {
        "ORDER": {
            "items": ["milk", "babylino", "toilet-paper"],
            "prices": [1, 2, 3],
            "VAT_rates": [0.18, 0.36, 0.18],
            "VAT": [0.18, 0.72, 0.54],
            "totals": [1.18, 2.7199999999999998, 3.54],
        },
        # "vat owed": 1.44,
    }

    ## `vat owed` both pruned & evicted
    #
    assert "vat owed" not in sol.dag.nodes
    assert "vat owed" in sol.plan.steps
    # Check Pruned+Evicted data plot as expected.
    #
    dot = str(sol.plot())
    print(dot)
    assert re.search(r"(?s)>vat owed<.+style=dashed", dot)
    assert re.search(r'(?s)>vat owed<.+tooltip=".*\(evicted\)"', dot)
Ejemplo n.º 6
0
def sideffected_resched(request, exemethod):
    @operation(provides=sfxed("DEP", "yes", "no"),
               rescheduled=1,
               returns_dict=1)
    def half_sfx():
        return {"DEP": 1, sfxed("DEP", "no"): False}

    yes = operation(lambda dep: "yes!",
                    name="YES",
                    needs=sfxed("DEP", "yes"),
                    provides="yes")
    no = operation(lambda dep: "no!",
                   name="NO",
                   needs=sfxed("DEP", "no"),
                   provides="no")
    ops = [half_sfx, yes, no]
    if request.param:
        ops = reversed(ops)
    return compose("sfxed_resched", *ops, parallel=exemethod)
Ejemplo n.º 7
0
def test_cwd_fnop():
    op = operation(
        str,
        None,
        needs=[
            "a",
            "a/b",
            "/r/b",
            optional("o"),
            keyword("k"),
            implicit("i"),
            vararg("v1"),
            varargs("v2"),
            sfx("s1"),
            sfxed("s2", "s22"),
            vcat("vc"),
        ],
        provides=["A/B", "C", "/R"],
        aliases=[("A/B", "aa"), ("C", "CC"), ("/R", "RR")],
        cwd="root",
    )
    exp = """
    FnOp(name='str',
        needs=['root/a'($),
            'root/a/b'($),
            '/r/b'($),
            'root/o'($?'o'),
            'root/k'($>'k'),
            'root/i'($),
            'root/v1'($*),
            'root/v2'($+),
            sfx('s1'),
            sfxed('root/s2'($),
            's22'),
            'root/vc'($)],
        provides=['root/A/B'($),
            'root/C'($),
            '/R'($),
            'root/aa'($),
            'root/CC'($),
            'root/RR'($)],
         aliases=[('root/A/B'($), 'root/aa'($)),
            ('root/C'($), 'root/CC'($)),
            ('/R'($), 'root/RR'($))],
        fn='str')
    """
    assert oneliner(op) == oneliner(exp)
Ejemplo n.º 8
0
 def _match_fn_name_pattern(self, fn_name,
                            pattern) -> Union[str, Tuple[str, str], None]:
     """return matched group or groups, callable results or after matched prefix string"""
     if isinstance(pattern, RegexPattern):
         m = pattern.search(fn_name)
         groups = m and m.groups()
         if groups:
             if len(groups) == 1:
                 return groups[0]
             if len(groups) > 2:
                 raise ValueError(
                     f"The `out_pattern` {pattern} matched on '{fn_name}' >2 groups: {groups}"
                 )
             return sfxed(*reversed(groups))
     elif callable(pattern):
         return pattern(fn_name)
     elif fn_name.startswith(pattern):
         return fn_name[len(pattern):]
Ejemplo n.º 9
0
def test_op_rename_parts():
    op = operation(
        str,
        name="op1",
        needs=[sfx("a/b"), "/a/b"],
        provides=["b/c", sfxed("d/e/f", "k/l")],
        aliases=[("b/c", "/b/t")],
    )

    def renamer(na):
        if na.name and na.typ.endswith(".jsonpart"):
            return f"PP.{na.name}"

    ren = op.withset(renamer=renamer)
    got = str(ren)
    print(got)
    assert got == oneliner(
        """
        FnOp(name='op1',
            needs=[sfx('a/b'), '/PP.a/PP.b'($)],
            provides=['PP.b/PP.c'($), sfxed('PP.d/PP.e/PP.f'($), 'k/l'), '/PP.b/PP.t'($)],
            aliases=[('PP.b/PP.c'($), '/PP.b/PP.t'($))], fn='str')
        """, )
Ejemplo n.º 10
0
def calc_prices_pipeline(request, exemethod):
    """A pipeline that may work even without VAT-rates."""
    @operation(needs="order_items", provides=sfxed("ORDER", "Items", "Prices"))
    def new_order(items: list) -> DataFrame:
        order = {"items": items}
        # Pretend we get the prices from sales.
        order["prices"] = list(range(1, len(order["items"]) + 1))
        return order

    @operation(
        needs=[sfxed("ORDER", "Items"), "vat rate"],
        provides=sfxed("ORDER", "VAT rates"),
    )
    def fill_in_vat_ratios(order: DataFrame, base_vat: float) -> DataFrame:
        order["VAT_rates"] = [
            v for _, v in zip(order["prices"], cycle((base_vat, 2 * base_vat)))
        ]
        return order

    @operation(
        needs=[
            sfxed("ORDER", "Prices"),
            sfxed("ORDER", "VAT rates", optional=True),
        ],
        provides=[sfxed("ORDER", "VAT", "Totals"), "vat owed"],
    )
    def finalize_prices(order: DataFrame) -> Tuple[DataFrame, float]:
        if "VAT_rates" in order:
            order["VAT"] = [
                p * v for p, v in zip(order["prices"], order["VAT_rates"])
            ]
            order["totals"] = [
                p + v for p, v in zip(order["prices"], order["VAT"])
            ]
            vat_to_pay = sum(order["VAT"])
        else:
            order["totals"] = order["prices"][::]
            vat_to_pay = None
        return order, vat_to_pay

    ops = [new_order, fill_in_vat_ratios, finalize_prices]
    if request.param:
        ops = reversed(ops)
    return compose("process order", *ops, parallel=exemethod)
Ejemplo n.º 11
0
def test_jetsam_n_plot_with_DEBUG():
    pipe = compose(
        "mix",
        operation(
            str,
            "FUNC",
            needs=[
                "a",
                sfxed("b", "foo", keyword="bb"),
                implicit("c"),
                sfxed("d", "bar"),
                vararg("e"),
                varargs("f"),
            ],
            provides=[
                "A",
                sfxed("b", "FOO", keyword="bb"),
                implicit("C"),
                sfxed("d", "BAR", optional=True),
                sfx("FOOBAR"),
            ],
            aliases={
                "A": "aaa",
                "b": "bbb",
                "d": "ddd"
            },  # FIXME: "D" is implicit!
        ),
    )

    with debug_enabled(True), pytest.raises(ValueError, match="^Unsolvable"):
        pipe.compute()
    with debug_enabled(True), pytest.raises(
            ValueError, match="^Failed matching inputs <=> needs") as exc:
        pipe.compute({
            "a": 1,
            sfxed("b", "foo"): 2,
            "c": 3,
            sfxed("d", "bar"): 4,
            "e": 5,
            "f": [6, 7],
        })

    exc.value.jetsam.plot_fpath.unlink()
Ejemplo n.º 12
0
    idx_miss_gear = cycle[c.g_max0] < 0
    ok_n = cycle.loc[:, c.ok_n]
    p_remain = cycle.loc[:, c.p_remain]
    cycle.loc[idx_miss_gear]


@autog.autographed(
    needs=[
        vararg("wltc_class_data/V_cycle"),
        vararg("V_dsc"),
        vararg("V_capped"),
        vararg("V_compensated"),
        vararg("forced_cycle"),
    ],
    provides=[
        sfxed("cycle", "init"),
        implicit("cycle/t"),
        implicit("cycle/V"),
    ],
)
def init_cycle_velocity(*velocities: Union[pd.Series, pd.DataFrame]) -> pd.DataFrame:
    """
    Concatenate velocities(series)/cycle(dataframe), cloning the last column as `V`.

    :param velocities:
        one or more velocity (same) time-indexed series & datarames
        (properly named), with the last one becoming the `V`,
        unless it already exists

    :return:
        the concatenated cycle with 2-level columns (item, gear)
Ejemplo n.º 13
0
    def yield_wrapped_ops(
        self,
        fn: Union[Callable, Tuple[Union[str, Collection[str]],
                                  Union[Callable, Collection[Callable]]], ],
        exclude=(),
        domain: Union[str, int, Collection] = None,
    ) -> Iterable[FnOp]:
        """
        Convert a (possibly **@autographed**) function into an graphtik **FnOperations**,

        respecting any configured overrides

        :param fn:
            either a callable, or a 2-tuple(`name-path`, `fn-path`) for::

                [module[, class, ...]] callable

            - If `fn` is an operation, yielded as is (found also in 2-tuple).
            - Both tuple elements may be singulars, and are auto-tuple-zed.
            - The `name-path` may (or may not) correspond to the given `fn-path`,
              and is used to derrive the operation-name;  If not given, the function
              name is inspected.
            - The last elements of the `name-path` are overridden by names in decorations;
              if the decor-name is the "default" (`None`), the `name-path` becomes
              the op-name.
            - The `name-path` is not used when matching overrides.

        :param exclude:
            a list of decor-names to exclude, as stored in decors.
            Ignored if `fn` already an operation.
        :param domain:
            if given, overrides :attr:`domain` for :func:`.autographed` decorators
            to search.
            List-ified if a single str, :func:`autographed` decors for the 1st one
            matching are used.

        :return:
            one or more :class:`FnOp` instances (if more than one name is defined
            when the given function was :func:`autographed`).

        Overriddes order: my-args, self.overrides, autograph-decorator, inspection

        See also: David Brubeck Quartet, "40 days"
        """
        if isinstance(fn, tuple):
            name_path, fn_path = fn
        else:
            name_path, fn_path = (), fn

        fun_path = cast(Tuple[Callable, ...], astuple(fn_path, None))
        fun = fun_path[-1]

        if isinstance(fun, Operation):
            ## pass-through operations
            yield fun
            return

        def param_to_modifier(name: str, param: inspect.Parameter) -> str:
            return (optional(name)
                    # is optional?
                    if param.default is not inspect._empty  # type: ignore
                    else keyword(name)
                    if param.kind == Parameter.KEYWORD_ONLY else name)

        given_name_path = astuple(name_path, None)

        decors_by_name = get_autograph_decors(fun, {}, domain or self.domain)

        for decor_name, decors in decors_by_name.items() or ((None, {}), ):
            if given_name_path and not decor_name:
                name_path = decor_path = given_name_path
            else:  # Name in decors was "default"(None).
                name_path = decor_path = astuple(
                    (decor_name
                     if decor_name else func_name(fun, fqdn=1)).split("."),
                    None,
                )
                assert decor_path, locals()

                if given_name_path:
                    # Overlay `decor_path` over `named_path`, right-aligned.
                    name_path = tuple(*name_path[:-len(decor_path)],
                                      *decor_path)

            fn_name = str(name_path[-1])
            if fn_name in exclude:
                continue
            overrides = self._from_overrides(decor_path)

            op_data = (ChainMap(overrides, decors) if (overrides and decors)
                       else overrides if overrides else decors)
            if op_data:
                log.debug("Autograph overrides for %r: %s", name_path, op_data)

            op_props = "needs provides renames, inp_sideffects out_sideffects".split(
            )
            needs, provides, override_renames, inp_sideffects, out_sideffects = (
                op_data.get(a, _unset) for a in op_props)

            sig = None
            if needs is _unset:
                sig = inspect.signature(fun)
                needs = [
                    param_to_modifier(name, param)
                    for name, param in sig.parameters.items() if name != "self"
                    and param.kind is not Parameter.VAR_KEYWORD
                ]
                ## Insert object as 1st need for object-methods.
                #
                if len(fun_path) > 1:
                    clazz = fun_path[-2]
                    # TODO: respect autograph decorator for object-names.
                    class_name = name_path[-2] if len(
                        name_path) > 1 else clazz.__name__
                    if is_regular_class(class_name, clazz):
                        log.debug("Object-method %s.%s", class_name, fn_name)
                        needs.insert(0, camel_2_snake_case(class_name))

            needs = aslist(needs, "needs")
            if ... in needs:
                if sig is None:
                    sig = inspect.signature(fun)
                needs = [
                    arg_name if n is ... else n
                    for n, arg_name in zip(needs, sig.parameters)
                ]

            if provides is _unset:
                if is_regular_class(fn_name, fun):
                    ## Convert class-name into object variable.
                    provides = camel_2_snake_case(fn_name)
                elif self.out_patterns:
                    provides = self._deduce_provides_from_fn_name(
                        fn_name) or _unset
                if provides is _unset:
                    provides = ()
            provides = aslist(provides, "provides")

            needs, provides = self._apply_renames(
                (override_renames, self.renames), (needs, provides))

            if inp_sideffects is not _unset:
                needs.extend((i if is_sfx(i) else sfxed(
                    *i) if isinstance(i, tuple) else sfx(i))
                             for i in aslist(inp_sideffects, "inp_sideffects"))

            if out_sideffects is not _unset:
                provides.extend(
                    (i if is_sfx(i) else sfxed(
                        *i) if isinstance(i, tuple) else sfx(i))
                    for i in aslist(out_sideffects, "out_sideffects"))

            if self.full_path_names:
                fn_name = self._join_path_names(*name_path)

            op_kws = self._collect_rest_op_args(decors)

            yield FnOp(fn=fun,
                       name=fn_name,
                       needs=needs,
                       provides=provides,
                       **op_kws)
Ejemplo n.º 14
0
 def half_sfx():
     return {"DEP": 1, sfxed("DEP", "no"): False}
Ejemplo n.º 15
0
def test_sideffected_canceled(sideffected_resched):
    """Check if a `returns-dict` op can cancel sideffecteds. """
    sol = sideffected_resched.compute({})
    print(sol)
    assert sol == {"DEP": 1, sfxed("DEP", "no"): False, "yes": "yes!"}
Ejemplo n.º 16
0
        cycle.index.name = c.t
        cycle.reset_index()  ## Ensure Time-steps start from 0 (not 1!).

    return cycle


@autog.autographed(
    needs=[
        vararg("wltc_class_data/V_cycle"),
        vararg("V_dsc"),
        vararg("V_capped"),
        vararg("V_compensated"),
        optional("forced_cycle"),
    ],
    provides=[
        sfxed("cycle", "init"),
        modify("cycle/V", implicit=1),
        modify("cycle/index", implicit=1),
    ],
)
def init_cycle_velocity(*velocities: pd.Series,
                        forced_cycle=None) -> pd.DataFrame:
    """
    Concatenate velocities(series)/cycle(dataframe), cloning the last column as `V`.

    :param forced_cycle:
        any previous cycle data in the model
    :param velocities:
        one or more velocity series (properly named),
        with the last one becoming the `V`
Ejemplo n.º 17
0
        x1 ops: str)
    """
    assert oneliner(op) == oneliner(exp)


@pytest.mark.parametrize(
    "provide, aliases, exp",
    [
        ("a", {
            "a": "aa"
        }, {
            "a": "k",
            "aa": "k"
        }),
        (
            sfxed("a", "1"),
            {
                sfxed("a", "1"): "aa"
            },
            {
                "a": "k",
                "aa": "k"
            },
        ),
        (
            "a",
            {
                "a": sfxed("a", "1")
            },
            {
                "a": "k"
Ejemplo n.º 18
0
def test_network_nest_subdocs_NOT_LAYERED(solution_layered_false):
    days = ["Monday", "Tuesday", "Wednesday"]
    todos = sfxed("backlog", "todos")

    @operation(name="wake up",
               needs="backlog",
               provides=["tasks", todos],
               rescheduled=True)
    def pick_tasks(backlog):
        if not backlog:
            return NO_RESULT
        # Pick from backlog 1/3 of len-of-chars of my operation's (day) name.
        n_tasks = int(len(task_context.get().op.name) / 3)
        my_tasks, todos = backlog[:n_tasks], backlog[n_tasks:]
        return my_tasks, todos

    do_tasks = operation(None,
                         name="work!",
                         needs="tasks",
                         provides="daily_tasks")

    weekday = compose("weekday", pick_tasks, do_tasks)
    weekdays = [weekday.withset(name=d) for d in days]

    def nester(ra: RenArgs):
        dep = ra.name
        if ra.typ == "op":
            return True
        if ra.typ.endswith(".jsonpart"):
            return False
        if dep == "tasks":
            return True
        # if is_sfxed(dep):
        #     return modifier_withset(
        #         dep, sfx_list=[f"{ra.parent.name}.{s}" for s in dep._sfx_list]
        #     )
        if dep == "daily_tasks":
            return dep_renamed(dep, lambda n: f"{n}/{ra.parent.name}")
        return False

    week = compose("week", *weekdays, nest=nester)
    assert str(week) == re.sub(
        r"[\n ]{2,}",  # collapse all space-chars into a single space
        " ",
        """
        Pipeline('week', needs=['backlog', 'Monday.tasks', 'Tuesday.tasks', 'Wednesday.tasks'],
        provides=['Monday.tasks', sfxed('backlog', 'todos'),
                  'daily_tasks/Monday'($), 'Tuesday.tasks', 'daily_tasks/Tuesday'($),
                  'Wednesday.tasks', 'daily_tasks/Wednesday'($)],
        x6 ops: Monday.wake up, Monday.work!, Tuesday.wake up, Tuesday.work!,
        Wednesday.wake up, Wednesday.work!)
        """.strip(),
    )

    ## Add collector after nesting

    @operation(
        name="collect tasks",
        needs=[todos, *(vararg(f"daily_tasks/{d}") for d in days)],
        provides=["weekly_tasks", "todos"],
    )
    def collector(backlog, *daily_tasks):
        return daily_tasks or (), backlog or ()

    week = compose("week", week, collector)
    assert str(week) == re.sub(
        r"[\n ]{2,}",  # collapse all space-chars into a single space
        " ",
        """
        Pipeline('week',
            needs=['backlog',
                'Monday.tasks', 'Tuesday.tasks', 'Wednesday.tasks',
                sfxed('backlog', 'todos'),
                'daily_tasks/Monday'($?), 'daily_tasks/Tuesday'($?), 'daily_tasks/Wednesday'($?)],
            provides=['Monday.tasks',
                sfxed('backlog', 'todos'), 'daily_tasks/Monday'($),
                'Tuesday.tasks', 'daily_tasks/Tuesday'($),
                'Wednesday.tasks', 'daily_tasks/Wednesday'($),
                'weekly_tasks', 'todos'],
            x7 ops: Monday.wake up, Monday.work!, Tuesday.wake up, Tuesday.work!,
                    Wednesday.wake up, Wednesday.work!, collect tasks)
        """.strip(),
    )

    # +3 from week's capacity: 4 + 5 + 5

    sol = week.compute({"backlog": range(17)},
                       layered_solution=solution_layered_false)
    assert sol == {
        "backlog": range(14, 17),
        "Monday.tasks": range(0, 4),
        "daily_tasks": {
            "Monday": range(0, 4),
            "Tuesday": range(4, 9),
            "Wednesday": range(9, 14),
        },
        "Tuesday.tasks": range(4, 9),
        "Wednesday.tasks": range(9, 14),
        "weekly_tasks": (range(0, 4), range(4, 9), range(9, 14)),
        "todos": range(14, 17),
    }

    assert sol.overwrites == {
        "backlog": [range(14, 17),
                    range(9, 17),
                    range(4, 17),
                    range(0, 17)]
    }

    ## -1 tasks for Wednesday to enact

    sol = week.compute({"backlog": range(9)},
                       layered_solution=solution_layered_false)
    assert sol == {
        "backlog": range(9, 9),
        "Monday.tasks": range(0, 4),
        "daily_tasks": {
            "Monday": range(0, 4),
            "Tuesday": range(4, 9),
        },
        "Tuesday.tasks": range(4, 9),
        sfxed("backlog", "todos"): False,
        "weekly_tasks": (range(0, 4), range(4, 9)),
        "todos": (),
    }
    assert sol.overwrites == {
        "backlog": [range(9, 9), range(4, 9),
                    range(0, 9)]
    }
    sol = week.compute(
        {"backlog": range(9)},
        outputs=["backlog", "daily_tasks", "weekly_tasks", "todos"],
        layered_solution=solution_layered_false,
    )
    assert sol == {
        "backlog": range(9, 9),
        "daily_tasks": {
            "Monday": range(0, 4),
            "Tuesday": range(4, 9),
        },
        "weekly_tasks": (range(0, 4), range(4, 9)),
        "todos": (),
    }

    ## Were failing due to eager eviction of "backlog".
    #
    sol = week.compute(
        {"backlog": range(9)},
        outputs=["daily_tasks", "weekly_tasks", "todos"],
        layered_solution=solution_layered_false,
    )
    assert sol == {
        "daily_tasks": {
            "Monday": range(0, 4),
            "Tuesday": range(4, 9),
        },
        "weekly_tasks": (range(0, 4), range(4, 9)),
        "todos": (),
    }

    sol = week.compute(
        {"backlog": range(9)},
        outputs="daily_tasks/Monday",
        layered_solution=solution_layered_false,
    )
    assert sol == {"daily_tasks": {"Monday": range(0, 4)}}
    assert sol.overwrites == {}
    sol = week.compute(
        {"backlog": range(9)},
        outputs="daily_tasks",
        layered_solution=solution_layered_false,
    )
    assert sol == {
        "daily_tasks": {
            "Monday": range(0, 4),
            "Tuesday": range(4, 9),
        }
    }
    assert sol.overwrites == {}
Ejemplo n.º 19
0
        for gnum, n2v in enumerate(n2v_ratios, 1)
    }

    wot_grid = pd.concat(
        wot_grids.values(),
        axis=1,
        keys=wot_grids.keys(),
        names=["gear", "item"],
        verify_integrity=True,
    ).swaplevel(axis=1)

    return wot_grid


@autog.autographed(
    provides=[sfxed("gwots", "p_avail"), implicit("gwots/p_avail")],
)
def attach_p_avail_in_gwots(gwots: pd.DataFrame, *, f_safety_margin) -> pd.DataFrame:
    """
    Attaches both `p_avail` and `p_avail_stable` for all gears.

    .. attention:
        Must NOT interpolate along with wot on grid, or great INNACCURACIES.

    :param gwots:
        a  df with 2-level multiindex columns, having at least (`g1`, 'p'), and
        optionally ('g1', 'ASM')) for each gears
        (as returned by :func:`interpolate_wot_on_v_grid()`).
    """
    w = wio.pstep_factory.get().wot
Ejemplo n.º 20
0
                "Solution is not the last positive p_remain:",
                roots_head[0],
                v_max,
                wot.loc[v_max - 5 * v_step:v_max + 5 * v_step,
                        w.p_remain_stable],
            )
            rec = VMaxRec(v_max, n_v_max, gid, False, wot)
        else:
            rec = VMaxRec(np.NAN, np.NAN, gid, False, wot)

    return rec


@autog.autographed(
    needs=[
        sfxed("gwots", "p_avail"),
        implicit("gwots/p_resist"),
    ],
    provides=[
        *VMaxRec._fields[:-2],
        keyword("is_n_lim_vmax", "is_n_lim"),
        keyword("vmax_gwot", "wot"),  # `wot` causes cycle!
    ],
    inp_sideffects=[("gwots", "p_avail")],
    returns_dict=True,
)
def calc_v_max(gwots: Union[pd.Series, pd.DataFrame]) -> VMaxRec:
    """
    Finds maximum velocity by scanning gears from the top.

    TODO: accept `n_lim`