Esempio n. 1
0
def test_Pipeline_configure(op_pipeline):
    pdef = op_pipeline

    pip = pdef.configure(addp=Merge(param=2)).make()
    assert pip(value=3) == {'value': 100}

    pip = pdef.configure({'addp.param': 2}).make()
    assert pip(value=3) == {'value': 100}

    pip = pdef.configure({
        'addp.param': 2
    }, addp=Merge(op=lambda p, x: p - x)).make()
    assert pip(value=3) == {'value': 4}

    pip = pdef.configure(addp=Reset(op=lambda p, x: p - x, param=2)).make()
    assert pip(value=3) == {'value': 4}

    with pytest.raises(TypeError):
        pdef.configure(addp=Reset(param=2)).make()

    pip = pdef.configure(mulp=False).make()
    assert pip(value=3) == {'value': 16}

    with pytest.raises(KeyError):
        pip = pdef.configure(mulp=False).configure(mulp=True).make()

    pip = pdef.configure(mulp=False).configure(addp=False).make()
    assert pip(value=3) == {'value': 9}

    with pytest.raises(KeyError):
        pdef.configure(quack=[1, 2])
Esempio n. 2
0
    def pipeline(self, default=steps.standard, config=None):
        if self.options['--scalar']:
            resources = scalar_pipeline.resources
        else:
            resources = standard_pipeline.resources
        all_steps = self.options['pipeline']
        pos = [p for p in all_steps if not isinstance(p, Not)]
        neg = {p.value for p in all_steps if isinstance(p, Not)}
        if not pos:
            pos = default
        final = [p for p in pos if p not in neg]
        pdef = PipelineDefinition(resources=resources,
                                  steps={p._name: p
                                         for p in final})
        opts = self.options['opts']
        if opts and steps.opt not in final:
            raise Exception('Optimizations can only be applied if the'
                            ' opt step is in the pipeline')
        elif opts:
            pdef = pdef.configure({'opt.opts': Merge(opts)})

        if callable(config):
            pdef = config(pdef)
        elif config:
            pdef = pdef.configure(config)

        return pdef.make()
Esempio n. 3
0
def test_merge_modes():

    for x, y in [({1, 2}, {3, 4}), ([1, 2], [3, 4]), ((1, 2), (3, 4))]:

        assert merge(x, y, mode="reset") == y
        assert merge(x, Reset(y)) == y
        assert merge(x, Reset(y), mode="merge") == y

        assert merge(x, y, mode="override") == y
        assert merge(x, Override(y)) == y
        assert merge(x, Override(y), mode="merge") == y

    a = {"a": 1}
    b = {"b": 2}
    c = {"a": 1, "b": 2}

    assert merge(a, b, mode="reset") == b
    assert merge(a, b, mode="override") == c

    a = {"a": [1, 2], "b": [3, 4]}
    b = {"a": [5, 6], "b": Override([7, 8])}
    c = {"a": [1, 2, 5, 6], "b": [7, 8]}
    d = {"a": [5, 6], "b": [7, 8]}

    assert merge(a, b) == c
    assert merge(a, b, mode="override") == d

    a = {"a": [1, 2]}
    assert merge(a, Merge(a=[3, 4])) == {"a": [1, 2, 3, 4]}
Esempio n. 4
0
def test_Pipeline_configure(op_pipeline):
    pdef = op_pipeline

    pip = pdef.configure(addp=Merge(param=2)).make()
    assert pip(value=3) == {'value': 100}

    pip = pdef.configure({'addp.param': 2}).make()
    assert pip(value=3) == {'value': 100}

    pip = pdef.configure(
        {'addp.param': 2},
        addp=Merge(op=lambda p, x: p - x)
    ).make()
    assert pip(value=3) == {'value': 4}

    pip = pdef.configure(addp=Reset(op=lambda p, x: p - x, param=2)).make()
    assert pip(value=3) == {'value': 4}

    with pytest.raises(TypeError):
        pdef.configure(addp=Reset(param=2)).make()

    pip = pdef.configure(mulp=False).make()
    assert pip(value=3) == {'value': 16}

    pip = pdef.configure(mulp=False).configure(mulp=True).make()
    assert pip(value=3) == {'value': 64}

    pip = pdef.configure(mulp=False).configure(addp=False).make()
    assert pip(value=3) == {'value': 9}

    pip = pdef.configure(param=3).make()
    assert pip(value=3) == {'value': 144}

    with pytest.raises(KeyError):
        pdef.configure(quack=[1, 2])

    pdef2 = pdef.configure_resources(quack=[1, 2])
    assert pdef2.make().resources.quack == [1, 2]
    assert pdef2.configure(quack=Merge([3])).make().resources.quack \
        == [1, 2, 3]
    assert pdef2.configure(quack=[3]).make().resources.quack == [3]
Esempio n. 5
0
from myia.pipeline import scalar_pipeline
from myia.prim import Primitive, ops as prim
from myia.utils import InferenceError, Merge
from myia.utils.unify import Var, var

from ..common import f64, i64, to_abstract_test

X = Var('X')
Y = Var('Y')
V = var(lambda n: n.is_constant())


parse = scalar_pipeline \
    .configure({
        'resources.convert.object_map': Merge({
            operations.getitem: prim.tuple_getitem,
            operations.user_switch: prim.switch
        })
    }) \
    .select('resources', 'parse', 'resolve') \
    .make_transformer('input', 'graph')


specialize = scalar_pipeline \
    .configure({
        'resources.convert.object_map': Merge({
            operations.getitem: prim.tuple_getitem
        })
    }) \
    .select('resources', 'parse', 'resolve', 'infer', 'specialize')

# We will optimize patterns of these fake primitives
Esempio n. 6
0
def test_cleanup_subclass():
    a = TypeMap({int: Merge("int")})
    ca = cleanup(a)
    assert isinstance(ca, TypeMap)
    assert ca == TypeMap({int: "int"})
Esempio n. 7
0
def test_cleanup():
    a = dict(a=1, b=[2, Merge(3)], c=Override(4), d=DELETE)
    assert cleanup(a) == dict(a=1, b=[2, 3], c=4)
Esempio n. 8
0
    cse, NodeMap
from myia.prim import Primitive, ops as prim
from myia.utils import Merge
from myia.utils.unify import Var, var

from ..common import i64, f64, to_abstract_test


X = Var('X')
Y = Var('Y')
V = var(lambda n: n.is_constant())


parse = scalar_pipeline \
    .configure({
        'convert.object_map': Merge({operations.getitem: prim.tuple_getitem})
    }) \
    .select('parse', 'resolve') \
    .make_transformer('input', 'graph')


specialize = scalar_pipeline \
    .configure({
        'convert.object_map': Merge({operations.getitem: prim.tuple_getitem})
    }) \
    .select('parse', 'resolve', 'infer', 'specialize')


# We will optimize patterns of these fake primitives

Esempio n. 9
0
from myia.testing.common import i64, to_abstract_test
from myia.utils import InferenceError, Merge
from myia.utils.unify import Var, var
from myia.validate import ValidationError

X = Var("X")
Y = Var("Y")
V = var(lambda n: n.is_constant())


parse = (
    scalar_pipeline.configure(
        {
            "convert.object_map": Merge(
                {
                    operations.getitem: prim.tuple_getitem,
                    operations.user_switch: prim.switch,
                }
            ),
        }
    )
    .with_steps(steps.step_parse, steps.step_copy)
    .make_transformer("input", "graph")
)


specialize = scalar_pipeline.configure(
    {"convert.object_map": Merge({operations.getitem: prim.tuple_getitem})}
).with_steps(steps.step_parse, steps.step_infer, steps.step_specialize)


# We will optimize patterns of these fake primitives
Esempio n. 10
0
)
from myia.pipeline import scalar_pipeline, steps
from myia.utils import InferenceError, Merge
from myia.utils.unify import Var, var
from myia.validate import ValidationError

from ..common import i64, to_abstract_test

X = Var("X")
Y = Var("Y")
V = var(lambda n: n.is_constant())

parse = (scalar_pipeline.configure({
    "resources.convert.object_map":
    Merge({
        operations.getitem: prim.tuple_getitem,
        operations.user_switch: prim.switch,
    })
}).select("resources", "parse", {
    "resolve": steps.step_resolve
}).make_transformer("input", "graph"))

specialize = scalar_pipeline.configure({
    "resources.convert.object_map":
    Merge({operations.getitem: prim.tuple_getitem})
}).select("resources", "parse", {"resolve": steps.step_resolve}, "infer",
          "specialize")

# We will optimize patterns of these fake primitives

P = Primitive("P")
Q = Primitive("Q")