Ejemplo n.º 1
0
def _grad_test(fn,
               obj,
               args,
               sens_type=f64,
               pipeline=grad_pipeline,
               rel_error=1e-3,
               argspec=None):
    pipeline = pipeline.insert_after('parse', grad_wrap=grad_wrap)
    if argspec is None:
        argspec = tuple(
            from_value(arg, broaden=True) for arg in clean_args(args))
    else:
        argspec = tuple(to_abstract_test(x) for x in argspec)
    sens_type = to_abstract_test(sens_type)
    if isinstance(obj, FunctionType):
        res = pipeline.run(input=obj, argspec=[*argspec, sens_type])
    else:
        pip = pipeline.configure(parse=False)
        res = pip.run(graph=obj, argspec=[*argspec, sens_type])
    gtest = GradTester(fn=fn,
                       gfn=res['output'],
                       args=args,
                       argnames=[f'in{i}' for i in range(len(args))],
                       outnames=None,
                       rel_error=rel_error)
    gtest.assert_match()
Ejemplo n.º 2
0
def _grad_test(
    fn,
    obj,
    args,
    sens_type=f64,
    pipeline=grad_pipeline,
    rel_error=1e-3,
    argspec=None,
):
    pipeline = pipeline.insert_after(steps.step_parse, grad_wrap)
    if argspec is None:
        argspec = tuple(
            from_value(arg, broaden=True) for arg in clean_args(args))
    else:
        argspec = tuple(to_abstract_test(x) for x in argspec)
    sens_type = to_abstract_test(sens_type)
    if isinstance(obj, FunctionType):
        res = pipeline(input=obj, argspec=[*argspec, sens_type])
    else:
        pip = pipeline.without_step(steps.step_parse)
        res = pip(graph=obj, argspec=[*argspec, sens_type])
    gtest = GradTester(
        fn=fn,
        gfn=res["output"],
        args=args,
        argnames=[f"in{i}" for i in range(len(args))],
        outnames=None,
        rel_error=rel_error,
    )
    gtest.assert_match()
Ejemplo n.º 3
0
def make_argspec(args, broad_specs):
    if broad_specs is None:
        broad_specs = (True,) * len(args)
    return tuple(
        from_value(arg, broaden=bs)
        for bs, arg in zip(broad_specs, clean_args(args))
    )
Ejemplo n.º 4
0
 def test(args):
     if not isinstance(args, tuple):
         args = (args,)
     py_result = fn(*map(copy, args))
     argspec = tuple(from_value(arg, broaden=True) for arg in args)
     myia_fn = pipeline.run(input=fn, argspec=argspec)['output']
     myia_result = myia_fn(*map(copy, args))
     assert py_result == myia_result
Ejemplo n.º 5
0
 def test(backend_opt, args):
     if not isinstance(args, tuple):
         args = (args, )
     ref_result = fn(*map(copy, args))
     argspec = tuple(from_value(arg, broaden=True) for arg in args)
     res = backend_opt.pip(input=fn, argspec=argspec)
     myia_fn = res['output']
     myia_args = backend_opt.convert_args(args)
     myia_result = myia_fn(*myia_args)
     np.testing.assert_allclose(ref_result, myia_result)
Ejemplo n.º 6
0
 def test(args):
     if not isinstance(args, tuple):
         args = (args, )
     if python:
         ref_result = fn(*map(copy, args))
     argspec = tuple(from_value(arg, broaden=True) for arg in args)
     res = pipeline.run(input=fn, argspec=argspec)
     myia_fn = res['output']
     myia_result = myia_fn(*map(copy, args))
     if python:
         if justeq:
             assert ref_result == myia_result
         else:
             np.testing.assert_allclose(ref_result, myia_result)
Ejemplo n.º 7
0
def _grad_test(fn,
               obj,
               args,
               sens_type,
               pipeline=grad_pipeline,
               rel_error=1e-3):

    pytorch_grads = pt_fn_grads(fn, *args)

    sens_type_shape = sens_type
    if sens_type == ():
        sens_type = APT_0d_loss
    elif sens_type == (1, ):
        sens_type = APT_loss
    else:
        sens_type = AbstractArray(AbstractScalar({
            TYPE: f32,
            VALUE: ANYTHING
        }), {
            SHAPE: sens_type,
            TYPE: PyTorchTensor
        })

    pipeline = standard_pipeline
    pipeline = pipeline.insert_after('parse', grad_wrap=grad_wrap)
    argspec = tuple(from_value(arg, broaden=True) for arg in clean_args(args))
    sens_type = to_abstract_test(sens_type)
    if isinstance(obj, FunctionType):
        res = pipeline.run(input=obj, argspec=[*argspec, sens_type])
    else:
        pip = pipeline.configure(parse=False)
        res = pip.run(graph=obj, argspec=[*argspec, sens_type])

    if sens_type == APT_loss:
        sens = torch.Tensor([1.0])
    elif sens_type == APT_0d_loss:
        sens = torch.Tensor([1.0]).reshape(())
    else:
        sens = torch.ones(sens_type_shape)

    myia_grads = res['output'](*args, sens)

    for pt_g, my_g in zip(pytorch_grads, myia_grads):
        # print("pytorch_grad", pt_g)
        # print("myia_grad", my_g)
        assert torch.allclose(pt_g,
                              my_g,
                              rtol=1e-05,
                              atol=1e-06,
                              equal_nan=True)
Ejemplo n.º 8
0
 def test(args):
     nonlocal profile
     if not isinstance(args, tuple):
         args = (args, )
     if python:
         ref_result = fn(*map(copy, args))
     argspec = tuple(from_value(arg, broaden=True) for arg in args)
     if profile is True:
         profile = Profile()
     res = pipeline.run(input=fn, argspec=argspec, profile=profile)
     profile.print()
     myia_fn = res['output']
     myia_result = myia_fn(*map(copy, args))
     if python:
         np.testing.assert_allclose(ref_result, myia_result)
Ejemplo n.º 9
0
def _fwd_test(fn, args, pipeline=standard_pipeline,
              optimize=True, python=True):
    if python:
        ref_result = fn(*map(copy, args))
    argspec = tuple(from_value(arg, broaden=True) for arg in args)
    res = pipeline.run(input=fn, argspec=argspec)
    myia_fn = res['output']
    myia_result = myia_fn(*map(copy, args))

    if type(ref_result) == torch.Tensor and type(myia_result) == torch.Tensor:
        assert torch.allclose(ref_result, myia_result, equal_nan=True)
        assert ref_result.shape == myia_result.shape
        return tuple(myia_result.shape)
    else:
        assert np.isclose(ref_result, myia_result)
        return tuple()
Ejemplo n.º 10
0
def test_switch_nontail():
    def fn(x, y):
        def f1():
            return x

        def f2():
            return y

        a = P.switch(x > y, f1, f2)()
        return a * a

    i64 = from_value(1, broaden=True)
    argspec = (i64, i64)
    myia_fn = compile_pipeline.run(input=fn, argspec=argspec)['output']

    for test in [(6, 23, 23**2), (67, 23, 67**2)]:
        *args, expected = test
        assert myia_fn(*args) == expected
Ejemplo n.º 11
0
            def run_test(args):
                if isinstance(args, Exception):
                    exc = type(args)
                    args = args.args
                else:
                    exc = None
                pdef = pipeline
                if not validate:
                    pdef = pdef.configure(validate=False)
                pip = pdef.make()
                if abstract is None:
                    argspec = tuple(
                        from_value(arg, broaden=True) for arg in args)
                else:
                    argspec = tuple(to_abstract_test(a) for a in abstract)

                if exc is not None:
                    try:
                        mfn = pip(input=fn, argspec=argspec)
                        mfn['output'](*args)
                    except exc:
                        pass
                    return

                result_py = fn(*args)

                try:
                    res = pip(input=fn, argspec=argspec)
                except InferenceError as ierr:
                    print_inference_error(ierr)
                    raise ierr
                except ValidationError as verr:
                    print('Collected the following errors:')
                    for err in verr.errors:
                        n = err.node
                        nlbl = lbl.label(n)
                        tname = type(n).__name__
                        print(f'   {nlbl} ({tname}) :: {n.abstract}')
                        print(f'      {err.args[0]}')
                    raise verr

                result_final = res['output'](*args)
                assert _eq(result_py, result_final)
Ejemplo n.º 12
0
            def run_test(args):
                pip = pipeline.make()
                argspec = tuple(from_value(arg, broaden=True) for arg in args)

                result_py = fn(*args)

                try:
                    res = pip(input=fn, argspec=argspec)
                except InferenceError as ierr:
                    print_inference_error(ierr)
                    raise ierr
                except ValidationError as verr:
                    print('Collected the following errors:')
                    for err in verr.errors:
                        n = err.node
                        nlbl = lbl.label(n)
                        print(f'   {nlbl} ({type(n).__name__}) :: {n.type}')
                        print(f'      {err.args[0]}')
                    raise verr

                result_final = res['output'](*args)
                assert _eq(result_py, result_final)
Ejemplo n.º 13
0
def _runwith(f, *args):
    argspec = tuple(from_value(arg, broaden=True) for arg in args)
    res = grad_pipeline.run(input=f, argspec=argspec)
    return res['output'](*args)
Ejemplo n.º 14
0
def _run(
    self,
    fn,
    args,
    result=None,
    abstract=None,
    broad_specs=None,
    validate=True,
    pipeline=standard_pipeline,
    backend=None,
    numpy_compat=True,
    **kwargs,
):
    """Test a Myia function.

    Arguments:
        fn: The Myia function to test.
        args: The args for the function.
        result: The expected result, or an exception subclass. If result is
            None, we will call the Python version of the function to compare
            with.
        abstract: The argspec. If None, it will be derived automatically from
            the args.
        broad_specs: For each argument, whether to broaden the type. By
            default, broaden all arguments.
        validate: Whether to run the validation step.
        pipeline: The pipeline to use.
    """

    if backend:
        backend_name = backend[0]
        backend_options = backend[1]

        pipeline = pipeline.configure({
            "resources.backend.name":
            backend_name,
            "resources.backend.options":
            backend_options,
        })

    if abstract is None:
        if broad_specs is None:
            broad_specs = (True, ) * len(args)
        argspec = tuple(
            from_value(arg, broaden=bs) for bs, arg in zip(broad_specs, args))
    else:
        argspec = tuple(to_abstract_test(a) for a in abstract)

    if not validate:
        pipeline = pipeline.configure(validate=False)

    def out(args):
        pip = pipeline.make()
        mfn = pip(input=fn, argspec=argspec)
        rval = mfn["output"](*args)
        return rval

    if result is None:
        result = fn(*args)

    self.check(out, args, result, **kwargs)

    if numpy_compat:
        args_torch = args
        args = ()
        for _ in args_torch:
            if isinstance(_, torch.Tensor):
                args += (_.detach().numpy(), )
            else:
                args += (_, )

        if abstract is None:
            if broad_specs is None:
                broad_specs = (True, ) * len(args)
            argspec = tuple(
                from_value(arg, broaden=bs)
                for bs, arg in zip(broad_specs, args))
        else:
            argspec = tuple(to_abstract_test(a) for a in abstract)

        out(args)
Ejemplo n.º 15
0
    @property
    def absprop(self):
        return self.abs()


@dataclass(frozen=True)
class Point3D(ArithmeticData):
    x: object
    y: object
    z: object

    def abs(self):
        return (self.x ** 2 + self.y ** 2 + self.z ** 2) ** 0.5


Thing_f = from_value(Thing(1.0), broaden=True)
Thing_ftup = from_value(Thing((1.0, 2.0)), broaden=True)


########
# ADTs #
########


@dataclass(frozen=True)
class Pair(ADT):
    left: object
    right: object


def make_tree(depth, x):
Ejemplo n.º 16
0

#########
# Tests #
#########


def make_model(dtype='float64'):
    return Model(layers=(
        TanhLayer(MA(6, 9, dtype=dtype), zeros(1, 9, dtype=dtype)),
        TanhLayer(MB(9, 10, dtype=dtype), zeros(1, 10, dtype=dtype)),
        TanhLayer(MC(10, 8, dtype=dtype), zeros(1, 8, dtype=dtype)),
    ))


Model_t = from_value(make_model(), broaden=True)
Model_t_f32 = from_value(make_model('float32'), broaden=True)


def cost(model, x, y):
    yy = model.apply(x)
    diff = (yy - y)
    return (array_reduce(scalar_add, diff**2, ())).item()


@infer_std(
    (make_model(), MC(3, 6), af64_of(3, 8)),
    (make_model('float32'), MC(3, 6), InferenceError),
    (make_model('float32'), MC(3, 6, dtype='float32'), af32_of(3, 8)),
    (make_model(), MC(3, 9), InferenceError),
)
Ejemplo n.º 17
0
i32 = AbstractScalar({VALUE: ANY, TYPE: Int[32]})
i64 = AbstractScalar({VALUE: ANY, TYPE: Int[64]})

f16 = AbstractScalar({VALUE: ANY, TYPE: Float[16]})
f32 = AbstractScalar({VALUE: ANY, TYPE: Float[32]})
f64 = AbstractScalar({VALUE: ANY, TYPE: Float[64]})

li16 = AbstractList(i16)
li32 = AbstractList(i32)
li64 = AbstractList(i64)

lf16 = AbstractList(f16)
lf32 = AbstractList(f32)
lf64 = AbstractList(f64)


@dataclass(frozen=True)
class Point:
    x: i64
    y: i64

    def abs(self):
        return (self.x**2 + self.y**2)**0.5

    def __add__(self, other):
        return Point(self.x * other.x, self.y * other.y)


pt = from_value(Point(1, 2), broaden=True)
lpt = AbstractList(pt)
Ejemplo n.º 18
0
 def argspec(self):
     return [
         v if isinstance(v, AbstractValue) else from_value(v, broaden=True)
         for v in self['args']
     ]
Ejemplo n.º 19
0
@infer((i64, i16, i32, i64, U(i16, i32, i64)))
def test_tagged_more(c, x, y, z):
    if c == 0:
        return tagged(x)
    elif c > 0:
        return tagged(y)
    else:
        return tagged(z)


@infer((i64, InferenceError))
def test_tagged_too_many_arguments(x):
    return tagged(x, 1, 2)


pair_t1 = from_value(Pair(Pair(1, 2), Pair(2, 3)))
pair_t1_u = pair_t1.attributes['left']


@infer((i64, pair_t1_u))
def test_tagged_adt(depth):
    return make_tree(depth, 1)


pair_t2 = from_value(Pair(1, Pair(2, Pair(3, None))))
pair_t2_u = pair_t2.attributes['right']


@infer((i64, pair_t2_u))
def test_tagged_adt_2(depth):
    return countdown(depth)