def _get_autodiff_grads_and_val(self, f_str, tensor_vals, get_op_tree=False,
                                    next_error=None):
        '''
        get autodiff grads from optree string expression
        f_str: the string of expression to be executed
        tensors: numpy tensor vals
        '''
        # backend
        be = self.be  # used in f_str
        # init gpu tensors
        count = 0
        tensors = []
        for tensor_val in tensor_vals:
            exec('x%s = self.be.array(tensor_val, name="x%s", dtype=self.dtype)'
                 % (count, count))
            exec('tensors.append(x%s)' % count)
            count += 1
        # build op_tree
        f = None
        f = eval(f_str)
        # evaluate op tree
        f_val = be.empty(f.shape)
        f_val[:] = f
        # init next error
        if next_error is not None:
            next_error = self.be.array(next_error)
        # get gradient
        ad = Autodiff(f, be, next_error=next_error)
        # get list
        if get_op_tree:
            gradients = list(ad.get_grad_op_tree(tensors))
        else:
            gradients = list(ad.get_grad_asnumpyarray(tensors))

        return [gradients, f_val.get()]
Esempio n. 2
0
def get_audiff_gradient(f, be, tensors):
    """
    get autodiff gradient w.r.t the tensors
    """
    op_tree = f(be, *tensors)
    ad = Autodiff(op_tree, be)
    return ad.get_grad_asnumpyarray(tensors)
Esempio n. 3
0
    def test_hard_coded(self):
        """
        The most basic test case
        """
        be = self.be
        x0 = be.array(np.ones((3, 3)) * 1, name='x0', dtype=self.dtype)
        x1 = be.array(np.ones((3, 3)) * 2, name='x1', dtype=self.dtype)
        x2 = be.array(np.ones((3, 3)) * 3, name='x2', dtype=self.dtype)
        x3 = be.array(np.ones((3, 3)) * 5, name='x3', dtype=self.dtype)

        f = x0 * x0 - x1 * x0 + x0 * x2 - x2 * x1 * x0 + x3 * x3 * x3
        ad = Autodiff(f, be)

        x0_grad = be.array(np.ones((3, 3)) * -3, dtype=self.dtype)
        x1_grad = be.array(np.ones((3, 3)) * -4, dtype=self.dtype)
        x2_grad = be.array(np.ones((3, 3)) * -1, dtype=self.dtype)
        x3_grad = be.array(np.ones((3, 3)) * 75, dtype=self.dtype)

        np.testing.assert_allclose(
            ad.get_grad_asnumpyarray([x0])[0], x0_grad.get(), atol=1e-5)
        np.testing.assert_allclose(
            ad.get_grad_asnumpyarray([x1])[0], x1_grad.get(), atol=1e-5)
        np.testing.assert_allclose(
            ad.get_grad_asnumpyarray([x2])[0], x2_grad.get(), atol=1e-5)
        np.testing.assert_allclose(
            ad.get_grad_asnumpyarray([x3])[0], x3_grad.get(), atol=1e-5)
Esempio n. 4
0
    def _get_autodiff_grads_and_val(self, f_str, tensor_vals, get_op_tree=False):
        '''
        get autodiff grads from optree string expression
        f_str: the string of expression to be executed
        tensors: numpy tensor vals
        '''
        # backend
        be = self.be  # used in f_str
        # init gpu tensors
        count = 0
        tensors = []
        for tensor_val in tensor_vals:
            exec('x%s = self.be.array(tensor_val, name="x%s", dtype=self.dtype)'
                 % (count, count))
            exec('tensors.append(x%s)' % count)
            count += 1
        # build op_tree
        f = None
        exec('f = %s' % f_str)
        # evaluate op tree
        f_val = be.empty(f.shape)
        f_val[:] = f
        # get gradient
        ad = Autodiff(f, be)
        # get list
        if get_op_tree:
            gradients = list(ad.get_grad_op_tree(tensors))
        else:
            gradients = list(ad.get_grad_asnumpyarray(tensors))

        return [gradients, f_val.get()]
Esempio n. 5
0
def get_audiff_gradient(f, be, tensors):
    """
    get autodiff gradient w.r.t the tensors
    """
    op_tree = f(be, *tensors)
    ad = Autodiff(op_tree, be)
    return ad.get_grad_asnumpyarray(tensors)
Esempio n. 6
0
    def test_hard_coded(self):
        """
        The most basic test case
        """
        be = self.be
        x0 = be.array(np.ones((3, 3)) * 1, name='x0', dtype=self.dtype)
        x1 = be.array(np.ones((3, 3)) * 2, name='x1', dtype=self.dtype)
        x2 = be.array(np.ones((3, 3)) * 3, name='x2', dtype=self.dtype)
        x3 = be.array(np.ones((3, 3)) * 5, name='x3', dtype=self.dtype)

        f = x0 * x0 - x1 * x0 + x0 * x2 - x2 * x1 * x0 + x3 * x3 * x3
        ad = Autodiff(f, be)

        x0_grad = be.array(np.ones((3, 3)) * -3, dtype=self.dtype)
        x1_grad = be.array(np.ones((3, 3)) * -4, dtype=self.dtype)
        x2_grad = be.array(np.ones((3, 3)) * -1, dtype=self.dtype)
        x3_grad = be.array(np.ones((3, 3)) * 75, dtype=self.dtype)

        assert np.allclose(ad.get_grad_asnumpyarray([x0])[0],
                           x0_grad.get(),
                           atol=1e-5)
        assert np.allclose(ad.get_grad_asnumpyarray([x1])[0],
                           x1_grad.get(),
                           atol=1e-5)
        assert np.allclose(ad.get_grad_asnumpyarray([x2])[0],
                           x2_grad.get(),
                           atol=1e-5)
        assert np.allclose(ad.get_grad_asnumpyarray([x3])[0],
                           x3_grad.get(),
                           atol=1e-5)