def __call__(self, *args, **kwargs):
        """
        applies the forward pass of the data and the gradient.
        returns a new variable with the updated information on data and gradient.
        """
        #python circular import fix
        if not 'Variable' in dir():
            from autograd.variable import Variable

       

        new_data=self.data_fn(*args, **kwargs)
        
        #print(ad.mode)

        #in forward mode, we force the flow of gradients with the dats
        if ad.mode=='forward':
            new_grad=self.gradient_forward(*args, **kwargs)

            #in forward mode, we return a full Variable, with gradients
            return(Variable(new_data, new_grad, input_node=False))



        elif ad.mode=='reverse':
            #reverse mode, we will make a forward pass on the data but will store the jacobians
            # we pay attention not to include the Constants in the computational graph
            input_variables =[]
            variables_indexes=[]
            for index, arg in enumerate(args):
                if type(arg)==Variable:
                    #print('data arg inside : ', arg.data)
                    #print('type ',type(arg))
                    input_variables+=[arg]
                    variables_indexes+=[index]

            children_nodes = [var.node for var in input_variables]
            children_jacs = self.get_jacobians(*args, **kwargs)

            #print('children nodes',children_nodes)
            #print('children jacs ',children_jacs)
            #print('variables_indexes', variables_indexes)
            #in reverse mode, the Variable does not store the gradients
            outputVariable = Variable(new_data, input_node=False)


            for index,i in enumerate(variables_indexes):
                outputVariable.node.childrens+=[{'node':children_nodes[index], 'jacobian':children_jacs[i]}]

                #increase the counter
                #children_nodes[i].times_used +=1

            return(outputVariable)

        else:
            print('unknown mode : ', ad.mode)
def test_gt_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data + 1)
    y = Variable(data)
    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert (x > y).all(), 'Data failed'
def test_ge_reverse():
    ad.set_mode('reverse')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data + 1)
    y = Variable(data)
    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert (x >= y).all(), 'Data failed'
def test_neq_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)
    y = Variable(data + 1)

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert (x != y).all, 'inequality forward failed'
Exemple #5
0
    def loss(params):
        var = Variable(params)
        x, y = var[0], var[1]
        l = (x + 5)**2 + (y + 3)**2

        l.compute_gradients()

        return (l.data, l.gradient)
Exemple #6
0
    def loss(params):
        ad.reset_graph()
        var = Variable(params)
        x = var[0]
        y = var[1]
        l = (x + 5)**2 + (y + 3)**2

        l.compute_gradients()
        return (l.data, l.gradient)
Exemple #7
0
def test_neg_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    y_true = Variable(-x.data, -x.gradient)

    y_block = -x

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(y_true.data, y_block.data).all(), 'Data failed'

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(y_true.gradient, y_block.gradient).all(), 'Gradient failed'
def test_eq_reverse():
    ad.set_mode('reverse')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)
    y = x

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert (x == y).all(), 'equality reverse failed'
def test_eq_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)
    y = x

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert (x == y).all(), 'equqlity forward failed'
def test_sin_reverse():
    ad.set_mode('reverse')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    sin_block = sin()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = sin_block(x)
    # =============================================================================
    #   Compute gradient backwards
    # =============================================================================
    y_block.compute_gradients()

    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.sin(data)
    gradient_true = np.diag(np.cos(data))

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong sin data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong sin gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
def test_log_diffBase_reverse():
    ad.set_mode('reverse')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    base = np.random.randint(2, 5) + np.random.random()
    log_block = log(base=base)

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = log_block(x)
    y_block.compute_gradients()

    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.log(data) / np.log(base)
    gradient_true = np.diag(1 / (data * np.log(base)))

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong log data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong log gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
    ad.set_mode('forward')
def test_logistic_reverse():
    ad.set_mode('reverse')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    logistic_block = logistic()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = logistic_block(x)
    y_block.compute_gradients()

    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = 1 / (1 + np.exp(-data))
    gradient_true = np.diag(np.exp(data) / (1 + np.exp(data))**2)

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong exp data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong exp gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
    ad.set_mode('forward')
def test_tanh_reverse():
    ad.set_mode('reverse')
    # =============================================================================
    #   define the input variablet
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    tanh_block = tanh()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = tanh_block(x)
    y_block.compute_gradients()

    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.tanh(data)
    gradient_true = np.diag(1 - np.tanh(data)**2)

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong tanh data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong tanh gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
    ad.set_mode('forward')
Exemple #14
0
def test_comp_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    sin_block = sin()
    exp_block = exp()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = sin_block(exp_block(x))
    y_block.compute_gradients()
    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.sin(np.exp(data))
    gradient_true = np.exp(data) * np.cos(np.exp(data)) * np.identity(5)

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong exp data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong exp gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
def test_cosh_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variablet
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    cosh_block = cosh()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = cosh_block(x)
    y_block.compute_gradients()
    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.cosh(data)
    gradient_true = np.diag(np.sinh(data))

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong cosh data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong cosh gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
def test_arctan_forward():
    ad.set_mode('forward')
    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    arctan_block = arctan()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = arctan_block(x)
    y_block.compute_gradients()
    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.arctan(data)
    gradient_true = np.diag(1 / (1 + data**2))

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong arctan data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong arctan gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)
Exemple #17
0
def test_multiple_forward():
    """
    assert that the package works well when we use it repetively
    """

    ad.set_mode('forward')

    # =============================================================================
    #   define the input variable
    # =============================================================================
    data = np.random.random(5)
    x = Variable(data)

    # =============================================================================
    #   define custom block
    # =============================================================================
    sin_block = sin()
    exp_block = exp()

    # =============================================================================
    #   compute output of custom block
    # =============================================================================
    y_block = sin_block(exp_block(x))
    y_block.compute_gradients()
    # =============================================================================
    #   define expected output
    # =============================================================================
    data_true = np.sin(np.exp(data))
    gradient_true = np.exp(data) * np.cos(np.exp(data)) * np.identity(5)

    # =============================================================================
    #   assert data pass
    # =============================================================================
    assert np.equal(data_true, y_block.data).all(
    ), 'wrong exp data pass. expected {}, given{}'.format(
        data_true, y_block.data)

    # =============================================================================
    #   assert gradient forward pass
    # =============================================================================
    assert np.equal(gradient_true, y_block.gradient).all(
    ), 'wrong exp gradient forward pass. expected {}, given{}'.format(
        gradient_true, y_block.gradient)

    # =============================================================================
    #     assert multiple gradient computes work
    # =============================================================================
    for _ in range(5):
        y_block.compute_gradients()
        assert np.equal(data_true, y_block.data).all(
        ), 'wrong exp data pass. expected {}, given{}'.format(
            data_true, y_block.data)
        assert np.equal(gradient_true, y_block.gradient).all(
        ), 'wrong exp gradient forward pass. expected {}, given{}'.format(
            gradient_true, y_block.gradient)

# =============================================================================
#     assert multiple passes work
# =============================================================================
    for _ in range(5):
        y_block = sin_block(exp_block(x))
        y_block.compute_gradients()
        assert np.equal(data_true, y_block.data).all(
        ), 'wrong exp data pass. expected {}, given{}'.format(
            data_true, y_block.data)
        assert np.equal(gradient_true, y_block.gradient).all(
        ), 'wrong exp gradient forward pass. expected {}, given{}'.format(
            gradient_true, y_block.gradient)


# =============================================================================
#   assert multiple definitions work
# =============================================================================
    for _ in range(5):

        data = np.random.random(5)
        x = Variable(data)

        sin_block = sin()
        exp_block = exp()

        y_block = sin_block(exp_block(x))
        y_block.compute_gradients()

        data_true = np.sin(np.exp(data))
        gradient_true = np.exp(data) * np.cos(np.exp(data)) * np.identity(5)

        assert np.equal(data_true, y_block.data).all(
        ), 'wrong exp data pass. expected {}, given{}'.format(
            data_true, y_block.data)
        assert np.equal(gradient_true, y_block.gradient).all(
        ), 'wrong exp gradient forward pass. expected {}, given{}'.format(
            gradient_true, y_block.gradient)