def test_sin_reverse(): ad.set_mode('reverse') # ============================================================================= # define the input variable # ============================================================================= data = np.random.random(5) x = Variable(data) # ============================================================================= # define custom block # ============================================================================= sin_block = sin() # ============================================================================= # compute output of custom block # ============================================================================= y_block = sin_block(x) # ============================================================================= # Compute gradient backwards # ============================================================================= y_block.compute_gradients() # ============================================================================= # define expected output # ============================================================================= data_true = np.sin(data) gradient_true = np.diag(np.cos(data)) # ============================================================================= # assert data pass # ============================================================================= assert np.equal(data_true, y_block.data).all( ), 'wrong sin data pass. expected {}, given{}'.format( data_true, y_block.data) # ============================================================================= # assert gradient forward pass # ============================================================================= assert np.equal(gradient_true, y_block.gradient).all( ), 'wrong sin gradient forward pass. expected {}, given{}'.format( gradient_true, y_block.gradient)
def test_comp_forward(): ad.set_mode('forward') # ============================================================================= # define the input variable # ============================================================================= data = np.random.random(5) x = Variable(data) # ============================================================================= # define custom block # ============================================================================= sin_block = sin() exp_block = exp() # ============================================================================= # compute output of custom block # ============================================================================= y_block = sin_block(exp_block(x)) y_block.compute_gradients() # ============================================================================= # define expected output # ============================================================================= data_true = np.sin(np.exp(data)) gradient_true = np.exp(data) * np.cos(np.exp(data)) * np.identity(5) # ============================================================================= # assert data pass # ============================================================================= assert np.equal(data_true, y_block.data).all( ), 'wrong exp data pass. expected {}, given{}'.format( data_true, y_block.data) # ============================================================================= # assert gradient forward pass # ============================================================================= assert np.equal(gradient_true, y_block.gradient).all( ), 'wrong exp gradient forward pass. expected {}, given{}'.format( gradient_true, y_block.gradient)
c_graph.reset_graph() def set_mode(new_mode): global mode, c_graph mode = new_mode if new_mode == 'reverse': reset_graph() # ============================================================================= # shortcuts for better user interface # ============================================================================= sin_ = sin() cos_ = cos() tan_ = tan() exp_ = exp() log_ = log() sqrt_ = sqrt() sinh_ = sinh() cosh_ = cosh() tanh_ = tanh() arcsin_ = arcsin() arccos_ = arccos() arctan_ = arctan() add_ = add() subtract_ = subtract() multiply_ = multiply() divide_ = divide()
def test_multiple_forward(): """ assert that the package works well when we use it repetively """ ad.set_mode('forward') # ============================================================================= # define the input variable # ============================================================================= data = np.random.random(5) x = Variable(data) # ============================================================================= # define custom block # ============================================================================= sin_block = sin() exp_block = exp() # ============================================================================= # compute output of custom block # ============================================================================= y_block = sin_block(exp_block(x)) y_block.compute_gradients() # ============================================================================= # define expected output # ============================================================================= data_true = np.sin(np.exp(data)) gradient_true = np.exp(data) * np.cos(np.exp(data)) * np.identity(5) # ============================================================================= # assert data pass # ============================================================================= assert np.equal(data_true, y_block.data).all( ), 'wrong exp data pass. expected {}, given{}'.format( data_true, y_block.data) # ============================================================================= # assert gradient forward pass # ============================================================================= assert np.equal(gradient_true, y_block.gradient).all( ), 'wrong exp gradient forward pass. expected {}, given{}'.format( gradient_true, y_block.gradient) # ============================================================================= # assert multiple gradient computes work # ============================================================================= for _ in range(5): y_block.compute_gradients() assert np.equal(data_true, y_block.data).all( ), 'wrong exp data pass. expected {}, given{}'.format( data_true, y_block.data) assert np.equal(gradient_true, y_block.gradient).all( ), 'wrong exp gradient forward pass. expected {}, given{}'.format( gradient_true, y_block.gradient) # ============================================================================= # assert multiple passes work # ============================================================================= for _ in range(5): y_block = sin_block(exp_block(x)) y_block.compute_gradients() assert np.equal(data_true, y_block.data).all( ), 'wrong exp data pass. expected {}, given{}'.format( data_true, y_block.data) assert np.equal(gradient_true, y_block.gradient).all( ), 'wrong exp gradient forward pass. expected {}, given{}'.format( gradient_true, y_block.gradient) # ============================================================================= # assert multiple definitions work # ============================================================================= for _ in range(5): data = np.random.random(5) x = Variable(data) sin_block = sin() exp_block = exp() y_block = sin_block(exp_block(x)) y_block.compute_gradients() data_true = np.sin(np.exp(data)) gradient_true = np.exp(data) * np.cos(np.exp(data)) * np.identity(5) assert np.equal(data_true, y_block.data).all( ), 'wrong exp data pass. expected {}, given{}'.format( data_true, y_block.data) assert np.equal(gradient_true, y_block.gradient).all( ), 'wrong exp gradient forward pass. expected {}, given{}'.format( gradient_true, y_block.gradient)