Esempio n. 1
0
 def test_forward(self):
     x_val = np.random.random((3, 4))
     x = ad.variable(x_val)
     y = ad.square(x)
     actual = y.forward()
     expect = x_val * x_val
     self.assertEqual(expect.shape, y.shape)
     self.assertTrue(np.allclose(expect, actual), (expect, actual))
Esempio n. 2
0
def gen_linear_model(config: dict, verbose=False):
    """Generate a linear model.

    :param config: Configuration.
    :param verbose: Print loss and gradients if it is True.
    :return: Model, loss, placeholders and variables.
    """
    x = ad.placeholder(shape=(None, config['input_len']), name='X')
    y = ad.placeholder(shape=(None, ), name='Y')

    w1 = ad.variable(
        initializer=ad.inits.random_normal(),
        shape=(config['input_len'], config['hidden_dim']),
        name='W1',
    )
    b1 = ad.variable(
        initializer=ad.inits.zeros,
        shape=config['hidden_dim'],
        name='b1',
    )

    v = ad.acts.leaky_relu(ad.dot(x, w1) + b1)

    w2 = ad.variable(
        initializer=ad.inits.random_normal(),
        shape=(config['hidden_dim'], 2),
        name='W2',
    )
    b2 = ad.variable(
        initializer=ad.inits.zeros,
        shape=2,
        name='b2',
    )

    y_pred = ad.acts.softmax(ad.dot(v, w2) + b2)

    loss = ad.square(y - y_pred).mean()

    if verbose:
        print('Loss:', loss)

    return y_pred, loss, [x, y], [w1, b1, w2, b2]
Esempio n. 3
0
def gen_linear_model(config: dict, verbose=False):
    """Generate a linear model.

    :param config: Configuration.
    :param verbose: Print loss and gradients if it is True.
    :return: Model, loss, placeholders and variables.
    """
    x = ad.placeholder(shape=(None, config['input_len']), name='X')
    y = ad.placeholder(shape=(None, ), name='Y')

    w = ad.variable(
        initializer=ad.inits.random_normal(),
        shape=config['input_len'],
        name='W',
    )
    b = ad.variable(0.0, name='b')

    y_pred = ad.dot(x, w) + b
    loss = ad.square(y - y_pred).mean()

    if verbose:
        print('Loss:', loss)

    return y_pred, loss, [x, y], [w, b]
Esempio n. 4
0
 def test_backward(self):
     x_val = np.random.random((3, 4))
     x = ad.variable(x_val)
     y = ad.square(x)
     self.numeric_gradient_check(y, {}, [x])
Esempio n. 5
0
def mean_square_error(y_true: ad.Operation,
                      y_pred: ad.Operation) -> ad.Operation:
    return ad.sum(ad.square(y_true - y_pred), axis=-1)