Esempio n. 1
0
def test_quadratic():
    x = nf.initialized_var('x', np.array([[0.07], [0.4], [0.1]]))
    A = nf.constant(np.array([[2, -1, -2], [-1, 1, 0], [-2, 0, 4]]))
    B = nf.constant(np.array([[1, 0, 0], [0, 0, 0], [0, 0, 0]]))
    b = nf.placeholder(shape=(3, 1))

    quadratic = x.T() @ A @ x
    linear = B @ x - b
    squared_linear = nf.mat_sum(linear)**2

    expr = quadratic + squared_linear

    cost_function = nf.as_scalar(expr) + 7

    cost = Graph(cost_function)
    cost.placeholders = {
        b: np.array([1, 0, 0]).reshape(3, 1).astype('float32')
    }
    cost.initialize_variables()
    optimizer = gd.GradientDescentOptimizer(0.1)
    optimizer.prepare_and_check(cost)

    for i in range(500):
        optimizer.run()

    np.testing.assert_allclose(cost.evaluate(), 7.)
    np.testing.assert_allclose(x.value, np.array([[1.], [1.], [.5]]))
Esempio n. 2
0
    def train(self,
              x_train,
              y_train,
              num_iterations=2000,
              learning_rate=0.5,
              print_cost=True):
        self.cost_graph.placeholders = {self.X: x_train, self.Y: y_train}
        self.cost_graph.initialize_variables()
        optimizer = gd.GradientDescentOptimizer(learning_rate)
        optimizer.prepare_and_check(self.cost_graph)
        costs = []
        for i in range(num_iterations):
            optimizer.run()

            if i % ITERATIONS_UNIT == 0:
                costs.append(optimizer.cost)

            if print_cost and i % ITERATIONS_UNIT == 0:
                print(f"Cost after iteration {i}: {optimizer.cost}")

        return costs
Esempio n. 3
0
        weights = f.var("W" + str(layer_num + 1), self.weight_initializer,
                        shape=(cnt_neurons, cnt_features))
        biases = f.var("b" + str(layer_num + 1), init.ZeroInitializer(),
                       shape=(cnt_neurons, 1))
        return f.fully_connected(features, weights, biases,
                                 first_layer=(layer_num == 0))

    def train(self, x_train, y_train, *,
              num_iterations=15_000,
              learning_rate=0.01,
              print_cost=True):

        env.seed(3)
        self.cost_graph.placeholders = {self.X: x_train, self.Y: y_train}
        self.cost_graph.initialize_variables()
        optimizer = gd.GradientDescentOptimizer(learning_rate)
        optimizer.prepare_and_check(self.cost_graph)
        costs = []
        for i in range(num_iterations + 1):
            optimizer.run()

            if i % 1000 == 0:
                costs.append(optimizer.cost)

            if print_cost and i % 1000 == 0:
                print(f"Cost after iteration {i}: {optimizer.cost}")

        return costs

    def predict(self, x_test):
        self.prediction_graph.placeholders = {self.X: x_test}