示例#1
0
 def graph(self):
     """
     Graph the model and data
     :return None
     """
     graph_function_and_data(lambda x: func(*self.theta, x),
                             x_data=self.x,
                             y_data=self.y)
示例#2
0
    def fit(self,
            x,
            y,
            init_theta=None,
            lr=0.001,
            steps=1000,
            graph=False,
            dx=0.0001):
        """
        Fit the model
        :param x: X data
        :param y: Y data
        :param init_theta: Initial Theta values; can be set to none for all 0s
        :param lr: Learning Rate
        :param steps: Number of steps do go through
        :param graph: Set true to graph function and data
        :param to_print: Print loss and step number every this number
        :param batch_size: Size of batches
        :return: None
        """
        self._check_length(x, y)
        self.x = list(x)
        self.y = list(x)

        func = self.func

        if init_theta is None:
            init_theta = [0 for _ in range(self.k)]

        try:
            _ = self.func(*init_theta, 0)
        except TypeError:
            raise Error("Initial Theta does not match with function")

        def _loss(*args):
            loss = 0
            for i in range(len(x)):
                loss += (func(*args, x[i]) - y[i])**2
            loss = loss * (1 / len(x))
            return loss

        optim = GradientDescentOptimizer(_loss, num_theta=self.k)
        theta = optim.optimize(learning_rate=lr,
                               steps=steps,
                               init_theta=init_theta)

        if graph:
            graph_function_and_data(lambda x: func(*theta, x),
                                    x_data=x,
                                    y_data=y)
        return theta
示例#3
0
# Data
x1 = [0.00, 4.48, 8.96, 13.44, 17.92, 22.41, 26.89, 31.37, 35.85, 40.33, 44.81]
y1 = [0.00, 2.89, 5.14, 6.74, 7.71, 8.03, 7.71, 6.74, 5.14, 2.89, 0.00]


# Defining Cost Function to optimize
def cost_function(a, b, c):
    j = 1 / 11
    sigma = 0
    for i in range(11):
        sigma = sigma + (y1[i] - func(a, b, c, x1[i]))**2
    j = j * sigma
    return j


# Function to fit the points
def func(a, b, c, x):
    return a * (x**2) + b * x + c


# Define the function
# This can be compressed to:

start_time = time.time()
theta = gradient_descent.optimize(cost_function, 7e-7, 50000, [0, 0, 0],
                                  0.0001, 3)
print("--- %s seconds ---" % (time.time() - start_time))

# Graph the function by using the theta learnt
graph_function_and_data(lambda x: func(*theta, x), x_data=x1, y_data=y1)