コード例 #1
0
def plt_logistic_squared_error(X, y):
    """ plots logistic squared error for demonstration """
    wx, by = np.meshgrid(np.linspace(-6, 12, 50), np.linspace(10, -20, 40))
    points = np.c_[wx.ravel(), by.ravel()]
    cost = np.zeros(points.shape[0])

    for i in range(points.shape[0]):
        w, b = points[i]
        cost[i] = compute_cost_logistic_sq_err(X.reshape(-1, 1), y, w, b)
    cost = cost.reshape(wx.shape)

    fig = plt.figure()
    fig.canvas.toolbar_visible = False
    fig.canvas.header_visible = False
    fig.canvas.footer_visible = False
    ax = fig.add_subplot(1, 1, 1, projection='3d')
    ax.plot_surface(
        wx,
        by,
        cost,
        alpha=0.6,
        cmap=cm.jet,
    )

    ax.set_xlabel('w', fontsize=16)
    ax.set_ylabel('b', fontsize=16)
    ax.set_zlabel("Cost", rotation=90, fontsize=16)
    ax.set_title('"Logistic" Squared Error Cost vs (w, b)')
    ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
    ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
    ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
コード例 #2
0
def plt_logistic_cost(X, y):
    """ plots logistic cost """
    wx, by = np.meshgrid(np.linspace(-6, 12, 50), np.linspace(0, -20, 40))
    points = np.c_[wx.ravel(), by.ravel()]
    cost = np.zeros(points.shape[0], dtype=np.longdouble)

    for i in range(points.shape[0]):
        w, b = points[i]
        cost[i] = compute_cost_matrix(X.reshape(-1, 1),
                                      y,
                                      w,
                                      b,
                                      logistic=True,
                                      safe=True)
    cost = cost.reshape(wx.shape)

    fig = plt.figure(figsize=(9, 5))
    fig.canvas.toolbar_visible = False
    fig.canvas.header_visible = False
    fig.canvas.footer_visible = False
    ax = fig.add_subplot(1, 2, 1, projection='3d')
    ax.plot_surface(
        wx,
        by,
        cost,
        alpha=0.6,
        cmap=cm.jet,
    )

    ax.set_xlabel('w', fontsize=16)
    ax.set_ylabel('b', fontsize=16)
    ax.set_zlabel("Cost", rotation=90, fontsize=16)
    ax.set_title('Logistic Cost vs (w, b)')
    ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
    ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
    ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))

    ax = fig.add_subplot(1, 2, 2, projection='3d')

    ax.plot_surface(
        wx,
        by,
        np.log(cost),
        alpha=0.6,
        cmap=cm.jet,
    )

    ax.set_xlabel('w', fontsize=16)
    ax.set_ylabel('b', fontsize=16)
    ax.set_zlabel('\nlog(Cost)', fontsize=16)
    ax.set_title('log(Logistic Cost) vs (w, b)')
    ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
    ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
    ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))

    plt.show()
    return cost
コード例 #3
0
    def linear_regression(self):
        self.ax[0].clear()
        self.fig.canvas.draw()

        # create and fit the model using our mapped_X feature set.
        self.X_mapped, _ = map_one_feature(self.X, self.degree)
        self.X_mapped_scaled, self.X_mu, self.X_sigma = zscore_normalize_features(
            self.X_mapped)

        #linear_model = LinearRegression()
        linear_model = Ridge(alpha=self.lambda_,
                             normalize=True,
                             max_iter=10000)
        linear_model.fit(self.X_mapped_scaled, self.y)
        self.w = linear_model.coef_.reshape(-1, )
        self.b = linear_model.intercept_
        x = np.linspace(
            *self.xlim,
            30)  #plot line idependent of data which gets disordered
        xm, _ = map_one_feature(x, self.degree)
        xms = (xm - self.X_mu) / self.X_sigma
        y_pred = linear_model.predict(xms)

        #self.fig.canvas.draw()
        self.linear_data(redraw=True)
        self.ax0yfit = self.ax[0].plot(x, y_pred, color="blue", label="y_fit")
        self.ax0ledgend = self.ax[0].legend(loc='lower right')
        self.fig.canvas.draw()
コード例 #4
0
def soup_bowl():
    """ creates 3D quadratic error surface """
    #Create figure and plot with a 3D projection
    fig = plt.figure(figsize=(4, 4))
    fig.canvas.toolbar_visible = False
    fig.canvas.header_visible = False
    fig.canvas.footer_visible = False

    #Plot configuration
    ax = fig.add_subplot(111, projection='3d')
    ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
    ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
    ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
    ax.zaxis.set_rotate_label(False)
    ax.view_init(15, -120)

    #Useful linearspaces to give values to the parameters w and b
    w = np.linspace(-20, 20, 100)
    b = np.linspace(-20, 20, 100)

    #Get the z value for a bowl-shaped cost function
    z = np.zeros((len(w), len(b)))
    j = 0
    for x in w:
        i = 0
        for y in b:
            z[i, j] = x**2 + y**2
            i += 1
        j += 1

    #Meshgrid used for plotting 3D functions
    W, B = np.meshgrid(w, b)

    #Create the 3D surface plot of the bowl-shaped cost function
    ax.plot_surface(W, B, z, cmap="Spectral_r", alpha=0.7, antialiased=False)
    ax.plot_wireframe(W, B, z, color='k', alpha=0.1)
    ax.set_xlabel("$w$")
    ax.set_ylabel("$b$")
    ax.set_zlabel("Cost", rotation=90)
    ax.set_title("Squared Error Cost used in Linear Regression")

    plt.show()
コード例 #5
0
def plt_prob(ax, w_out, b_out):
    """ plots a decision boundary but include shading to indicate the probability """
    #setup useful ranges and common linspaces
    x0_space = np.linspace(0, 4, 100)
    x1_space = np.linspace(0, 4, 100)

    # get probability for x0,x1 ranges
    tmp_x0, tmp_x1 = np.meshgrid(x0_space, x1_space)
    z = np.zeros_like(tmp_x0)
    for i in range(tmp_x0.shape[0]):
        for j in range(tmp_x1.shape[1]):
            z[i, j] = sigmoid(
                np.dot(w_out, np.array([tmp_x0[i, j], tmp_x1[i, j]])) + b_out)

    cmap = plt.get_cmap('Blues')
    new_cmap = truncate_colormap(cmap, 0.0, 0.5)
    pcm = ax.pcolormesh(tmp_x0,
                        tmp_x1,
                        z,
                        norm=cm.colors.Normalize(vmin=0, vmax=1),
                        cmap=new_cmap,
                        shading='nearest',
                        alpha=0.9)
    ax.figure.colorbar(pcm, ax=ax)
コード例 #6
0
    def draw_logistic_lines(self, firsttime=False):
        if not firsttime:
            self.aline[0].remove()
            self.bline[0].remove()
            self.alegend.remove()

        xlim = self.ax.get_xlim()
        x_hat = np.linspace(*xlim, 30)
        y_hat = sigmoid(np.dot(x_hat.reshape(-1, 1), self.w) + self.b)
        self.aline = self.ax.plot(x_hat,
                                  y_hat,
                                  color=dlc["dlblue"],
                                  label="y = sigmoid(z)")
        f_wb = np.dot(x_hat.reshape(-1, 1), self.w) + self.b
        self.bline = self.ax.plot(
            x_hat,
            f_wb,
            color=dlc["dlorange"],
            lw=1,
            label=f"z = {np.squeeze(self.w):0.2f}x+({self.b:0.2f})")
        self.alegend = self.ax.legend(loc='upper left')
コード例 #7
0
 def calc_logistic(self, event):
     if self.bthresh.get_status()[0]:
         self.remove_thresh()
     for it in [1, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096]:
         self.w, self.b, _ = gradient_descent(self.x.reshape(-1, 1),
                                              self.y.reshape(-1, 1),
                                              self.w.reshape(-1, 1),
                                              self.b,
                                              0.1,
                                              it,
                                              logistic=True,
                                              lambda_=0,
                                              verbose=False)
         self.aline[0].remove()
         self.bline[0].remove()
         self.alegend.remove()
         xlim = self.ax[0].get_xlim()
         x_hat = np.linspace(*xlim, 30)
         y_hat = sigmoid(np.matmul(x_hat.reshape(-1, 1), self.w) + self.b)
         self.aline = self.ax[0].plot(x_hat,
                                      y_hat,
                                      color=dlblue,
                                      label="y = sigmoid(z)")
         f_wb = np.matmul(x_hat.reshape(-1, 1), self.w) + self.b
         self.bline = self.ax[0].plot(
             x_hat,
             f_wb,
             color=dlorange,
             lw=1,
             label=f"z = {np.squeeze(self.w):0.2f}x+({self.b:0.2f})")
         self.alegend = self.ax[0].legend(loc='lower right')
         time.sleep(0.3)
         self.fig.canvas.draw()
     if self.bthresh.get_status()[0]:
         self.draw_thresh()
         self.fig.canvas.draw()
コード例 #8
0
def plt_two_logistic_loss_curves():
    """ plots the logistic loss """
    fig, ax = plt.subplots(1, 2, figsize=(6, 3), sharey=True)
    fig.canvas.toolbar_visible = False
    fig.canvas.header_visible = False
    fig.canvas.footer_visible = False
    x = np.linspace(0.01, 1 - 0.01, 20)
    ax[0].plot(x, -np.log(x))
    #ax[0].set_title("y = 1")
    ax[0].text(0.5, 4.0, "y = 1", fontsize=12)
    ax[0].set_ylabel("loss")
    ax[0].set_xlabel(r"$f_{w,b}(x)$")
    ax[1].plot(x, -np.log(1 - x))
    #ax[1].set_title("y = 0")
    ax[1].text(0.5, 4.0, "y = 0", fontsize=12)
    ax[1].set_xlabel(r"$f_{w,b}(x)$")
    ax[0].annotate(
        "prediction \nmatches \ntarget ",
        xy=[1, 0],
        xycoords='data',
        xytext=[-10, 30],
        textcoords='offset points',
        ha="right",
        va="center",
        arrowprops={
            'arrowstyle': '->',
            'color': dlorange,
            'lw': 3
        },
    )
    ax[0].annotate(
        "loss increases as prediction\n differs from target",
        xy=[0.1, -np.log(0.1)],
        xycoords='data',
        xytext=[10, 30],
        textcoords='offset points',
        ha="left",
        va="center",
        arrowprops={
            'arrowstyle': '->',
            'color': dlorange,
            'lw': 3
        },
    )
    ax[1].annotate(
        "prediction \nmatches \ntarget ",
        xy=[0, 0],
        xycoords='data',
        xytext=[10, 30],
        textcoords='offset points',
        ha="left",
        va="center",
        arrowprops={
            'arrowstyle': '->',
            'color': dlorange,
            'lw': 3
        },
    )
    ax[1].annotate(
        "loss increases as prediction\n differs from target",
        xy=[0.9, -np.log(1 - 0.9)],
        xycoords='data',
        xytext=[-10, 30],
        textcoords='offset points',
        ha="right",
        va="center",
        arrowprops={
            'arrowstyle': '->',
            'color': dlorange,
            'lw': 3
        },
    )
    plt.suptitle("Loss Curves for Two Categorical Target Values", fontsize=12)
    plt.tight_layout()
    plt.show()
コード例 #9
0
def truncate_colormap(cmap, minval=0.0, maxval=1.0, n=100):
    """ truncates color map """
    new_cmap = colors.LinearSegmentedColormap.from_list(
        'trunc({n},{a:.2f},{b:.2f})'.format(n=cmap.name, a=minval, b=maxval),
        cmap(np.linspace(minval, maxval, n)))
    return new_cmap
コード例 #10
0
    def __init__(self, axc, axs, x_train, y_train, w_range, b_range, w, b):

        self.x_train = x_train
        self.y_train = y_train
        self.axc = axc
        self.axs = axs

        #setup useful ranges and common linspaces
        b_space = np.linspace(*b_range, 100)
        w_space = np.linspace(*w_range, 100)

        # get cost for w,b ranges for contour and 3D
        tmp_b, tmp_w = np.meshgrid(b_space, w_space)
        z = np.zeros_like(tmp_b)
        for i in range(tmp_w.shape[0]):
            for j in range(tmp_w.shape[1]):
                z[i, j] = compute_cost_matrix(x_train.reshape(-1, 1),
                                              y_train,
                                              tmp_w[i, j],
                                              tmp_b[i, j],
                                              logistic=True,
                                              lambda_=0,
                                              safe=True)
                if z[i, j] == 0:
                    z[i, j] = 1e-9

        ### plot contour ###
        CS = axc.contour(tmp_w,
                         tmp_b,
                         np.log(z),
                         levels=12,
                         linewidths=2,
                         alpha=0.7,
                         colors=dlcolors)
        axc.set_title('log(Cost(w,b))')
        axc.set_xlabel('w', fontsize=10)
        axc.set_ylabel('b', fontsize=10)
        axc.set_xlim(w_range)
        axc.set_ylim(b_range)
        self.update_contour_wb_lines(w, b, firsttime=True)
        axc.text(0.7,
                 0.05,
                 "Click to choose w,b",
                 bbox=dict(facecolor='white', ec='black'),
                 fontsize=10,
                 transform=axc.transAxes,
                 verticalalignment='center',
                 horizontalalignment='center')

        #Surface plot of the cost function J(w,b)
        axs.plot_surface(tmp_w,
                         tmp_b,
                         z,
                         cmap=cm.jet,
                         alpha=0.3,
                         antialiased=True)
        axs.plot_wireframe(tmp_w, tmp_b, z, color='k', alpha=0.1)
        axs.set_xlabel("$w$")
        axs.set_ylabel("$b$")
        axs.zaxis.set_rotate_label(False)
        axs.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
        axs.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
        axs.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
        axs.set_zlabel("J(w, b)", rotation=90)
        axs.view_init(30, -120)

        axs.autoscale(enable=False)
        axc.autoscale(enable=False)

        self.path = path(
            self.w, self.b,
            self.axc)  # initialize an empty path, avoids existance check