Example #1
0
def removed_rss(Model, index, Y):
    X = model2matix(Model)
    del X[index]
    X = np.transpose(np.array(X))
    if len(X) <= 1:
        return 999999999., None
    beta = leastsquare.ls(X, Y)
    Y_hat = leastsquare.predict(X, beta)
    rss = np.sum((Y-Y_hat)**2)
    return rss, beta
Example #2
0
def forward(CXT, CTX, Y):
    N = len(Y)
    P = len(CXT)
    h0 = H(np.ones(N).tolist())
    Model = [h0]
    while True:
        min_error = 999999999.
        best_h_1 = None
        best_h_2 = None
        best_i = None
        best_p = None
        best_model = None
        for m in Model:
            model = np.array(m.h)
            for p in range(P):
                for i in range(N):
                    X = model2matix(Model)
                    x_t = CXT[p,i]
                    t_x = CTX[p,i]
                    new_h_1 = x_t*model
                    new_h_2 = t_x*model
                    X.append(new_h_1.tolist())
                    X.append(new_h_2.tolist())
                    X = np.transpose(np.array(X))
                    #print X
                    beta = leastsquare.ls(X, Y)
                    if beta == None:
                        continue
                    error = leastsquare.RSS(X, Y, beta)
                    if error < min_error:
                        print "error:", error
                        min_error = error
                        best_h_1 = new_h_1
                        best_h_2 = new_h_2
                        best_i = i
                        best_p = p
                        best_model = m
        if best_model == None:
            continue
        directions = copy.deepcopy(best_model.directions)
        indexes = copy.deepcopy(best_model.t_idxes)
        directions.append(best_p)
        indexes.append(best_i)
        print "d:",directions
        print "ti:", indexes
        Model.append(H(best_h_1.tolist(), directions, indexes, True))
        Model.append(H(best_h_2.tolist(), directions, indexes, False))
        if len(Model) >= 100 or min_error < 100.:
            break
    return Model
Example #3
0
def draw_polynomial_pwv(train_in, train_out):
    p = 3
    X = polynomial(train_in)
    XT = np.transpose(X)
    
    beta = ls.ls(X, train_out)
    
    Yhat = ls.predict(X, beta)
    thegsq = ls.thegama2(Yhat, train_out, p)
    beta_var = np.linalg.inv(XT.dot(X))*thegsq
    
    print beta_var
    
    PWV = []
    for i in range(len(X)):
        PWV.append(np.transpose(X[i]).dot(beta_var).dot(X[i]))
        
    #print PWV
    
    plt.plot(train_in, PWV)
Example #4
0
def draw_cubic_spline_pwv(train_in, train_out):
    p = 5
    X = cubic(train_in)
    #print X.shape
    XT = np.transpose(X)
    #print X
    
    beta = ls.ls(X, train_out)
    
    Yhat = ls.predict(X, beta)
    thegsq = ls.thegama2(Yhat, train_out, p)
    beta_var = np.linalg.inv(XT.dot(X))*thegsq
    
    print beta_var
    
    PWV = []
    for i in range(len(X)):
        PWV.append(np.transpose(X[i]).dot(beta_var).dot(X[i]))
        
    #print PWV
    
    plt.plot(train_in, PWV)
Example #5
0
def draw_natural_spline_pwv(train_in, train_out):
    p = 5
    knots = np.arange(0.1, 0.9, 0.16).tolist()
    knots.append(0.9)
    X = nscookdata(train_in, 6, knots)
    #print H
    #print H.shape
    XT = np.transpose(X)
    
    beta = ls.ls(X, train_out)
    
    Yhat = ls.predict(X, beta)
    thegsq = ls.thegama2(Yhat, train_out, p)
    beta_var = np.linalg.inv(XT.dot(X))*thegsq
    
    print beta_var
    
    PWV = []
    for i in range(len(X)):
        PWV.append(np.transpose(X[i]).dot(beta_var).dot(X[i]))
        
    #print PWV
    
    plt.plot(train_in, PWV)
Example #6
0
                    product *= x_t
                else:
                    product *= t_x
                h.append(product)

if __name__ == '__main__':
    data = ozone_data.load()
    data_train, data_test = ozone_data.traintest(data)
    X_train, Y_train = ozone_data.cook(data_train)
    X_test, Y_test = ozone_data.cook(data_test)
    CXT, CTX = create_basis_matrix(X_train)
    Model = forward(CXT, CTX, np.array(Y_train))
    #Model, beta = backward(Model, Y_train)
    X = model2matix(Model)
    X = np.transpose(np.array(X))
    beta = leastsquare.ls(X,Y_train)
    print "last error:", leastsquare.RSS(X, Y_train, beta)
    print beta
    fortest(X_train, Model)
    #err = 0.
    #for i in range(len(Y_test)):
    #    res = predict(beta, Model, X_test[i], X_train)
    #    print res, Y_test[i]
    #    err += (res-Y_test[i])**2
    #print "test error:", err
    
    #print "**********************************"
    #ls_beta = leastsquare.ls(X_train, Y_train)
    #print ls_beta
    #res = leastsquare.predict(X_test, ls_beta)
    #for l in range(len(Y_test)):