コード例 #1
0
# open and load csv files
time_load_start = time.clock()
X_train, y_train = fipr.load_csv("train_file.csv", True)
X_test, y_test = fipr.load_csv("test_file.csv", True)
y_train = y_train.flatten()
y_test = y_test.flatten()
time_load_end = time.clock()
print("Loading finished, loading time: %g seconds" %
      (time_load_end - time_load_start))

X_test_even, y_test_even = fipr.load_csv("test_file_even.csv", True)
y_test_even = y_test_even.flatten()

# scale features to encourage gradient descent convergence
X_train = fipr.scale_features(X_train, 0.0, 1.0)
X_test = fipr.scale_features(X_test, 0.0, 1.0)

X_test_even = fipr.scale_features(X_test_even, 0.0, 1.0)

# create the logistic regression classifier using the training data
LRC = LogisticRegressionClassifier(alpha, lmbda, maxiter)
print("\nCreated a logistic regression classifier =", LRC)

# start counting time for training
time_train_start = time.clock()

# fit the model to the loaded training data
print("Fitting the training data...\n")
LRC.fit(X_train, y_train)
コード例 #2
0
def main():

    # open and load csv files
    time_load_start = time.clock()
    X_train, y_train = fipr.load_csv("train_file.csv", True)
    X_test, y_test = fipr.load_csv("test_file.csv", True)
    y_train = y_train.flatten()
    y_test = y_test.flatten()
    time_load_end = time.clock()
    print("Loading finished, loading time: %g seconds" %
          (time_load_end - time_load_start))

    X_test_even, y_test_even = fipr.load_csv("test_file_even.csv", True)
    y_test_even = y_test_even.flatten()
    # scale features to encourage gradient descent convergence
    X_train = fipr.scale_features(X_train, 0.0, 1.0)
    X_test = fipr.scale_features(X_test, 0.0, 1.0)

    X_test_even = fipr.scale_features(X_test_even, 0.0, 1.0)

    Pattern_train = []
    for i, sample_train in enumerate(X_train):
        Pattern_train.append([sample_train, y_train[i]])

    Pattern_test = []
    for j, sample_test in enumerate(X_test):
        Pattern_test.append([sample_test, y_test[j]])

    Pattern_test_even = []
    for k, sample_test_even in enumerate(X_test_even):
        Pattern_test_even.append([sample_test_even, y_test_even[k]])

    #print(Pattern_train)
    #print(Pattern_test)
    # Teach network XOR function (for test only)
    '''pat = [
        [[0,0], [0]],
        [[0,1], [1]],
        [[1,0], [1]],
        [[1,1], [0]]
        ]
    print(pat)

    # create a network with two input, two hidden, and one output nodes
    n = NN(2, 2, 1)
    # train it with some patterns
    n.train(pat)
    # test it
    n.test(pat)'''

    # Test on Iris data
    #pattern = irisdemo()

    # create a network with two hundred inputs, two hidden, and one output nodes
    n = NN(200, 4, 1)

    # start counting time for training
    time_train_start = time.clock()

    # train it with some patterns
    n.train(Pattern_train)

    # print training time
    time_train_end = time.clock()
    print("Training finished, training time: %g seconds \n" %
          (time_train_end - time_train_start))

    # start counting time for testing
    time_test_start = time.clock()

    # test it
    n.test(Pattern_test)

    # print testing time
    time_test_end = time.clock()
    print("Testing finished, testing time: %g seconds  \n" %
          (time_test_end - time_test_start))

    # test on EVEN data set
    n.test(Pattern_test_even)