コード例 #1
0
def offline_online():
    start_time = time.time()
    # x, y, x_s, y_s, x_test, y_test = load_datasets(path='datasets/svhn/', static_data_ratio=0.1)
    x, y, x_s, y_s, x_test, y_test = loadmnist(static_data_ratio=0.1)

    model = NeuralNetworkClassifier()
    sgd = StochasticGradientDescent(model, x, y)
    sgd.train(num_iterations=50, learning_rate=0.001)

    error = []
    test_increment = math.floor(len(y_s) / len(y_test))
    i = 0
    idx = 2
    while i <= len(y_s):
        end_of_batch = i + test_increment
        if i + test_increment >= len(y_s):
            end_of_batch = len(y_s)
        sgd.incremental(x_s[i:end_of_batch, ], y_s[i:end_of_batch], 0.001)
        if idx < len(y_test):
            error_rate = model.calculate_loss(x_test[0:idx], y_test[0:idx])
        else:
            error_rate = model.calculate_loss(x_test, y_test)
        error += [error_rate]
        print("{} items process".format(i))
        i += test_increment
        idx += 1

    error_rate = model.calculate_loss(x_test, y_test)
    error += [error_rate]

    running_time = time.time() - start_time
    return error, running_time
コード例 #2
0
def velox():
    start_time = time.time()
    # x, y, x_s, y_s, x_test, y_test = load_datasets(path='datasets/svhn/', static_data_ratio=0.1)
    x, y, x_s, y_s, x_test, y_test = loadmnist(static_data_ratio=0.1)

    model = NeuralNetworkClassifier()
    sgd = StochasticGradientDescent(model, x, y)
    sgd.train(num_iterations=50, learning_rate=0.001)
    buffer_size = 10000
    error = []
    test_increment = math.floor(len(y_s) / len(y_test))
    i = 0
    idx = 2
    while i < len(y_s):
        sgd.incremental(np.array([x_s[i]]), np.array([y_s[i]]), 0.001)
        sgd.update_buffer(x_s[i], y_s[i])
        # print("{} items process".format(i))
        i += 1
        if i % test_increment == 0:
            if idx < len(y_test):
                error_rate = model.calculate_loss(x_test[0:idx], y_test[0:idx])
            else:
                error_rate = model.calculate_loss(x_test, y_test)
            error += [error_rate]
        if i % buffer_size == 0:
            print("{} items process ... scheduling new iteration".format(i))
            sgd.retrain()
        idx += 1

    error_rate = model.calculate_loss(x_test, y_test)
    error += [error_rate]

    running_time = time.time() - start_time
    return error, running_time
コード例 #3
0
def simple():
    x, y, x_s, y_s, x_test, y_test = loadsvhn(path='../datasets/svhn/',
                                              static_data_ratio=1.0)
    model = NeuralNetworkClassifier()
    sgd = StochasticGradientDescent(model, x, y)
    sgd.train(num_iterations=50, learning_rate=0.001)
    print(model.calculate_loss(x_test, y_test))
コード例 #4
0
def full():
    start_time = time.time()
    # x, y, x_s, y_s, x_test, y_test = load_datasets(path='datasets/svhn/', static_data_ratio=0.1)
    x, y, x_s, y_s, x_test, y_test = loadmnist(static_data_ratio=1.0)

    model = NeuralNetworkClassifier()
    sgd = StochasticGradientDescent(model, x, y)
    sgd.train(num_iterations=50, learning_rate=0.001)
    error = []
    error_rate = model.calculate_loss(x_test, y_test)
    error += [error_rate]
    running_time = time.time() - start_time
    return error, running_time