Пример #1
0
def main2():

    sgd = ParticleSGD(alpha=0.2, n_epochs=1, mini_batch_size=1, verbosity=2, weight_update="momentum", beta=0.5)
    # sgd = ParticleSGD(alpha=0.2, n_epochs=1, mini_batch_size=1, verbosity=2)

    train_X = np.asarray([[0.2, -0.3]])
    train_Y = np.asarray([[0.0, 1.0, 0.0]])

    net = ParticleNetwork(cost="mse", particle_input=ParticleInput(2))
    net.append(Particle(2, 5, activation="sigmoid"))
    net.append(Particle(5, 3, activation="sigmoid"))

    sgd.optimize(net, train_X, train_Y)
Пример #2
0
def main2():

    sgd = ParticleSGD(alpha=0.2, n_epochs=1, mini_batch_size=1, verbosity=2, weight_update="momentum", beta=0.5)
    # sgd = ParticleSGD(alpha=0.2, n_epochs=1, mini_batch_size=1, verbosity=2)

    train_X = np.asarray([[0.2, -0.3]])
    train_Y = np.asarray([[0.0, 1.0, 0.0]])

    net = ParticleNetwork(cost="mse", particle_input=ParticleInput(2))
    net.append(Particle(2, 5, activation="sigmoid"))
    net.append(Particle(5, 3, activation="sigmoid"))

    sgd.optimize(net, train_X, train_Y)
Пример #3
0
               255.0)  # scaled values in range [0-1]
# length ten categorical vector
Y = []
for val in raw_data_train.ix[:, 0]:
    y = np.zeros(10)
    y[val] = 1.0
    Y.append(y)
Y = np.asarray(Y)

# Data subset
n_sub = len(X)
X_sub = X[:n_sub, :]
Y_sub = Y[:n_sub, :]

net = ParticleNetwork(cost="categorical_cross_entropy",
                      particle_input=ParticleInput(784))
net.append(Particle(784, 10, activation="sigmoid"))
net.append(Particle(10, 10, activation="softmax"))

start = time.time()

rprop = ParticleSGD(n_epochs=1,
                    verbosity=2,
                    cost_freq=25,
                    n_threads=4,
                    chunk_size=500,
                    mini_batch_size=n_sub)
rprop.optimize(net, X_sub, Y_sub)

print(time.time() - start)
Пример #4
0
# MNIST data
raw_data_train = pd.read_csv("/Users/alange/programming/MNIST/data/mnist_train.csv", header=None)
print("data loaded")

# Prepare data
X = np.asarray(raw_data_train.ix[:, 1:] / 255.0)  # scaled values in range [0-1]
# length ten categorical vector
Y = []
for val in raw_data_train.ix[:, 0]:
    y = np.zeros(10)
    y[val] = 1.0
    Y.append(y)
Y = np.asarray(Y)

# Data subset
n_sub = len(X)
X_sub = X[:n_sub, :]
Y_sub = Y[:n_sub, :]

net = ParticleNetwork(cost="categorical_cross_entropy", particle_input=ParticleInput(784))
net.append(Particle(784, 10, activation="sigmoid"))
net.append(Particle(10, 10, activation="softmax"))

start = time.time()

rprop = ParticleSGD(n_epochs=1, verbosity=2, cost_freq=25, n_threads=4, chunk_size=500, mini_batch_size=n_sub)
rprop.optimize(net, X_sub, Y_sub)

print(time.time() - start)