コード例 #1
0
def auto_encode(x, y):
    from sknn import ae, mlp

    # Initialize auto-encoder for unsupervised learning.
    myae = ae.AutoEncoder(
        layers=[ae.Layer("Tanh", units=8),
                ae.Layer("Sigmoid", units=4)],
        learning_rate=0.002,
        n_iter=10)

    # Layerwise pre-training using only the input data.
    myae.fit(x)

    # Initialize the multi-layer perceptron with same base layers.
    mymlp = mlp.Regressor(layers=[
        mlp.Layer("Tanh", units=8),
        mlp.Layer("Sigmoid", units=4),
        mlp.Layer("Linear")
    ])

    # Transfer the weights from the auto-encoder.
    myae.transfer(mymlp)
    # Now perform supervised-learning as usual.
    mymlp.fit(x, y)
    return mymlp
コード例 #2
0
ファイル: annAnalysis.py プロジェクト: anuragreddygv323/P1
def autoEncoderOptimization(data):
    rbm = ae.AutoEncoder(layers=[
        ae.Layer("Tanh", units=300),
        ae.Layer("Sigmoid", units=200),
        ae.Layer("Tanh", units=100)
    ],
                         learning_rate=0.002,
                         n_iter=10)

    rbm.fit(data["train"])

    model = Classifier(layers=[
        Layer("Tanh", units=300),
        Layer("Sigmoid", units=200),
        Layer("Tanh", units=100),
        Layer("Rectifier", units=100),
        Layer("Rectifier", units=50),
        Layer("Softmax")
    ], )

    rbm.transfer(model)

    model.fit(data["train"], data["label"])

    prediction = model.predict(data["train"])

    print accuracy_score(data["label"], prediction)
コード例 #3
0
ファイル: irisAE.py プロジェクト: ewang73/proj3
##for each in complist:
##    comp = each
t0 = time.clock()

print("Time started")
# Fit the Autoencoder

result = ae.AutoEncoder(AELayers,
                        warning=None,
                        random_state=0,
                        learning_rule=u'sgd',
                        learning_rate=0.1,
                        learning_momentum=0.9,
                        regularize=None,
                        weight_decay=None,
                        dropout_rate=None,
                        batch_size=1,
                        n_iter=None,
                        n_stable=10,
                        f_stable=0.001,
                        valid_set=None,
                        valid_size=0.0,
                        loss_type=None,
                        debug=False,
                        verbose=None).fit(traindata)

t1 = time.clock()
timetaken = str(t1 - t0)
print("Computation Time" + timetaken)

#autoencoder results
result = result.transform(traindata)
コード例 #4
0
    mlp.Layer("Tanh", units=n_feat * 2 / 3),
    mlp.Layer("Sigmoid", units=n_feat * 1 / 3),
    mlp.Layer("Softmax", units=n_targets)
],
                    n_iter=50,
                    n_stable=10,
                    learning_rate=0.001,
                    valid_size=0.5,
                    verbose=1)

if PRETRAIN:
    from sknn import ae
    ae = ae.AutoEncoder(layers=[
        ae.Layer("Tanh", units=n_feat * 2 / 3),
        ae.Layer("Sigmoid", units=n_feat * 2 / 3)
    ],
                        learning_rate=0.002,
                        n_iter=10,
                        verbose=1)
    ae.fit(data_train)
    ae.transfer(nn)

nn.fit(data_train, labels_train)

from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix

expected = labels_test
predicted = net.predict(data_test)

print("Classification report for classifier %s:\n%s\n" %