Пример #1
0
LAYER_SIZE = [256, 512, 1024]
N_LAYERS = [2, 3, 4]
ITERATIONS = 5

datasets = load_data(n_frames=N_FRAMES)

for _ in range(ITERATIONS):
    for n_layers in N_LAYERS:
        for layer_size in LAYER_SIZE:
            test_score, val_score = test_GRBM_DBN(
                finetune_lr=0.1,
                pretraining_epochs=[225, 75],
                pretrain_lr=[0.002, 0.02],
                k=1,
                weight_decay=0.0002,
                momentum=0.9,
                batch_size=128,
                datasets=datasets,
                hidden_layers_sizes=n_layers * [layer_size],
                load=False,
                n_ins=39 * N_FRAMES,
                n_outs=120,
                filename=('../data/speech_%d_%d_%d.pickle' %
                          (N_FRAMES, layer_size, n_layers)))

            log = '../data/speech.log'
            with open(log, 'a') as f:
                f.write(
                    'N_FRAMES=%d, LAYER_SIZE=%d, n_layers=%d, test_score=%f%%, val_score=%f%%\n'
                    % (N_FRAMES, layer_size, n_layers, test_score, val_score))
Пример #2
0
# -*- coding: utf-8 -*-

from GRBM_DBN import test_GRBM_DBN
from GRBM_DBN import GRBM_DBN

from load_data_MNIST import load_data
from load_data_MNIST import load_raw_data

datasets = load_data()

#
#   UCZYMY SIEĆ
#
test_score, val_score = test_GRBM_DBN(finetune_lr=0.1, pretraining_epochs=[1, 1], pretrain_lr=[0.002, 0.02], k=1, weight_decay=0.0002,
                momentum=0.9, batch_size=128, datasets=datasets, hidden_layers_sizes=[784,784], finetune = False,
                saveToDir = '../results/MNIST/', loadModelFromFile = '', verbose = True)

            

#
# UŻYCIE WYUCZONEJ SIECI
#

dbn = GRBM_DBN.load('../results/MNIST/pretrained_model')

train_set, valid_set, test_set = load_raw_data()

#klasyfikacja pierwszych 13 wzorców
print dbn.classify(train_set[0][1:13])
#realne klasy pierwszych 13 wzorców
print train_set[1][1:13]
Пример #3
0
from GRBM_DBN import test_GRBM_DBN
from load_data_MNIST import load_data

LAYER_SIZE = [256, 512, 1024]
N_LAYERS = [2, 3, 4]
ITERATIONS = 5

datasets = load_data()

for _ in range(ITERATIONS):
    for n_layers in N_LAYERS:
        for layer_size in LAYER_SIZE:
            test_score, val_score = test_GRBM_DBN(finetune_lr=0.1, pretraining_epochs=[225, 75],
                pretrain_lr=[0.002, 0.02], k=1, weight_decay=0.0002,
                momentum=0.9, batch_size=128, datasets=datasets,
                hidden_layers_sizes=n_layers*[layer_size], load=False,
                filename=('../data/MNIST_%d_%d.pickle'%(layer_size, n_layers)))

            log = '../data/MNIST.log'
            with open(log, 'a') as f:
                f.write('LAYER_SIZE=%d, n_layers=%d, test_score=%f%%, val_score=%f%%\n' % (layer_size, n_layers, test_score, val_score))

Пример #4
0
from GRBM_DBN import test_GRBM_DBN
from load_data_MNIST import load_data


datasets = load_data()

test_GRBM_DBN(finetune_lr=0.2, pretraining_epochs=[70, 40],
    pretrain_lr=[0.0002, 0.002], k=1, weight_decay=0.02,
    momentum=0.8, batch_size=20, datasets=datasets,
    hidden_layers_sizes=[784,784], load=False, save=True,
    filename='../data/MNIST/GRBM200/layer-1.pickle',
    finetune=True, pretraining_start=0, pretraining_stop=2, verbose=True)
    
Пример #5
0
from GRBM_DBN import test_GRBM_DBN
from load_data_MNIST import load_data

datasets = load_data()

test_GRBM_DBN(finetune_lr=0.2,
              pretraining_epochs=[200, 50],
              pretrain_lr=[0.0001, 0.002],
              k=1,
              weight_decay=0.002,
              momentum=0.7,
              batch_size=20,
              datasets=datasets,
              hidden_layers_sizes=[784, 784],
              load=False,
              save=True,
              filename='../data/MNIST/GRBM200/layer-4.pickle',
              finetune=True,
              pretraining_start=0,
              pretraining_stop=2,
              verbose=True)