Ejemplo n.º 1
0
                            'bias': None,
                            'value': None,
                            'layer_type': 'binary',
                            'layer_name': 'input'
                         },
                      1: {
                            'dimension':2000,     
                            'bias': None,
                            'value': None,
                            'layer_type': 'binary',
                            'layer_name': 'hidden'
                         }
              }
learning_rate, weight_decay, momentum= step_iterator(0.1,0.1,0), step_iterator(2e-4,2e-4,0), step_iterator(0.5,0.9,0.05)
batch_func = batch_func_generator(X_train, batch_size = 100)
rbm0 = BinRBM(layers_dict = RBM1layers_dict, weight_list = None, random_state = None)
rbm0.fit(batch_func, PCD = False, error_function = 'recon',learning_rate = learning_rate, momentum = momentum,
                weight_decay = weight_decay, k = 1, perst_size = 100, n_iter = 50, verbose = True)
X_train = rbm0.transform(X_train, sample = False)

learning_rate, weight_decay, momentum= step_iterator(0.1,0.1,0), step_iterator(2e-4,2e-4,0), step_iterator(0.5,0.9,0.05)
batch_func = batch_func_generator(X_train, batch_size = 100)
rbm1 = BinRBM(layers_dict = RBM2layers_dict, weight_list = None, random_state = None)
rbm1.fit(batch_func, PCD = False, error_function = 'recon',learning_rate = learning_rate, momentum = momentum,
                weight_decay = weight_decay, k = 1, perst_size = 100, n_iter = 50, verbose = True)
X_train = rbm1.transform(X_train, sample = False)

learning_rate, weight_decay, momentum= step_iterator(0.1,0.1,0), step_iterator(2e-4,2e-4,0), step_iterator(0.5,0.9,0.05)
batch_func = batch_func_generator(X_train, batch_size = 100)
rbm2 = BinRBM(layers_dict = RBM3layers_dict, weight_list = None, random_state = None)
rbm2.fit(batch_func, PCD = False, error_function = 'recon',learning_rate = learning_rate, momentum = momentum,
Ejemplo n.º 2
0
                            'bias': None,
                            'value': None,
                            'layer_type': 'binary',
                            'layer_name': 'input'
                         },
                      1: {
                            'dimension':200,     
                            'bias': None,
                            'value': None,
                            'layer_type': 'binary',
                            'layer_name': 'hidden'
                         }
              }
learning_rate, weight_decay, momentum= step_iterator(0.01,0.001,-0.002), step_iterator(2e-5,2e-5,0), step_iterator(0.5,0.9,0.05)
batch_func = batch_func_generator(X_train_bin, batch_size = 100)
rbm = BinRBM(layers_dict = RBMlayers_dict, weight_list = None, random_state = None)
print 'Training starts'
rbm.fit(batch_func, PCD = False, error_function = 'recon',learning_rate = learning_rate, momentum = momentum,
                weight_decay = weight_decay, k = 1, perst_size = 100, n_iter = 500, verbose = True)
combined = rbm.transform(X_train_bin)
combined_test = rbm.transform(X_test_bin)

parameters = {'alpha':[1e-1,1e-2,1e-3,1e-4,1e-5], 'n_iter': [10, 50 ,100]}
clf = GridSearchCV(SGDClassifier(loss="hinge", penalty="l2", n_iter = 100, random_state = 500), parameters) 
clf.fit(combined,labels_train)
y_pred = clf.predict(combined_test)
print 'toplam: ', labels_test.shape[0], 'dogru: ', (y_pred == labels_test).sum()
print clf.best_estimator_

parameters = [{'kernel': ['rbf'], 'gamma': [1e-1,1e-2,1e-3, 1e-4],
                     'C': [1, 10, 100, 1000]},
Ejemplo n.º 3
0
                            'bias': None,
                            'value': None,
                            'layer_type': 'binary',
                            'layer_name': 'input'
                         },
                      3: {
                            'dimension':1000,     
                            'bias': None,
                            'value': None,
                            'layer_type': 'binary',
                            'layer_name': 'hidden'
                         }
              }
learning_rate, weight_decay, momentum= step_iterator(0.1,0.01,-0.002), step_iterator(1e-6,1e-6,0), step_iterator(0.1,0.9,0.05)
batch_func = batch_func_generator([rbmR_hidden, rbmG_hidden, rbmB_hidden], batch_size = 50)
rbm = BinRBM(layers_dict = RBMlayers_dict, weight_list = None, random_state = random_state)
print 'Training starts'
rbm.fit(batch_func, PCD = False, error_function = 'recon',learning_rate = learning_rate, momentum = momentum,
                weight_decay = weight_decay, k = 1, perst_size = 100, n_iter = 20, verbose = True)
        #        sparsity_cond = True, sparsity_target = 0.01, sparsity_lambda = 1e-6)
                
###sample from model####
#rbm.run_gibbs(X = [rbmR_hidden[1000],rbmG_hidden[1000],rbmB_hidden[1000]], step_size = 1, sample = False)
#rbm.run_gibbs(X = [np.zeros(500),np.zeros(500),np.zeros(500)], step_size = 1000, sample = False)
#hidR,hidG,hidB = rbm.input_layer_list[0].value,rbm.input_layer_list[1].value,rbm.input_layer_list[2].value
#rbmR.hidden_layer.value,rbmG.hidden_layer.value,rbmB.hidden_layer.value = hidR,hidG,hidB
#rbmR.feed_back(sample = True)
#rbmG.feed_back(sample = True)
#rbmB.feed_back(sample = True)
rbmR.run_gibbs(np.random.rand(1800)*10 -5, step_size = 1000, sample = True)
rbmG.run_gibbs(np.random.rand(1800)*10 -5, step_size = 1000, sample = True)