'layer_name': 'input' }, 1: { 'dimension':2000, 'bias': None, 'value': None, 'layer_type': 'binary', 'layer_name': 'hidden' } } learning_rate, weight_decay, momentum= step_iterator(0.1,0.1,0), step_iterator(2e-4,2e-4,0), step_iterator(0.5,0.9,0.05) batch_func = batch_func_generator(X_train, batch_size = 100) rbm0 = BinRBM(layers_dict = RBM1layers_dict, weight_list = None, random_state = None) rbm0.fit(batch_func, PCD = False, error_function = 'recon',learning_rate = learning_rate, momentum = momentum, weight_decay = weight_decay, k = 1, perst_size = 100, n_iter = 50, verbose = True) X_train = rbm0.transform(X_train, sample = False) learning_rate, weight_decay, momentum= step_iterator(0.1,0.1,0), step_iterator(2e-4,2e-4,0), step_iterator(0.5,0.9,0.05) batch_func = batch_func_generator(X_train, batch_size = 100) rbm1 = BinRBM(layers_dict = RBM2layers_dict, weight_list = None, random_state = None) rbm1.fit(batch_func, PCD = False, error_function = 'recon',learning_rate = learning_rate, momentum = momentum, weight_decay = weight_decay, k = 1, perst_size = 100, n_iter = 50, verbose = True) X_train = rbm1.transform(X_train, sample = False) learning_rate, weight_decay, momentum= step_iterator(0.1,0.1,0), step_iterator(2e-4,2e-4,0), step_iterator(0.5,0.9,0.05) batch_func = batch_func_generator(X_train, batch_size = 100) rbm2 = BinRBM(layers_dict = RBM3layers_dict, weight_list = None, random_state = None) rbm2.fit(batch_func, PCD = False, error_function = 'recon',learning_rate = learning_rate, momentum = momentum, weight_decay = weight_decay, k = 1, perst_size = 100, n_iter = 50, verbose = True) Networklayer_dict = {0:
}, 1: { 'dimension':200, 'bias': None, 'value': None, 'layer_type': 'binary', 'layer_name': 'hidden' } } learning_rate, weight_decay, momentum= step_iterator(0.01,0.001,-0.002), step_iterator(2e-5,2e-5,0), step_iterator(0.5,0.9,0.05) batch_func = batch_func_generator(X_train_bin, batch_size = 100) rbm = BinRBM(layers_dict = RBMlayers_dict, weight_list = None, random_state = None) print 'Training starts' rbm.fit(batch_func, PCD = False, error_function = 'recon',learning_rate = learning_rate, momentum = momentum, weight_decay = weight_decay, k = 1, perst_size = 100, n_iter = 500, verbose = True) combined = rbm.transform(X_train_bin) combined_test = rbm.transform(X_test_bin) parameters = {'alpha':[1e-1,1e-2,1e-3,1e-4,1e-5], 'n_iter': [10, 50 ,100]} clf = GridSearchCV(SGDClassifier(loss="hinge", penalty="l2", n_iter = 100, random_state = 500), parameters) clf.fit(combined,labels_train) y_pred = clf.predict(combined_test) print 'toplam: ', labels_test.shape[0], 'dogru: ', (y_pred == labels_test).sum() print clf.best_estimator_ parameters = [{'kernel': ['rbf'], 'gamma': [1e-1,1e-2,1e-3, 1e-4], 'C': [1, 10, 100, 1000]}, {'kernel': ['linear'], 'C': [1, 10, 100, 1000]}] clf = GridSearchCV(svm.SVC(random_state = 500), parameters) clf.fit(combined,labels_train) y_pred = clf.predict(combined_test)
#rbmG.feed_back(sample = True) #rbmB.feed_back(sample = True) rbmR.run_gibbs(np.random.rand(1800)*10 -5, step_size = 1000, sample = True) rbmG.run_gibbs(np.random.rand(1800)*10 -5, step_size = 1000, sample = True) rbmB.run_gibbs(np.random.rand(1800)*10 -5, step_size = 1000, sample = True) visR = scalerR.inverse_transform(rbmR.input_layer_list[0].value) visG = scalerG.inverse_transform(rbmG.input_layer_list[0].value) visB = scalerB.inverse_transform(rbmB.input_layer_list[0].value) visRR = scalerR.inverse_transform(R_train[1000]) visGG = scalerG.inverse_transform(G_train[1000]) visBB = scalerB.inverse_transform(B_train[1000]) #sio.savemat(r'C:\Users\daredavil\Documents\MATLAB\Poje2MCA\sample_new4.mat',{'R':visR, 'G':visG,'B':visB, 'RR':visRR, 'GG':visGG,'BB':visBB, # 'masks':rbmR.weight_list[0][[30,70,80,150,220,380,420],:]}) ######################### combined = rbm.transform([rbmR_hidden, rbmG_hidden, rbmB_hidden]) combined_test = rbm.transform([rbmR_hidden_test, rbmG_hidden_test, rbmB_hidden_test]) sonuclar = {'sgd':0,'svm_lin':0,'svm_rbf':0,'knn':0,'nn':[],'svm_lin_n':0,'svm_rbf_n':0} parameters = {'alpha':[1e-1,1e-2,1e-3,1e-4,1e-5]} clf = GridSearchCV(SGDClassifier(loss="log", penalty="l2", n_iter = 100,random_state=random_state), parameters) clf.fit(combined,labels_train) y_pred = clf.predict(combined_test) print 'toplam: ', labels_test.shape[0], 'dogru: ', (y_pred == labels_test).sum() print clf.best_estimator_ sonuclar['sgd'] = (y_pred == labels_test).sum() #parameters = [{'kernel': ['rbf'], 'gamma': [1e-3, 1e-4], # 'C': [1, 10, 100, 1000]}, # {'kernel': ['linear'], 'C': [1, 10, 100, 1000]}]