def train_mlp(): with open('../data/params_0.pkl', 'rb') as f: w_0, _, _ = cPickle.load(f) with open('../data/params_1.pkl', 'rb') as f: w_1, _, _ = cPickle.load(f) with open('../data/params_2.pkl', 'rb') as f: w_2, _, _ = cPickle.load(f) train_x, train_y = SupervisedLoader.load('../data') model = Sequential() model.add(Dense(33, 64, weights=[w_0])) model.add(Activation('sigmoid')) # model.add(Dropout(0.2)) model.add(Dense(64, 128, weights=[w_1])) model.add(Activation('sigmoid')) # model.add(Dropout(0.2)) model.add(Dense(128, 128, weights=[w_2])) model.add(Dense(128, 1, init='glorot_uniform')) model.add(Activation('relu')) # sgd = SGD(lr=1.e-5, decay=1e-6, momentum=0.9, nesterov=True) model.compile(loss='mean_squared_error', optimizer='adagrad') model.fit(train_x, train_y, nb_epoch=500, batch_size=128, validation_split=0.2) model.save_weights('../data/mlp_params.hdf5')
def compute_features_from_aes_for_train_set(): train_x, train_y = SupervisedLoader.load('../data') config = load_configuration('../config/caes.json') scaes = StackedAutoencoders(config, warm_start=True) train_x = scaes.get_features(train_x) np.save('../data/features.npy', train_x) np.save('../data/hazards.npy', train_y)
def grid_search_for_svr(): train_x, train_y = SupervisedLoader.load('../data') gammas = [4.] clf = SVR(verbose=1) param_grid = {'gamma': gammas, 'C': [10., 20., 30., 40.]} grid_search = GridSearchCV(clf, param_grid, scoring='mean_squared_error', n_jobs=4, verbose=1) grid_search.fit(train_x, train_y) print grid_search.best_score_ print grid_search.best_params_ with open('../data/another2_svr.pkl', 'wb') as f: cPickle.dump(grid_search.best_estimator_, f)