# XGB num_round = 400 xgb_params = {'objective':'multi:softprob', 'num_class':38, 'eta':.2, 'max_depth':6, 'colsample_bytree':.4, 'subsample':.8, 'silent':1, 'nthread':8} pr_nn_test_dict = {60:np.zeros((N/2, 38)), 70:np.zeros((N/2, 38))} pr_nn_train_dict = {60:np.zeros((N/2, 38)), 70:np.zeros((N/2, 38))} scores = [] t0 = datetime.now() for j in range(nModels): seed = 9317*j + 3173 xgb_params['seed'] = seed cols = feature_selection(X2[v_train-1], y, xgb_params, num_round) X = X2.tocsc()[:,cols].tocsr().astype(np.float32) for params in params_lst: h1 = params['h1'] h2 = params['h2'] max_epochs = params['max_epochs'] pr_nn_train = pr_nn_train_dict[h1] pr_nn_test = pr_nn_test_dict[h1] np.random.seed(seed) net1 = build_net_sparse_input(h1, h2, p, mm, bs=bs, max_epochs=max_epochs, num_in=X.shape[1], shuffle=True, eval_size=False) net1.fit(X[v_train-1], y) pr = net1.predict_proba(X[v_train-1]) # net1.save_params_to(data_path+'model/nn002_h1_'+str(h1)+'_'+str(j)+'.pkl') pr_nn_train += pr pr_nn_test += net1.predict_proba(X[v_test-1])
p = .1 bs = 256 h1h2_lst = [{'h1':60, 'h2':100}, {'h1':70, 'h2':90}] pr_nn_dict = {(params['h1'], params['h2']):[np.zeros((N/2, 38)) for _ in range(nIter)] \ for params in h1h2_lst} scores = [] t0 = datetime.now() for fold, idx in enumerate(kf): train_idx, valid_idx = idx params_dict = {'fold':fold} for j in range(nModels): seed = 11281*j + 9108 xgb_params['seed'] = seed cols = feature_selection(X4[train_idx], y[train_idx], xgb_params, nt) X = X4.tocsc()[:,cols].tocsr().astype(np.float32) for params in h1h2_lst: h1 = params['h1'] h2 = params['h2'] params_dict.update(params) pr_avg = pr_nn_dict[(h1, h2)] np.random.seed(seed) net1 = build_net_sparse_input(h1, h2, p, mm, max_epochs=epochs, num_in=X.shape[1], shuffle=True, eval_size=False) for k in range(nIter): net1.fit(X[train_idx], target[train_idx]) pr = net1.predict_proba(X[valid_idx]) pr_avg[k][valid_idx] += pr sc = params_dict.copy() sc.update({'epochs':epochs*(k+1), 'nModels':j+1,