# this method is slow, because it keeps calling a feature reduction # method for each bar and each estimator. We will globally reduce the # features before starting global_f_select = MultiSelectKBest(f_classif, pooling_function=np.min, k=3000) res2 = first_layer_predictor2.fit_transform( global_f_select.fit_transform(data, stimuli), stimuli) # Now visualise the predictions. from viz import get_bars, draw_words, pad, make_collage bars = get_bars(img_size=(50, 50)) words1 = draw_words(res1, bars) words2 = draw_words(res2, bars) words = draw_words(stimuli, bars) stacked = np.concatenate([words1, words2, words], axis=1) # pad this slightly in order to be able to distinguish groups stacked = pad(stacked, [0, 10, 10]) num_x = 8 num_y = 12 start_at = 0 collage = make_collage(stacked[start_at:start_at + (num_x * num_y)].\
# this method is slow, because it keeps calling a feature reduction # method for each bar and each estimator. We will globally reduce the # features before starting global_f_select = MultiSelectKBest(f_classif, pooling_function=np.min, k=3000) #res2 = first_layer_predictor2.fit_transform( # global_f_select.fit_transform(data, stimuli), stimuli) res2 = first_layer_predictor2.fit_transform(data, stimuli) # Now visualise the predictions. from viz import get_bars, draw_words, pad, make_collage bars = get_bars(img_size=(50, 50)) words1 = draw_words(res1, bars) words2 = draw_words(res2, bars) words = draw_words(stimuli, bars) stacked = np.concatenate([words1, words2, words], axis=1) # pad this slightly in order to be able to distinguish groups stacked = pad(stacked, [0, 10, 10]) num_x = 8 num_y = 12 start_at = 0 collage = make_collage(stacked[start_at:start_at + (num_x * num_y)].\