from collections import OrderedDict import numpy as np import theano from dagbldr.utils import add_datasets_to_graph, convert_to_one_hot from dagbldr.utils import get_params_and_grads from dagbldr.utils import early_stopping_trainer from dagbldr.nodes import linear_layer, softmax_layer from dagbldr.nodes import categorical_crossentropy from dagbldr.optimizers import sgd from dagbldr.datasets import load_digits # Common between tests digits = load_digits() X = digits["data"] y = digits["target"] n_classes = len(set(y)) y = convert_to_one_hot(y, n_classes) def test_feedforward_classifier(): minibatch_size = 100 random_state = np.random.RandomState(1999) graph = OrderedDict() X_sym, y_sym = add_datasets_to_graph([X, y], ["X", "y"], graph) l1_o = linear_layer([X_sym], graph, 'l1', proj_dim=20, random_state=random_state) y_pred = softmax_layer([l1_o], graph, 'pred', n_classes, random_state=random_state)
from dagbldr.datasets import load_digits from dagbldr.optimizers import sgd from dagbldr.utils import add_embedding_datasets_to_graph, convert_to_one_hot from dagbldr.utils import add_datasets_to_graph from dagbldr.utils import get_params_and_grads, make_embedding_minibatch from dagbldr.nodes import fixed_projection_layer, embedding_layer from dagbldr.nodes import projection_layer, linear_layer, softmax_layer from dagbldr.nodes import softmax_zeros_layer, maxout_layer from dagbldr.nodes import sigmoid_layer, tanh_layer, softplus_layer from dagbldr.nodes import exp_layer, relu_layer, dropout_layer from dagbldr.nodes import softmax_sample_layer, gaussian_sample_layer from dagbldr.nodes import gaussian_log_sample_layer, conv2d_layer from dagbldr.nodes import pool2d_layer # Common between tests digits = load_digits() X = digits["data"] y = digits["target"] n_classes = len(set(y)) y = convert_to_one_hot(y, n_classes) def run_common_layer(layer): random_state = np.random.RandomState(42) graph = OrderedDict() X_sym, y_sym = add_datasets_to_graph([X, y], ["X", "y"], graph) single_o = layer([X_sym], graph, 'single', proj_dim=5, random_state=random_state) concat_o = layer([X_sym, y_sym], graph, 'concat', proj_dim=5, random_state=random_state) # Check that things can be reused
def test_digits(): digits = load_digits() assert_equal(len(digits["data"]), len(digits["target"]))