Example #1
0
    def setUp(self):
        tf.reset_default_graph()

        if self.single_thread:
            sess = tensorflow_session()
            sess.close()

            config = tf.ConfigProto(
                allow_soft_placement=True,
                intra_op_parallelism_threads=1,
                inter_op_parallelism_threads=1,
            )
            tensorflow_session.cache = tf.Session(config=config)

        if not self.verbose:
            logging.disable(logging.CRITICAL)

        # Clean identifiers map for each test
        if hasattr(format_name_if_specified_as_pattern, 'counters'):
            del format_name_if_specified_as_pattern.counters

        utils.reproducible(seed=self.random_seed)
Example #2
0
def reproducible_network_train(seed=0, epochs=500, **additional_params):
    """
    Make a reproducible train for Gradient Descent based neural
    network with a XOR problem and return trained network.

    Parameters
    ----------
    seed : int
        Random State seed number for reproducibility. Defaults to ``0``.
    epochs : int
        Number of epochs for training. Defaults to ``500``.
    **additional_params
        Aditional parameters for Neural Network.

    Returns
    -------
    GradientDescent instance
        Returns trained network.
    """
    utils.reproducible(seed)

    xor_x_train = np.array([[-1, -1], [-1, 1], [1, -1], [1, 1]])
    xor_y_train = np.array([[1, -1, -1, 1]]).T

    xavier_normal = init.XavierNormal()
    tanh_weight1 = xavier_normal.sample((2, 5), return_array=True)
    tanh_weight2 = xavier_normal.sample((5, 1), return_array=True)

    network = algorithms.GradientDescent(network=[
        layers.Input(2),
        layers.Tanh(5, weight=tanh_weight1),
        layers.Tanh(1, weight=tanh_weight2),
    ],
                                         batch_size=None,
                                         **additional_params)
    network.train(xor_x_train, xor_y_train, epochs=epochs)
    return network
Example #3
0
import matplotlib.pyplot as plt
from sklearn import datasets
from neupy import algorithms, utils

from utils import plot_2d_grid

plt.style.use('ggplot')
utils.reproducible()

if __name__ == '__main__':
    GRID_WIDTH = 20
    GRID_HEIGHT = 1

    data, targets = datasets.make_moons(n_samples=400, noise=0.1)
    data = data[targets == 1]

    sofm = algorithms.SOFM(
        n_inputs=2,
        features_grid=(GRID_HEIGHT, GRID_WIDTH),
        verbose=True,
        shuffle_data=True,

        # The winning neuron will be selected based on the
        # Euclidean distance. For this task it's important
        # that distance is Euclidean. Other distances will
        # not give us the same results.
        distance='euclid',
        learning_radius=2,
        # Reduce learning radius by 1 after every 20 epochs.
        # Learning radius will be equal to 2 during first
        # 20 epochs and on the 21st epoch it will be equal to 1.