Exemple #1
0
    def __init__(self, ls_neurons_per_layers, used_sectors, learning_rate):
        ''' Initialization ... same parameters as for DiscreteMLMVN

        @see DiscreteMLMVN.__init__
        '''
        ## Sectors used for counting activation function of neurons in last
        # layer
        self.sects = used_sectors

        ## Learning rate defining speed of learning
        self.learning_rate = learning_rate

        ## List of layers of given network
        self.ls_layers = []

        # create first and hidden layers
        for (n_inputs, n_neurons) in zip(ls_neurons_per_layers[:-2],
                                         ls_neurons_per_layers[1:-1]):
            self.ls_layers.append(
                layer.MVNLayer(n_neurons,
                               n_inputs,
                               learning_rate=self.learning_rate))

        # create last layer
        n_inputs = ls_neurons_per_layers[-2]
        n_neurons = ls_neurons_per_layers[-1]
        self.ls_layers.append(
            layer.MVNLastLayer(n_neurons,
                               n_inputs,
                               activation_func=self.sects.activation_function,
                               learning_rate=self.learning_rate))

        # set upper layers of self.ls_layers for backpropagation alg.
        for (this_l, upper_l) in zip(self.ls_layers[:-1], self.ls_layers[1:]):
            this_l.set_upper_layer(upper_l)
Exemple #2
0
    def __init__(self, ls_neurons_per_layers, learning_rate=1):
        ''' Initialization ... same parameters as for DiscreteMLMVN, but
        we do not need info about sectors of neurons.

        @see DiscreteMLMVN.__init__
        '''

        ## Learning rate defining speed of learning
        self.learning_rate = learning_rate

        ## List of layers of given network
        self.ls_layers = []

        # create first and hidden layers wth continuous neurons
        for (n_inputs, n_neurons) in zip(ls_neurons_per_layers[:-2],
                                         ls_neurons_per_layers[1:-1]):
            self.ls_layers.append(
                layer.MVNLayer(n_neurons,
                               n_inputs,
                               learning_rate=self.learning_rate))

        # create last layer wth continuous neurons
        n_inputs = ls_neurons_per_layers[-2]
        n_neurons = ls_neurons_per_layers[-1]
        self.ls_layers.append(
            layer.MVNLastLayer(n_neurons,
                               n_inputs,
                               learning_rate=self.learning_rate))

        # set upper layers of self.ls_layers for backpropagation alg.
        for (this_l, upper_l) in zip(self.ls_layers[:-1], self.ls_layers[1:]):
            this_l.set_upper_layer(upper_l)
Exemple #3
0
    def __init__(self, ls_neurons_per_layers, used_sectors, learning_rate):
        ''' Initializes network

        @param ls_neurons_per_layers List specifying number of layers and
            neurons in them.\n Example: [<#inputs for first layer>,
            <#neurons fst layer>, <#neurons hidden layer>,
            <#neurons out layer>]\n
            Example 2:
@verbatim
     [20,8,4,2] therefore indicates:
            layer 1: 20 inputs, 8 neurons
            layer 2: 8 inputs, 4 outputs
            last layer: 4 inputs, 2 outputs (network outputs)
@endverbatim

        @param layers_list List of [(n_inputs,n_neurons) .. ]
        @param used_sectors Sectors used to encode/transform data.\n
                            Sectors provide activation function for neurons.\n
                            If ommited, continuous sectors/activation function
                            is used through whole network.
        @param learning_rate Specifies speed of learning should be in
                            interval (0,1] (where 1 == full speed), but higher
                            speed is also possible.
        '''

        ## Sectors used for counting activation function of neurons
        self.sects = used_sectors

        ## Learning rate defining speed of learning
        self.learning_rate = learning_rate

        ## List of layers of given network
        self.ls_layers = []

        # create first and hidden layers
        for (n_inputs, n_neurons) in zip(ls_neurons_per_layers[:-2],
                                         ls_neurons_per_layers[1:-1]):
            self.ls_layers.append(
                layer.MVNLayer(n_neurons,
                               n_inputs,
                               activation_func=self.sects.activation_function,
                               learning_rate=self.learning_rate))

        # create last layer
        n_inputs = ls_neurons_per_layers[-2]
        n_neurons = ls_neurons_per_layers[-1]
        self.ls_layers.append(
            layer.MVNLastLayer(n_neurons,
                               n_inputs,
                               activation_func=self.sects.activation_function,
                               learning_rate=self.learning_rate))

        # set upper layers of self.ls_layers for backpropagation alg.
        for (this_l, upper_l) in zip(self.ls_layers[:-1], self.ls_layers[1:]):
            this_l.set_upper_layer(upper_l)
Exemple #4
0
    def __init__(self, ls_neurons_per_layers, learning_rate):
        ''' Initializes network

        @param ls_neurons_per_layers List specifying number of layers and
            neurons in them.\n Example: [<#inputs for first layer>,
            <#neurons fst layer>, <#neurons hidden layer>,
            <#neurons out layer>]\n
            Example 2:
@verbatim
     [20,8,4,2] therefore indicates:
            layer 1: 20 inputs, 8 neurons
            layer 2: 8 inputs, 4 outputs
            last layer: 4 inputs, 2 outputs (network outputs)
@endverbatim

        @param layers_list List of [(n_inputs,n_neurons) .. ]
        @param learning_rate Specifies speed of learning should be in
                            interval (0,1] (where 1 == full speed), but higher
                            speed is also possible.
        '''

        ## List of layers of given network
        self.ls_layers = []

        # create first and hidden layers with continuous neurons
        for (n_inputs, n_neurons) in zip(ls_neurons_per_layers[:-2],
                                         ls_neurons_per_layers[1:-1]):
            self.ls_layers.append(layer.MVNLayer(n_neurons, n_inputs))

        # create last layer with continuous neurons
        n_inputs = ls_neurons_per_layers[-2]
        n_neurons = ls_neurons_per_layers[-1]
        self.ls_layers.append(layer.MVNLastLayer(n_neurons, n_inputs))

        # set upper layers of self.ls_layers for backpropagation alg.
        for (this_l, upper_l) in zip(self.ls_layers[:-1], self.ls_layers[1:]):
            this_l.set_upper_layer(upper_l)

        # set learning rate for each layer
        self.set_learning_rate(learning_rate)