Пример #1
0
def prepare():
    """ Shape the dataset, and build the linear classifier """
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork
    from pybrain.datasets import SupervisedDataSet
    D = SupervisedDataSet(3, 1)
    for c, f, i in data:
        D.addSample([1, f, i], [c])

    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(D.indim, name='in'))
    net.addOutputModule(LinearLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.sortModules()
    return D, net
Пример #2
0
def buildNet(indim, hidden, outdim=2, temperature=1., recurrent=True):
    from pybrain import FullConnection, BiasUnit, TanhLayer, SoftmaxLayer, RecurrentNetwork, LinearLayer, LinearConnection, FeedForwardNetwork, SigmoidLayer
    if recurrent:
        net = RecurrentNetwork()
    else:
        net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(indim, name = 'i'))
    net.addModule(TanhLayer(hidden, name = 'h'))
    net.addModule(BiasUnit('bias'))
    net.addModule(SigmoidLayer(outdim, name = 'unscaled'))
    net.addOutputModule(SoftmaxLayer(outdim, name = 'o'))
    net.addConnection(FullConnection(net['i'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['unscaled']))
    net.addConnection(FullConnection(net['h'], net['unscaled']))
    lconn = LinearConnection(net['unscaled'], net['o'])
    lconn._setParameters([1./temperature]*outdim)
    # these are fixed. 
    lconn.paramdim = 0
    net.addConnection(lconn)
    if recurrent:
        net.addRecurrentConnection(FullConnection(net['h'], net['h']))
    net.sortModules()
    print  net
    print 'number of parameters', net.paramdim
    return net
Пример #3
0
def testPlot1():
    dim = 15
    from scipy import rand, dot
    from pybrain.datasets import SupervisedDataSet
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork
    from pybrain.utilities import dense_orth
    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(dim, name='in'))
    net.addOutputModule(LinearLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.sortModules()

    ds = SupervisedDataSet(dim, 1)
    ds2 = SupervisedDataSet(dim, 1)
    R = dense_orth(dim)
    for _ in range(1000):
        tmp = rand(dim) > 0.5
        tmp2 = dot(tmp, R)
        ds.addSample(tmp, [tmp[-1]])
        ds2.addSample(tmp2, [tmp[-1]])

    f = ModuleWrapper(ds, net)
    f2 = ModuleWrapper(ds2, net)

    # tracking progress by callback
    ltrace = []

    def storer(a):
        ltrace.append(a.provider.currentLosses(a.bestParameters))

    x = net.params
    x *= 0.001

    algo = SGD(f, net.params.copy(), callback=storer, learning_rate=0.2)
    algo.run(1000)
    pylab.plot(ltrace, 'r-')

    del ltrace[:]

    algo = SGD(f2, net.params.copy(), callback=storer, learning_rate=0.2)
    algo.run(1000)
    pylab.plot(ltrace, 'g-')

    pylab.semilogy()
    pylab.show()
Пример #4
0
def prepare():
    """ Shape the dataset, and build the linear classifier """
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork
    from pybrain.datasets import SupervisedDataSet
    D = SupervisedDataSet(3, 1)
    for c, f, i in data:
        D.addSample([1, f, i], [c]) 
    
    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(D.indim, name='in'))
    net.addOutputModule(LinearLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.sortModules()
    return D, net
Пример #5
0
    def train(self):
        """Train neuron grid by training sample"""

        self.net = FeedForwardNetwork()

        inLayer = LinearLayer(self.input_neurons)
        hiddenLayer = SigmoidLayer(self.hiden_neurons)
        outLayer = LinearLayer(self.OUTPUT_NEURONS)

        self.net.addInputModule(inLayer)

        self.net.addModule(hiddenLayer)
        self.net.addOutputModule(outLayer)

        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_out)
        self.net.sortModules()

        ds = ClassificationDataSet(self.input_neurons,
                                   self.OUTPUT_NEURONS,
                                   nb_classes=3)
        for i, coord in enumerate(self.X):
            ds.addSample(coord, (self.y[i], ))

        trainer = BackpropTrainer(self.net,
                                  dataset=ds,
                                  momentum=0.1,
                                  verbose=True,
                                  weightdecay=0.01)

        if self.maxErr:
            for i in range(self.maxEpochs):
                if trainer.train() < self.maxErr:
                    print "Desired error reached"
                    break
        else:
            trainer.trainUntilConvergence(maxEpochs=self.maxEpochs)

        print "Successfully finished"
def testPlot1():
    dim = 15
    from scipy import rand, dot
    from pybrain.datasets import SupervisedDataSet
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork
    from pybrain.utilities import dense_orth
    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(dim, name='in'))
    net.addOutputModule(LinearLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.sortModules()
    
    ds = SupervisedDataSet(dim, 1)
    ds2 = SupervisedDataSet(dim, 1)
    R = dense_orth(dim)
    for _ in range(1000):
        tmp = rand(dim) > 0.5
        tmp2 = dot(tmp, R)
        ds.addSample(tmp, [tmp[-1]])
        ds2.addSample(tmp2, [tmp[-1]])
        
    f = ModuleWrapper(ds, net)
    f2 = ModuleWrapper(ds2, net)
    
    # tracking progress by callback
    ltrace = []
    def storer(a):
        ltrace.append(a.provider.currentLosses(a.bestParameters))
    
    x = net.params
    x *= 0.001
    
    algo = SGD(f, net.params.copy(), callback=storer, learning_rate=0.2)
    algo.run(1000)
    pylab.plot(ltrace, 'r-')
    
    del ltrace[:]
    
    algo = SGD(f2, net.params.copy(), callback=storer, learning_rate=0.2)
    algo.run(1000)
    pylab.plot(ltrace, 'g-')
    
    pylab.semilogy()
    pylab.show()
Пример #7
0
import pandas as pd
from pybrain import FeedForwardNetwork
from pybrain import LinearLayer, SigmoidLayer
from pybrain import FullConnection
import numpy as np
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer
import matplotlib.pylab as plt


dateparse = lambda dates: pd.datetime.strptime(dates, '%Y-%m')
data = pd.read_csv('AirPassengers.csv', parse_dates=['Month'], index_col='Month', date_parser=dateparse)
ts = data['#Passengers']

# Make a new FFN object:
n = FeedForwardNetwork()

# Constructing the input, output and hidden layers:
inLayer = LinearLayer(3)
hiddenLayer = SigmoidLayer(4)
outLayer = LinearLayer(1)

# Adding layers to the network:
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

# determining how neurons should be connected:
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
    def apply_custom_network(self, hidden_counts):
        """
        Changes a network to a new one with possibly multiple layers with various hidden neurons count
        :param hidden_counts: an array of numbers of hidden nodes in every hidden layer. For example:
        [3, 4, 5] means a NN with 3 hidden layers with 3 hidden neurons on 1st layer and so on...
        :return: self
        """
        network = FeedForwardNetwork()
        in_layer = LinearLayer(self.inp_cnt)
        network.addInputModule(in_layer)
        out_layer = SoftmaxLayer(self.out_cnt)
        network.addOutputModule(out_layer)

        hidden_layer = SigmoidLayer(hidden_counts[0])
        network.addModule(hidden_layer)
        in_to_hidden = FullConnection(in_layer, hidden_layer)
        network.addConnection(in_to_hidden)

        for i in range(1, len(hidden_counts)):
            last_hidden_layer = hidden_layer
            hidden_layer = SigmoidLayer(hidden_counts[i])
            network.addModule(hidden_layer)
            hidden_to_hidden = FullConnection(last_hidden_layer, hidden_layer)
            network.addConnection(hidden_to_hidden)

        hidden_to_out = FullConnection(hidden_layer, out_layer)
        network.addConnection(hidden_to_out)
        network.sortModules()
        self.network = network
        return self
Пример #9
0
def testBank():
    D = readData()    
    print len(D), 'samples', D.indim, 'features'
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork, BiasUnit, SigmoidLayer
    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(D.indim, name='in'))
    net.addModule(BiasUnit(name='bias'))
    net.addOutputModule(SigmoidLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.addConnection(FullConnection(net['bias'], net['out']))
    net.sortModules()
    p = net.params
    p *= 0.01
    provider = ModuleWrapper(D, net, shuffling=False)
    algo = SGD(provider, net.params.copy(), #callback=printy, 
           learning_rate=5.5e-5)
    #algo = vSGDfd(provider, net.params.copy(), #callback=printy
    #              )
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
Пример #10
0
class NeuronDecider(BaseDecider):
    """
    The class is responsible for learning to recognize certain group of objects.

    Attributes
    -----------
    hiden_neurons : int
        Number of hiden neurons.

    OUTPUT_NEURONS : int
        Number of output neurons.

    input_neurons : int
        Number of input neurons.

    X_train : numpy array of array of floats
        Each item of the array contains specific "coordinates" of the train
        object in array.

    X_test : numpy array of array of floats
        Each item of the array contains specific "coordinates" of the test
        object in array.

    y_train : numpy array of ints
        Each item of the array contains a number of the group which the train
        object belongs. Position in the array
        corresponds to item in X_train.

    y_test : numpy array of ints
        Each item of the array contains a number of the group which the test
        object belongs. Position in the array
        corresponds to item in X_test.

    maxErr : float
        Maximal error which would satisfy trainer

    maxEpochs : int
        Maximum number of epochs for training
    """

    OUTPUT_NEURONS = 1

    def __init__(self,
                 treshold=0.5,
                 hidden_neurons=2,
                 maxErr=None,
                 maxEpochs=20000):
        """
        Parameters
        -----------
        hidden_neurons: int
            Number of hidden neurons

        maxErr : NoneType, float
            Maximal error which would satisfy the trainer. In case of None trainUntilConvergence methods
            is used

        maxEpochs : int
            Maximum number of epochs for training

        Note
        -----
        Attributes with None values will be updated by setTrainer
        and train methods
        """

        self.hiden_neurons = hidden_neurons

        self.input_neurons = None
        self.X = None
        self.y = None

        self.treshold = treshold
        self.maxEpochs = maxEpochs
        self.maxErr = maxErr

        self.net = None

    def learn(self, searched, others):
        """
        This method loads lists of specific values of searched objects and
        others. Then the sample will be  divided into train and
        test samples according to user.

        Parameters
        -----------
        searched : iterable
            List of searched objects values (their "coordinates")

        others : iterable
            List of other objects values (their "coordinates")

        Returns
        -------
        NoneType
            None
        """
        if not len(searched) or not len(others):
            raise QueryInputError(
                "Decider can't be learned on an empty sample")

        # Resolve number of input neurons
        self.input_neurons = len(searched[0])

        # Input is accepted as a numpy array or as a list
        if type(searched) != list:
            try:
                X = searched.tolist() + others.tolist()
            except AttributeError as err:
                raise AttributeError("Wrong coordinates input: %s" % err)
        elif type(searched) == list:
            X = np.array(searched + others)

        # Note searched objects as 1 and others as 0
        self.y = np.array([1 for i in range(len(searched))] +
                          [0 for i in range(len(others))])
        self.X = X

        self.train()

    def train(self):
        """Train neuron grid by training sample"""

        self.net = FeedForwardNetwork()

        inLayer = LinearLayer(self.input_neurons)
        hiddenLayer = SigmoidLayer(self.hiden_neurons)
        outLayer = LinearLayer(self.OUTPUT_NEURONS)

        self.net.addInputModule(inLayer)

        self.net.addModule(hiddenLayer)
        self.net.addOutputModule(outLayer)

        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        self.net.addConnection(in_to_hidden)
        self.net.addConnection(hidden_to_out)
        self.net.sortModules()

        ds = ClassificationDataSet(self.input_neurons,
                                   self.OUTPUT_NEURONS,
                                   nb_classes=3)
        for i, coord in enumerate(self.X):
            ds.addSample(coord, (self.y[i], ))

        trainer = BackpropTrainer(self.net,
                                  dataset=ds,
                                  momentum=0.1,
                                  verbose=True,
                                  weightdecay=0.01)

        if self.maxErr:
            for i in range(self.maxEpochs):
                if trainer.train() < self.maxErr:
                    print "Desired error reached"
                    break
        else:
            trainer.trainUntilConvergence(maxEpochs=self.maxEpochs)

        print "Successfully finished"

    def evaluate(self, coords):
        """
        Find if inspected parameter-space coordinates belongs to searched
        object

        Parameter
        ---------
        coords : list of lists
            Parameter-space coordinates of inspected objects

        Returns
        ------
        numpy.array
            Probabilities of membership to searched group objects
        """
        pred = []
        for coord in coords:
            p = self.net.activate(coord)[0]
            if p < 0:
                p = 0
            elif p > 1:
                p = 1
            pred.append(p)

        return np.array(pred)
Пример #11
0
    def build_network(hidden_layer_size):
        """Creates network

        :param hidden_layer_size: hidden layer size
        :return:
        """
        network = FeedForwardNetwork()
        # Create layers and bias unit
        input_layer = LinearLayer(5)
        hidden_layer = SigmoidLayer(hidden_layer_size)
        output_layer = SigmoidLayer(1)
        # output_layer = LinearLayer(1)
        bias = BiasUnit()
        # Create connections
        input_to_hidden = FullConnection(input_layer, hidden_layer)
        hidden_to_output = FullConnection(hidden_layer, output_layer)
        bias_to_hidden = FullConnection(bias, hidden_layer)
        bias_to_output = FullConnection(bias, output_layer)
        # Add layers to network
        network.addInputModule(input_layer)
        network.addModule(hidden_layer)
        network.addOutputModule(output_layer)
        network.addModule(bias)
        # Add connections to network
        network.addConnection(input_to_hidden)
        network.addConnection(hidden_to_output)
        network.addConnection(bias_to_hidden)
        network.addConnection(bias_to_output)
        # Sort modules and return
        network.sortModules()
        return network
def set_nn(inp,hid1,out):
    # Make a new FFN object:
    n = FeedForwardNetwork()
    
    # Constructing the input, output and hidden layers:
    inLayer = LinearLayer(inp)
    hiddenLayer1 = TanhLayer(hid1)
#    hiddenLayer2 = TanhLayer(hid2)
    outLayer = LinearLayer(out)
    
    # Adding layers to the network:
    n.addInputModule(inLayer)
    n.addModule(hiddenLayer1)
#    n.addModule(hiddenLayer2)
    n.addOutputModule(outLayer)
    
    # determining how neurons should be connected:
    in_to_hidden = FullConnection(inLayer, hiddenLayer1)
#    hid_to_hid = FullConnection(hiddenLayer1,hiddenLayer2)
    hidden_to_out = FullConnection(hiddenLayer1, outLayer)
    
    # Adding connections to the network
    n.addConnection(in_to_hidden)
#    n.addConnection(hid_to_hid)
    n.addConnection(hidden_to_out)
    
    # Final step that makes our MLP usable
    n.sortModules()
    return n
Пример #13
0
def testTraining(D):
    print len(D), 'samples'
    from core.datainterface import ModuleWrapper
    from algorithms import SGD, vSGDfd
    import pylab
    from pybrain.datasets import SupervisedDataSet
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork, TanhLayer, BiasUnit
    from pybrain.utilities import dense_orth
    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(D.indim, name='in'))
    net.addModule(BiasUnit(name='bias'))
    net.addModule(TanhLayer(14, name='h'))
    net.addOutputModule(LinearLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['h']))
    net.addConnection(FullConnection(net['h'], net['out']))
    net.addConnection(FullConnection(net['bias'], net['out']))
    net.sortModules()
        
    
    # tracking progress by callback
    ltrace = []
    def storer(a):
        if a._num_updates % 10 == 0:
            a.provider.nextSamples(250)
            ltrace.append(pylab.mean(a.provider.currentLosses(a.bestParameters)))
    
    x = net.params
    x *= 0.001
    
    f = ModuleWrapper(D, net)
    #algo = SGD(f, net.params.copy(), callback=storer, learning_rate=0.0001)
    algo = vSGDfd(f, net.params.copy(), callback=storer)
    algo.run(10000)
    pylab.plot(ltrace, 'r-')
    pylab.xlabel('epochs x10')
    pylab.ylabel('MSE')
    pylab.show()
Пример #14
0
def testBank():
    D = readData()
    print len(D), 'samples', D.indim, 'features'
    from pybrain import LinearLayer, FullConnection, FeedForwardNetwork, BiasUnit, SigmoidLayer
    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(D.indim, name='in'))
    net.addModule(BiasUnit(name='bias'))
    net.addOutputModule(SigmoidLayer(1, name='out'))
    net.addConnection(FullConnection(net['in'], net['out']))
    net.addConnection(FullConnection(net['bias'], net['out']))
    net.sortModules()
    p = net.params
    p *= 0.01
    provider = ModuleWrapper(D, net, shuffling=False)
    algo = SGD(
        provider,
        net.params.copy(),  #callback=printy, 
        learning_rate=5.5e-5)
    #algo = vSGDfd(provider, net.params.copy(), #callback=printy
    #              )
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
    algo.run(len(D))
    printy(algo, force=True)
Пример #15
0
    def create_network(hidden_layer_size):
        """Creates sin NN.

        :param hidden_layer_size: size of hidden layer
        :return: network
        """
        # Create network
        network = FeedForwardNetwork()
        # Create network layers and bias
        input_layer = LinearLayer(1)
        hidden_layer = SigmoidLayer(hidden_layer_size)
        output_layer = SigmoidLayer(1)
        bias = BiasUnit()
        # Create connections
        input_hidden_connection = FullConnection(input_layer, hidden_layer)
        hidden_output_connection = FullConnection(hidden_layer, output_layer)
        bias_hidden_connection = FullConnection(bias, hidden_layer)
        bias_output_connection = FullConnection(bias, output_layer)
        # Add network layers and bias to network
        network.addInputModule(input_layer)
        network.addModule(hidden_layer)
        network.addOutputModule(output_layer)
        network.addModule(bias)
        # Add connections between layers and bias
        network.addConnection(input_hidden_connection)
        network.addConnection(hidden_output_connection)
        network.addConnection(bias_hidden_connection)
        network.addConnection(bias_output_connection)
        # Sort modules and return network
        network.sortModules()
        return network
Пример #16
0
def set_nn(inp, hid1, out):
    # Make a new FFN object:
    n = FeedForwardNetwork()

    # Constructing the input, output and hidden layers:
    inLayer = LinearLayer(inp)
    hiddenLayer1 = TanhLayer(hid1)
    #    hiddenLayer2 = TanhLayer(hid2)
    outLayer = LinearLayer(out)

    # Adding layers to the network:
    n.addInputModule(inLayer)
    n.addModule(hiddenLayer1)
    #    n.addModule(hiddenLayer2)
    n.addOutputModule(outLayer)

    # determining how neurons should be connected:
    in_to_hidden = FullConnection(inLayer, hiddenLayer1)
    #    hid_to_hid = FullConnection(hiddenLayer1,hiddenLayer2)
    hidden_to_out = FullConnection(hiddenLayer1, outLayer)

    # Adding connections to the network
    n.addConnection(in_to_hidden)
    #    n.addConnection(hid_to_hid)
    n.addConnection(hidden_to_out)

    # Final step that makes our MLP usable
    n.sortModules()
    return n
Пример #17
0
from pybrain import LinearLayer, SigmoidLayer
from pybrain import FullConnection
import numpy as np
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer
import matplotlib.pylab as plt

dateparse = lambda dates: pd.datetime.strptime(dates, '%Y-%m')
data = pd.read_csv('AirPassengers.csv',
                   parse_dates=['Month'],
                   index_col='Month',
                   date_parser=dateparse)
ts = data['#Passengers']

# Make a new FFN object:
n = FeedForwardNetwork()

# Constructing the input, output and hidden layers:
inLayer = LinearLayer(3)
hiddenLayer = SigmoidLayer(4)
outLayer = LinearLayer(1)

# Adding layers to the network:
n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

# determining how neurons should be connected:
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)