Exemplo n.º 1
0
def main():
    a = 0
    for i in range(0, 100):
        inLayer = SigmoidLayer(2)
        hiddenLayer = SigmoidLayer(3)
        outLayer = SigmoidLayer(1)

        net = FeedForwardNetwork()
        net.addInputModule(inLayer)
        net.addModule(hiddenLayer)
        net.addOutputModule(outLayer)

        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_out = FullConnection(hiddenLayer, outLayer)

        net.addConnection(in_to_hidden)
        net.addConnection(hidden_to_out)

        net.sortModules()

        ds = SupervisedDataSet(2, 1)
        ds.addSample((1, 1), (0))
        ds.addSample((1, 0), (1))
        ds.addSample((0, 1), (1))
        ds.addSample((0, 0), (0))

        trainer = BackpropTrainer(net, ds)
        trainer.trainUntilConvergence()

        out = net.activate((1, 1))
        if (out < 0.5):
            a = a + 1
    print(str(a) + "/100")
Exemplo n.º 2
0
    def _buildStructure(self, inputdim, insize, inlayer, convSize, numFeatureMaps):
        #build layers
        outdim = insize - convSize + 1
        hlayer = TanhLayer(outdim * outdim * numFeatureMaps, name='h')
        self.addModule(hlayer)

        outlayer = SigmoidLayer(outdim * outdim, name='out')
        self.addOutputModule(outlayer)

        # build shared weights
        convConns = []
        for i in range(convSize):
            convConns.append(MotherConnection(convSize * numFeatureMaps * inputdim, name='conv' + str(i)))
        outConn = MotherConnection(numFeatureMaps)

        # establish the connections.
        for i in range(outdim):
            for j in range(outdim):
                offset = i * outdim + j
                outmod = ModuleSlice(hlayer, inSliceFrom=offset * numFeatureMaps, inSliceTo=(offset + 1) * numFeatureMaps,
                                     outSliceFrom=offset * numFeatureMaps, outSliceTo=(offset + 1) * numFeatureMaps)
                self.addConnection(SharedFullConnection(outConn, outmod, outlayer, outSliceFrom=offset, outSliceTo=offset + 1))

                for k, mc in enumerate(convConns):
                    offset = insize * (i + k) + j
                    inmod = ModuleSlice(inlayer, outSliceFrom=offset * inputdim, outSliceTo=offset * inputdim + convSize * inputdim)
                    self.addConnection(SharedFullConnection(mc, inmod, outmod))
Exemplo n.º 3
0
def trained_cat_dog_RFCNN():
    n = RecurrentNetwork()

    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], name='nmc'))
    n.sortModules()

    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogRFCNN(n)
    return n
Exemplo n.º 4
0
def trained_cat_dog_ANN():
    n = FeedForwardNetwork()
    d = get_cat_dog_trainset()
    input_size = d.getDimension('input')
    n.addInputModule(LinearLayer(input_size, name='in'))
    n.addModule(SigmoidLayer(input_size + 1500, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))
    n.sortModules()
    n.convertToFastNetwork()
    print 'successful converted to fast network'
    t = BackpropTrainer(n, d, learningrate=0.0001)  #, momentum=0.75)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        count += 1
        if globErr < 0.01:
            break
        if count == 30:
            break

    exportCatDogANN(n)
    return n
Exemplo n.º 5
0
def trainedRNN():
    n = RecurrentNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.addRecurrentConnection(NMConnection(n['out'], n['out'], name='nmc'))
    # n.addRecurrentConnection(FullConnection(n['out'], n['hidden'], inSliceFrom = 0, inSliceTo = 1, outSliceFrom = 0, outSliceTo = 3))
    n.sortModules()

    draw_connections(n)
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 50:
            return trainedRNN()
    # exportRNN(n)
    draw_connections(n)

    return n
Exemplo n.º 6
0
def trainedANN():
    n = FeedForwardNetwork()

    n.addInputModule(LinearLayer(4, name='in'))
    n.addModule(SigmoidLayer(6, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()

    draw_connections(n)
    # d = generateTrainingData()
    d = getDatasetFromFile(root.path() + "/res/dataSet")
    t = BackpropTrainer(n, d, learningrate=0.001, momentum=0.75)
    t.trainOnDataset(d)
    # FIXME: I'm not sure the recurrent ANN is going to converge
    # so just training for fixed number of epochs

    count = 0
    while True:
        globErr = t.train()
        print globErr
        if globErr < 0.01:
            break
        count += 1
        if count == 20:
            return trainedANN()

    exportANN(n)
    draw_connections(n)

    return n
Exemplo n.º 7
0
def importCatDogANN(fileName=root.path() + "/res/recCatDogANN"):
    n = FeedForwardNetwork()
    n.addInputModule(LinearLayer(7500, name='in'))
    n.addModule(SigmoidLayer(9000, name='hidden'))
    n.addOutputModule(LinearLayer(2, name='out'))
    n.addConnection(FullConnection(n['in'], n['hidden'], name='c1'))
    n.addConnection(FullConnection(n['hidden'], n['out'], name='c2'))

    n.sortModules()
    params = np.load(root.path() + '/res/cat_dog_params.txt.npy')
    n._setParameters(params)
    return n
Exemplo n.º 8
0
from pybrain.datasets.supervised import SupervisedDataSet
from pybrain.structure.connections.full import FullConnection
from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain.supervised.trainers.backprop import BackpropTrainer

network = FeedForwardNetwork()  # create network
inputLayer = SigmoidLayer(1)  # maybe LinearLayer ?
hiddenLayer = SigmoidLayer(4)
outputLayer = SigmoidLayer(1)  # maybe LinearLayer ?

network.addInputModule(inputLayer)
network.addModule(hiddenLayer)
network.addOutputModule(outputLayer)
# Connection
network.addConnection(FullConnection(inputLayer, hiddenLayer))
network.addConnection(FullConnection(hiddenLayer, outputLayer))

network.sortModules()

dataTrain = SupervisedDataSet(1, 1)  # input, target
dataTrain.addSample(
    1, 0.76
)  # it seems to me that input(our x), target(value y) from function sin(x)*sin(2*x)

trainer = BackpropTrainer(
    network, dataTrain)  # it's back prop, we use our network and our data
print(trainer.train())  # i think it's value trained

print(network.params)  # i think that are wights
Exemplo n.º 9
0
 def __init__(self, dim, sparsity=0.1, beta=1, resetAfterTraining=True, name=None):
     SigmoidLayer.__init__(self, dim, name);
     self.r = sparsity;
     self.beta = beta;
     self.resetAfterTraining= resetAfterTraining;
     self.resetAverage();
Exemplo n.º 10
0
    def buildBMTrainer(self):
        x, y = self.readexcel()
        # 模拟size条数据:
        # self.writeexcel(size=100)
        # resx=contrib(x,0.9)
        # print '**********************'
        # print resx
        # x1=x[:,[3,4,5,6,7,8,9,10,11,0,1,2]]
        # resx1=contrib(x1)
        # print '**********************'
        # print resx1

        self.realy = y
        per = int(len(x))
        # 对数据进行归一化处理(一般来说使用Sigmoid时一定要归一化)
        self.sx = MinMaxScaler()
        self.sy = MinMaxScaler()

        xTrain = x[:per]
        xTrain = self.sx.fit_transform(xTrain)
        yTrain = y[:per]
        yTrain = self.sy.fit_transform(yTrain)

        # 初始化前馈神经网络
        self.__fnn = FeedForwardNetwork()

        # 构建输入层,隐藏层和输出层,一般隐藏层为3-5层,不宜过多
        inLayer = LinearLayer(x.shape[1], 'inLayer')
        hiddenLayer0 = SigmoidLayer(int(self.hiddendim / 3), 'hiddenLayer0')
        hiddenLayer1 = TanhLayer(self.hiddendim, 'hiddenLayer1')
        hiddenLayer2 = SigmoidLayer(int(self.hiddendim / 3), 'hiddenLayer2')
        outLayer = LinearLayer(self.rescol, 'outLayer')

        # 将构建的输出层、隐藏层、输出层加入到fnn中
        self.__fnn.addInputModule(inLayer)
        self.__fnn.addModule(hiddenLayer0)
        self.__fnn.addModule(hiddenLayer1)
        self.__fnn.addModule(hiddenLayer2)
        self.__fnn.addOutputModule(outLayer)

        # 对各层之间建立完全连接
        in_to_hidden = FullConnection(inLayer, hiddenLayer0)
        hidden_to_hidden0 = FullConnection(hiddenLayer0, hiddenLayer1)
        hidden_to_hidden1 = FullConnection(hiddenLayer1, hiddenLayer2)
        hidden_to_out = FullConnection(hiddenLayer2, outLayer)

        # 与fnn建立连接
        self.__fnn.addConnection(in_to_hidden)
        self.__fnn.addConnection(hidden_to_hidden0)
        self.__fnn.addConnection(hidden_to_hidden1)
        self.__fnn.addConnection(hidden_to_out)
        self.__fnn.sortModules()
        # 初始化监督数据集
        DS = SupervisedDataSet(x.shape[1], self.rescol)

        # 将训练的数据及标签加入到DS中
        # for i in range(len(xTrain)):
        #     DS.addSample(xTrain[i], yTrain[i])
        for i in range(len(xTrain)):
            DS.addSample(xTrain[i], yTrain[i])

        # 采用BP进行训练,训练至收敛,最大训练次数为1000
        trainer = BMBackpropTrainer(self.__fnn,
                                    DS,
                                    learningrate=0.0001,
                                    verbose=self.verbose)
        if self.myalg:
            trainingErrors = trainer.bmtrain(maxEpochs=10000,
                                             verbose=True,
                                             continueEpochs=3000,
                                             totalError=0.0001)
        else:
            trainingErrors = trainer.trainUntilConvergence(
                maxEpochs=10000, continueEpochs=3000, validationProportion=0.1)
        # CV = CrossValidator(trainer, DS, n_folds=4, valfunc=ModuleValidator.MSE)
        # CV.validate()
        # CrossValidator
        # trainingErrors = trainer.trainUntilConvergence(maxEpochs=10000,continueEpochs=5000, validationProportion=0.1)
        # self.finalError = trainingErrors[0][-2]
        # self.finalerror=trainingErrors[0][-2]
        # if (self.verbose):
        #     print '最后总体容差:', self.finalError
        self.__sy = self.sy
        self.__sx = self.sx
        for i in range(len(xTrain)):
            a = self.sy.inverse_transform(
                self.__fnn.activate(xTrain[i]).reshape(-1, 1))
            self.restest.append(
                self.sy.inverse_transform(
                    self.__fnn.activate(xTrain[i]).reshape(-1, 1))[0][0])
Exemplo n.º 11
0
          net.activate((1, 0)), net.activate((1, 1)))


ds = SupervisedDataSet(2, 1)

ds.addSample((0, 0), (0, ))
ds.addSample((0, 1), (1, ))
ds.addSample((1, 0), (1, ))
ds.addSample((1, 1), (0, ))

for input, target in ds:
    print(input, target)

#define layers and connections
inLayer = LinearLayer(2)
hiddenLayerOne = SigmoidLayer(4, "one")
hiddenLayerTwo = SigmoidLayer(4, "two")
outLayer = LinearLayer(1)
inToHiddenOne = FullConnection(inLayer, hiddenLayerOne)
hiddenOneToTwo = FullConnection(hiddenLayerOne, hiddenLayerTwo)
hiddenTwoToOut = FullConnection(hiddenLayerTwo, outLayer)

#wire the layers and connections to a net
net = FeedForwardNetwork()
net.addInputModule(inLayer)
net.addModule(hiddenLayerOne)
net.addModule(hiddenLayerTwo)
net.addOutputModule(outLayer)
net.addConnection(inToHiddenOne)
net.addConnection(hiddenOneToTwo)
net.addConnection(hiddenTwoToOut)
Exemplo n.º 12
0

ds = SupervisedDataSet(2, 1)

ds.addSample((0, 0), (0, ))
ds.addSample((0, 1), (1, ))
ds.addSample((1, 0), (1, ))
ds.addSample((1, 1), (0, ))

for input, target in ds:
    print(input, target)

#define net
net = RecurrentNetwork()
net.addInputModule(LinearLayer(2, name="il"))
net.addModule(SigmoidLayer(4, name="h1"))
net.addModule(SigmoidLayer(4, name="h2"))
net.addOutputModule(LinearLayer(1, name="ol"))
c1 = FullConnection(net["il"], net["h1"])
c2 = FullConnection(net["h1"], net["h2"])
c3 = FullConnection(net["h2"], net["ol"])
cr1 = FullConnection(net["h1"], net["h1"])
net.addConnection(c1)
net.addConnection(c2)
net.addConnection(c3)
net.addRecurrentConnection(cr1)
net.sortModules()

print(net)

trainer = BackpropTrainer(net, ds)
Exemplo n.º 13
0
from pybrain.structure import FeedForwardNetwork
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.modules.biasunit import BiasUnit
from pybrain.structure.connections.full import FullConnection

rede = FeedForwardNetwork()
camadaEntrada = LinearLayer(2)
camadaOculta = SigmoidLayer(3)
camadaSaida = SigmoidLayer(1)
bias1 = BiasUnit()
bias2 = BiasUnit()

rede.addModule(camadaEntrada)
rede.addModule(camadaOculta)
rede.addModule(camadaSaida)
rede.addModule(bias1)
rede.addModule(bias2)

entradaOculta = FullConnection(camadaEntrada, camadaOculta)
ocultaSaida = FullConnection(camadaOculta, camadaSaida)
biasOculta = FullConnection(bias1,camadaOculta)
biasSaida = FullConnection(bias2,camadaSaida)

rede.sortModules()

print(rede)
print(entradaOculta.params)
print(ocultaSaida.params)
print(biasOculta.params)
print(biasSaida.params)
Exemplo n.º 14
0
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers.backprop import BackpropTrainer
from pybrain.datasets.supervised import SupervisedDataSet
from BinReader import BinReader
from pybrain.utilities import percentError
from pybrain.datasets.classification import ClassificationDataSet
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.connections.full import FullConnection
from pybrain.tools.xml.networkwriter import NetworkWriter

dim = 381
n = FeedForwardNetwork()
inLayer = LinearLayer(dim)
hiddenLayer = SigmoidLayer(100)
outLayer = LinearLayer(1)

n.addInputModule(inLayer)
n.addModule(hiddenLayer)
n.addOutputModule(outLayer)

in_to_hidden = FullConnection(inLayer,hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer,outLayer)

n.addConnection(in_to_hidden)
n.addConnection(hidden_to_out)

n.sortModules()

from pybrain.structure.modules.linearlayer import LinearLayer
from pybrain.structure.modules.sigmoidlayer import SigmoidLayer
from pybrain.structure.connections.full import FullConnection
from pybrain.structure.modules.biasunit import BiasUnit

# Next we transform the data into a vectorized format so that it can be used as a training set
aramdata = open("ARAMData.txt","r")

#ChampionDictionary holds all the riot static data about each champion. The Riot IDs are the keys of the dictionary
championdictionary = DatabaseActions.CreateChampionDictionary()

#Creates a Neural Network of Appropriate size
predictionNet = FeedForwardNetwork()

inLayer = LinearLayer(len(championdictionary))
hiddenLayer = SigmoidLayer(5)
outLayer = SigmoidLayer(1)

predictionNet.addInputModule(inLayer)
predictionNet.addModule(hiddenLayer)
predictionNet.addOutputModule(outLayer)
predictionNet.addModule(BiasUnit(name = 'bias'))

in_to_hidden = FullConnection(inLayer,hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer,outLayer)

predictionNet.addConnection(in_to_hidden)
predictionNet.addConnection(hidden_to_out)
predictionNet.addConnection(FullConnection(predictionNet['bias'],hiddenLayer))
predictionNet.addConnection(FullConnection(predictionNet['bias'],outLayer))
predictionNet.sortModules()
# create an Object to get the data source
dataObject = MNIST_Data.MNIST_Processing()
traininglist = dataObject.neural_data_set
traininglabels = dataObject.neural_label_set

# step1
#create neural network
fnn = FeedForwardNetwork()

#set three layers, input+ hidden layer+ output  28*28=784

# the first feature extraction
#inLayer = LinearLayer(784,name='inLayer')
# the second feature extraction
inLayer = LinearLayer(28, name='inLayer')
hiddenLayer = SigmoidLayer(30, name='hiddenLayer0')
outLayer = LinearLayer(10, name='outLayer')

#There are a couple of different classes of layers. For a complete list check out the modules package.

#add these three Layers into neural network
fnn.addInputModule(inLayer)
fnn.addModule(hiddenLayer)
fnn.addOutputModule(outLayer)

#create the connections between three layers
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)

#add connections into network
fnn.addConnection(in_to_hidden)
Exemplo n.º 17
0
    ##The worst
    time.sleep(1)

aramdata.close()

# Next we transform the data into a vectorized format so that it can be used as a training set
aramdata = open("ARAMData.txt", "r")

#ChampionDictionary holds all the riot static data about each champion. The Riot IDs are the keys of the dictionary
championdictionary = DatabaseActions.CreateChampionDictionary()

#Creates a Neural Network of Appropriate size
predictionNet = FeedForwardNetwork()

inLayer = LinearLayer(len(championdictionary))
hiddenLayer = SigmoidLayer(20)
outLayer = SigmoidLayer(1)

predictionNet.addInputModule(inLayer)
predictionNet.addModule(hiddenLayer)
predictionNet.addOutputModule(outLayer)
predictionNet.addModule(BiasUnit(name='bias'))

in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)

predictionNet.addConnection(in_to_hidden)
predictionNet.addConnection(hidden_to_out)
predictionNet.addConnection(FullConnection(predictionNet['bias'], hiddenLayer))
predictionNet.addConnection(FullConnection(predictionNet['bias'], outLayer))
predictionNet.sortModules()
Exemplo n.º 18
0
from irisdataset import IRIS_TEST_SET, IRIS_TRAIN_SET

# Because the version of pybrain from pip isn't up to date
class ReluLayer(NeuronLayer):
    """ Layer of rectified linear units (relu). """

    def _forwardImplementation(self, inbuf, outbuf):
        outbuf[:] = inbuf * (inbuf > 0)

    def _backwardImplementation(self, outerr, inerr, outbuf, inbuf):
        inerr[:] = outerr * (inbuf > 0)

net = FeedForwardNetwork()

inLayer = LinearLayer(4, name="in")
hidden0 = SigmoidLayer(5, name="hidden0")
#hidden1 = SigmoidLayer(5, name="hidden1")
outLayer = SigmoidLayer(3, name="out")

net.addInputModule(inLayer)
net.addModule(hidden0)
#net.addModule(hidden1)
net.addOutputModule(outLayer)

def init_params(conn):
    for i in range(len(conn.params)):
        conn.params[i] = random.uniform(-0.2, 0.2)

in2Hidden = FullConnection(inLayer, hidden0)
#hidden01 = FullConnection(hidden0, hidden1)
hidden2Out = FullConnection(hidden0, outLayer)