Esempio n. 1
0
def buildNN2HiddenLayer(trnData, netNo):
    from pybrain.structure import FeedForwardNetwork, RecurrentNetwork
    from pybrain.structure import LinearLayer, SigmoidLayer, TanhLayer, SoftmaxLayer
    from pybrain.structure import FullConnection

    n = FeedForwardNetwork()
    inLayer = LinearLayer(trnData.indim)  # Define Layer Types
    if netNo == 1 or netNo == 3:
        hiddenLayer0 = TanhLayer(hiddenLayer0neurons)  # Tanh
        hiddenLayer1 = SigmoidLayer(hiddenLayer1neurons)  # Sigmoid
    elif netNo == 2:
        hiddenLayer0 = TanhLayer(hiddenLayer1neurons)  # Tanh
        hiddenLayer1 = SigmoidLayer(hiddenLayer0neurons)  # Sigmoid

    outLayer = SoftmaxLayer(trnData.outdim)  # SoftmaxLayer

    n.addInputModule(inLayer)
    n.addModule(hiddenLayer0)
    n.addModule(hiddenLayer1)
    n.addOutputModule(outLayer)

    in_to_hidden0 = FullConnection(inLayer, hiddenLayer0)  # Define connections
    hidden0_to_hidden1 = FullConnection(hiddenLayer0, hiddenLayer1)
    hidden1_to_out = FullConnection(hiddenLayer1, outLayer)
    n.addConnection(in_to_hidden0)
    n.addConnection(hidden0_to_hidden1)
    n.addConnection(hidden1_to_out)
    n.sortModules()
    return n
Esempio n. 2
0
    def __init__(self, genes=None):

        self.net = FeedForwardNetwork()
        self.inLayer = TanhLayer(16)
        self.hiddenLayer = TanhLayer(20)
        self.hiddenLayer2 = TanhLayer(20)
        self.outLayer = SoftmaxLayer(4)

        self.net.addInputModule(self.inLayer)
        self.net.addModule(self.hiddenLayer)
        self.net.addModule(self.hiddenLayer2)
        self.net.addOutputModule(self.outLayer)

        self.in_to_hidden = FullConnection(self.inLayer, self.hiddenLayer)
        self.hidden1_to_hidden2 = FullConnection(self.hiddenLayer, self.hiddenLayer2)
        self.hidden2_to_out = FullConnection(self.hiddenLayer2, self.outLayer)

        self.net.addConnection(self.in_to_hidden)
        self.net.addConnection(self.hidden1_to_hidden2)
        self.net.addConnection(self.hidden2_to_out)

        self.net.sortModules()

        # Set the params to the provided params
        if genes is not None:
            self.net._setParameters(genes)
Esempio n. 3
0
    def __setUpBrain(self, genome):
        """
		Set up PyBrain's neural network
		
		Args:
		    genome (G1DList): PyEvolve's individual container
		"""
        self.network = FeedForwardNetwork()

        inLayer = TanhLayer(14)
        hiddenLayer = TanhLayer(12)
        hiddenLayer2 = TanhLayer(6)
        outLayer = TanhLayer(2)

        self.network.addInputModule(inLayer)
        self.network.addModule(hiddenLayer)
        self.network.addModule(hiddenLayer2)
        self.network.addOutputModule(outLayer)

        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        hidden_to_hidden2 = FullConnection(hiddenLayer, hiddenLayer2)
        hidden2_to_out = FullConnection(hiddenLayer2, outLayer)

        self.network.addConnection(in_to_hidden)
        self.network.addConnection(hidden_to_hidden2)
        self.network.addConnection(hidden2_to_out)

        self.network.sortModules()

        new_params = numpy.array(genome.genomeList)
        self.network._setParameters(new_params)
def buildNN(indim=4, hiddim=6, outdim=3):
    net = FeedForwardNetwork()
    net.addInputModule(TanhLayer(indim, name='i'))
    net.addModule(TanhLayer(hiddim, name='h'))
    net.addOutputModule(ThresholdLayer(outdim, name='o', threshold=0.5))
    net.addConnection(FullConnection(net['i'], net['h']))
    net.addConnection(FullConnection(net['h'], net['o']))
    net.sortModules()
    return net
def createNNLong(trndata):
    nn = FeedForwardNetwork()
    inLayer = LinearLayer(trndata.indim, name='in')
    hiddenLayer = TanhLayer(6, name='hidden0')
    outLayer = TanhLayer(trndata.outdim, name='out')
    nn.addInputModule(inLayer)
    nn.addModule(hiddenLayer)
    nn.addOutputModule(outLayer)
    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_out = FullConnection(hiddenLayer, outLayer)
    nn.addConnection(in_to_hidden)
    nn.addConnection(hidden_to_out)
    nn.sortModules()
    return nn
Esempio n. 6
0
def train_net(data_set, n, epochs=1):
    num_inputs = len(data_set[0][0][n])
    ds = SupervisedDataSet(num_inputs, 2)
    for i in range(len(data_set)):
        try:
            ds.appendLinked(data_set[i][0][n],
                            (data_set[i][1], data_set[i][2]))
        except:
            continue
    print str(len(ds)) + ' points successfully aquired'

    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(num_inputs, name='input'))
    net.addInputModule(BiasUnit(name='bias'))
    net.addOutputModule(LinearLayer(2, name='output'))
    net.addModule(SigmoidLayer(int((num_inputs + 2) / 2.), name='sigmoid'))
    net.addModule(TanhLayer(10, name='tanh'))
    net.addConnection(FullConnection(net['bias'], net['sigmoid']))
    net.addConnection(FullConnection(net['bias'], net['tanh']))
    net.addConnection(FullConnection(net['input'], net['sigmoid']))
    net.addConnection(FullConnection(net['sigmoid'], net['tanh']))
    net.addConnection(FullConnection(net['tanh'], net['output']))
    net.sortModules()

    trainer = BackpropTrainer(net,
                              learningrate=0.01,
                              momentum=0.1,
                              verbose=True)

    trainer.trainOnDataset(ds)
    trainer.trainEpochs(epochs)

    return net
Esempio n. 7
0
    def create(number_of_hidden_layers, activation_function, input_length,
               output_length, network_file, classify):
        n = FeedForwardNetwork()
        in_layer = LinearLayer(input_length)
        n.addInputModule(in_layer)

        layer_to_connect_to = in_layer
        for x in range(0, number_of_hidden_layers):
            if activation_function == 'sigmoid':
                hidden_layer = SigmoidLayer(input_length)
            else:
                hidden_layer = TanhLayer(input_length)

            n.addModule(hidden_layer)
            hidden_layer_connection = FullConnection(layer_to_connect_to,
                                                     hidden_layer)
            n.addConnection(hidden_layer_connection)
            layer_to_connect_to = hidden_layer

        if classify:
            out_layer = SoftmaxLayer(output_length)
        else:
            out_layer = LinearLayer(output_length)
        n.addOutputModule(out_layer)

        hidden_to_out = FullConnection(layer_to_connect_to, out_layer)
        n.addConnection(hidden_to_out)
        n.sortModules()
        save_network(n, network_file)
def setupNetwork(numHiddenNodes, numHiddenLayers, numFeatures, numSpeakers):

    nn = FeedForwardNetwork()
    inputLayer = LinearLayer(numFeatures)
    nn.addInputModule(inputLayer)

    hiddenLayers = []
    for x in range(numHiddenLayers):
        hiddenLayer = TanhLayer(numHiddenNodes)
        nn.addModule(hiddenLayer)
        hiddenLayers.append(hiddenLayer)
    outputLayer = SoftmaxLayer(numSpeakers)
    nn.addOutputModule(outputLayer)

    inputConnection = FullConnection(inputLayer, hiddenLayers[0])
    nn.addConnection(inputConnection)

    for x in range(numHiddenLayers - 1):
        connect = FullConnection(hiddenLayers[x], hiddenLayers[x - 1])
        nn.addConnection(connect)

    outputConnection = FullConnection(hiddenLayers[numHiddenLayers - 1],
                                      outputLayer)
    nn.addConnection(outputConnection)
    nn.sortModules()

    return nn
Esempio n. 9
0
def buildParityNet():
    net = RecurrentNetwork()
    net.addInputModule(LinearLayer(1, name = 'i'))
    net.addModule(TanhLayer(2, name = 'h'))
    net.addModule(BiasUnit('bias'))
    net.addOutputModule(TanhLayer(1, name = 'o'))
    net.addConnection(FullConnection(net['i'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['h']))
    net.addConnection(FullConnection(net['bias'], net['o']))
    net.addConnection(FullConnection(net['h'], net['o']))
    net.addRecurrentConnection(FullConnection(net['o'], net['h']))
    net.sortModules()

    p = net.params
    p[:] = [-0.5, -1.5, 1, 1, -1, 1, 1, -1, 1]
    p *= 10.

    return net
Esempio n. 10
0
def build_fnn():
    fnn = FeedForwardNetwork()
    inLayer = LinearLayer(2)
    hiddenLayer = TanhLayer(50)
    outLayer = SoftmaxLayer(2)
    fnn.addInputModule(inLayer)
    fnn.addModule(hiddenLayer)
    fnn.addOutputModule(outLayer)
    return fnn
Esempio n. 11
0
 def weird_network(self, net):
     bias = BiasUnit(name='bias')
     inlayer = TanhLayer(1, name='input')
     outlayer = TanhLayer(1, name='output')
     gatelayer = GateLayer(1, name='gate')
     con1 = FullConnection(bias, gatelayer, outSliceFrom=0, outSliceTo=1)
     con2 = FullConnection(bias, gatelayer, outSliceFrom=1, outSliceTo=2)
     con3 = FullConnection(inlayer, gatelayer, outSliceFrom=0, outSliceTo=1)
     con4 = FullConnection(inlayer, gatelayer, outSliceFrom=1, outSliceTo=2)
     con5 = FullConnection(gatelayer, outlayer)
     net.addInputModule(inlayer)
     net.addModule(bias)
     net.addModule(gatelayer)
     net.addOutputModule(outlayer)
     net.addConnection(con1)
     net.addConnection(con2)
     net.addConnection(con3)
     net.addConnection(con4)
     net.addConnection(con5)
Esempio n. 12
0
def simple_network_builder(layers, partial_path):
    n = FeedForwardNetwork()
    ## create the network
    inlayer = LinearLayer(layers[0], name="In")
    hidden_one = TanhLayer(layers[1], name="Hidden 1")
    hidden_two = TanhLayer(layers[2], name="Hidden 2")
    b1 = BiasUnit(name="Bias")
    output = LinearLayer(1, name="Out")
    n.addInputModule(inlayer)
    n.addModule(hidden_one)
    n.addModule(hidden_two)
    n.addModule(b1)
    n.addOutputModule(output)
    in_to_one = FullConnection(inlayer, hidden_one)
    one_to_two = FullConnection(hidden_one, hidden_two)
    two_to_out = FullConnection(hidden_two, output)
    b1_to_one = FullConnection(b1, hidden_one)
    b2_to_two = FullConnection(b1, hidden_two)
    b3_to_output = FullConnection(b1, output)
    ### load weights and biases
    in_to_one._setParameters(np.array((csv_loader(partial_path + '_w1.csv'))))
    one_to_two._setParameters(np.array(csv_loader(partial_path + '_w2.csv')))
    two_to_out._setParameters(np.array(csv_loader(partial_path + '_w3.csv')))
    b1_to_one._setParameters(np.array(csv_loader(partial_path + '_b1.csv')))
    b2_to_two._setParameters(np.array(csv_loader(partial_path + '_b2.csv')))
    b3_to_output._setParameters(np.array(csv_loader(partial_path + '_b3.csv')))

    ### connect the network topology
    n.addConnection(in_to_one)
    n.addConnection(one_to_two)
    n.addConnection(two_to_out)
    #    n.sortModules()

    n.addConnection(b1_to_one)
    n.addConnection(b2_to_two)
    n.addConnection(b3_to_output)

    ### finalize network object
    n.sortModules()

    return n
Esempio n. 13
0
def netBuild(ds):
    #net = buildNetwork(8, 13,13,13, 1)

    # 建立神经网络fnn
    fnn = FeedForwardNetwork()

    # 设立三层,一层输入层(3个神经元,别名为inLayer),一层隐藏层,一层输出层
    inLayer = LinearLayer(8, name='inLayer')
    hiddenLayer0 = TanhLayer(13, name='hiddenLayer0')
    hiddenLayer1 = TanhLayer(13, name='hiddenLayer1')
    hiddenLayer2 = TanhLayer(13, name='hiddenLayer2')
    outLayer = LinearLayer(1, name='outLayer')

    # 将五层层都加入神经网络(即加入神经元)
    fnn.addInputModule(inLayer)
    fnn.addModule(hiddenLayer0)
    fnn.addModule(hiddenLayer1)
    fnn.addModule(hiddenLayer2)
    fnn.addOutputModule(outLayer)

    # 建立五层之间的连接
    in_to_hidden = FullConnection(inLayer, hiddenLayer0)
    hidden_to_hidden0 = FullConnection(hiddenLayer0, hiddenLayer1)
    hidden_to_hidden1 = FullConnection(hiddenLayer1, hiddenLayer2)
    hidden_to_out = FullConnection(hiddenLayer2, outLayer)

    # 将连接加入神经网络
    fnn.addConnection(in_to_hidden)
    fnn.addConnection(hidden_to_hidden0)
    fnn.addConnection(hidden_to_hidden1)
    fnn.addConnection(hidden_to_out)

    # 让神经网络可用
    fnn.sortModules()

    print("Trainging")
    trainer = BackpropTrainer(fnn, ds, verbose=True, learningrate=0.01)
    # trainer.train()
    trainer.trainUntilConvergence(maxEpochs=500)
    print("Finish training")
    return fnn
Esempio n. 14
0
def build_new_nets(data_set, n):
    num_inputs = len(data_set[0][0][n])
    arousal_net = FeedForwardNetwork()
    arousal_net.addInputModule(LinearLayer(num_inputs, name='input'))
    arousal_net.addInputModule(BiasUnit(name='bias'))
    arousal_net.addOutputModule(LinearLayer(1, name='output'))
    arousal_net.addModule(
        SigmoidLayer(int((num_inputs + 2) / 2.), name='sigmoid'))
    arousal_net.addModule(TanhLayer(10, name='tanh'))
    arousal_net.addConnection(
        FullConnection(arousal_net['bias'], arousal_net['sigmoid']))
    arousal_net.addConnection(
        FullConnection(arousal_net['bias'], arousal_net['tanh']))
    arousal_net.addConnection(
        FullConnection(arousal_net['input'], arousal_net['sigmoid']))
    arousal_net.addConnection(
        FullConnection(arousal_net['sigmoid'], arousal_net['tanh']))
    arousal_net.addConnection(
        FullConnection(arousal_net['tanh'], arousal_net['output']))
    arousal_net.sortModules()

    valence_net = FeedForwardNetwork()
    valence_net.addInputModule(LinearLayer(num_inputs, name='input'))
    valence_net.addInputModule(BiasUnit(name='bias'))
    valence_net.addOutputModule(LinearLayer(1, name='output'))
    valence_net.addModule(
        SigmoidLayer(int((num_inputs + 2) / 2.), name='sigmoid'))
    valence_net.addModule(TanhLayer(10, name='tanh'))
    valence_net.addConnection(
        FullConnection(valence_net['bias'], valence_net['sigmoid']))
    valence_net.addConnection(
        FullConnection(valence_net['bias'], valence_net['tanh']))
    valence_net.addConnection(
        FullConnection(valence_net['input'], valence_net['sigmoid']))
    valence_net.addConnection(
        FullConnection(valence_net['sigmoid'], valence_net['tanh']))
    valence_net.addConnection(
        FullConnection(valence_net['tanh'], valence_net['output']))
    valence_net.sortModules()

    return arousal_net, valence_net
Esempio n. 15
0
    def constructNet(self, input, hidden, output):
        inputLayer = LinearLayer(input)
        hiddenLayer = TanhLayer(hidden)
        outputLayer = LinearLayer(output)

        self.net.addInputModule(inputLayer)
        self.net.addModule(hiddenLayer)
        self.net.addOutputModule(outputLayer)

        conn1 = FullConnection(inputLayer, hiddenLayer)
        conn2 = FullConnection(hiddenLayer, outputLayer)

        self.net.addConnection(conn1)
        self.net.addConnection(conn2)
Esempio n. 16
0
    def __init__(self, hidden_layers, ally_champ_obj_list,
                 enemy_champ_obj_list):

        self.ally_champ_obj_list = ally_champ_obj_list
        self.enemy_champ_obj_list = enemy_champ_obj_list

        self.set_nodes()

        self.network = FeedForwardNetwork()

        connect_queue = Queue.Queue()

        for layer in xrange(0, hidden_layers):
            connect_queue.put(
                TanhLayer(self.input_node_count,
                          name='hidden_layer_{}'.format(layer)))

        connect_queue.put(SigmoidLayer(1, name='output_layer'))

        prev_layer = LinearLayer(self.input_node_count, name='input_layer')
        self.network.addInputModule(prev_layer)

        while not connect_queue.empty():

            current_layer = connect_queue.get()
            if current_layer.name == 'output_layer':
                self.network.addOutputModule(current_layer)
            else:
                self.network.addModule(current_layer)

            bias = BiasUnit()
            bias_connection = FullConnection(
                bias,
                current_layer,
                name="bias_to_{}_connection".format(current_layer.name))
            self.network.addModule(bias)
            self.network.addConnection(bias_connection)

            connection = FullConnection(prev_layer,
                                        current_layer,
                                        name="{}_to_{}_connection".format(
                                            prev_layer.name,
                                            current_layer.name))
            self.network.addConnection(connection)

            prev_layer = current_layer

        self.network.sortModules()
Esempio n. 17
0
    def __init__(self, hidden_layers, data_index_size):

        self.network = FeedForwardNetwork()

        connect_queue = Queue.Queue()

        for layer in xrange(0, hidden_layers):
            connect_queue.put(
                TanhLayer(data_index_size,
                          name='hidden_layer_{}'.format(layer)))

        connect_queue.put(SigmoidLayer(1, name='output_layer'))

        prev_layer = LinearLayer(data_index_size, name='input_layer')
        self.network.addInputModule(prev_layer)

        while not connect_queue.empty():
            print 'layer'
            current_layer = connect_queue.get()
            if current_layer.name == 'output_layer':
                self.network.addOutputModule(current_layer)
            else:
                self.network.addModule(current_layer)

            bias = BiasUnit()
            bias_connection = FullConnection(
                bias,
                current_layer,
                name="bias_to_{}_connection".format(current_layer.name))
            self.network.addModule(bias)
            self.network.addConnection(bias_connection)

            connection = FullConnection(prev_layer,
                                        current_layer,
                                        name="{}_to_{}_connection".format(
                                            prev_layer.name,
                                            current_layer.name))
            self.network.addConnection(connection)

            prev_layer = current_layer

        print 'sorting....'
        self.network.sortModules()
Esempio n. 18
0
def network(dataset, input_list):
    num_words = len(input_list)
    #dividing the dataset into training and testing data
    tstdata, trndata = dataset.splitWithProportion(0.25)

    #building the network
    net = RecurrentNetwork()
    input_layer1 = LinearLayer(num_words, name='input_layer1')
    input_layer2 = LinearLayer(num_words, name='input_layer2')
    hidden_layer = TanhLayer(num_words, name='hidden_layer')
    output_layer = SoftmaxLayer(num_words, name='output_layer')
    net.addInputModule(input_layer1)
    net.addInputModule(input_layer2)
    net.addModule(hidden_layer)
    net.addOutputModule(output_layer)
    net.addConnection(
        FullConnection(input_layer1, hidden_layer, name='in1_to_hidden'))
    net.addConnection(
        FullConnection(input_layer2, hidden_layer, name='in2_to_hidden'))
    net.addConnection(
        FullConnection(hidden_layer, output_layer, name='hidden_to_output'))
    net.addConnection(
        FullConnection(input_layer1, output_layer, name='in1_to_out'))
    net.addConnection(
        FullConnection(input_layer2, output_layer, name='in2_to_out'))
    net.sortModules()
    #backpropagation
    trainer = BackpropTrainer(net,
                              dataset=trndata,
                              momentum=0.1,
                              verbose=True,
                              weightdecay=0.01)
    #error checking part
    for i in range(10):
        trainer.trainEpochs(1)
        trnresult = percentError(trainer.testOnClassData(), trndata['target'])
        tstresult = percentError(trainer.testOnClassData(dataset=tstdata),
                                 tstdata['target'])
        print "epoch: %4d" % trainer.totalepochs
        print "  train error: %5.10f%%" % trnresult
        print "  test error: %5.10f%%" % tstresult
    return net
Esempio n. 19
0
def createNetwork():
    print("[+]Creating network...")
    global net
    net = FeedForwardNetwork()
    inLayer = LinearLayer(2, name='in')
    hiddenLayer = TanhLayer(5, name='hidden')
    outLayer = LinearLayer(4, name='out')

    net.addInputModule(inLayer)
    net.addModule(hiddenLayer)
    net.addOutputModule(outLayer)

    inToHidden = FullConnection(inLayer, hiddenLayer)
    hiddenToOut = FullConnection(hiddenLayer, outLayer)

    net.addConnection(inToHidden)
    net.addConnection(hiddenToOut)

    net.sortModules()
    print("[+] network created!")
Esempio n. 20
0
def build_2net(input_size, output_size, n_hidden=[5, 3]):
    """ Build a 2 hidden layer network give the layer sizes. """
    # Create network and modules
    net = FeedForwardNetwork()
    inp = LinearLayer(input_size)
    h1 = SigmoidLayer(n_hidden[0])
    h2 = TanhLayer(n_hidden[1])
    outp = LinearLayer(output_size)
    # Add modules
    net.addOutputModule(outp)
    net.addInputModule(inp)
    net.addModule(h1)
    net.addModule(h2)
    # Create connections
    net.addConnection(FullConnection(inp, h1, inSliceTo=6))
    net.addConnection(FullConnection(inp, h2, inSliceFrom=6))
    net.addConnection(FullConnection(h1, h2))
    net.addConnection(FullConnection(h2, outp))
    # Finish up
    net.sortModules()
    return net
Esempio n. 21
0
def testTraining():
    # the AnBnCn dataset (sequential)
    d = AnBnCnDataSet()

    # build a recurrent network to be trained
    hsize = 2
    n = RecurrentNetwork()
    n.addModule(TanhLayer(hsize, name='h'))
    n.addModule(BiasUnit(name='bias'))
    n.addOutputModule(LinearLayer(1, name='out'))
    n.addConnection(FullConnection(n['bias'], n['h']))
    n.addConnection(FullConnection(n['h'], n['out']))
    n.addRecurrentConnection(FullConnection(n['h'], n['h']))
    n.sortModules()

    # initialize the backprop trainer and train
    t = BackpropTrainer(n, learningrate=0.1, momentum=0.0, verbose=True)
    t.trainOnDataset(d, 200)

    # the resulting weights are in the network:
    print('Final weights:', n.params)
Esempio n. 22
0
    def create_net(self, in_size, hidden_size, out_size, override=False):
        net = FeedForwardNetwork()
        in_layer = LinearLayer(in_size)
        hidden_layer = TanhLayer(hidden_size)
        out_layer = LinearLayer(out_size)

        net.addInputModule(in_layer)
        net.addModule(hidden_layer)
        net.addOutputModule(out_layer)

        in_to_hidden = FullConnection(in_layer, hidden_layer)
        hidden_to_out = FullConnection(hidden_layer, out_layer)

        net.addConnection(in_to_hidden)
        net.addConnection(hidden_to_out)
        net.sortModules()

        if override == True:
            self.neural_net = net
        else:
            return net
Esempio n. 23
0
def buildBP(input_, hidden, output, trndata):
    fnn = FeedForwardNetwork()

    inLayer = LinearLayer(input_, 'inLayer')
    hidden0 = TanhLayer(hidden, 'hiddenLayer')
    outLayer = LinearLayer(output, 'outLayer')

    fnn.addInputModule(inLayer)
    fnn.addModule(hidden0)
    fnn.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hidden0)
    hidden_to_out = FullConnection(hidden0, outLayer)

    fnn.addConnection(in_to_hidden)
    fnn.addConnection(hidden_to_out)
    fnn.sortModules()

    trainer = BackpropTrainer(fnn, trndata, verbose=True, learningrate=.001)
    trainer.trainUntilConvergence(maxEpochs=3000)

    return fnn
Esempio n. 24
0
    def buildBMTrainer(self):
        # print np.random.rand(1)
        # print np.random.rand(1)
        x, y = self.readexcel()
        # 从sklearn数据集中读取用来模拟的数据
        # boston = load_boston()
        # x = boston.data
        # y = boston.target.reshape(-1, 1)
        # for i in range(0,x.shape[0]):
        #     for j in range(0,x.shape[1]):
        #         print (x[i][j])
        # print x.shape
        # sys.exit();
        # for x in x:
        #     print x
        # print x
        # print y
        # sys.exit(0)
        # 直接采用不打乱的方式进行7:3分离训练集和测试集
        # per = int(len(x) * 0.7)
        per = int(len(x))
        # 对数据进行归一化处理(一般来说使用Sigmoid时一定要归一化)
        sx = MinMaxScaler()
        sy = MinMaxScaler()
        xTrain = x[:per]
        xTrain = sx.fit_transform(xTrain)
        yTrain = y[:per]
        # print yTrain
        yTrain = sy.fit_transform(yTrain)
        # print yTrain
        # print sy.inverse_transform(yTrain)
        # sys.exit()
        # xTest = x[per:]
        # xTest = sx.transform(xTest)
        # yTest = y[per:]
        # yTest = sy.transform(yTest)
        # print xTest.shape
        # for x in xTest:
        #     print x
        # sys.exit()

        # 初始化前馈神经网络
        self.__fnn = FeedForwardNetwork()

        # 构建输入层,隐藏层和输出层,一般隐藏层为3-5层,不宜过多
        inLayer = LinearLayer(x.shape[1], 'inLayer')

        # hiddenLayer = TanhLayer(3, 'hiddenLayer')
        hiddenLayer = TanhLayer(self.hiddendim, 'hiddenLayer')
        outLayer = LinearLayer(self.rescol, 'outLayer')
        # hiddenLayer1 = TanhLayer(5, 'hiddenLayer1')
        # outLayer = LinearLayer(1, 'outLayer')

        # 将构建的输出层、隐藏层、输出层加入到fnn中
        self.__fnn.addInputModule(inLayer)
        self.__fnn.addModule(hiddenLayer)
        # fnn.addModule(hiddenLayer1)
        self.__fnn.addOutputModule(outLayer)

        # 对各层之间建立完全连接
        in_to_hidden = FullConnection(inLayer, hiddenLayer)
        # in_to_hidden.setName('in_to_hidden')
        # in_to_hidden._setParameters([0 for i in range(30)])
        hidden_to_out = FullConnection(hiddenLayer, outLayer)
        # hidden_to_out.setName('hidden_to_out')
        # hidden_to_out._setParameters([1 for i in range(3)])
        # hidden_to_hidden = FullConnection(hiddenLayer,hiddenLayer1 )
        # hidden_to_out = FullConnection(hiddenLayer1, outLayer)

        # 与fnn建立连接
        self.__fnn.addConnection(in_to_hidden)
        # fnn.addConnection(hidden_to_hidden)
        self.__fnn.addConnection(hidden_to_out)
        self.__fnn.sortModules()

        # 初始化监督数据集
        DS = SupervisedDataSet(x.shape[1], self.rescol)

        # 将训练的数据及标签加入到DS中
        # for i in range(len(xTrain)):
        #     DS.addSample(xTrain[i], yTrain[i])
        for i in range(len(xTrain)):
            DS.addSample(xTrain[i], yTrain[i])

        # 采用BP进行训练,训练至收敛,最大训练次数为1000
        trainer = BackpropTrainer(self.__fnn,
                                  DS,
                                  learningrate=0.001,
                                  verbose=self.verbose)
        trainingErrors = trainer.trainUntilConvergence(maxEpochs=10000)
        self.finalError = trainingErrors[0][-2]
        if (self.verbose):
            print('最后总体容差:', self.finalError)
        self.__sy = sy
        # print "1"
        # print fnn.activate(x)
        for i in range(len(xTrain)):
            print(
                sy.inverse_transform(
                    self.__fnn.activate(xTrain[i]).reshape(-1, 1)))
Esempio n. 25
0
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.datasets import SupervisedDataSet
from pybrain.structure import FullConnection, FeedForwardNetwork, TanhLayer, LinearLayer, BiasUnit
import matplotlib.pyplot as plt
from numpy import *

n = FeedForwardNetwork()
n.addInputModule(LinearLayer(1, name='in'))
n.addInputModule(BiasUnit(name='bias'))
n.addModule(TanhLayer(3, name='gotan'))
n.addOutputModule(LinearLayer(1, name='out'))
n.addConnection(FullConnection(n['bias'], n['gotan']))
n.addConnection(FullConnection(n['in'], n['gotan']))
n.addConnection(FullConnection(n['gotan'], n['out']))
n.sortModules()

# initialize the backprop trainer and train
t = BackpropTrainer(n, learningrate=0.1, momentum=0.0, verbose=True)
#DATASET

DS = SupervisedDataSet(1, 1)
X = random.rand(100, 1) * 100
Y = X**3 + random.rand(100, 1) * 5
maxy = float(max(Y))
maxx = 100.0

for r in range(X.shape[0]):
    DS.appendLinked((X[r] / maxx), (Y[r] / maxy))

t.trainOnDataset(DS, 200)
Esempio n. 26
0
def trainet2(data, nhide=8, nhide1=8, epo=10, wd=.1, fn=''):

    alldata = data
    tstdata_temp, trndata_temp = alldata.splitWithProportion(0.5)

    tstdata = ClassificationDataSet(alldata.indim, nb_classes=alldata.nClasses)
    for n in range(0, tstdata_temp.getLength()):
        tstdata.addSample(
            tstdata_temp.getSample(n)[0],
            tstdata_temp.getSample(n)[1])

    trndata = ClassificationDataSet(alldata.indim, nb_classes=alldata.nClasses)
    for n in range(0, trndata_temp.getLength()):
        trndata.addSample(
            trndata_temp.getSample(n)[0],
            trndata_temp.getSample(n)[1])

    tstdata._convertToOneOfMany()
    trndata._convertToOneOfMany()

    net = FeedForwardNetwork()
    inLayer = LinearLayer(trndata.indim)
    hiddenLayer = TanhLayer(nhide)
    hiddenLayer1 = TanhLayer(nhide1)
    outLayer = LinearLayer(trndata.outdim)

    net.addInputModule(inLayer)
    net.addModule(hiddenLayer)
    net.addModule(hiddenLayer1)
    net.addOutputModule(outLayer)

    in_to_hidden = FullConnection(inLayer, hiddenLayer)
    hidden_to_hidden = FullConnection(hiddenLayer, hiddenLayer1)
    hidden_to_out = FullConnection(hiddenLayer1, outLayer)

    net.addConnection(in_to_hidden)
    net.addConnection(hidden_to_hidden)
    net.addConnection(hidden_to_out)

    net.sortModules()
    net.bias = True

    trainer = BackpropTrainer(net,
                              dataset=trndata,
                              verbose=True,
                              weightdecay=wd,
                              momentum=0.1)
    edata = []
    msedata = []
    for i in range(epo):
        trainer.trainEpochs(1)
        trnresult = percentError(trainer.testOnClassData(), trndata['class'])
        tstresult = percentError(trainer.testOnClassData(dataset=tstdata),
                                 tstdata['class'])
        tod = trainer.testOnData(verbose=False)
        print("epoch: %4d" % trainer.totalepochs,
              "  train error: %5.2f%%" % trnresult,
              "  test error: %5.2f%%" % tstresult, "  layers: ", nhide1,
              "  N_tourn: ", alldata.indim / 2)
        edata.append([trnresult, tstresult])
        msedata.append([i, tod])
    with open(fn + ".dta", 'w') as fp:
        json.dump(edata, fp)
    with open(fn + ".mse", 'w') as fp:
        json.dump(msedata, fp)
    return net
Esempio n. 27
0
def part2():
    '''
    Determine the minimal number of hidden units
    required to train the network successfully
    using multiple hidden layers
    '''
    '''
    # Parameters
    HIDDEN_NODES =          8
    LEARNING_DECAY =        0.9999    # Set in range [0.9, 1]
    LEARNING_RATE =         0.08    # Set in range [0, 1]
    MOMENTUM =              0.0    # Set in range [0, 0.5]
    TRAINING_ITERATIONS =   1000
    BATCH_LEARNING =        False
    VALIDATION_PROPORTION = 0.0
    SPARSE_LENGTH =         16
    '''

    # Parameters
    HIDDEN_NODES = 4
    LEARNING_DECAY = 0.9999  # Set in range [0.9, 1]
    LEARNING_RATE = 0.111  # Set in range [0, 1]
    MOMENTUM = 0.05  # Set in range [0, 0.5]
    TRAINING_ITERATIONS = 5000
    BATCH_LEARNING = False
    VALIDATION_PROPORTION = 0.0
    SPARSE_LENGTH = 16

    # Get the dataset
    dataset = sparse_coding.generateFull(SPARSE_LENGTH)
    validationSet = sparse_coding.generateFull(SPARSE_LENGTH)
    dataset, classes = sparse_coding.toClassificationDataset(dataset)
    inDimension = dataset.indim
    outDimension = dataset.outdim

    print inDimension
    print outDimension

    # Set up the neral network layers
    inLayer = LinearLayer(inDimension, name='input')
    hiddenLayer1 = SigmoidLayer(HIDDEN_NODES, name='hidden1')
    hiddenLayer2 = TanhLayer(HIDDEN_NODES, name='hidden2')
    outLayer = LinearLayer(outDimension, name='output')

    # Set up the connections
    input_to_hidden1 = FullConnection(inLayer, hiddenLayer1, name='in_h1')
    hidden1_to_hidden2 = FullConnection(hiddenLayer1,
                                        hiddenLayer2,
                                        name='h1_h2')
    hidden2_to_output = FullConnection(hiddenLayer2, outLayer, name='h2_out')
    hidden1_to_output = FullConnection(hiddenLayer1, outLayer, name='h2_out')

    # Create the network and add the information
    neuralNet = FeedForwardNetwork()
    neuralNet.addInputModule(inLayer)
    neuralNet.addModule(hiddenLayer1)
    neuralNet.addModule(hiddenLayer2)
    neuralNet.addOutputModule(outLayer)

    neuralNet.addConnection(input_to_hidden1)
    neuralNet.addConnection(hidden1_to_hidden2)
    neuralNet.addConnection(hidden2_to_output)
    neuralNet.addConnection(hidden1_to_output)
    neuralNet.sortModules()

    print neuralNet

    # Train the network
    trainer = BackpropTrainer(neuralNet,
                              dataset,
                              learningrate=LEARNING_RATE,
                              momentum=MOMENTUM,
                              lrdecay=LEARNING_DECAY,
                              batchlearning=BATCH_LEARNING)

    trainingErrors = []
    validationErrors = []

    for i in xrange(TRAINING_ITERATIONS):
        print "Training iteration: ", i

        # Check if VALIDATION_PROPORTION is not 0. This will split the input dataset into
        # VALIDATION_PROPORTION % for Validation Data and
        # (1 - VALIDATION_PROPORTION) % for Training Data
        # e.g. 25% Validation Data and 75% Training Data

        if VALIDATION_PROPORTION == 0.0 or VALIDATION_PROPORTION == 0:
            # Cannot split the data set into Training and Validation Data. Train the
            # Neural Network by standard means. This will not calculate Validatinon Error

            # The result of training is the proportional error for the number of epochs run
            trainingError = trainer.train()
            trainingErrors.append(trainingError)

            # Display the result of training for the iteration
            print "   Training error:    ", trainingError
        else:
            trainingErrors, validationErrors = trainer.trainUntilConvergence(
                validationProportion=VALIDATION_PROPORTION)

    # Create the output path if it doesn't exist
    generated_dir = path.abspath(
        path.join("generated", "Q2Task2-TrainedNN-{}".format(
            strftime("%Y-%m-%d_%H-%M-%S"))))
    if not path.exists(generated_dir):
        makedirs(generated_dir)

    # save parameters
    with open(path.normpath(path.join(generated_dir, "params.txt")), "a") as f:
        f.write("HIDDEN_LAYERS = {}\n".format(HIDDEN_NODES))
        f.write("LEARNING_DECAY = {}\n".format(LEARNING_DECAY))
        f.write("LEARNING_RATE = {}\n".format(LEARNING_RATE))
        f.write("MOMENTUM = {}\n".format(MOMENTUM))
        f.write("TRAINING_ITERATIONS = {}\n".format(TRAINING_ITERATIONS))
        f.write("BATCH_LEARNING = {}\n".format(BATCH_LEARNING))
        f.write("VALIDATION_PROPORTION = {}\n".format(VALIDATION_PROPORTION))

    # Save the Trained Neural Network
    uniqueFileName = path.normpath(path.join(generated_dir, "data.pkl"))
    writeMode = 'wb'  # Write Bytes
    pickle.dump(neuralNet, open(uniqueFileName, writeMode))

    # Plot the results of training
    plot.plot(trainingErrors, 'b')
    plot.ylabel("Training Error")
    plot.xlabel("Training Steps")
    plot.savefig(path.normpath(path.join(generated_dir, "errors.png")))
    plot.show()
    plot.clf()

    from mpl_toolkits.mplot3d import Axes3D
    figure = plot.figure()
    axis = figure.add_subplot(111, projection='3d')
    colors = ['r', 'y', 'g', 'c', 'b', 'k']

    for sample in validationSet:
        classifier = sparse_coding.getClassifier(sample)
        activationResult = neuralNet.activate(sample)
        axis.bar(range(len(sample)),
                 activationResult,
                 classifier,
                 zdir='y',
                 color=colors[:len(sample)])

    plot.savefig(path.normpath(path.join(generated_dir, "activations.png")))
    plot.show()
Esempio n. 28
0
    out = word_list.index(temp_list[2])
    inp1_vec = np.concatenate((inp1_vec, [inp[inp1, :]]), axis=0)
    inp2_vec = np.concatenate((inp2_vec, [inp[inp2, :]]), axis=0)
    out_vec = np.concatenate((out_vec, [inp[out, :]]), axis=0)
inp_vec = np.concatenate((inp1_vec, inp2_vec), axis=1)

#building the dataset
dataset = SupervisedDataSet(2 * num_words, num_words)
for i in range(len(sorted_list) + 1):
    dataset.addSample(inp_vec[i, :], out_vec[i, :])
tstdata, trndata = dataset.splitWithProportion(0.25)

#building the network
net = FeedForwardNetwork()
input_layer = LinearLayer(2 * num_words, name='input_layer')
hidden_layer = TanhLayer(num_words, name='hidden')
output_layer = SigmoidLayer(num_words, name='output_layer')
net.addInputModule(input_layer)
net.addModule(hidden_layer)
net.addOutputModule(output_layer)
net.addConnection(
    FullConnection(input_layer, hidden_layer, name='in_to_hidden'))
net.addConnection(
    FullConnection(hidden_layer, output_layer, name='hidden_to_out'))
net.sortModules()

#backpropagation
trainer = BackpropTrainer(net,
                          dataset=trndata,
                          momentum=0.1,
                          verbose=True,
Esempio n. 29
0
conglomerateSet = list(set(list(conglomerateString[0])))
codeTable = pd.Series(data=conglomerateSet, index=conglomerateSet)
codeTable = pd.get_dummies(codeTable)

conglomerateSet = []
conglomerateString = []

# Construct LSTM network
rnn = RecurrentNetwork()

inputSize = len(codeTable['a'].values)
outputSize = 4
hiddenSize = 10

rnn.addInputModule(LinearLayer(dim=inputSize, name='in'))
rnn.addModule(TanhLayer(dim=hiddenSize, name='in_proc'))
rnn.addModule(LSTMLayer(dim=hiddenSize, peepholes=True, name='hidden'))
rnn.addModule(TanhLayer(dim=hiddenSize, name='out_proc'))
rnn.addOutputModule(SoftmaxLayer(dim=outputSize, name='out'))

rnn.addConnection(FullConnection(rnn['in'], rnn['in_proc'], name='c1'))
rnn.addConnection(FullConnection(rnn['in_proc'], rnn['hidden'], name='c2'))
rnn.addRecurrentConnection(
    FullConnection(rnn['hidden'], rnn['hidden'], name='c3'))
rnn.addConnection(FullConnection(rnn['hidden'], rnn['out_proc'], name='c4'))
rnn.addConnection(FullConnection(rnn['out_proc'], rnn['out'], name='c5'))

rnn.sortModules()

# Construct dataset
trainingData = SequentialDataSet(inputSize, outputSize)
Esempio n. 30
0
xTest = sx.transform(xTest)
yTest = y[per:]
yTest = sy.transform(yTest)
# print xTest.shape
# for x in xTest:
#     print x
# sys.exit()

#初始化前馈神经网络
fnn = FeedForwardNetwork()

#构建输入层,隐藏层和输出层,一般隐藏层为3-5层,不宜过多
inLayer = LinearLayer(x.shape[1], 'inLayer')

# hiddenLayer = TanhLayer(3, 'hiddenLayer')
hiddenLayer = TanhLayer(12, 'hiddenLayer')
outLayer = LinearLayer(1, 'outLayer')
# hiddenLayer1 = TanhLayer(5, 'hiddenLayer1')
# outLayer = LinearLayer(1, 'outLayer')

#将构建的输出层、隐藏层、输出层加入到fnn中
fnn.addInputModule(inLayer)
fnn.addModule(hiddenLayer)
# fnn.addModule(hiddenLayer1)
fnn.addOutputModule(outLayer)

#对各层之间建立完全连接
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
# hidden_to_hidden = FullConnection(hiddenLayer,hiddenLayer1 )
# hidden_to_out = FullConnection(hiddenLayer1, outLayer)