Пример #1
0
    def __init__(self, features_num, hidden_neurons_num):
        super().__init__()
        self.is_learning = True

        self.features_num = features_num
        #         self.net = buildNetwork(features_num, hidden_neurons_num, 1, bias = True)
        #         self.net = buildNetwork(features_num, hidden_neurons_num, hidden_neurons_num, 1, bias = True)
        #         self.net = ConvolutionalBoardNetwork(Board.BOARD_SIZE, 5, 3)
        #         self.trainer = BackpropTrainer(self.net)

        self.net_attack = buildNetwork(features_num,
                                       hidden_neurons_num,
                                       hidden_neurons_num,
                                       1,
                                       bias=True)
        self.net_defence = buildNetwork(features_num,
                                        hidden_neurons_num,
                                        hidden_neurons_num,
                                        1,
                                        bias=True)
        self.trainer_attack = BackpropTrainer(self.net_attack)
        self.trainer_defence = BackpropTrainer(self.net_defence)

        self.gamma = 0.9
        self.errors = []
        self.buf = np.zeros(200)
        self.buf_index = 0
        self.setup()
Пример #2
0
def begin1():

    cbf = readFromCsv("cbf2")
    numdataset = np.array(cbf, dtype=np.float64)
    #训练数据,验证数据,今天的数据
    tgdataset, vadataset, tydata = dataSplit(numdataset)
    #归一的参数
    gydata, dmean, dstd = gyData(tgdataset)

    #验证和今天的数据
    gyvadata = calFeature(vadataset, dmean, dstd)
    gytydata = calFeature(tydata, dmean, dstd)

    #神经网络
    trainingset = buildTrainingSet(gydata)

    for i in range(1000):
        net = buildNetwork(15,
                           8,
                           1,
                           bias=True,
                           hiddenclass=TanhLayer,
                           outclass=TanhLayer)
        trainer = BackpropTrainer(net, trainingset)
        trainer.trainEpochs(epochs=100)
        rate = va.calRightRate(gyvadata, net)
        if rate > 0.6:
            NetworkWriter.writeToFile(
                net, '../netv3/zxtx_8l_100t_6_' + str(rate) + ".xml")
            print(va.calRightRate(gyvadata, net))
            print(va.calRightRate(gytydata, net))
        print(str(i) + " times " + str(rate))


# begin1();
Пример #3
0
def main (  ) :

    # criando os dados para treino
    datasetTreino = montaDados ()

    # criando os dados para teste
    datasetTeste = montaDados()

    # definindo a estrutura de como será a rede neural
    # a entrada será a dimensão de entrada do dataset = 3
    # terá 6 neurônios na primeira camada intermediária
    # terá 6 neurônios na segunda camada escondida
    # terá como dimensão de saída o tamanho do dado de saída = 1
    # terá a função de autocorreção para melhor adaptação da rede
    network = buildNetwork( datasetTreino.indim, 12, 6, datasetTreino.outdim, bias=True )

    # criando a rede neural
    # terá como estrutura de rede neural definida no objeto network
    # utilizará os dados do dataset para treino
    neuralNetwork = BackpropTrainer ( network, datasetTreino, learningrate=0.01, momentum=0.9 )

    # treinando a rede
    neuralNetwork.trainEpochs ( 1500 )

    # validando a rede
    neuralNetwork.testOnData ( datasetTeste, verbose=True )
Пример #4
0
 def init(self):
     self.networks = []
     self.trainers = []
     self.starting_weights = []
     for i in range(self.size):  #@UnusedVariable
         if self.num_hid2 == 0:
             network = buildNetwork(self.num_inp,
                                    self.num_hid1,
                                    self.num_out,
                                    hiddenclass=SigmoidLayer,
                                    bias=True)
         else:
             network = buildNetwork(self.num_inp,
                                    self.num_hid1,
                                    self.num_hid2,
                                    self.num_out,
                                    hiddenclass=SigmoidLayer,
                                    bias=True)
         starting_weights = network.params.copy()
         trainer = BackpropTrainer(network,
                                   learningrate=LEARNING_RATE,
                                   momentum=MOMENTUM_LOW,
                                   verbose=False)
         self.networks.append(network)
         self.trainers.append(trainer)
         self.starting_weights.append(starting_weights)
Пример #5
0
def main():

    # criando o dataset, onde os dados de entrada no dataset será um vetor de tamanho 2
    # e o dado de saída será um escalar
    dataset = SupervisedDataSet(2, 1)

    criandoDataset(dataset)

    # criando a rede neural
    # onde terá, respectivamente, a quantidade de entrada na rede
    # quantidade de neurônios na camada intermediária
    # dimensão de saída da rede
    # utilizando uma adaptação da rede ao longo do tempo
    network = buildNetwork(dataset.indim, 4, dataset.outdim, bias=True)

    # criando o método de treino da rede
    # passando a rede
    # passando o dataset
    # passando a taxa de aprendizado
    # aumentando o cálculo que maximiza o treinamento da rede
    trainer = BackpropTrainer(network,
                              dataset,
                              learningrate=0.01,
                              momentum=0.99)

    # looping que faz o treino da  função
    for epocas in range(0, 1000):

        trainer.train()

    # realizando o teste
    datasetTeste = SupervisedDataSet(2, 1)
    criandoDataset(datasetTeste)
    trainer.testOnData(datasetTeste, verbose=True)
Пример #6
0
def main():
    print "Calculating mfcc...."
    mfcc_coeff_vectors_dict = {}
    for i in range(1, 201):
        extractor = FeatureExtractor(
            '/home/venkatesh/Venki/FINAL_SEM/Project/Datasets/Happiness/HappinessAudios/' + str(i) + '.wav')
        mfcc_coeff_vectors = extractor.calculate_mfcc()
        mfcc_coeff_vectors_dict.update({str(i): (mfcc_coeff_vectors, mfcc_coeff_vectors.shape[0])})

    for i in range(201, 401):
        extractor = FeatureExtractor(
            '/home/venkatesh/Venki/FINAL_SEM/Project/Datasets/Sadness/SadnessAudios/' + str(i - 200) + '.wav')
        mfcc_coeff_vectors = extractor.calculate_mfcc()
        mfcc_coeff_vectors_dict.update({str(i): (mfcc_coeff_vectors, mfcc_coeff_vectors.shape[0])})

    audio_with_min_frames, min_frames = get_min_frames_audio(
        mfcc_coeff_vectors_dict)
    processed_mfcc_coeff = preprocess_input_vectors(
        mfcc_coeff_vectors_dict, min_frames)
    # frames = min_frames
    # print frames
    # print len(processed_mfcc_coeff['1'])
    # for each_vector in processed_mfcc_coeff['1']:
    #     print len(each_vector)
    print "mffcc found..."
    classes = ["happiness", "sadness"]

    training_data = ClassificationDataSet(
        26, target=1, nb_classes=2, class_labels=classes)
    # training_data = SupervisedDataSet(13, 1)
    try:
        network = NetworkReader.readFrom(
            'network_state_frame_level_new2_no_pp1.xml')
    except:
        for i in range(1, 51):
            mfcc_coeff_vectors = processed_mfcc_coeff[str(i)]
            for each_vector in mfcc_coeff_vectors:
                training_data.appendLinked(each_vector, [1])

        for i in range(201, 251):
            mfcc_coeff_vectors = processed_mfcc_coeff[str(i)]
            for each_vector in mfcc_coeff_vectors:
                training_data.appendLinked(each_vector, [0])

        training_data._convertToOneOfMany()
        print "prepared training data.."
        print training_data.indim, training_data.outdim
        network = buildNetwork(
            training_data.indim, 5, training_data.outdim, fast=True)
        trainer = BackpropTrainer(network, learningrate=0.01, momentum=0.99)
        print "Before training...", trainer.testOnData(training_data)
        trainer.trainOnDataset(training_data, 1000)
        print "After training...", trainer.testOnData(training_data)
        NetworkWriter.writeToFile(
            network, "network_state_frame_level_new2_no_pp.xml")
def main():
    print "Calculating mfcc...."
    mfcc_coeff_vectors_dict = {}
    for i in range(1, 201):
        extractor = FeatureExtractor('/home/venkatesh/Venki/FINAL_SEM/Project/Datasets/Happiness/HappinessAudios/' + str(i) + '.wav')
        mfcc_coeff_vectors = extractor.calculate_mfcc()
        mfcc_coeff_vectors_dict.update({str(i): (mfcc_coeff_vectors, mfcc_coeff_vectors.shape[0])})

    for i in range(201, 401):
        extractor = FeatureExtractor('/home/venkatesh/Venki/FINAL_SEM/Project/Datasets/Sadness/SadnessAudios/' + str(i - 200) + '.wav')
        mfcc_coeff_vectors = extractor.calculate_mfcc()
        mfcc_coeff_vectors_dict.update({str(i): (mfcc_coeff_vectors, mfcc_coeff_vectors.shape[0])})

    audio_with_min_frames, min_frames = get_min_frames_audio(mfcc_coeff_vectors_dict)
    processed_mfcc_coeff = preprocess_input_vectors(mfcc_coeff_vectors_dict, min_frames)
    frames = min_frames
    print "mfcc found...."
    classes = ["happiness", "sadness"]
    try:
        network = NetworkReader.readFrom('network_state_new_.xml')
    except:
        # Create new network and start Training
        training_data = ClassificationDataSet(frames * 26, target=1, nb_classes=2, class_labels=classes)
        # training_data = SupervisedDataSet(frames * 39, 1)
        for i in range(1, 151):
            mfcc_coeff_vectors = processed_mfcc_coeff[str(i)]
            training_data.appendLinked(mfcc_coeff_vectors.ravel(), [1])
            # training_data.addSample(mfcc_coeff_vectors.ravel(), [1])

        for i in range(201, 351):
            mfcc_coeff_vectors = processed_mfcc_coeff[str(i)]
            training_data.appendLinked(mfcc_coeff_vectors.ravel(), [0])
            # training_data.addSample(mfcc_coeff_vectors.ravel(), [0])

        training_data._convertToOneOfMany()
        network = buildNetwork(training_data.indim, 5, training_data.outdim)
        trainer = BackpropTrainer(network, learningrate=0.01, momentum=0.99)
        print "Before training...", trainer.testOnData(training_data)
        trainer.trainOnDataset(training_data, 1000)
        print "After training...", trainer.testOnData(training_data)
        NetworkWriter.writeToFile(network, "network_state_new_.xml")

    print "*" * 30 , "Happiness Detection", "*" * 30
    for i in range(151, 201):
        output = network.activate(processed_mfcc_coeff[str(i)].ravel())
        # print output,
        # if output > 0.7:
        #     print "happiness"
        class_index = max(xrange(len(output)), key=output.__getitem__)
        class_name = classes[class_index]
        print class_name
Пример #8
0
def montaRede(dadosEntrada, dadosSaida):
    """
    Função na qual def

    :param dadosEntrada: parâmetros de entrada na rede neural
    :param dadosSaida:  parâmetros de saída da rede neural
    :return: retorna a rede de treinamento treinada e os dados supervisionados
    """

    entradaTreino = np.concatenate(
        (dadosEntrada[:35], dadosEntrada[50:85], dadosEntrada[100:135]))
    saidaTreino = np.concatenate(
        (dadosSaida[:35], dadosSaida[50:85], dadosSaida[100:135]))
    entradaTeste = np.concatenate(
        (dadosEntrada[35:50], dadosEntrada[85:100], dadosEntrada[135:]))
    saidaTeste = np.concatenate(
        (dadosSaida[35:50], dadosSaida[85:100], dadosSaida[135:]))

    treinaRede(entradaTreino, saidaTreino)

    # criando o dataset de treinamento
    # serão 4 dados de entrada
    # será um dado de saída
    treinamento = treinaRede(entradaTreino, saidaTreino)

    # rede neural do tamanho do treinamento
    # com 2 neurônios na camada intermediária
    # com o dado de output sendo o tamanho da rede
    # utilizando bias
    redeNeural = buildNetwork(treinamento.indim,
                              2,
                              treinamento.outdim,
                              bias=True)

    # criando a rede neural treinada
    redeNeuralTreinada = BackpropTrainer(redeNeural,
                                         treinamento,
                                         learningrate=0.3,
                                         momentum=0.9)

    for epocas in range(0, 10000):

        redeNeuralTreinada.train()

    teste = SupervisedDataSet(4, 1)

    for i in range(len(entradaTeste)):

        teste.addSample(entradaTeste[i], saidaTeste[i])

    return redeNeuralTreinada, teste
 def train(self,
           dataset,
           maxEpochs=10,
           learningrate=0.01,
           momentum=0.99,
           continueEpochs=10,
           validationProportion=0.25):
     '''trains a network with the given dataset
     
     :param SupervisedDataSet dataset: the training dataset
     :param int maxEpochs: max number of iterations to train the network
     :parma float learningrate: helps to 
     :param float momentum: helps out of local minima while training, to get better results
     '''
     self.trainer = BackpropTrainer(self.net,
                                    learningrate=learningrate,
                                    momentum=momentum)
     self.trainer.trainOnDataset(dataset, maxEpochs)
Пример #10
0
    def train(network_file, input_length, output_length, training_data_file,
              learning_rate, momentum, stop_on_convergence, epochs, classify):
        n = get_network(network_file)
        if classify:
            ds = ClassificationDataSet(int(input_length),
                                       int(output_length) * 2)
            ds._convertToOneOfMany()
        else:
            ds = SupervisedDataSet(int(input_length), int(output_length))
        training_data = get_training_data(training_data_file)

        NetworkManager.last_training_set_length = 0
        for line in training_data:
            data = [float(x) for x in line.strip().split(',') if x != '']
            input_data = tuple(data[:(int(input_length))])
            output_data = tuple(data[(int(input_length)):])
            ds.addSample(input_data, output_data)
            NetworkManager.last_training_set_length += 1

        t = BackpropTrainer(n,
                            learningrate=learning_rate,
                            momentum=momentum,
                            verbose=True)
        print "training network " + network_storage_path + network_file

        if stop_on_convergence:
            t.trainUntilConvergence(ds, epochs)
        else:
            if classify:
                t.trainOnDataset(ds['class'], epochs)
            else:
                t.trainOnDataset(ds, epochs)

        error = t.testOnData()
        print "training done"
        if not math.isnan(error):
            save_network(n, network_file)
        else:
            print "error occured, network not saved"

        print "network saved"

        return error
Пример #11
0
def ANN(
    trainFeature, trainLabel, testFeature, testLabel, netStructure, para_rate,
    para_momentum
):  #netStructure is a list [in, hidden, out], momentum is a parameter in SGD
    sampleNum = trainFeature.shape[0]
    featureNum = trainFeature.shape[1]
    Dataset = SupervisedDataSet(featureNum, 1)
    i = 0
    while (i < sampleNum):
        print(i)
        Dataset.addSample(list(trainFeature[i]), [trainLabel[i]])
        i += 1
    Network = buildNetwork(netStructure[0],
                           netStructure[1],
                           netStructure[2],
                           netStructure[3],
                           hiddenclass=SigmoidLayer,
                           outclass=SigmoidLayer)
    T = BackpropTrainer(Network,
                        Dataset,
                        learningrate=para_rate,
                        momentum=para_momentum,
                        verbose=True)
    #print(Dataset['input'])
    errorList = []
    errorList.append(T.testOnData(Dataset))
    T.trainOnDataset(Dataset)
    errorList.append(T.testOnData(Dataset))
    T.trainOnDataset(Dataset)
    while (abs(T.testOnData(Dataset) - errorList[-1]) > 0.0001):
        T.trainOnDataset(Dataset)
        errorList.append(T.testOnData(Dataset))
    pass  #this step is for the output of predictedLabel
    print(np.array([Network.activate(x) for x in trainFeature]))
    #print(testLabel)
    print(
        Network.activate([
            0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
            0, 0
        ]))
    return (errorList)
Пример #12
0
    def __init__(self, **kwargs):

        self.max_depth = 0
        self.stats = {}

        self.calculation_time = float(kwargs.get('time', 1))
        self.max_moves = int(kwargs.get('max_moves', Board.BOARD_SIZE_SQ))

        # Exploration constant, increase for more exploratory moves,
        # decrease to prefer moves with known higher win rates.
        self.C = float(kwargs.get('C', 1.4))

        self.features_num = Board.BOARD_SIZE_SQ * 3 + 2
        self.hidden_neurons_num = self.features_num * 2
        self.net = buildNetwork(self.features_num,
                                self.hidden_neurons_num,
                                2,
                                bias=True,
                                outclass=SigmoidLayer)
        self.trainer = BackpropTrainer(self.net)

        self.total_sim = 0
        self.observation = []
Пример #13
0
def get_trained_ann(dataset, ann=None, test_train_prop=0.25, max_epochs=50):
    tstdata, trndata = dataset.splitWithProportion(test_train_prop)
    trndata._convertToOneOfMany()
    tstdata._convertToOneOfMany()
    if not ann:
        ann = build_ann(trndata.indim, trndata.outdim)


#        ann = build_exp_ann(trndata.indim, trndata.outdim)
#    trainer = RPropMinusTrainer(ann)
    trainer = BackpropTrainer(ann,
                              dataset=trndata,
                              learningrate=0.01,
                              momentum=0.5,
                              verbose=True)
    trnresult = tstresult = 0
    #    for i in range(10):
    trainer.trainUntilConvergence(maxEpochs=max_epochs, verbose=True)
    trnresult = percentError(trainer.testOnClassData(), trndata['class'])
    tstresult = percentError(trainer.testOnClassData(dataset=tstdata),
                             tstdata['class'])
    #        print trnresult, tstresult
    return ann, trnresult, tstresult