Beispiel #1
0
def main():

    # criando o dataset, onde os dados de entrada no dataset será um vetor de tamanho 2
    # e o dado de saída será um escalar
    dataset = SupervisedDataSet(2, 1)

    criandoDataset(dataset)

    # criando a rede neural
    # onde terá, respectivamente, a quantidade de entrada na rede
    # quantidade de neurônios na camada intermediária
    # dimensão de saída da rede
    # utilizando uma adaptação da rede ao longo do tempo
    network = buildNetwork(dataset.indim, 4, dataset.outdim, bias=True)

    # criando o método de treino da rede
    # passando a rede
    # passando o dataset
    # passando a taxa de aprendizado
    # aumentando o cálculo que maximiza o treinamento da rede
    trainer = BackpropTrainer(network,
                              dataset,
                              learningrate=0.01,
                              momentum=0.99)

    # looping que faz o treino da  função
    for epocas in range(0, 1000):

        trainer.train()

    # realizando o teste
    datasetTeste = SupervisedDataSet(2, 1)
    criandoDataset(datasetTeste)
    trainer.testOnData(datasetTeste, verbose=True)
Beispiel #2
0
    def make_evaluation_datasets(self):
        eval_dataset = SupervisedDataSet(self.inputdim, self.outputdim)
        eval_costset = SupervisedDataSet(self.inputdim, self.outputdim)

        f_sim = open('simdata/evalset.txt')

        f_input = open('../data/funcvalue.txt', 'w')
        f_input_cost = open('../data/funccost.txt', 'w')
        for line in f_sim:
            line_segs = line.split()
            x = line_segs[0]
            y = line_segs[1]
            dist = float(line_segs[2])
            angle = line_segs[3]

            if dist < 0:
                cost = self.COST_HIGH
            else:
                cost = self.COST_LOW

            eval_dataset.addSample([x, y], [dist, angle])
            eval_costset.addSample([x, y], [cost])

            f_input.write('%s %s %f\n' % (x, y, dist))
            f_input_cost.write('%s %s %f\n' % (x, y, cost))

        f_input.close()
        f_input_cost.close()

        return (eval_dataset, eval_costset)
def Neural_Network(xtrain,ytrain,xtest,ytest):
    #Hidden nodes
    hidden_net = 2
    #Epoch is a single pass through the entire training set, followed by testing of the verification set.
    epoch = 2
    ytrain = ytrain.reshape(-1,1)
    input_cnt = xtrain.shape[1]
    target_cnt = ytrain.shape[1]
    dataset = SupervisedDataSet(input_cnt, target_cnt)
    dataset.setField( 'input', xtrain )
    dataset.setField( 'target', ytrain )
    network = buildNetwork( input_cnt, hidden_net, target_cnt, bias = True )
    #Trainer that trains the parameters of a module according to a supervised dataset (potentially sequential) by backpropagating the errors (through time).
    trainer = BackpropTrainer( network,dataset )
    print("---------------Neural Network---------------")
    print("Train Data")
    for e in range(epoch):
        mse = trainer.train()
        rmse = math.sqrt(mse)   
    print("MSE, epoch {}: {}".format(e + 1, mse))
    print("RMSE, epoch {}: {}".format(e + 1, rmse))
    
    ytest=ytest.reshape(-1,1)
    input_size = xtest.shape[1]
    target_size = ytest.shape[1]
    dataset = SupervisedDataSet( input_size, target_size )
    dataset.setField( 'input', xtest)
    dataset.setField( 'target', ytest)
    model = network.activateOnDataset(dataset)

    mse = mean_squared_error(ytest, model )
    rmse =math.sqrt(mse)
    print("Test Data:")
    print("MSE: ", mse)
    print("RMSE: ", rmse)
Beispiel #4
0
    def __init__(self, domain, iters, trial_number):
        super(ActNetExperiment,
              self).__init__(domain, Experiment.EXP_ACTNET, iters,
                             ACTIVE_ENSEMBLE_SIZE, trial_number)
        # inputs: x, y, point's ambiguity, current average cost, current average value variance
        # output: ratio of next error * avg_cost to current
        self.cost_ensemble = Ensemble(self.ensemble_size, domain.inputdim,
                                      self.NUM_HIDDEN1, self.NUM_HIDDEN2, 1)
        self.train_costset = SupervisedDataSet(domain.inputdim, 1)

        # train cost network to reset costs
        points = self.domain.generate_grid_points(INIT_COST_SAMPLES_AXIS)
        init_costset = SupervisedDataSet(2, 1)
        for point in points:
            z_cost = self.domain.COST_LOW
            init_costset.addSample(point, [z_cost])

        print 'Initializing Cost Ensemble...'
        self.cost_ensemble.train(init_costset)
        self.cost_ensemble.save_starting_weights()

        self.perf_input_dim = 4
        self.perf_ensemble = Ensemble(self.ensemble_size, self.perf_input_dim,
                                      self.NUM_HIDDEN1, self.NUM_HIDDEN2, 1)
        self.train_inputs = []
        self.train_outputs = []
        #self.train_perfset = ImportanceDataSet(self.perf_input_dim, 1)
        self.train_perfset = SupervisedDataSet(self.perf_input_dim, 1)

        self.last_avg_value_var = None
        self.last_x_y_value_var = None
        self.last_x_y_cost = None
        self.last_x_y_actual_cost = None
        self.last_x_y_cost_var = None
        self.last_error_times_avg_cost = None
        self.last_predicted_drop = -1

        # train perf network to reset predictions
        #init_perfset = SupervisedDataSet(self.perf_input_dim, 1)
        init_perfset = SupervisedDataSet(self.perf_input_dim, 1)
        for i in range(INIT_PERF_SAMPLES):  #@UnusedVariable
            x_y_value_var = random.uniform(-.1, 5)
            avg_value_var = random.uniform(-.1, 5)
            cost_gutter = float(self.domain.COST_HIGH -
                                self.domain.COST_LOW) / 10
            x_y_cost = random.uniform(self.domain.COST_LOW - cost_gutter,
                                      self.domain.COST_HIGH + cost_gutter)
            x_y_cost_var = random.uniform(-.1, 20)
            inp = [x_y_value_var, avg_value_var, x_y_cost, x_y_cost_var]
            out = [2]
            init_perfset.addSample(inp, out)
        print 'Initializing Perf Ensemble...'
        self.perf_ensemble.train(init_perfset)
        self.perf_ensemble.save_starting_weights()
Beispiel #5
0
    def validate(self):
        """ The main method of this class. It runs the crossvalidation process
            and returns the validation result (e.g. performance).
        """
        dataset = self._dataset
        trainer = self._trainer
        n_folds = self._n_folds
        l = dataset.getLength()
        inp = dataset.getField("input")
        tar = dataset.getField("target")
        indim = dataset.indim
        outdim = dataset.outdim
        assert l > n_folds

        perms = array_split(permutation(l), n_folds)

        perf = 0.
        for i in range(n_folds):
            # determine train indices
            train_perms_idxs = range(n_folds)
            train_perms_idxs.pop(i)
            temp_list = []
            for train_perms_idx in train_perms_idxs:
                temp_list.append(perms[train_perms_idx])
            train_idxs = concatenate(temp_list)

            # determine test indices
            test_idxs = perms[i]

            # train
            #print "training iteration", i
            train_ds = SupervisedDataSet(indim, outdim)
            train_ds.setField("input", inp[train_idxs])
            train_ds.setField("target", tar[train_idxs])
            trainer = copy.deepcopy(self._trainer)
            trainer.setData(train_ds)
            if not self._max_epochs:
                trainer.train()
            else:
                trainer.trainEpochs(self._max_epochs)

            # test
            #print "testing iteration", i
            test_ds = SupervisedDataSet(indim, outdim)
            test_ds.setField("input", inp[test_idxs])
            test_ds.setField("target", tar[test_idxs])
            #            perf += self.getPerformance( trainer.module, dataset )
            perf += self._calculatePerformance(trainer.module, dataset)

        perf /= n_folds
        return perf
Beispiel #6
0
    def __init__(self, domain, iters, trial_number):
        super(ActVarExperiment,
              self).__init__(domain, Experiment.EXP_ACTVAR, iters,
                             ACTIVE_ENSEMBLE_SIZE, trial_number)
        self.var_ensemble = Ensemble(self.ensemble_size, domain.inputdim,
                                     self.NUM_HIDDEN1, self.NUM_HIDDEN2, 1)
        self.train_varset = SupervisedDataSet(domain.inputdim, 1)

        # train cost network to reset costs
        points = self.domain.generate_grid_points(INIT_COST_SAMPLES_AXIS)
        init_varset = SupervisedDataSet(domain.inputdim, 1)
        for point in points:
            z_var = 1.0
            init_varset.addSample(point, [z_var])
        print 'Initializing Variance Ensemble...'
Beispiel #7
0
def anntrain(xdata,ydata):#,epochs):
    #print len(xdata[0])
    ds=SupervisedDataSet(len(xdata[0]),1)
    #ds=ClassificationDataSet(len(xdata[0]),1, nb_classes=2)
    for i,algo in enumerate (xdata):
        ds.addSample(algo,ydata[i])
    #ds._convertToOneOfMany( ) esto no
    net= FeedForwardNetwork()
    inp=LinearLayer(len(xdata[0]))
    h1=SigmoidLayer(1)
    outp=LinearLayer(1)
    net.addOutputModule(outp) 
    net.addInputModule(inp) 
    net.addModule(h1)
    #net=buildNetwork(len(xdata[0]),1,1,hiddenclass=TanhLayer,outclass=SoftmaxLayer)
    
    net.addConnection(FullConnection(inp, h1))  
    net.addConnection(FullConnection(h1, outp))

    net.sortModules()

    trainer=BackpropTrainer(net,ds)#, verbose=True)#dataset=ds,verbose=True)
    #trainer.trainEpochs(40)
    trainer.trainOnDataset(ds,40) 
    #trainer.trainUntilConvergence(ds, 20, verbose=True, validationProportion=0.15)
    trainer.testOnData()#verbose=True)
    #print 'Final weights:',net.params
    return net
Beispiel #8
0
    def __init__(self, domain, mode, iters, ensemble_size, trial_number):
        self.domain = domain
        self.mode = mode
        self.iters = iters
        self.ensemble_size = ensemble_size
        self.trial_number = trial_number
        self.iteration = 0

        seed = abs(hash(self))
        numpy.random.seed(seed)
        random.seed(seed)
        seed = abs(hash(random.random()))
        numpy.random.seed(seed)
        random.seed(seed)
        print 'Seeding %d' % seed
        self.ensemble = Ensemble(self.ensemble_size, domain.inputdim,
                                 self.NUM_HIDDEN1, self.NUM_HIDDEN2,
                                 domain.outputdim)
        (self.eval_dataset,
         self.eval_costset) = self.domain.make_evaluation_datasets()
        # used in run()
        self.train_dataset = SupervisedDataSet(domain.inputdim,
                                               domain.outputdim)
        self.current_error = 0.0
        self.current_avg_cost = 0.0
        self.current_error_times_avg_cost = 0.0
Beispiel #9
0
    def _update_impl(self, old, new, reward):
        old_input = self.get_input_values(old)

        v1_a = self.net_attack.activate(self.get_input_values(new))
        target = self.gamma * v1_a

        ds_a = SupervisedDataSet(self.features_num, 1)
        ds_a.addSample(old_input, target + max(0, reward))
        ds_d = SupervisedDataSet(self.features_num, 1)
        ds_d.addSample(old_input, target + min(0, reward))
        #         self.trainer.setData(ds)
        #         err = self.trainer.train()
        self.trainer_attack.setData(ds_a)
        self.trainer_attack.train()
        self.trainer_defence.setData(ds_d)
        self.trainer_defence.train()
def train_net(data_set, n, epochs=1):
    num_inputs = len(data_set[0][0][n])
    ds = SupervisedDataSet(num_inputs, 2)
    for i in range(len(data_set)):
        try:
            ds.appendLinked(data_set[i][0][n],
                            (data_set[i][1], data_set[i][2]))
        except:
            continue
    print str(len(ds)) + ' points successfully aquired'

    net = FeedForwardNetwork()
    net.addInputModule(LinearLayer(num_inputs, name='input'))
    net.addInputModule(BiasUnit(name='bias'))
    net.addOutputModule(LinearLayer(2, name='output'))
    net.addModule(SigmoidLayer(int((num_inputs + 2) / 2.), name='sigmoid'))
    net.addModule(TanhLayer(10, name='tanh'))
    net.addConnection(FullConnection(net['bias'], net['sigmoid']))
    net.addConnection(FullConnection(net['bias'], net['tanh']))
    net.addConnection(FullConnection(net['input'], net['sigmoid']))
    net.addConnection(FullConnection(net['sigmoid'], net['tanh']))
    net.addConnection(FullConnection(net['tanh'], net['output']))
    net.sortModules()

    trainer = BackpropTrainer(net,
                              learningrate=0.01,
                              momentum=0.1,
                              verbose=True)

    trainer.trainOnDataset(ds)
    trainer.trainEpochs(epochs)

    return net
Beispiel #11
0
 def createDataSet(self, trainInput, trainOut):
     ds = SupervisedDataSet(trainInput.shape[1], 1)
     # adhoc - no first input element
     # adding all train samples to dataset
     for x in range(len(trainInput)): #for x in range(len(trainInput)-1):
         ds.addSample(trainInput[x], trainOut[x])  # ds.addSample(trainInput[x + 1], trainOut[x])
     return ds
Beispiel #12
0
    def createXORData(self,inputdim,outputdim):
 
        self.data = SupervisedDataSet(inputdim,outputdim)
        self.data.addSample([1,1],[0])
        self.data.addSample([1,0],[1])
        self.data.addSample([0,1],[1])
        self.data.addSample([0,0],[0])
Beispiel #13
0
 def make_ds_with_samples(sample_subset):
     ds = SupervisedDataSet(len(features.word_list),
                            len(features.class_list))
     ds_labels = []
     for sample_features, target, label in sample_subset:
         ds.addSample(sample_features, target)
         ds_labels.append(label)
     return (ds, ds_labels)
Beispiel #14
0
    def __init__(self, domain, iters, trial_number):
        super(ActCostExperiment,
              self).__init__(domain, Experiment.EXP_ACTCOST, iters,
                             ACTIVE_ENSEMBLE_SIZE, trial_number)
        self.cost_ensemble = Ensemble(self.ensemble_size, domain.inputdim,
                                      self.NUM_HIDDEN1, self.NUM_HIDDEN2, 1)
        self.train_costset = SupervisedDataSet(domain.inputdim, 1)

        # train cost network to reset costs
        points = self.domain.generate_grid_points(INIT_COST_SAMPLES_AXIS)
        init_costset = SupervisedDataSet(domain.inputdim, 1)
        for point in points:
            z_cost = self.domain.COST_LOW
            init_costset.addSample(point, [z_cost])
        print 'Initializing Cost Ensemble...'
        self.cost_ensemble.train(init_costset)
        self.cost_ensemble.save_starting_weights()
Beispiel #15
0
def buildTrainingSet(gydataset):
    #最后的训练数据
    trainingset = SupervisedDataSet(15, 1)
    for line in gydataset:
        trainingset.addSample(
            (line[0], line[1], line[2], line[3], line[4], line[5], line[6],
             line[7], line[8], line[9], line[10], line[11], line[12], line[13],
             line[14]), line[15])
    return trainingset
Beispiel #16
0
 def createTrainingData(self,filename,inputdim, outputdim):
     """
     create training data by reading our log file
     inputdim = inputdimension of data
     outputdim = output dim expected
     """
     
     self.data = SupervisedDataSet(inputdim,outputdim)
     textFile = loadtxt(filename, delimiter=",")
     
     for line in textFile:
         self.data.addSample(line[:inputdim], line[-outputdim:])
def neuralNetwork(X, Y):
    print "Creating dataset..."
    ds = SupervisedDataSet(len(X[0]), 1)

    for x, y in zip(X, Y):
        ds.addSample(x, y)

    print "Creating neural network..."
    n = buildNetwork(ds.indim, int(ds.indim), ds.outdim)
    print "Training neural network..."
    t = BackpropTrainer(n, ds, verbose=True)
    errors = t.trainUntilConvergence(maxEpochs=10)
    return n
Beispiel #18
0
def montaRede(dadosEntrada, dadosSaida):
    """
    Função na qual def

    :param dadosEntrada: parâmetros de entrada na rede neural
    :param dadosSaida:  parâmetros de saída da rede neural
    :return: retorna a rede de treinamento treinada e os dados supervisionados
    """

    entradaTreino = np.concatenate(
        (dadosEntrada[:35], dadosEntrada[50:85], dadosEntrada[100:135]))
    saidaTreino = np.concatenate(
        (dadosSaida[:35], dadosSaida[50:85], dadosSaida[100:135]))
    entradaTeste = np.concatenate(
        (dadosEntrada[35:50], dadosEntrada[85:100], dadosEntrada[135:]))
    saidaTeste = np.concatenate(
        (dadosSaida[35:50], dadosSaida[85:100], dadosSaida[135:]))

    treinaRede(entradaTreino, saidaTreino)

    # criando o dataset de treinamento
    # serão 4 dados de entrada
    # será um dado de saída
    treinamento = treinaRede(entradaTreino, saidaTreino)

    # rede neural do tamanho do treinamento
    # com 2 neurônios na camada intermediária
    # com o dado de output sendo o tamanho da rede
    # utilizando bias
    redeNeural = buildNetwork(treinamento.indim,
                              2,
                              treinamento.outdim,
                              bias=True)

    # criando a rede neural treinada
    redeNeuralTreinada = BackpropTrainer(redeNeural,
                                         treinamento,
                                         learningrate=0.3,
                                         momentum=0.9)

    for epocas in range(0, 10000):

        redeNeuralTreinada.train()

    teste = SupervisedDataSet(4, 1)

    for i in range(len(entradaTeste)):

        teste.addSample(entradaTeste[i], saidaTeste[i])

    return redeNeuralTreinada, teste
Beispiel #19
0
    def make_evaluation_datasets(self):
        eval_dataset = SupervisedDataSet(self.inputdim, self.outputdim)
        eval_costset = SupervisedDataSet(self.inputdim, self.outputdim)
        f_input = open('../data/funcvalue.txt', 'w')
        f_input_cost = open('../data/funccost.txt', 'w')
        points = self.generate_grid_points(PLOT_SAMPLES_AXIS)
        for point in points:
            z = self.fn_base(point)
            z_cost = self.cost_fn(point)
            point_str = str(point).strip('[]').replace(',', '')
            f_input.write('%s %f\n' % (point_str, z[0]))
            f_input_cost.write('%s %f\n' % (point_str, z_cost))
        f_input.close()
        f_input_cost.close()

        points = self.generate_grid_points(self.EVAL_SAMPLES_AXIS)
        for point in points:
            z = self.fn_base(point)
            z_cost = self.cost_fn(point)
            eval_dataset.addSample(point, z)
            eval_costset.addSample(point, [z_cost])

        return (eval_dataset, eval_costset)
Beispiel #20
0
    def absorb(self, winner, **kwargs):
        self.total_sim += 1

        ds = SupervisedDataSet(self.features_num, 2)
        for who, s0, s1 in self.observation:
            if who != Board.STONE_BLACK:
                continue
            input_vec = self.get_input_values(s0, s1, who)
            val = self.net.activate(input_vec)
            plays = val[1] * self.total_sim + 1
            wins = val[0] * self.total_sim
            if who == winner:
                wins += 1
            ds.addSample(input_vec, (wins, plays))
        self.trainer.trainOnDataset(ds)
Beispiel #21
0
 def createTrainingData(self,filename,inputdim, outputdim):
     """
     create training data by reading file=filename
     inputdim = inputdimension of data
     outputdim = output dim expected
     """
     
     if filename is not None:
         finaldf = pd.read_csv(paths+filename, parse_dates=[0], delimiter=";",index_col=0);
         finaldf = finaldf.reset_index()     
         finaldf['hour'] = pd.DatetimeIndex(finaldf['TIMESTAMP']).hour 
         
         for col in finaldf:
             if(col not in ['TIMESTAMP','hour']):
                 print col
                 print "hhhhhhhhhhhhhhhhhhh"
                 finaldf[col] /= finaldf[col].iloc[0].astype(np.float64)
             
         print finaldf.head(10)          
         #split data into percentages
         msk = np.random.rand(len(finaldf)) < self.train_percent
         train = finaldf[msk].copy()
         test = finaldf[~msk].copy()
                   
         test = test.reset_index()
         train = train.reset_index()
         
         self.train_input =  train[inputparams]
         self.train_output = train[outputparams]
         
         #normalize train_output
         #self.train_output = 1. * self.train_output/self.train_output.max()
         #print self.train_output.head(10)
         
         
         self.test_input =  test[inputparams]
           
         self.test_output = test[outputparams] 
     
     self.data = SupervisedDataSet(inputdim,outputdim)
  
     totalLength = len(self.train_input)
     for line in xrange(0,totalLength-1):
         #print self.train_input.values[line], self.train_output.values[:,0][line]
         self.data.addSample(self.train_input.values[line], self.train_output.values[:,0][line])
     
     print "data loaded..."
Beispiel #22
0
def treinaRede(entradaTreino, saidaTreino):
    """
    Função que cria o método de treino da rede

    :param entradaTreino: dados de entrada do treino
    :param saidaTreino: dados de saída do treino
    :return: treinamento : objeto que diz qual será o treino da rede
    """
    # serão 4 dados de entrada
    # será um dado de saída
    treinamento = SupervisedDataSet(4, 1)

    for i in range(len(entradaTreino)):

        treinamento.addSample(entradaTreino[i], saidaTreino[i])

    return treinamento
Beispiel #23
0
def buildTrainingSet(dataset):
#     gy = dataset[:,:-1];
#     X_scaled = preprocessing.scale(gy)
#     gydataset = np.hstack((X_scaled,dataset[:,-1:]));
    gydataset = dataset;
#     print(gydataset[:,5:6]);
    #最后的训练数据
    trainingset = SupervisedDataSet(15, 2);
    for line in gydataset:
        if line[-1] == 0:
            trainingset.addSample((line[0],line[1],line[2],line[3],line[4],line[5],line[6],line[7],line[8],line[9],line[10],line[11],line[12],line[13],line[14]), (0,0));
        elif line[-1] == 1:
            trainingset.addSample((line[0],line[1],line[2],line[3],line[4],line[5],line[6],line[7],line[8],line[9],line[10],line[11],line[12],line[13],line[14]), (0,1));
        elif line[-1] == 2:
            trainingset.addSample((line[0],line[1],line[2],line[3],line[4],line[5],line[6],line[7],line[8],line[9],line[10],line[11],line[12],line[13],line[14]), (1,0));
        elif line[-1] == 3:
            trainingset.addSample((line[0],line[1],line[2],line[3],line[4],line[5],line[6],line[7],line[8],line[9],line[10],line[11],line[12],line[13],line[14]), (1,1));
    return trainingset;
Beispiel #24
0
 def apply_updates(self):
     dataset = SupervisedDataSet(self.inputdim, self.outputdim)
     for (si, ai) in self.updates.iterkeys():
         si_ai = '%s-%s' % (si, ai)
         network_in = self.network_inputs[si_ai]
         current_value = self.get_network_value(None, None, si_ai)
         new_value = [
             a + b for a, b in zip(current_value, self.updates[(si, ai)])
         ]
         dataset.addSample(network_in, new_value)
         if PRINT_GAME_RESULTS:
             print 'updating (%s, %s) from %s to %s' % (
                 si, ai, map(PrettyFloat,
                             current_value), map(PrettyFloat, new_value))
     # import pdb; pdb.set_trace()
     if dataset:  # len(dataset) > 0:
         self.trainer.setData(dataset)
         self.trainer.trainEpochs(NTD_TRAIN_EPOCHS)
Beispiel #25
0
    def test_train(self, epochs=1):
        print("Training...")

        # split the array in a way that the net will be
        # trained with 70% of the images and
        # tested with the rest
        split = int(len(self.samples) * 0.7)
        train_samples = self.samples[0:split]
        train_labels = self.labels[0:split]

        test_samples = self.samples[split:]
        test_labels = self.labels[split:]

        # build the net with 300 input values representing
        # each pixel of the 10x10 image (100 values)
        # and its Red,Green,Blue values (3 values)
        net = buildNetwork(300, 300, 1)
        ds = SupervisedDataSet(300, 1)
        for i in range(len(train_samples)):
            ds.addSample(tuple(np.array(train_samples[i], dtype='float64')),
                         (train_labels[i], ))

        trainer = BackpropTrainer(net, ds, verbose=True)
        trainer.trainEpochs(epochs)
        self.totalEpochs = epochs

        error = 0
        counter = 0
        for i in range(0, 100):
            output = net.activate(
                tuple(np.array(test_samples[i], dtype='float64')))
            if round(output[0]) != test_labels[i]:
                counter += 1
                print(counter, " : output : ", output[0], " real answer : ",
                      test_labels[i])
                error += 1
            else:
                counter += 1
                print(counter, " : output : ", output[0], " real answer : ",
                      test_labels[i])

        print("Trained with " + str(epochs) + " epochs; Total: " +
              str(self.totalEpochs) + ";")
        return error
Beispiel #26
0
    def train(network_file, input_length, output_length, training_data_file,
              learning_rate, momentum, stop_on_convergence, epochs, classify):
        n = get_network(network_file)
        if classify:
            ds = ClassificationDataSet(int(input_length),
                                       int(output_length) * 2)
            ds._convertToOneOfMany()
        else:
            ds = SupervisedDataSet(int(input_length), int(output_length))
        training_data = get_training_data(training_data_file)

        NetworkManager.last_training_set_length = 0
        for line in training_data:
            data = [float(x) for x in line.strip().split(',') if x != '']
            input_data = tuple(data[:(int(input_length))])
            output_data = tuple(data[(int(input_length)):])
            ds.addSample(input_data, output_data)
            NetworkManager.last_training_set_length += 1

        t = BackpropTrainer(n,
                            learningrate=learning_rate,
                            momentum=momentum,
                            verbose=True)
        print "training network " + network_storage_path + network_file

        if stop_on_convergence:
            t.trainUntilConvergence(ds, epochs)
        else:
            if classify:
                t.trainOnDataset(ds['class'], epochs)
            else:
                t.trainOnDataset(ds, epochs)

        error = t.testOnData()
        print "training done"
        if not math.isnan(error):
            save_network(n, network_file)
        else:
            print "error occured, network not saved"

        print "network saved"

        return error
Beispiel #27
0
    def import_network(self, filename):
        train_samples = self.samples
        train_labels = self.labels

        np.random.seed(0)
        np.random.shuffle(train_samples)
        np.random.seed(0)
        np.random.shuffle(train_labels)

        self.net_shared = NetworkReader.readFrom(filename)
        self.ds_shared = SupervisedDataSet(300, 1)
        for i in range(len(train_samples)):
            self.ds_shared.addSample(
                tuple(np.array(train_samples[i], dtype='float64')),
                (train_labels[i], ))

        self.trainer_shared = BackpropTrainer(self.net_shared,
                                              self.ds_shared,
                                              verbose=True)
Beispiel #28
0
    def ready_supervised_dataset(self, dataset):
        """
        Ready the supervised dataset for training.

        @TODO: Need to randomize the data being fed to the network.
        See randomBatches() here: http://pybrain.org/docs/api/datasets/superviseddataset.html
        """
        self.network_dataset = SupervisedDataSet(len(self.train_data), 1)
        # Currently only supports log function for normalizing data
        training_values = np.log(dataset.data_frame[self.train_data])
        results = np.log(dataset.data_frame[self.prediction_data].shift(
            -self.prediction_window))
        training_values['PREDICTION_%s' % self.prediction_data[0]] = results
        training_values = training_values.dropna()
        for _, row_data in enumerate(training_values.iterrows()):
            _, data = row_data
            sample = list(data[:-1])
            result = [data[-1]]
            self.network_dataset.addSample(sample, result)
def montaDados ( ) :
    """

    Função na qual monta o dataset

    :return: dataset montado
    """
    dataset = SupervisedDataSet(3, 1)

    dataset.addSample ( [0, 0, 0], [0] )
    dataset.addSample ( [0, 1, 1], [0] )
    dataset.addSample ( [1, 0, 1], [0] )
    dataset.addSample ( [1, 1, 0], [0] )
    dataset.addSample ( [1, 0, 0], [1] )
    dataset.addSample ( [0, 0, 1], [1] )
    dataset.addSample ( [0, 1, 0], [0] )
    dataset.addSample ( [1, 1, 1], [1] )

    return dataset
Beispiel #30
0
def ANN(
    trainFeature, trainLabel, testFeature, testLabel, netStructure, para_rate,
    para_momentum
):  #netStructure is a list [in, hidden, out], momentum is a parameter in SGD
    sampleNum = trainFeature.shape[0]
    featureNum = trainFeature.shape[1]
    Dataset = SupervisedDataSet(featureNum, 1)
    i = 0
    while (i < sampleNum):
        print(i)
        Dataset.addSample(list(trainFeature[i]), [trainLabel[i]])
        i += 1
    Network = buildNetwork(netStructure[0],
                           netStructure[1],
                           netStructure[2],
                           netStructure[3],
                           hiddenclass=SigmoidLayer,
                           outclass=SigmoidLayer)
    T = BackpropTrainer(Network,
                        Dataset,
                        learningrate=para_rate,
                        momentum=para_momentum,
                        verbose=True)
    #print(Dataset['input'])
    errorList = []
    errorList.append(T.testOnData(Dataset))
    T.trainOnDataset(Dataset)
    errorList.append(T.testOnData(Dataset))
    T.trainOnDataset(Dataset)
    while (abs(T.testOnData(Dataset) - errorList[-1]) > 0.0001):
        T.trainOnDataset(Dataset)
        errorList.append(T.testOnData(Dataset))
    pass  #this step is for the output of predictedLabel
    print(np.array([Network.activate(x) for x in trainFeature]))
    #print(testLabel)
    print(
        Network.activate([
            0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
            0, 0
        ]))
    return (errorList)