コード例 #1
0
def seqDataSetPair(data, in_labels, out_labels, seq_title, tseqs, vseqs):

    tds = SequentialDataSet(len(in_labels), len(out_labels))
    vds = SequentialDataSet(len(in_labels), len(out_labels))
    ds = None

    for i in xrange(len(data[in_labels[0]])):

        if i == 0 or data[seq_title][i] != data[seq_title][i - 1]:
            if int(data[seq_title][i]) in tseqs:
                ds = tds
                ds.newSequence()
            elif int(data[seq_title][i]) in vseqs:
                ds = vds
                ds.newSequence()
            else:
                ds = None

        if ds == None: continue

        din = [data[l][i] for l in in_labels]
        dout = [data[l][i] for l in out_labels]

        ds.addSample(din, dout)

    return (tds, vds)
コード例 #2
0
def visulizeDataSet(network, data, seqno, in_labels, out_labels):

    seq = data.getSequence(seqno)
    tmpDs = SequentialDataSet(data.indim, data.outdim)
    tmpDs.newSequence()

    for i in xrange(data.getSequenceLength(seqno)):
        tmpDs.addSample(seq[0][i], seq[1][i])

    nplots = len(in_labels) + len(out_labels)

    for i in range(len(in_labels)):
        p = PL.subplot(nplots, 1, i + 1)
        p.clear()
        p.plot(tmpDs['input'][:, i])
        p.set_ylabel(in_labels[i])

    for i in range(len(out_labels)):
        p = PL.subplot(nplots, 1, i + 1 + len(in_labels))
        p.clear()

        output = ModuleValidator.calculateModuleOutput(network, tmpDs)

        p.plot(tmpDs['target'][:, i], label='train')
        p.plot(output[:, i], label='sim')

        p.legend()
        p.set_ylabel(out_labels[i])
コード例 #3
0
    def train(self, params):
        """
    Train LSTM network on buffered dataset history
    After training, run LSTM on history[:-1] to get the state correct
    :param params:
    :return:
    """
        if params['reset_every_training']:
            n = params['encoding_num']
            self.net = buildNetwork(n,
                                    params['num_cells'],
                                    n,
                                    hiddenclass=LSTMLayer,
                                    bias=True,
                                    outputbias=params['output_bias'],
                                    recurrent=True)
            self.net.reset()

        # prepare training dataset
        ds = SequentialDataSet(params['encoding_num'], params['encoding_num'])
        history = self.window(self.history, params)
        resets = self.window(self.resets, params)

        for i in xrange(1, len(history)):
            if not resets[i - 1]:
                ds.addSample(self.encoder.encode(history[i - 1]),
                             self.encoder.encode(history[i]))
            if resets[i]:
                ds.newSequence()

        if params['num_epochs'] > 1:
            trainer = RPropMinusTrainer(self.net,
                                        dataset=ds,
                                        verbose=params['verbosity'] > 0)

            if len(history) > 1:
                trainer.trainEpochs(params['num_epochs'])

            # run network on buffered dataset after training to get the state right
            self.net.reset()
            for i in xrange(len(history) - 1):
                symbol = history[i]
                output = self.net.activate(self.encoder.encode(symbol))
                self.encoder.classify(output, num=params['num_predictions'])

                if resets[i]:
                    self.net.reset()
        else:
            self.trainer.setData(ds)
            self.trainer.train()

            # run network on buffered dataset after training to get the state right
            self.net.reset()
            for i in xrange(len(history) - 1):
                symbol = history[i]
                output = self.net.activate(self.encoder.encode(symbol))
                self.encoder.classify(output, num=params['num_predictions'])

                if resets[i]:
                    self.net.reset()
コード例 #4
0
def getPyBrainDataSet(sequence,
                      nTrain,
                      predictionStep=1,
                      useTimeOfDay=True,
                      useDayOfWeek=True):
    print "generate a pybrain dataset of sequences"
    print "the training data contains ", str(nTrain -
                                             predictionStep), "records"

    inDim = 1 + int(useTimeOfDay) + int(useDayOfWeek)
    ds = SequentialDataSet(inDim, 1)
    if useTimeOfDay:
        print "include time of day as input field"
    if useDayOfWeek:
        print "include day of week as input field"

    for i in xrange(nTrain - predictionStep):
        if useTimeOfDay and useDayOfWeek:
            sample = np.array([
                sequence['data'][i], sequence['timeofday'][i],
                sequence['dayofweek'][i]
            ])
        elif useTimeOfDay:
            sample = np.array([sequence['data'][i], sequence['timeofday'][i]])
        elif useDayOfWeek:
            sample = np.array([sequence['data'][i], sequence['dayofweek'][i]])
        else:
            sample = np.array([sequence['data'][i]])

        ds.addSample(sample, sequence['data'][i + predictionStep])
    return ds
コード例 #5
0
 def __init__(self, net, task, valueNetwork=None, **args):
     self.net = net
     self.task = task
     self.setArgs(**args)
     if self.valueLearningRate == None:
         self.valueLearningRate = self.learningRate
     if self.valueMomentum == None:
         self.valueMomentum = self.momentum        
     if self.supervisedPlotting:
         from pylab import ion
         ion() 
     
     # adaptive temperature:
     self.tau = 1.
     
     # prepare the datasets to be used
     self.weightedDs = ImportanceDataSet(self.task.outdim, self.task.indim)
     self.rawDs = ReinforcementDataSet(self.task.outdim, self.task.indim)
     self.valueDs = SequentialDataSet(self.task.outdim, 1)
     
     # prepare the supervised trainers
     self.bp = BackpropTrainer(self.net, self.weightedDs, self.learningRate,
                               self.momentum, verbose=False,
                               batchlearning=True)            
     
     # CHECKME: outsource
     self.vnet = valueNetwork
     if valueNetwork != None:
         self.vbp = BackpropTrainer(self.vnet, self.valueDs, self.valueLearningRate,
                                    self.valueMomentum, verbose=self.verbose)
         
     # keep information:
     self.totalSteps = 0
     self.totalEpisodes = 0
コード例 #6
0
 def create_data(self, inputs, targets):
     data = SequentialDataSet(inputs, targets)
     for i in xrange(0, len(self.dataframe) - 1):
         data.newSequence()
         ins = self.dataframe.ix[i].values
         target = self.dataframe.ix[i + 1].values[0]
         data.appendLinked(ins, target)
     self.data = data
コード例 #7
0
def create_data(dataframe, inputs, targets):
    data = SequentialDataSet(inputs, targets)
    for i in range(0, dataframe.shape[0] - 1):
        row = dataframe.iloc[i]
        data.newSequence()
        ins = row.values
        target = dataframe.iloc[i + 1].values[0]
        data.appendLinked(ins, target)
    return data
コード例 #8
0
    def rnn_dataset(self, X, y):

        # Create an empty dataset.
        ds = SequentialDataSet(len(X.columns), len(y.columns))

        # And add all rows...
        for i in range(0, len(X.index)):
            ds.addSample(tuple(X.ix[i, :].values), tuple(y.ix[i, :].values))
        return ds
コード例 #9
0
    def reset(self, params, repetition):
        print params

        self.nDimInput = 1  #3
        self.inputEncoder = PassThroughEncoder()

        if params['output_encoding'] == None:
            self.outputEncoder = PassThroughEncoder()
            self.nDimOutput = 1
        elif params['output_encoding'] == 'likelihood':
            self.outputEncoder = ScalarBucketEncoder()
            self.nDimOutput = self.outputEncoder.encoder.n

        if (params['dataset'] == 'nyc_taxi'
                or params['dataset'] == 'nyc_taxi_perturb_baseline'):
            self.dataset = NYCTaxiDataset(params['dataset'])
        else:
            raise Exception("Dataset not found")

        self.testCounter = 0
        self.resets = []
        self.iteration = 0

        # initialize LSTM network
        random.seed(6)
        if params['output_encoding'] == None:
            self.net = buildNetwork(self.nDimInput,
                                    params['num_cells'],
                                    self.nDimOutput,
                                    hiddenclass=LSTMLayer,
                                    bias=True,
                                    outputbias=True,
                                    recurrent=True)
        elif params['output_encoding'] == 'likelihood':
            self.net = buildNetwork(self.nDimInput,
                                    params['num_cells'],
                                    self.nDimOutput,
                                    hiddenclass=LSTMLayer,
                                    bias=True,
                                    outclass=SigmoidLayer,
                                    recurrent=True)
        print self.net['out']
        print self.net['hidden0']
        self.trainer = BackpropTrainer(self.net,
                                       dataset=SequentialDataSet(
                                           self.nDimInput, self.nDimOutput),
                                       learningrate=0.01,
                                       momentum=0,
                                       verbose=params['verbosity'] > 0)

        (self.networkInput, self.targetPrediction, self.trueData) = \
          self.dataset.generateSequence(
          prediction_nstep=params['prediction_nstep'],
          output_encoding=params['output_encoding'],
          noise=params['noise'])
コード例 #10
0
    def rnn_dataset(X, y):
        """
        Create a recurrent neural network dataset according to the pybrain specification and return this new format.
        """

        # Create an empty dataset
        ds = SequentialDataSet(len(X.columns), len(y.columns))
        # And add all rows
        for i in range(0, len(X.index)):
            ds.addSample(tuple(X.iloc[i, :].values), tuple(y.iloc[i, :].values))
        return ds
コード例 #11
0
    def train(self, params, verbose=False):

        if params['reset_every_training']:
            if verbose:
                print 'create lstm network'

            random.seed(6)
            if params['output_encoding'] == None:
                self.net = buildNetwork(self.nDimInput,
                                        params['num_cells'],
                                        self.nDimOutput,
                                        hiddenclass=LSTMLayer,
                                        bias=True,
                                        outputbias=True,
                                        recurrent=True)
            elif params['output_encoding'] == 'likelihood':
                self.net = buildNetwork(self.nDimInput,
                                        params['num_cells'],
                                        self.nDimOutput,
                                        hiddenclass=LSTMLayer,
                                        bias=True,
                                        outclass=SigmoidLayer,
                                        recurrent=True)

        self.net.reset()

        ds = SequentialDataSet(self.nDimInput, self.nDimOutput)
        networkInput = self.window(self.networkInput, params)
        targetPrediction = self.window(self.targetPrediction, params)

        # prepare a training data-set using the history
        for i in xrange(len(networkInput)):
            ds.addSample(self.inputEncoder.encode(networkInput[i]),
                         self.outputEncoder.encode(targetPrediction[i]))
        mycount = 0

        if params['num_epochs'] > 1:
            trainer = RPropMinusTrainer(self.net, dataset=ds, verbose=verbose)

            if verbose:
                print " train LSTM on ", len(
                    ds), " records for ", params['num_epochs'], " epochs "

            if len(networkInput) > 1:
                trainer.trainEpochs(params['num_epochs'])

        else:
            self.trainer.setData(ds)
            self.trainer.train()

        # run through the training dataset to get the lstm network state right
        self.net.reset()
        for i in xrange(len(networkInput)):
            self.net.activate(ds.getSample(i)[0])
コード例 #12
0
def ltsmXY(tin, tout, title='ltsm.png'):

    #datain = zip(tin[:-3], tin[1:-2], tin[2:-1])
    #datain = zip(tin[:-8], tin[1:-7], tin[2:-6], tin[3:-5], tin[4:-4], tin[5:-3],tin[6:-2], tin[7:-1])
    #datain = zip(tin[:-12], tin[1:-11], tin[2:-10], tin[3:-9], tin[4:-8], tin[5:-7],tin[6:-6], tin[7:-5], tin[8:-4], tin[9:-3], tin[10:-2], tin[11:-1])
    datain = zip(tin[:-16], tin[1:-15], tin[2:-14], tin[3:-13], tin[4:-12], tin[5:-11],tin[6:-10], tin[7:-9], tin[8:-8], tin[9:-7], tin[10:-6], tin[11:-5], tin[12:-4], tin[13:-3], tin[14:-2], tin[15:-1])

    #dataout = tout[3:]
    #dataout = tout[8:]
    #dataout = tout[12:]
    dataout = tout[16:]

    #ds = SequentialDataSet(3, 1)
    #ds = SequentialDataSet(8, 1)
    #ds = SequentialDataSet(12, 1)
    ds = SequentialDataSet(16, 1)

    for x, y in zip(datain[:len(datain)/2], dataout[:len(datain)/2]):
        ds.addSample(x, y)


    # add layers until overfitting the training data
    #net = buildNetwork(3,5,1,hiddenclass=LSTMLayer, outputbias=False, recurrent=True)
    #net = buildNetwork(8, 8, 1, hiddenclass=LSTMLayer, outputbias=False, recurrent=True)
    #net = buildNetwork(12, 20, 1, hiddenclass=LSTMLayer, outputbias=False, recurrent=True)
    net = buildNetwork(16, 20, 1, hiddenclass=LSTMLayer, outputbias=False, recurrent=True)

    
    trainer = RPropMinusTrainer(net, dataset=ds)
    train_errors = []
    EPOCHS_PER_CYCLE = 5
    CYCLES = 100
    EPOCHS = EPOCHS_PER_CYCLE * CYCLES
    for i in xrange(CYCLES):
        trainer.trainEpochs(EPOCHS_PER_CYCLE)
        train_errors.append(trainer.testOnData())
        epoch = (i+1) * EPOCHS_PER_CYCLE
        #print "\r epoch {}/{}".format(epoch, EPOCHS)
        stdout.flush()

    print "final error =", train_errors[-1]

    pred_out = []
    for i in range(len(datain)):
        pred_out.append(net.activate(datain[i]))
    
    fig = plt.figure()
    #tout[16:].plot(ax=ax, title='Occupancy')
    plt.plot(tout[16:].index, tout[16:], 'y', linewidth=1.5)
    plt.plot(tout[16:].index, pred_out, 'b+')
    plt.legend(['Occupancy', 'LTSM'])
    fig.tight_layout()
    plt.savefig(title,inches='tight')
コード例 #13
0
def create_train_set(consumption):
    #create train/test set
    global active_max
    ds = SequentialDataSet(1, 1)
    consumption_data = normalize(consumption)
    active_max = max(consumption_data[1], active_max)
    consumption = consumption_data[0]

    size = len(consumption)
    for i in range(0, size - 1):
        ds.addSample(consumption[i], consumption[i + 1])

    return ds
コード例 #14
0
    def reset(self, params, repetition):
        random.seed(params['seed'])

        if params['encoding'] == 'basic':
            self.encoder = BasicEncoder(params['encoding_num'])
        elif params['encoding'] == 'distributed':
            self.encoder = DistributedEncoder(
                params['encoding_num'],
                maxValue=params['encoding_max'],
                minValue=params['encoding_min'],
                classifyWithRandom=params['classify_with_random'])
        else:
            raise Exception("Encoder not found")

        if params['dataset'] == 'simple':
            self.dataset = SimpleDataset()
        elif params['dataset'] == 'reber':
            self.dataset = ReberDataset(maxLength=params['max_length'])
        elif params['dataset'] == 'high-order':
            self.dataset = HighOrderDataset(
                numPredictions=params['num_predictions'], seed=params['seed'])
        else:
            raise Exception("Dataset not found")

        self.computeCounter = 0

        self.history = []
        self.resets = []
        self.randoms = []

        self.currentSequence = []
        self.targetPrediction = []
        self.replenishSequence(params, iteration=0)

        self.net = buildNetwork(params['encoding_num'],
                                params['num_cells'],
                                params['encoding_num'],
                                hiddenclass=LSTMLayer,
                                bias=True,
                                outputbias=params['output_bias'],
                                recurrent=True)

        self.trainer = BackpropTrainer(self.net,
                                       dataset=SequentialDataSet(
                                           params['encoding_num'],
                                           params['encoding_num']),
                                       learningrate=0.01,
                                       momentum=0,
                                       verbose=params['verbosity'] > 0)

        self.sequenceCounter = 0
コード例 #15
0
def buildAppropriateDataset(module):
    """ build a sequential dataset with 2 sequences of 3 samples, with arndom input and target values,
    but the appropriate dimensions to be used on the provided module. """
    if module.sequential:
        d = SequentialDataSet(module.indim, module.outdim)
        for dummy in range(2):
            d.newSequence()
            for dummy in range(3):
                d.addSample(randn(module.indim), randn(module.outdim))
    else:
        d = SupervisedDataSet(module.indim, module.outdim)
        for dummy in range(3):
            d.addSample(randn(module.indim), randn(module.outdim))
    return d
コード例 #16
0
def ltsm(data):
    from pybrain.datasets import SequentialDataSet
    from itertools import cycle
    
    datain = zip(data[:-6], data[1:-5], data[2:-4], data[3:-3], data[4:-2], data[5:-1])
    dataout = data[6:]
    ds = SequentialDataSet(6, 1)
    for x, y in zip(datain, dataout):
        ds.addSample(x, y)

    from pybrain.tools.shortcuts import buildNetwork
    from pybrain.structure.modules import LSTMLayer

    net = buildNetwork(6, 7, 1, hiddenclass=LSTMLayer, outputbias=False, recurrent=True)

    from pybrain.supervised import RPropMinusTrainer
    from sys import stdout
    
    trainer = RPropMinusTrainer(net, dataset=ds)
    train_errors = []
    EPOCHS_PER_CYCLE = 5
    CYCLES = 100
    EPOCHS = EPOCHS_PER_CYCLE * CYCLES
    for i in xrange(CYCLES):
        trainer.trainEpochs(EPOCHS_PER_CYCLE)
        train_errors.append(trainer.testOnData())
        epoch = (i+1) * EPOCHS_PER_CYCLE
        #print "\r epoch {}/{}".format(epoch, EPOCHS)
        stdout.flush()

    print "final error =", train_errors[-1]

    '''
    plt.figure()
    plt.plot(range(0, EPOCHS, EPOCHS_PER_CYCLE), train_errors)
    plt.xlabel('epoch')
    plt.ylabel('error')
    plt.show()
    '''

    test_error = 0.
    cnt = 0
    for sample, target in ds.getSequenceIterator(0):
        #print "sample = ",  sample
        #print "predicted next sample = %4.1f" % net.activate(sample)
        #print "actual next sample = %4.1f" % target
        test_error += abs(net.activate(sample) - target)
        cnt += 1
    test_error /= cnt 
    print "test (train) error =", test_error
コード例 #17
0
def getPyBrainDataSetScalarEncoder(sequence,
                                   nTrain,
                                   encoderInput,
                                   encoderOutput,
                                   predictionStep=1,
                                   useTimeOfDay=True,
                                   useDayOfWeek=True):
    """
  Use scalar encoder for the data
  :param sequence:
  :param nTrain:
  :param predictionStep:
  :param useTimeOfDay:
  :param useDayOfWeek:
  :return:
  """
    print "generate a pybrain dataset of sequences"
    print "the training data contains ", str(nTrain -
                                             predictionStep), "records"

    if encoderInput is None:
        inDim = 1 + int(useTimeOfDay) + int(useDayOfWeek)
    else:
        inDim = encoderInput.n + int(useTimeOfDay) + int(useDayOfWeek)

    if encoderOutput is None:
        outDim = 1
    else:
        outDim = encoderOutput.n

    ds = SequentialDataSet(inDim, outDim)
    if useTimeOfDay:
        print "include time of day as input field"
    if useDayOfWeek:
        print "include day of week as input field"

    for i in xrange(nTrain - predictionStep):

        sample = getSingleSample(i, sequence, useTimeOfDay, useDayOfWeek)

        if encoderOutput is None:
            dataSDROutput = [sequence['normdata'][i + predictionStep]]
        else:
            dataSDROutput = encoderOutput.encode(
                sequence['data'][i + predictionStep])

        ds.addSample(sample, dataSDROutput)

    return ds
コード例 #18
0
ファイル: gpmodel.py プロジェクト: chenzhikuo1/OCR-Python
    def __init__(self, task, agent):
        EpisodicExperiment.__init__(self, task, agent)

        # create model and training set (action dimension + 1 for time)
        self.modelds = SequentialDataSet(self.task.indim + 1, 1)
        self.model = [
            GaussianProcess(indim=self.modelds.getDimension('input'),
                            start=(-10, -10, 0),
                            stop=(10, 10, 300),
                            step=(5, 5, 100)) for _ in range(self.task.outdim)
        ]

        # change hyper parameters for all gps
        for m in self.model:
            m.hyper = (20, 2.0, 0.01)
コード例 #19
0
def train(context, trainX, trainY):
    ds = SequentialDataSet(4, 1)
    for dataX, dataY in zip(trainX, trainY):
        ds.addSample(dataX, dataY)
    net = buildNetwork(4,
                       1,
                       1,
                       hiddenclass=LSTMLayer,
                       outputbias=False,
                       recurrent=True)
    trainer = RPropMinusTrainer(net, dataset=ds)
    EPOCHS_PER_CYCLE = 5
    CYCLES = 5
    for i in range(CYCLES):
        trainer.trainEpochs(EPOCHS_PER_CYCLE)
    return net, trainer.testOnData()
コード例 #20
0
def create_train_set(open_price, close_price):
    global open_max
    global close_max
    ds = SequentialDataSet(1, 1)
    open_data = normalize(open_price)
    close_data = normalize(close_price)
    open_max = open_data[1]
    close_max = close_data[1]
    open_price = open_data[0]
    close_price = close_data[0]

    size = len(open_price)
    for i in range(0, size):
        ds.addSample(open_price[i], close_price[i])

    return ds
コード例 #21
0
ファイル: NET4.py プロジェクト: daliel/PyBrain_DNS_52_52
    def AddDataSequential(self, data):
        self.ds = SequentialDataSet(self.inputsize, self.outputsize)
        for i in xrange(len(data) - 1, 0, -1):

            t = data[i]
            k = i - 1
            while k > -1:
                self.ds.appendLinked(data[k], t)
                k -= 1
            self.ds.newSequence()
        """print self.ds.getNumSequences()
		for i in range(self.ds.getNumSequences()):
			for input, target in self.ds.getSequenceIterator(i):
				print i, TransToIntList_45(input), TransToIntList_45(target)"""
        self.trainer = RPropMinusTrainer(self.net,
                                         dataset=self.ds,
                                         learningrate=0.1)
        return 0
コード例 #22
0
    def handle(self, *args, **options):
        ticker = args[0]
        print("****** STARTING PREDICTOR " + ticker + " ******* ")
        prices = Price.objects.filter(
            symbol=ticker).order_by('-created_on').values_list('price',
                                                               flat=True)
        data = normalization(list(prices[0:NUM_MINUTES_BACK].reverse()))
        data = [int(x * MULT_FACTOR) for x in data]
        ds = SequentialDataSet(1, 1)
        for sample, next_sample in zip(data, cycle(data[1:])):
            ds.addSample(sample, next_sample)

        net = buildNetwork(1,
                           5,
                           1,
                           hiddenclass=LSTMLayer,
                           outputbias=False,
                           recurrent=True)

        trainer = RPropMinusTrainer(net, dataset=ds)
        train_errors = []  # save errors for plotting later
        EPOCHS_PER_CYCLE = 5
        CYCLES = 100
        EPOCHS = EPOCHS_PER_CYCLE * CYCLES
        for i in xrange(CYCLES):
            trainer.trainEpochs(EPOCHS_PER_CYCLE)
            train_errors.append(trainer.testOnData())
            epoch = (i + 1) * EPOCHS_PER_CYCLE
            print("\r epoch {}/{}".format(epoch, EPOCHS), end="")
            stdout.flush()

        print()
        print("final error =", train_errors[-1])

        for sample, target in ds.getSequenceIterator(0):
            show_pred_sample = net.activate(sample) / MULT_FACTOR
            show_sample = sample / MULT_FACTOR
            show_target = target / MULT_FACTOR
            show_diff = show_pred_sample - show_target
            show_diff_pct = 100 * show_diff / show_pred_sample
            print("{} => {}, act {}. ({}%)".format(
                show_sample[0], round(show_pred_sample[0], 3), show_target[0],
                int(round(show_diff_pct[0], 0))))
コード例 #23
0
def main():
    generated_data = [0 for i in range(10000)]
    rate, data = get_data_from_wav("../../data/natabhairavi_violin.wav")
    data = data[1000:190000]
    print("Got wav")
    ds = SequentialDataSet(1, 1)
    for sample, next_sample in zip(data, cycle(data[1:])):
        ds.addSample(sample, next_sample)

    net = buildNetwork(1,
                       5,
                       1,
                       hiddenclass=LSTMLayer,
                       outputbias=False,
                       recurrent=True)

    trainer = RPropMinusTrainer(net, dataset=ds)
    train_errors = []  # save errors for plotting later
    EPOCHS_PER_CYCLE = 5
    CYCLES = 10
    EPOCHS = EPOCHS_PER_CYCLE * CYCLES
    for i in xrange(CYCLES):
        trainer.trainEpochs(EPOCHS_PER_CYCLE)
        train_errors.append(trainer.testOnData())
        epoch = (i + 1) * EPOCHS_PER_CYCLE
        print("\r epoch {}/{}".format(epoch, EPOCHS), end="")
        stdout.flush()

    # predict new values
    old_sample = [100]

    for i in xrange(500000):
        new_sample = net.activate(old_sample)
        old_sample = new_sample
        generated_data[i] = new_sample[0]
        print(new_sample)

    wavfile.write("../../output/test.wav", rate, np.array(generated_data))
コード例 #24
0
ファイル: suite.py プロジェクト: oxtopus/htmresearch
    def train(self, params):
        n = params['encoding_num']
        net = buildNetwork(n,
                           params['num_cells'],
                           n,
                           hiddenclass=LSTMLayer,
                           bias=True,
                           outputbias=params['output_bias'],
                           recurrent=True)
        net.reset()

        ds = SequentialDataSet(n, n)
        trainer = RPropMinusTrainer(net, dataset=ds)

        history = self.window(self.history, params)
        resets = self.window(self.resets, params)

        for i in xrange(1, len(history)):
            if not resets[i - 1]:
                ds.addSample(self.encoder.encode(history[i - 1]),
                             self.encoder.encode(history[i]))
            if resets[i]:
                ds.newSequence()

        if len(history) > 1:
            trainer.trainEpochs(params['num_epochs'])
            net.reset()

        for i in xrange(len(history) - 1):
            symbol = history[i]
            output = net.activate(self.encoder.encode(symbol))
            predictions = self.encoder.classify(output,
                                                num=params['num_predictions'])

            if resets[i]:
                net.reset()

        return net
コード例 #25
0
    def init_nn(self, datain, dataout):
        INPUTS = 5
        OUTPUTS = 1
        HIDDEN = 20
        self.net = buildNetwork(INPUTS,
                                HIDDEN,
                                OUTPUTS,
                                hiddenclass=LSTMLayer,
                                outclass=SigmoidLayer,
                                recurrent=True,
                                bias=True)
        self.ds = SequentialDataSet(INPUTS, OUTPUTS)

        for x, y in itertools.izip(datain, dataout):
            self.ds.newSequence()
            self.ds.appendLinked(tuple(x), tuple(y))

        self.net.randomize()

        trainer = BackpropTrainer(self.net, self.ds)

        for _ in range(1000):
            print trainer.train()
コード例 #26
0
def getReberDS(maxLength, display=0):
    """
  @param maxLength (int): maximum length of the sequence
  """
    [in_seq, out_seq] = generateSequencesVector(maxLength)

    target = out_seq
    last_target = target[-1]
    last_target[np.argmax(out_seq[-1])] = 1
    target[-1] = last_target

    ds = SequentialDataSet(7, 7)
    i = 0
    for sample, next_sample in zip(in_seq, target):
        ds.addSample(sample, next_sample)
        if display:
            print("     sample: %s" % sample)
            print("     target: %s" % next_sample)
            print("next sample: %s" % out_seq[i])
            print()
        i += 1

    return (ds, in_seq, out_seq)
コード例 #27
0
ファイル: predict_price_v3.py プロジェクト: ycaokris/pytrader
    def handle(self, *args, **options):
        ticker = args[0]
        print("****** STARTING PREDICTOR " + ticker + " ******* ")
        #prices = Price.objects.filter(symbol=ticker).order_by('-created_on').values_list('price',flat=True)
        #data = prices[0:NUM_MINUTES_BACK].reverse()
        #data = [ x * MULT_FACTOR for x in data]

        from pybrain.tools.shortcuts import buildNetwork
        from pybrain.supervised.trainers import BackpropTrainer
        from pybrain.datasets import SequentialDataSet
        from pybrain.structure import SigmoidLayer, LinearLayer
        from pybrain.structure import LSTMLayer

        import itertools
        import numpy as np

        INPUTS = 5
        OUTPUTS = 1
        HIDDEN = 40

        net = buildNetwork(INPUTS, HIDDEN, OUTPUTS, hiddenclass=LSTMLayer, outclass=LinearLayer, recurrent=True, bias=True) 

        ds = SequentialDataSet(INPUTS, OUTPUTS)
        ds.addSample([0,1,2,3,4],[5])
        ds.addSample([5,6,7,8,9],[10])
        ds.addSample([10,11,12,13,14],[15])
        ds.addSample([16,17,18,19,20],[21])

        net.randomize()

        trainer = BackpropTrainer(net, ds)

        for _ in range(1000):
            trainer.train()

        x=net.activate([0,1,2,3,4])
        print x 
コード例 #28
0
# data.extend([data1[i]*2 for i in range(len(data1))])
# data.extend([data1[i]*3 for i in range(len(data1))])
# data.extend([data1[i]*4 for i in range(len(data1))])

#data = [float(i*1)/max(data) for i in data]
#data = [24924.5,46039.49,41595.55,19403.54,21827.9,21043.39,22136.64,26229.21,57258.43,42960.91,17596.96,16145.35,16555.11,17413.94,18926.74,14773.04,15580.43,17558.09,16637.62,16216.27,16328.72,16333.14,17688.76,17150.84,15360.45,15381.82,17508.41,15536.4,15740.13,15793.87,16241.78,18194.74,19354.23,18122.52,20094.19,23388.03,26978.34,25543.04,38640.93,34238.88,19549.39,19552.84,18820.29,22517.56,31497.65,44912.86,55931.23,19124.58,15984.24,17359.7,17341.47,18461.18,21665.76,37887.17,46845.87,19363.83,20327.61,21280.4,20334.23,20881.1,20398.09,23873.79,28762.37,50510.31,41512.39,20138.19,17235.15,15136.78,15741.6,16434.15,15883.52,14978.09,15682.81,15363.5,16148.87,15654.85,15766.6,15922.41,15295.55,14539.79,14689.24,14537.37,15277.27,17746.68,18535.48,17859.3,18337.68,20797.58,23077.55,23351.8,31579.9,39886.06,18689.54,19050.66,20911.25,25293.49,33305.92,45773.03,46788.75,23350.88,16567.69,16894.4,18365.1,18378.16,23510.49,36988.49,54060.1,20124.22,20113.03,21140.07,22366.88,22107.7,28952.86,57592.12,34684.21,16976.19,16347.6,17147.44,18164.2,18517.79,16963.55,16065.49,17666,17558.82,16633.41,15722.82,17823.37,16566.18,16348.06,15731.18,16628.31,16119.92,17330.7,16286.4,16680.24,18322.37,19616.22,19251.5,18947.81,21904.47,22764.01,24185.27,27390.81]
print(data)
#exit(0)

#data =[-0.025752496,0.091349779,0.112477983,-0.043485112,-0.076961041,0.175632569,0.131852131,0.000000000,-0.073203404,-0.172245905,-0.154150680,0.205443974]
#data = data * 100

from pybrain.datasets import SequentialDataSet
from itertools import cycle

ds = SequentialDataSet(1, 1)
for sample, next_sample in zip(data, cycle(data[1:])):
    ds.addSample(sample, next_sample)
print(ds)

from pybrain.tools.shortcuts import buildNetwork
from pybrain.structure.modules import LSTMLayer

# Buils a simple LSTM network with 1 input node, 1 output node and 5 LSTM cells
net = buildNetwork(1,
                   12,
                   1,
                   hiddenclass=LSTMLayer,
                   peepholes=False,
                   outputbias=False,
                   recurrent=True)
コード例 #29
0
fileName = sys.argv[1]
myFile = open(fileName, "r")
lines = myFile.readlines()
inputData = []
outputData = []

for line in lines:
    allData = (eval(line))
    if len(allData) == 4:
        outputData.append([allData[3]])
    else:
        inputData.append(allData[2])
inputSize = len(inputData[0])
outputSize = len(outputData[0])

ds = SequentialDataSet(inputSize, outputSize)
for i in range(0, len(outputData)):
    ds.addSample(inputData[i], outputData[i])

from pybrain.tools.shortcuts import buildNetwork
from pybrain.structure.modules import LSTMLayer
from pybrain.structure.modules import SigmoidLayer

net = buildNetwork(inputSize,
                   inputSize,
                   outputSize,
                   hiddenclass=LSTMLayer,
                   outclass=SigmoidLayer,
                   outputbias=False,
                   recurrent=True)
コード例 #30
0
    
    net = buildNetwork(*networkDim,  recurrent=True)
    
    for i in range(hiddenLayers):
        net.addRecurrentConnection(FullConnection(net["hidden" + str(i)],net["hidden"+ str(i)]))
    
    net.sortModules() #Sorts the network so it is ready to be activated
    
    
    
    
    # Creating dataset for training the network
    print("Generating dataset")
    
    
    sequenceDataSet = SequentialDataSet(inputNeurons, outputNeurons)

    sequenceDataSet = GenerateDataSet.approachDataSet(approachDataSamples, enviroment,robotHeight, prefSpeed, distPreferences, sequenceDataSet )
    
    sequenceDataSet = GenerateDataSet.leaveDataSet(leaveDataSet, enviroment,robotHeight, prefSpeed, distPreferences, sequenceDataSet )

    sequenceDataSet = GenerateDataSet.waitCloseDataSet(waitCloseDataSamples,robotHeight, sequenceDataSet)
    
    sequenceDataSet = GenerateDataSet.waitFarDataSet(waitFarDataSamples,distPreferences, robotHeight, sequenceDataSet)




    # Training neural network.
    print("Training network")
    trainer = BackpropTrainer(net,dataset=sequenceDataSet,learningrate=0.01,lrdecay=1, momentum=0,weightdecay=0, verbose = True)