示例#1
0
def prepareANNDataset(data, prob=None):
    '''
        Method to prepare the dataset for ANN training
        and testing
    '''
    # we only import this when preparing ANN dataset
    import pybrain.datasets as dt

    # supplementary method to convert list to tuple
    def extract(row):
        return tuple(row)

    # get the number of inputs and outputs
    inputs = len(data[0].columns)
    outputs = len(data[1].axes) + 1
    if prob == 'regression':
        outputs -= 1

    # create dataset object
    dataset = dt.SupervisedDataSet(inputs, outputs)

    # convert dataframes to lists of tuples
    x = list(data[0].apply(extract, axis=1))
    if prob == 'regression':
        y = [(item) for item in data[1]]
    else:
        y = [(item, abs(item - 1)) for item in data[1]]

    # and add samples to the ANN dataset
    for x_item, y_item in zip(x, y):
        dataset.addSample(x_item, y_item)

    return dataset
示例#2
0
def nnreal(x, y, hidden=5000, threshold=.1, numwords=10):
    sums = np.sum(y, axis=0).tolist()
    mostcommonwords = [
        j[0] for j in sorted(enumerate(sums), key=lambda i: i[1], reverse=True)
        [:numwords]
    ]

    # x = x[lag:]
    # y = y[:len(x)]
    x, y = nonzero(x, y)

    numInputFeatures, numOutputFeatures = x.shape[1], y.shape[1]
    ds = pbds.SupervisedDataSet(numInputFeatures, numOutputFeatures)
    ds.setField('input', x)
    ds.setField('target', y)
    dstrain, dstest = ds.splitWithProportion(.93)

    nn = pbts.buildNetwork(numInputFeatures,
                           hidden,
                           numOutputFeatures,
                           bias=True)
    trainer = pbsvt.BackpropTrainer(nn, dstrain)
    errors = []
    e = 1
    i = 0
    while e > .1 and i < 50:
        e = trainer.train()
        print e
        i += 1
示例#3
0
文件: nn.py 项目: rv1/designproject
def train_nn(net, train_data, train_labels):
    ntrain = len(train_data)
    train_dataset = pd.SupervisedDataSet(indim, outdim)

    for index in range(ntrain):
        train_dataset.addSample(train_data[index], train_labels[index])

    trainer = BackpropTrainer(net, train_dataset)
    train_error = trainer.train()

    return net, train_error
示例#4
0
    def __init__(self, input_size, topology):
        topology = list(topology) + [1]
        self._net = _shortcuts.buildNetwork(
            *([input_size] + topology
              ))  #This is used because buildNetwork expects many arguments
        self._data = _datasets.SupervisedDataSet(topology[0], topology[-1])
        self._data_source = {'inp': _np.array([]), 'tar': _np.array([])}
        self._samples_limit = None

        new_params = _np.array([1. / input_size] * len(self._net.params))
        self._net._setParameters(new_params)

        self._net.sortModules()
示例#5
0
    def neural_net(self, extrapolation_spots, data):
        net = buildNetwork(2, 10, 1)
        ds = pd.SupervisedDataSet(2, 1)

        for row in self.sample_data:
            ds.addSample((row[0], row[1]), (row[2], ))
        trainer = BackpropTrainer(net, ds)
        trainer.trainUntilConvergence()

        new_points = np.zeros((len(extrapolation_spots), 3))
        new_points[:, 0] = extrapolation_spots[:, 0]
        new_points[:, 1] = extrapolation_spots[:, 1]
        for i in range(len(extrapolation_spots)):
            new_points[i, 2] = net.activate(extrapolation_spots[i, :2])
        combined = np.concatenate((data, new_points))
        return combined
示例#6
0
def to_supervised_dataset(list, target):
    super = ds.SupervisedDataSet(len(list[0]), 1)
    for l in enumerate(list):
        super.addSample(l, target)