Пример #1
0
 def iterRbms(self):
     """Yield every two layers as an rbm."""
     layers = [i for i in self.net.modulesSorted if isinstance(i, NeuronLayer) and not isinstance(i, BiasUnit)]
     # There will be a single bias.
     bias = [i for i in self.net.modulesSorted if isinstance(i, BiasUnit)][0]
     layercons = (self.net.connections[i][0] for i in layers)
     # The biascons will not be sorted; we have to sort them to zip nicely
     # with the corresponding layers.
     biascons = self.net.connections[bias]
     biascons.sort(key=lambda c: layers.index(c.outmod))
     modules = list(zip(layers, layers[1:], layercons, biascons))
     for visible, hidden, layercon, biascon in modules:
         rbm = Rbm.fromModules(visible, hidden, bias, layercon, biascon)
         yield rbm
Пример #2
0
 def iterRbms(self):
     """Yield every two layers as an rbm."""
     layers = [i for i in self.net.modulesSorted
               if isinstance(i, NeuronLayer) and not isinstance(i, BiasUnit)]
     # There will be a single bias.
     bias = [i for i in self.net.modulesSorted if isinstance(i, BiasUnit)][0]
     layercons = (self.net.connections[i][0] for i in layers)
     # The biascons will not be sorted; we have to sort them to zip nicely
     # with the corresponding layers.
     biascons = self.net.connections[bias]
     biascons.sort(key=lambda c: layers.index(c.outmod))
     modules = zip(layers, layers[1:], layercons, biascons)
     for visible, hidden, layercon, biascon in modules:
         rbm = Rbm.fromModules(visible, hidden, bias,
                               layercon, biascon)
         yield rbm
Пример #3
0
from __future__ import print_function

#!/usr/bin/env python
""" Miniscule restricted Boltzmann machine usage example """

__author__ = 'Justin S Bayer, [email protected]'

from pybrain.structure.networks.rbm import Rbm
from pybrain.unsupervised.trainers.rbm import (RbmGibbsTrainerConfig,
                                               RbmBernoulliTrainer)
from pybrain.datasets import UnsupervisedDataSet

ds = UnsupervisedDataSet(6)
ds.addSample([0, 1] * 3)
ds.addSample([1, 0] * 3)

cfg = RbmGibbsTrainerConfig()
cfg.maxIter = 3

rbm = Rbm.fromDims(6, 1)
trainer = RbmBernoulliTrainer(rbm, ds, cfg)
print(rbm.params, rbm.biasParams)
for _ in range(50):
    trainer.train()

print(rbm.params, rbm.biasParams)
Пример #4
0
	def __init__(self, in_dims, out_dims):
		self.dataset = UnsupervisedDataSet(in_dims)
		cfg = RbmGibbsTrainerConfig()
		cfg.maxIter = 5
		self.model = Rbm.fromDims(in_dims, out_dims)
		self.trainer = RbmBernoulliTrainer(self.model, self.dataset, cfg)
Пример #5
0
from __future__ import print_function

#!/usr/bin/env python
""" Miniscule restricted Boltzmann machine usage example """

__author__ = 'Justin S Bayer, [email protected]'

from pybrain.structure.networks.rbm import Rbm
from pybrain.unsupervised.trainers.rbm import (RbmGibbsTrainerConfig,
                                               RbmBernoulliTrainer)
from pybrain.datasets import UnsupervisedDataSet


ds = UnsupervisedDataSet(6)
ds.addSample([0, 1] * 3)
ds.addSample([1, 0] * 3)

cfg = RbmGibbsTrainerConfig()
cfg.maxIter = 3

rbm = Rbm.fromDims(6, 1)
trainer = RbmBernoulliTrainer(rbm, ds, cfg)
print(rbm.params, rbm.biasParams)
for _ in range(50):
    trainer.train()

print(rbm.params, rbm.biasParams)
Пример #6
0
# Sort modules topologically and initialize
net.sortModules()

with open('vsample.csv','rb') as f:
    reader = csv.reader(f)
    for row in reader:
        d_input = map(float,row[1:10])
        output = map(float, row[10])
        n_input = d_input/numpy.linalg.norm(d_input)
        ds.addSample(d_input,output)
        temp_ds.addSample(d_input)

#print ds       
cfg = RbmGibbsTrainerConfig()
cfg.maxIter = 3
rbm = Rbm.fromDims(9,5)
trainer = BackpropTrainer(net, dataset=ds, learningrate= 0.001, weightdecay=0.01, verbose=True)
#trainer = DeepBeliefTrainer(net, dataset=temp_ds)
#trainer = RbmBernoulliTrainer(rbm, temp_ds, cfg)
for i in range(30):
	trainer.trainEpochs(30)
	
print 'Expected:1 [FRAUD]     ', net.activate([49,2.6,0.98,4.3,1.48,10,2.5,6,67]) 
print 'Expected:0 [NOT FRAUD] ', net.activate([78,5,4.4,4.5,2.99,3,1.3,10,59])
print 'Expected:1 [FRAUD]     ', net.activate([57,2,0.1,1.15,0.47,7,1.8,6,73])
print 'Expected:0 [NOT FRAUD] ', net.activate([65,3,11.1,1.8,0.6,4,4,4.5,90])
print 'Expected:1 [FRAUD]     ', net.activate([55,2,0.23,3.2,0.55,9,1.9,5.5,60])
print 'Expected:0 [NOT FRAUD] ', net.activate([39,5,0.07,0.5,0.17,3,3.8,3,32])
print 'Expected:1 [FRAUD]     ', net.activate([63,2.5,0.25,1.23,0.3,7,1.45,4.75,35])   

Пример #7
0
# Sort modules topologically and initialize
net.sortModules()

with open('vsample.csv', 'rb') as f:
    reader = csv.reader(f)
    for row in reader:
        d_input = map(float, row[1:10])
        output = map(float, row[10])
        n_input = d_input / numpy.linalg.norm(d_input)
        ds.addSample(d_input, output)
        temp_ds.addSample(d_input)

#print ds
cfg = RbmGibbsTrainerConfig()
cfg.maxIter = 3
rbm = Rbm.fromDims(9, 5)
trainer = BackpropTrainer(net,
                          dataset=ds,
                          learningrate=0.001,
                          weightdecay=0.01,
                          verbose=True)
#trainer = DeepBeliefTrainer(net, dataset=temp_ds)
#trainer = RbmBernoulliTrainer(rbm, temp_ds, cfg)
for i in range(30):
    trainer.trainEpochs(30)

print 'Expected:1 [FRAUD]     ', net.activate(
    [49, 2.6, 0.98, 4.3, 1.48, 10, 2.5, 6, 67])
print 'Expected:0 [NOT FRAUD] ', net.activate(
    [78, 5, 4.4, 4.5, 2.99, 3, 1.3, 10, 59])
print 'Expected:1 [FRAUD]     ', net.activate(