Beispiel #1
0
def test():
    from ffnet import mlgraph, ffnet
    import networkx as NX
    import pylab
    conec1 = mlgraph((2, 2, 2), biases=False)
    net1 = ffnet(conec1)
    conec2 = mlgraph((4, 2, 2, 1), biases=True)
    net2 = ffnet(conec2)
    NX.draw_graphviz(net1.graph, prog='dot')
    pylab.show()
Beispiel #2
0
def learn(inputD,outputData):
    conec = mlgraph((2,22,12,1))
    net = ffnet(conec)
    print("READING DATA")
    inputData = inputD
    target = numpy.array(outputData)#data[:, -1] #last column

    print ("TRAINING NETWORK...")
    sys.stdout.flush()
    net.train_tnc(inputData, target, maxfun = 5000, messages=1)
    print ("TESTING NETWORK...")
    output, regression = net.test(inputData, target, iprint = 0)
    print(regression)
    Rsquared = regression[0][2]
    maxerr = abs( numpy.array(output).reshape( len(output) ) - numpy.array(target) ).max()
    print ("R-squared:           %s  (should be >= 0.999999)" %str(Rsquared))
    print ("max. absolute error: %s  (should be <= 0.05)" %str(maxerr))
    print ("Is ffnet ready for a stock?")
    try:
        plot( target, 'b--' )
        plot( output, 'k-' )
        legend(('target', 'output'))
        xlabel('pattern'); ylabel('price')
        title('Outputs vs. target of trained network.')
        grid(True)
        show()
        return net
    except ImportError:
        print ("Cannot make plots. For plotting install matplotlib.\n%s" % e)
        return net
Beispiel #3
0
def train(data):
  #trains neural network based on passed training data.
  #training data is a list of [input,output] lists
   
  print "amount of training data:" + str(len(data))
  inputsize = 30 * 30
  outsize = 10
  nodes = 350 #((inputsize + outsize) * 2) / 3

  inp = [i for i,t in data]
  trg = [t for i,t in data]
    
  print "creating neural network, hidden nodes:" + str(nodes)
  conec = mlgraph((inputsize,nodes,outsize))
  
  print "initializing ffnet"
  net = ffnet(conec)
  
  #print "loading ffnet"
  #net = loadnet("ffnet.net")

  # print "assigning random weights"
  # net.randomweights()

  # Train process
  print "training network"
  net.train_tnc(inp, trg, messages=1,nproc=4)

  print "saving trained network"
  savenet(net, "ffnet.net")

  print "testing network"
  net.test(inp, trg, iprint = 1)
Beispiel #4
0
    def train(self, X, Y):
        """ Trains the neural network based on the given set of inputs
        and outputs. """

        inp = array(X)
        targ = array(Y)
        n_inputs = len(inp[0])

        # 1 Output node because Surrogate Model has only 1 output
        self._nn_surr = ffnet(mlgraph((n_inputs, self.n_hidden_nodes, 1)))

        # Start the training
        if self.method == 'cg':
            self._nn_surr.train_cg(inp, targ, disp=False)
        elif self.method == 'genetic':
            self._nn_surr.train_genetic(inp,
                                        targ,
                                        individuals=10 * n_inputs,
                                        generations=500)
        elif self.method == 'tnc':
            self._nn_surr.train_tnc(inp, targ, maxfun=5000)
        elif self.method == 'momentum':
            self._nn_surr.train_momentum(inp, targ, momentum=1)
        elif self.method == 'rprop':
            self._nn_surr.train_rprop(inp, targ)
        elif self.method == 'bfgs':
            self._nn_surr.train_bfgs(inp, targ)
        else:
            self.raise_exception('Unknown training method %r' % self.method)
Beispiel #5
0
def ffnetwork(inputs=1, layers=1, outputs=1):
    """
    Define a feedforward neural network

    Parameters
    ----------
    inputs : integer
        default = 1
        defines the length of input vector
    layers : integer
        default = 1
        defines the number of layers
    outputs : integer
        default = 1
        defines the length of output vector

    Returns
    -------
    net : Feed-forward neural network
    """
    conec = ffnet.mlgraph((inputs, layers, outputs), biases=False)
    net = ffnet.ffnet(conec)
    '''NX.draw_graphviz(net.graph, prog='dot')
    plt.show()'''
    return net
def ffnetwork(inputs=1, layers=1, outputs=1):
    """
    Define a feedforward neural network

    Parameters
    ----------
    inputs : integer
        default = 1
        defines the length of input vector
    layers : integer
        default = 1
        defines the number of layers
    outputs : integer
        default = 1
        defines the length of output vector

    Returns
    -------
    net : Feed-forward neural network
    """
    conec = ffnet.mlgraph((inputs, layers, outputs), biases=False)
    net = ffnet.ffnet(conec)
    '''NX.draw_graphviz(net.graph, prog='dot')
    plt.show()'''
    return net
Beispiel #7
0
def build_ffnet(sorted_data,network_config):

	#data_in_training2 = sorted_data[:training_set_size,10:-2].astype(float).tolist()
	data_target_training2 = [[i] for i in sorted_data[:training_set_size,0].astype(float)]

	new_data_in = sorted_data[:training_set_size,col_training_set[0]] # the first col
	for i in col_training_set[1:]: # and the rest of the cols
		new_data_in = numpy.column_stack((new_data_in, sorted_data[:training_set_size,i]))
	data_in_training2 = new_data_in.astype(float).tolist()

	print 'defining network'

	# Define net (large one)
	conec = mlgraph(network_config, biases=False)

	net = ffnet(conec)

	#print 'draw network'
	#networkx.draw_graphviz(net.graph, prog='dot')
	#pylab.show()

	logging.info('network built as: ' + str(network_config) )

	print "TRAINING NETWORK..."
	# that are many different training algos

	#net.train_rprop(data_in_training2, data_target_training2, a=1.9, b=0.1, mimin=1e-06, mimax=15.0, xmi=0.5, maxiter=max_functions, disp=1)
	###net.train_momentum(data_in_training2, data_target_training2, eta=0.2, momentum=0.1, maxiter=max_functions, disp=1)
	net.train_tnc(data_in_training2, data_target_training2, maxfun = max_functions, messages=1)
	#net.train_cg(data_in_training2, data_target_training2, maxiter=max_functions, disp=1)
	#net.train_genetic(data_in_training2, data_target_training2, individuals=max_population, generations=max_functions)
	#net.train_bfgs(data_in_training2, data_target_training2, maxfun = max_functions, disp=1)

	return net
    def train(self, X, Y):
        """ Trains the neural network based on the given set of inputs
        and outputs. """

        inp = array(X)
        targ = array(Y)
        n_inputs = len(inp[0])

        # 1 Output node because Surrogate Model has only 1 output
        self._nn_surr = ffnet(mlgraph((n_inputs, self.n_hidden_nodes, 1)))

        # Start the training
        if self.method == 'cg':
            self._nn_surr.train_cg(inp, targ, disp=False)
        elif self.method == 'genetic':
            self._nn_surr.train_genetic(inp, targ, individuals=10*n_inputs,
                                        generations=500)
        elif self.method == 'tnc':
            self._nn_surr.train_tnc(inp, targ, maxfun=5000)
        elif self.method == 'momentum':
            self._nn_surr.train_momentum(inp, targ, momentum=1)
        elif self.method == 'rprop':
            self._nn_surr.train_rprop(inp, targ)
        elif self.method == 'bfgs':
            self._nn_surr.train_bfgs(inp, targ)
        else:
            self.raise_exception('Unknown training method %r' % self.method)
Beispiel #9
0
def run_network():
    # Generate standard layered network architecture and create network
    conec = mlgraph((14,28,1))
    net = ffnet(conec)

    df = pd.read_csv('data/copacabana.csv', sep=';')
    variables = [
        'Posicao', 'Quartos', 'Vagas', 'DistIpanema', 'DistPraia',
        'DistFavela', 'RendaMedia', 'RendaMovel', 'RendaMovelRua',
        'Vu2009', 'Mes', 'Idade', 'Tipologia', 'AreaConstruida'
    ]
    input = df[variables]
    target = df[['VAL_UNIT']]

    # Train network
    #first find good starting point with genetic algorithm (not necessary, but may be helpful)
    print "FINDING STARTING WEIGHTS WITH GENETIC ALGORITHM..."
    net.train_genetic(input, target, individuals=20, generations=500)
    #then train with scipy tnc optimizer
    print "TRAINING NETWORK..."
    net.train_tnc(input, target, maxfun = 1000, messages=1)

    print "TESTING NETWORK..."
    output, regression = net.test(input, target, iprint=0)

    # Save/load/export network
    print "Network is saved..."
    savenet(net, "data/capacabana.net")

    return output, regression
Beispiel #10
0
 def __init__(self,
              shape=None,
              full_conn=True,
              biases=True,
              random_weights=True,
              normalize=True,
              reduce_empty_dims=True):
     """
     shape: shape of a NN given as a tuple
     """
     self.shape = shape
     self.full_conn = full_conn
     self.biases = biases
     self.random_weights = random_weights
     self.normalize = normalize
     self.reduce_empty_dims = reduce_empty_dims
     if self.normalize:
         self.norm = StandardScaler()
     self.shape = shape
     if shape:
         if self.full_conn:
             conec = tmlgraph(self.shape, self.biases)
         else:
             conec = mlgraph(self.shape, self.biases)
         self.model = ffnet(conec)
         if random_weights:
             self.model.randomweights()
 def __init__(self, arch, verbose = False, biases = True):
     """
     Initialises the neural network
     """
     self.verbose = verbose
     self.arch = arch
     self.conec = mlgraph(self.arch, biases = biases)
     self.net = ffnet(self.conec)
Beispiel #12
0
def create_bp(input, output):
    it = len(input[0])
    ot = len(output[0])

    connection = mlgraph((it, 10, ot))
    net = ffnet(connection)

    print("Created new network...")
    return net
Beispiel #13
0
    def train(self, X, Y):
        """ Trains the neural network based on the given set of inputs
        and outputs. """

        inp = array(X)
        targ = array(Y)
        n_inputs = len(inp[0])
        
        # 1 Output node because Surrogate Model has only 1 output
        self._nn_surr = ffnet(mlgraph((n_inputs, self.n_hidden_nodes, 1)))
                        
        # Start the training
        self._nn_surr.train_cg(inp, targ, disp=False)
Beispiel #14
0
 def fit(self, descs, target_values, train_alg='tnc',**kwargs):
     # setup neural network
     if self.full_conn:
         conec = tmlgraph(self.shape, self.biases)
     else:
         conec = mlgraph(self.shape, self.biases)
     self.model = ffnet(conec)
     if self.random_weights:
         if not self.random_state is None:
             random_seed(self.random_state)
         self.model.randomweights()
     # train
     getattr(self.model, 'train_'+train_alg)(descs, target_values, nproc='ncpu' if self.n_jobs < 1 else self.n_jobs, **kwargs)
     return self
Beispiel #15
0
def train(input, target):
    print input[0]
    print len(input)
    conec = mlgraph((16, 16, 10))
    net = ffnet(conec)
    input = input.tolist()
    target = target.tolist()

    print "FINDING STARTING WEIGHTS WITH GENETIC ALGORITHM..."
    # net.train_genetic(input, target, individuals=20, generations=30)
    #then train with scipy tnc optimizer
    print "TRAINING NETWORK..."
    net.train_tnc(input, target, maxfun=1000, messages=1)
    return net
Beispiel #16
0
 def __init__(self):
     super(NeuralNetwork, self).__init__()
     self.field = Field(20, 20)
     
     self.outputs = []
     
     self.input = []
     self.target = []
     
     b = QtGui.QPushButton("Learn!")
     self.connect(b, QtCore.SIGNAL("clicked()"), self.learn)
     
     self.outcomes_list = QtGui.QComboBox()
     self._add_output("Square")
     self._add_output("Triangle")
     self._add_output("Line")
     
     hpanel = QtGui.QHBoxLayout()
     hpanel.addWidget(self.outcomes_list)
     hpanel.addWidget(b)
     
     btn_classify = QtGui.QPushButton("Classify")
     self.connect(btn_classify, QtCore.SIGNAL("clicked()"), self.classify)
     
     btn_clear = QtGui.QPushButton("Clear")
     self.connect(btn_clear, QtCore.SIGNAL("clicked()"), self.clear)
     
     self.label_output = QtGui.QLabel()
     self.label_output.setMaximumHeight(20)
     
     self.label_epoch = QtGui.QLabel()
     self.label_epoch.setMaximumHeight(20)
     
     vpanel = QtGui.QVBoxLayout()
     vpanel.addWidget(self.field)
     vpanel.addLayout(hpanel)
     vpanel.addWidget(self.label_output)
     vpanel.addWidget(self.label_epoch)
     vpanel.addWidget(btn_classify)
     vpanel.addWidget(btn_clear)
     
     self.setLayout(vpanel)
     
     try:
         self.net, self.epoch = loadnet("netdata.dat")
     except IOError:
         conec = mlgraph((self.field.x*self.field.y, 10, 10, 3))
         self.net = ffnet(conec)
         self.epoch = 0
Beispiel #17
0
    def __init__(self):
        super(NeuralNetwork, self).__init__()
        self.field = Field(20, 20)

        self.outputs = []

        self.input = []
        self.target = []

        b = QtGui.QPushButton("Learn!")
        self.connect(b, QtCore.SIGNAL("clicked()"), self.learn)

        self.outcomes_list = QtGui.QComboBox()
        self._add_output("Square")
        self._add_output("Triangle")
        self._add_output("Line")

        hpanel = QtGui.QHBoxLayout()
        hpanel.addWidget(self.outcomes_list)
        hpanel.addWidget(b)

        btn_classify = QtGui.QPushButton("Classify")
        self.connect(btn_classify, QtCore.SIGNAL("clicked()"), self.classify)

        btn_clear = QtGui.QPushButton("Clear")
        self.connect(btn_clear, QtCore.SIGNAL("clicked()"), self.clear)

        self.label_output = QtGui.QLabel()
        self.label_output.setMaximumHeight(20)

        self.label_epoch = QtGui.QLabel()
        self.label_epoch.setMaximumHeight(20)

        vpanel = QtGui.QVBoxLayout()
        vpanel.addWidget(self.field)
        vpanel.addLayout(hpanel)
        vpanel.addWidget(self.label_output)
        vpanel.addWidget(self.label_epoch)
        vpanel.addWidget(btn_classify)
        vpanel.addWidget(btn_clear)

        self.setLayout(vpanel)

        try:
            self.net, self.epoch = loadnet("netdata.dat")
        except IOError:
            conec = mlgraph((self.field.x * self.field.y, 10, 10, 3))
            self.net = ffnet(conec)
            self.epoch = 0
Beispiel #18
0
    def add_mlp(self, n_hidden):
        """
        Dodajem novu neuralnu mrežu s n_hidden neurona u skrivenom sloju.
        n_hidden može biti broj ili tuple
        """
        ind = len(self.nns)
        nin = len(self._invars)
        nout = len(self._outvars)
        if isinstance(n_hidden, int): n_hidden = (n_hidden, )
        arch = (nin, ) + n_hidden + (nout, )
        net = ffnet(mlgraph(arch))

        self.nns.append(net)

        return ind
Beispiel #19
0
    def fit(self, X, y):
        dim = X.shape
        nFeatures = int(dim[1])

        #input = [ [0.,0.], [0.,1.], [1.,0.], [1.,1.] ]
        #target  = [ [1.], [0.], [0.], [1.] ]
        input = list(X)
        target = list(y)

        conec = mlgraph((nFeatures, self.nNodes, 1))
        self.net = ffnet(conec)
        if self.maxfun == '':
            self.net.train_tnc(input, target)
        else:
            self.net.train_tnc(input, target, maxfun=self.maxfun)
def main():
    """ load training data"""
    inputs = np.loadtxt("../handwriting/X2_100samples.dat")
    targets = np.loadtxt("../handwriting/y2_100samples.dat")
    """ define network topology """
    conec = mlgraph((inputs.shape[1], 10, 1))

    #     reg = 0.1
    reg = False
    net = ffnet(conec)
    system = NNSystem(net, inputs, targets, reg=reg)

    database = system.create_database(
        #                     db="/home/ab2111/machine_learning_landscapes/neural_net/db_ffnet_100samples_reg"+str(reg) +".sqlite"
        db="../db/db_ffnet_100samples.sqlite")
    run_gui(system, database)
Beispiel #21
0
    def __init__(self,
                 inputDims,
                 actionSpace,
                 hiddenUnits=5,
                 bias=True,
                 independentOutputs=False,
                 **kwargs):
        assert not actionSpace.hasContinuousDimensions() \
                or actionSpace.getNumberOfDimensions() == 1, \
                "MLP policy can currently not deal with continuous action "\
                "spaces with more than one dimension!"

        try:
            import ffnet
        except:
            import warnings
            warnings.warn("The MLP policy module requires the ffnet package.")

        self.continuousActions = actionSpace.hasContinuousDimensions()

        if self.continuousActions:
            actionDimension = actionSpace.getDimensions()[
                0]  # there is per assert only 1
            actionRanges = actionDimension.getValueRanges()
            assert len(actionRanges) == 1, "MLP policy cannot deal with "\
                                           "non-contiguous action ranges."
            self.actionRange = actionRanges[0]
            self.numActions = 1  # TODO
        else:
            self.actions = actionSpace.getActionList()
            self.numActions = len(self.actions)

        self.inputDims = inputDims
        self.hiddenUnits = hiddenUnits
        self.bias = bias

        # Determine network topology
        if independentOutputs:
            conec = ffnet.imlgraph((inputDims, hiddenUnits, self.numActions),
                                   biases=self.bias)
        else:
            conec = ffnet.mlgraph((inputDims, hiddenUnits, self.numActions),
                                  biases=self.bias)

        # Create net based on connectivity
        self.net = ffnet.ffnet(conec)
def train(trainData):
    print('Starting Training')
    inLength = 64

    inData = []
    for i in range(len(trainData)):
        row = []
        for j in range(inLength):
            row.append(trainData[i][j])
        inData.append(row)

    conec = mlgraph((inLength, 10, 10, inLength))
    net = ffnet(conec)
    input = numpy.array(inData)
    target = numpy.array(inData)
    net.train_tnc(input, target, maxfun=2000, messages=1)
    print('Training completed')
Beispiel #23
0
 def fit(self, descs, target_values, train_alg='tnc', **kwargs):
     # setup neural network
     if self.full_conn:
         conec = tmlgraph(self.shape, self.biases)
     else:
         conec = mlgraph(self.shape, self.biases)
     self.model = ffnet(conec)
     if self.random_weights:
         if not self.random_state is None:
             random_seed(self.random_state)
         self.model.randomweights()
     # train
     getattr(self.model, 'train_' + train_alg)(
         descs,
         target_values,
         nproc='ncpu' if self.n_jobs < 1 else self.n_jobs,
         **kwargs)
     return self
def main():
    """ load training data"""
    inputs  = np.loadtxt("../handwriting/X2_100samples.dat")
    targets = np.loadtxt("../handwriting/y2_100samples.dat")
    
    """ define network topology """
    conec = mlgraph((inputs.shape[1],10,1))

#     reg = 0.1
    reg=False
    net = ffnet(conec)
    system = NNSystem(net, inputs, targets, reg=reg)
    
    database = system.create_database(
#                     db="/home/ab2111/machine_learning_landscapes/neural_net/db_ffnet_100samples_reg"+str(reg) +".sqlite"
                    db="../db/db_ffnet_100samples.sqlite"
                )
    run_gui(system, database)
def load_speaker_recognition_newtork(filename, create_new=False):
    """
    Load or create (if you  want) network for speker recognition form file

    returns tuple: (network, people_names, people_number)
    """
    people = voice_sample.get_names(); 
    people_num = len(people)
    network = None

    try:
        network = loadnet(filename)
    except IOError, ex:
        if create_new:
            
            network = ffnet(mlgraph((LPC_COE_NUM, people_num + LPC_COE_NUM,
            #network = ffnet(mlgraph((LPC_COE_NUM, 10,
                people_num)) )
def run():

    def formatPrediction(u):
        if (u>0.5):
            return 1
        else:
            return 0
    dat = json.loads(dict(request.form).keys()[0])
    dat["pgood"]["values"] = []
    dat["pbad"]["values"] = []
    good = set(dat["good"]["values"])
    none = set(dat["none"]["values"])
    bad = set(dat["bad"]["values"])
    data = get_all_fruits()
    train_input  = []
    train_target = []
    test_input = []
    for f in data:
        if f[0] in good:
            train_input.append(f[1:])
            train_target.append(1)
        elif f[0] in bad:
            train_input.append(f[1:])
            train_target.append(0)
        elif f[0] in none:
            pass
        else:
            test_input.append(f)            
    
    print len(test_input)
    # Run algorithm
    l = len(data[0])-1
    conec = mlgraph( (l,2,1)) 
    net = ffnet(conec)
    net.train_tnc(train_input, train_target, maxfun = 1000)
    ## Print the name of the fruits used for test 
    
    o = net.test([u[1:] for u in test_input], [0]*len(test_input),iprint=0)
    res = [formatPrediction(u[0]) for u in o[0]]

    dat["pgood"]["values"] = [ k[0] for i,k in enumerate(test_input) if res[i] == 1 ]
    dat["pbad"]["values"] = [ k[0] for i,k in enumerate(test_input) if res[i] == 0 ]
    return json.dumps(dat)
    def train(self, X, Y):
        """ Trains the nerual network based on the given set of inputs
        and outputs. """

        inp = array(X)
        targ = array(Y)
        n_inputs = len(inp[0])
        
        # 1 Output node because Surrogate Model has only 1 output
        self._nn_surr = ffnet(mlgraph((n_inputs, self.n_hidden_nodes, 1)))
                        
        # Start the training
        #self._nn_surr.train_genetic(inp, targ, individuals=10*n_inputs, generations=500)

        #self._nn_surr.train_tnc(inp, targ,maxfun=5000)
        
        #self._nn_surr.train_momentum(inp,targ,momentum=1)
        #self._nn_surr.train_rprop(inp,targ)
        self._nn_surr.train_cg(inp,targ,disp=False)
Beispiel #28
0
    def __init__(self, stateSpace, actions, **kwargs):
        super(MLP, self).__init__(stateSpace)
        try:
            import ffnet
        except ImportError:
            raise Exception(
                "Error: MLP function approximator cannot be used without the ffnet module!"
            )

        self.numberOfInputs = len(stateSpace.keys())
        self.numberOfOutputs = len(actions)
        self.actions = actions
        self.stateSpace = stateSpace

        conec = ffnet.mlgraph((self.numberOfInputs, 8, 1))
        self.nets = dict([(action, ffnet.ffnet(conec)) for action in actions])
        for action in self.actions:
            for index in range(len(self.nets[action].weights)):
                self.nets[action].weights[index] = 0.0
def main():
    """ load training data"""
    inputs = np.loadtxt("../handwriting/X2_100samples.dat")
    targets = np.loadtxt("../handwriting/y2_100samples.dat")

    ValInputs, ValTargets = get_validation_data()
    """ define network topology """
    conec = mlgraph((inputs.shape[1], 10, 1))

    net = ffnet(conec)
    system = NNSystem(net, inputs, targets)

    pot = system.get_potential()

    database = system.create_database(
        db=
        "/home/ab2111/machine_learning_landscapes/neural_net/db_ffnet_100samples.sqlite"
    )
    # database = system.create_database(db="/home/ab2111/machine_learning_landscapes/neural_net/db_ffnet_me3.sqlite")
    # run_gui(system, database)

    #     check_its_a_minimum(system, database)

    energies = np.array([])
    for m in database.minima():
        coords = m.coords
        testenergy = pot.getValidationEnergy(coords, ValInputs,
                                             ValTargets) / len(ValTargets)
        energies = np.append(energies, testenergy)


#         plt.plot(m.coords,'o')
#         np.max(m.coords)

#     plt.plot([m._id for m in database.minima()], np.array([m.energy for m in database.minima()])/100., 'o')
    plt.plot(np.array([m.energy for m in database.minima()]) / 100)
    plt.plot(energies)
    plt.plot(
        np.array([np.max(m.coords) for m in database.minima()]) / 1000, 'x')

    plt.legend(["Etrain", "Evalidation", "max(params)"])
    plt.show()
Beispiel #30
0
def load_speaker_recognition_newtork(filename, create_new=False):
    """
    Load or create (if you  want) network for speker recognition form file

    returns tuple: (network, people_names, people_number)
    """
    people = voice_sample.get_names()
    people_num = len(people)
    network = None

    try:
        network = loadnet(filename)
    except IOError, ex:
        if create_new:

            network = ffnet(
                mlgraph((
                    LPC_COE_NUM,
                    people_num + LPC_COE_NUM,
                    #network = ffnet(mlgraph((LPC_COE_NUM, 10,
                    people_num)))
Beispiel #31
0
 def __init__(
     self, shape=None, full_conn=True, biases=True, random_weights=True, normalize=True, reduce_empty_dims=True
 ):
     """
     shape: shape of a NN given as a tuple
     """
     self.shape = shape
     self.full_conn = full_conn
     self.biases = biases
     self.random_weights = random_weights
     self.normalize = normalize
     self.reduce_empty_dims = reduce_empty_dims
     if self.normalize:
         self.norm = StandardScaler()
     self.shape = shape
     if shape:
         if self.full_conn:
             conec = tmlgraph(self.shape, self.biases)
         else:
             conec = mlgraph(self.shape, self.biases)
         self.model = ffnet(conec)
         if random_weights:
             self.model.randomweights()
def main():
    def formatPrediction(u):
        if (u>0.5):
            return 1
        else:
            return 0
    res = []
    for k in range(100):
        header, tests, train = read_data()
        inputLength = len(header) - 2
        # 2 here is the middle layer, you can remove it and try, it does not 
        # seem to have much impact in that very case
        conec = mlgraph( (inputLength,1) )
        net = ffnet(conec)
        train_input = [ u[1:-1] for u in train ]
        target_input  = [ u[-1] for u in train ]
        test_input = [ u[1:-1] for u in tests ]
        test_target  = [ u[-1] for u in tests ]
        net.train_tnc(train_input, target_input, maxfun = 1000)
        # Print the name of the fruits used for test 
        o = net.test(test_input, test_target,iprint=0)#), iprint = 2)
        res.append(float(sum([formatPrediction(u[0]) ^ int(test_target[i]) for i,u in enumerate(o[0])]))/len(test_target))
    print sum(res)/len(res)
Beispiel #33
0
def main():
	conec = mlgraph((1, 4 ,1))
	net = ffnet(conec)
	
	patterns = 16
	input = [[ k * 2 * pi / patterns] for k in xrange(patterns + 1)]
	target = [[sin(x[0])] for x in input]
	
	print "training"
	net.train_genetic(input, target, individuals=20, generations=500)
	print "simple trainig"
	net.train_tnc(input, target, maxfun = 5000, messages = 1)
	
	print "test"
	output, regression = net.test(input, target, iprint = 2)
	
	# draw it
	points = 128
	xaxis = [[ k * 2 * pi / patterns] for k in xrange(patterns + 1)]
	sine = [sin(x) for x in xaxis]
	cosine = [cos(x) for x in xaxis]
	netsine = [net([x])[0] for x in xaxis]
	netcosine = [net.derivative([x])[0][0] for x in xaxis]
	
	subplot(211)
	plot(xaxis, sine, 'b--', xaxis, netsine, 'k-')
	legend(('sine', 'network output'))
	grid(True)
	title('Outputs of trained network.')

	subplot(212)
	plot(xaxis, cosine, 'b--', xaxis, netcosine, 'k-')
	legend(('cosine', 'network derivative'))
	grid(True)
	show()
	
	return 0
def main():
    """ load training data"""
    inputs  = np.loadtxt("../handwriting/X2_100samples.dat")
    targets = np.loadtxt("../handwriting/y2_100samples.dat")    
    
    ValInputs, ValTargets = get_validation_data()

    """ define network topology """
    conec = mlgraph((inputs.shape[1],10,1))
    
    net = ffnet(conec)
    system = NNSystem(net, inputs, targets)
    
    pot = system.get_potential()
            
    database = system.create_database(db="/home/ab2111/machine_learning_landscapes/neural_net/db_ffnet_100samples.sqlite")
    # database = system.create_database(db="/home/ab2111/machine_learning_landscapes/neural_net/db_ffnet_me3.sqlite")
    # run_gui(system, database)
    
#     check_its_a_minimum(system, database)

    energies = np.array([])
    for m in database.minima():
        coords = m.coords
        testenergy = pot.getValidationEnergy(coords,ValInputs,ValTargets)/len(ValTargets)
        energies = np.append(energies,testenergy)
#         plt.plot(m.coords,'o')
#         np.max(m.coords)
         

#     plt.plot([m._id for m in database.minima()], np.array([m.energy for m in database.minima()])/100., 'o')
    plt.plot(np.array([m.energy for m in database.minima()])/100)
    plt.plot(energies)
    plt.plot(np.array([np.max(m.coords) for m in database.minima()])/1000, 'x')
    
    plt.legend(["Etrain","Evalidation","max(params)"])
    plt.show()
Beispiel #35
0
from ffnet import ffnet, mlgraph
from math import pi, sin, cos

# Let's define network connectivity by hand:
# conec = [(1, 2), (1, 3), (1, 4), (1, 5), (2, 6), (3, 6), (4, 6), (5, 6), \
#         (0, 2), (0, 3), (0, 4), (0, 5), (0, 6)]
# Note 1: Biases in ffnet are handled as the connections
#         from special node numbered 0. Input nodes cannot be biased.
# Note 2: Node numbering and order of links in conec is meaningless,
#         but the connections have to be from source to target.
# Note 3: The same connectivity can be obtained using mlgraph function
#         provided with ffnet (layered architecture (1,4,1)).

# Network creation
net = ffnet(mlgraph((1, 4, 1)))

# Generation of training data (sine values for x from 0 to 2*pi)
patterns = 16
input = [ [ 0. ] ] + [ [ k * 2 * pi / patterns ] for k in xrange(1, patterns + 1) ]
target = [ [ sin(x[0]) ] for x in input ]

# Training network
# first find good starting point with genetic algorithm (not necessary, but may be helpful)
print "FINDING STARTING WEIGHTS WITH GENETIC ALGORITHM..."
net.train_genetic(input, target, individuals=20, generations=500)
# then train with scipy tnc optimizer
print "TRAINING NETWORK..."
net.train_tnc(input, target, maxfun=5000, messages=1)

# Testing network
Beispiel #36
0
def createNNetwork(design):
    conec = mlgraph(design)
    return ffnet(conec)
Beispiel #37
0
def _train2(X, Y, filename, epochs=50):
    global nn
    conec = mlgraph((INPUT_SIZE, HIDDEN_LAYERS, OUTPUT_LAYER))
    nn = ffnet(conec)

    nn.train_momentum(X, Y, eta=0.001, momentum=0.8, maxiter=epochs, disp=True)
Beispiel #38
0
### Parallel training example for ffnet ###

from ffnet import ffnet, mlgraph
from scipy import rand

# Generate random training data (large)
input = rand(10000, 10)
target = rand(10000, 1)

# Define net (large one)
conec = mlgraph((10, 300, 1))
net = ffnet(conec)

# Test training speed-up
# Note that the below *if* is necessary only on Windows
if __name__ == '__main__':
    stored_weights = net.weights.copy()

    print "Training in single process:"
    from time import time
    t0 = time()
    net.train_tnc(input, target, nproc=1, maxfun=50, messages=1)
    t1 = time()
    single_time = t1 - t0

    print

    from multiprocessing import cpu_count
    print "Trainig in %s processes:" % cpu_count()
    net.weights = stored_weights  # Just to start from the same point
    t0 = time()
Beispiel #39
0
 def __init__(self, con=(2,2,1)):
     self.network = ffnet(mlgraph(con))
Beispiel #40
0
### Multiprocessing training example for ffnet ###

from ffnet import ffnet, mlgraph
from scipy import rand

# Generate random training data (large)
input = rand(10000, 10)
target = rand(10000, 1)

# Define net (large one)
conec = mlgraph((10, 300, 1))
net = ffnet(conec)

# Test training speed-up
# Note that the below *if* is necessary only on Windows
if __name__ == "__main__":
    from time import time
    from multiprocessing import cpu_count

    # Preserve original weights
    weights0 = net.weights.copy()

    print "TRAINING, this can take a while..."
    for n in range(1, cpu_count() + 1):
        net.weights[:] = weights0  # Start always from the same point
        t0 = time()
        net.train_tnc(input, target, nproc=n, maxfun=50, messages=0)
        t1 = time()
        print "%s processes: %s s" % (n, t1 - t0)
    
    dg.plot()
    dg.label_minima(labels)
    print labels
    plt.show()
#     dg.savefig("/home/ab2111/machine_learning_landscapes/neural_net/dg.png")

from NNSystem import NNSystem

""" load training data"""
inputs  = np.loadtxt("../handwriting/X2_100samples.dat")
targets = np.loadtxt("../handwriting/y2_100samples.dat")
from ffnet_validation import get_validation_data
vinputs, vtargets = get_validation_data()
    
""" define network topology """
conec = mlgraph((inputs.shape[1],10,1))
print inputs.shape
# exit()
net = ffnet(conec)
system = NNSystem(net, inputs, targets)
        
database = system.create_database(db="../db/db_ffnet_100samples.sqlite")

# make_disconnectivity_graph(system, database, vinputs, vtargets)

#     plt.plot(ts.coords,'x')
#     plt.plot(ts.eigenvec,'o')
# plt.show()
make_validation_disconnectivity_graph(system, database)
Beispiel #42
0
##  Distributed under the terms of the GNU General Public License (GPL)
##  http://www.gnu.org/copyleft/gpl.html
########################################################################

### Digits recognition example for ffnet ###

# Training file (data/ocr.dat) contains 68 patterns - first 58 
# are used for training and last 10 are used for testing. 
# Each pattern contains 64 inputs which define 8x8 bitmap of 
# the digit and last 10 numbers are the targets (10 targets for 10 digits).
# Layered network architecture is used here: (64, 10, 10, 10).

from ffnet import ffnet, mlgraph, readdata

# Generate standard layered network architecture and create network
conec = mlgraph((64,10,10,10))
net = ffnet(conec)

# Read data file
print "READING DATA..."
data = readdata( 'data/ocr.dat', delimiter = ' ' )
input =  data[:, :64] #first 64 columns - bitmap definition
target = data[:, 64:] #the rest - 10 columns for 10 digits

# Train network with scipy tnc optimizer - 58 lines used for training
print "TRAINING NETWORK..."
net.train_tnc(input[:58], target[:58], maxfun = 2000, messages=1)

# Test network - remaining 10 lines used for testing
print
print "TESTING NETWORK..."
Beispiel #43
0
##  Distributed under the terms of the GNU General Public License (GPL)
##  http://www.gnu.org/copyleft/gpl.html
########################################################################

### Digits recognition example for ffnet ###

# Training file (data/ocr.dat) contains 68 patterns - first 58
# are used for training and last 10 are used for testing.
# Each pattern contains 64 inputs which define 8x8 bitmap of
# the digit and last 10 numbers are the targets (10 targets for 10 digits).
# Layered network architecture is used here: (64, 10, 10, 10).

from ffnet import ffnet, mlgraph, readdata

# Generate standard layered network architecture and create network
conec = mlgraph((64, 10, 10, 10))
net = ffnet(conec)

# Read data file
print "READING DATA..."
data = readdata('data/ocr.dat', delimiter=' ')
input = data[:, :64]  #first 64 columns - bitmap definition
target = data[:, 64:]  #the rest - 10 columns for 10 digits

# Train network with scipy tnc optimizer - 58 lines used for training
print "TRAINING NETWORK..."
net.train_tnc(input[:58], target[:58], maxfun=2000, messages=1)

# Test network - remaining 10 lines used for testing
print
print "TESTING NETWORK..."
Beispiel #44
0
##
##  Distributed under the terms of the GNU General Public License (GPL)
##  http://www.gnu.org/copyleft/gpl.html
########################################################################

### Sine training example for ffnet ###

from ffnet import ffnet, mlgraph
from math import pi, sin, cos
from pylab import *
from numpy import *
import pylab as p
import matplotlib.axes3d as p3

# Let's define network connectivity by hand and then create network.
conec = mlgraph((2, 4, 1))
# Note 1: Biases in ffnet are handled as the connections
#         from special node numbered 0. Input nodes cannot be biased.
# Note 2: Node numbering and order of links in conec is meaningless,
#         but the connections have to be from source to target.
# Note 3: The same connectivity can be obtained using mlgraph function
#         provided with ffnet (layered architecture (1,4,1)).
net = ffnet(conec)

# Generate training data (sine values for x from 0 to 2*pi)

x, y = mgrid[0:1:5j, 0:1:5j]
z = exp(x) + sin(6 * y)

fig = p.figure()
ax = p3.Axes3D(fig)
Beispiel #45
0
##
##  Distributed under the terms of the GNU General Public License (GPL)
##  http://www.gnu.org/copyleft/gpl.html
########################################################################

### Sine training example for ffnet ###

from ffnet import ffnet, mlgraph
from math import pi, sin, cos
from pylab import *
from numpy import *
import pylab as p
import matplotlib.axes3d as p3

# Let's define network connectivity by hand and then create network.
conec = mlgraph((2,4,1))
# Note 1: Biases in ffnet are handled as the connections
#         from special node numbered 0. Input nodes cannot be biased.
# Note 2: Node numbering and order of links in conec is meaningless,
#         but the connections have to be from source to target.
# Note 3: The same connectivity can be obtained using mlgraph function
#         provided with ffnet (layered architecture (1,4,1)).
net = ffnet(conec)

# Generate training data (sine values for x from 0 to 2*pi)

x,y=mgrid[0:1:5j,0:1:5j]
z=exp(x)+sin(6*y)

fig=p.figure()
ax = p3.Axes3D(fig)
 def __init__(self, nnStructure):
     self.nnStructure = nnStructure
     conec = mlgraph(nnStructure)
     self.network = ffnet(conec)
Beispiel #47
0
"""Author Jinjun Sun 2012-08-21

"""
from ffnet import mlgraph, ffnet
import networkx as NX
import pylab

conec = mlgraph((11,5,3), biases=False)
net = ffnet(conec)
NX.draw_graphviz(net.graph, prog='dot')
pylab.show()

Beispiel #48
0
 def fit(self, training_set, training_target):
     self.feature_size = training_set.shape[1]
     hidden_layer_size = self.hidden_node
     connection_tuple = (self.feature_size, hidden_layer_size, 1)
     self.nn = ffnet(mlgraph(connection_tuple))
     self.nn.train_momentum(training_set, training_target)
Beispiel #49
0
### XOR problem example for ffnet ###

from ffnet import ffnet, mlgraph

# Generate standard layered network architecture and create network
conec = mlgraph((2, 2, 1))
net = ffnet(conec)

# Define training data
input = [[0., 0.], [0., 1.], [1., 0.], [1., 1.]]
target = [[1.], [0.], [0.], [1.]]

# Train network
#first find good starting point with genetic algorithm (not necessary, but may be helpful)
print "FINDING STARTING WEIGHTS WITH GENETIC ALGORITHM..."
net.train_genetic(input, target, individuals=20, generations=500)
#then train with scipy tnc optimizer
print "TRAINING NETWORK..."
net.train_tnc(input, target, maxfun=1000, messages=1)

# Test network
print
print "TESTING NETWORK..."
output, regression = net.test(input, target, iprint=2)

# Save/load/export network
from ffnet import savenet, loadnet, exportnet

print "Network is saved..."
savenet(net, "xor.net")
print "Network is reloaded..."
Beispiel #50
0
# word vector dimension
wordDim = 125
# input layer size
inputSize = cxtWinSize * wordDim
# hidden layer size
hidSize = 10
# output layer size: seven classes
outSize = 8

# Read input and output from training data
# wait to be implemented
# inputVec =
# targetVec =

# Read input from test data
# wait to be implemented
# testInputVec =

# specify network topology
conec = mlgraph((inputSize, hidSize, outSize), biases=False)
net = ffnet(conec)
# training process: inputVec: numpy.ndArray, target: numpy.ndArray
net.train_momentum(inputVec,
                   targetVec,
                   eta=0.2,
                   momentum=0.8,
                   maxiter=10000,
                   disp=0)
# prediction
predProb = net.call(testInputVec)
Beispiel #51
0
# The data is "difficult" in that (for a neural network to
# practically emulate Black-Scholes) a very tight fit is required.
# The R-squared should be at least 0.999999 or better, and the largest
# absolute error must be less than 0.05 dollars (the price increment
# for most options) or, better yet, less than 0.01 dollars.
#
#
# So let's try.
# Attention: training might be a long process since we train a big network.

from __future__ import print_function
from ffnet import ffnet, mlgraph, readdata
from numpy import array

# Generate standard layered network architecture and create network
conec = mlgraph((3,22,12,1))
net = ffnet(conec)

# Read training data omitting first column and first line
print("READING DATA...")
data = readdata( 'data/black-scholes.dat',
                 usecols  = (1, 2, 3, 4),
                 skiprows =  1)
input =  data[:, :3] #first 3 columns
target = data[:, -1] #last column

print("TRAINING NETWORK...")
import sys; sys.stdout.flush() #Just to ensure dislpaing the above messages here
net.train_tnc(input, target, maxfun = 5000, messages=1)

# Test network
Beispiel #52
0
             result.append(float(l[i]))
         train_x.append(result)
 print "FINISH READING TRAIN FILE"
 with open("para_test.txt") as f:
     for l in f:
         l = l.strip().split()
         result = []
         for i in range(len(l)):
             result.append(float(l[i]))
         test_x.append(result)
 print "FINISH READING TEST FILE"
 #train_x = train_x[:5]
 #test_x = test_x[:5]
 #train_y = train_y[:5]
 #test_y = test_y[:5]
 c = ffnet.ffnet(ffnet.mlgraph((len(train_x[0]), 50, 1)))
 print "TRAINING....",
 c.train_tnc(train_x, train_y, messages=1, nproc='ncpu', maxfun=1000)
 print "OK"
 print "TESTING....",
 wrong = 0
 for i in range(len(test_y)):
     result = c.call(test_x[i]).tolist()[0]
     if result >= 0.5:
         result = 1.0
     else:
         result = 0.0
     if result != test_y[i]:
         wrong += 1
 print "OK"
 print float(wrong) / float(len(test_y))
residuals[isnan(BAND1) == True] = NaN
f_orig = zeros(R.shape[1])
f_orig[isnan(BAND1) == True] = NaN
f_neu = zeros(R.shape[1])
f_neu[isnan(BAND1) == True] = NaN

########## Define network geometry#############
#conec = mlgraph((3,5,3)) # Define network geometry 3 input layers, 5 middle, 3 output neurons
#conec = mlgraph((3,9,15,9,3))
#conec = mlgraph((3,5,7,5,3))
#conec = mlgraph((3,5,11,5,3))
#conec = mlgraph((3,21,21,3)) #--->ging ganz gut
#conec = mlgraph((3,12,12,3))
#conec = mlgraph((3,15,15,3))
#conec = mlgraph((3,27,27,3))#--->result pickeld as net4.data -> ging gut
conec = mlgraph((3, 9, 27, 3))  #--->result pickeld as net5.data ->
net = ffnet(conec)

LS = 5000  # Learning steps

# Optimization
x = array([0.25, 0.25, 0.25])

#zeitnahme start
import time

start = time.clock()
print('size of Matrix: ' + str(R.shape[1]) + ' values ')

print 'Optimization'
Beispiel #54
0
        if (headerskipped):
            line = line.split(',')
            last = len(line) - 1
            instr = line[0:last]
            inline = []
            for j in range(len(instr)):
                x = float(instr[j])
                input[i, j] = x
            target[i, 0] = float(line[last].strip())
            i += 1
        else:
            headerskipped = True

    f.close()
    return input, target


input, target = readin(infile, inrownum)

testin, testtarget = readin(testfile, testrownum)

connections = mlgraph((inputnum, 100, 1))
net = ffnet(connections)

print('training net...')
#net.train_momentum(input, target, eta=0.5, momentum=.1)
net.train_tnc(input, target)
print('testing net...')
output, regression = net.test(testin, testtarget, iprint=2)
print(output)
print(regression)
    proba = [0.0, 0.0, 0.0, 0.0
             ] + proba[7:-16].reshape(-1).tolist() + [0.0, 0.0, 0.0, 0.0]

    input_data = np.array([proba[i:i + 9]
                           for i in range(0,
                                          len(proba) - 8)]).astype(np.float32)

    return input_data, certainty


## ===============================
## TRAIN NEURAL NET
## ===============================

conec = mlgraph((9, 9, 1))
net = ffnet(conec)

if TRAIN_NEURAL_NET:

    # training files for neural net were generated using trainingfiles/generate_morse.m
    # list of solution texts is included in morse_text.txt

    X_train = None
    Y_train = None

    for filename in sorted(glob.glob("trainingfiles/*.wav")):
        (f, snr, wpm, solution) = string.split(filename, "_")

        print >> sys.stderr, "generating neural net input data for file %s" % f
Beispiel #56
0
from ffnet import ffnet, mlgraph, readdata
import numpy as np
import sys

def readfile( fname ):
    lines = open( fname ).readlines()[1:]
    vec = [ map( float, x.split(',')[0:-1] ) for x in lines ]
    clas = [ x.split(',')[-1] for x in lines ]
    output = [ [1,0,0,0,0,0,0,0,0],
               [0,1,0,0,0,0,0,0,0],
               [0,0,1,0,0,0,0,0,0],
               [0,0,0,1,0,0,0,0,0],
               [0,0,0,0,1,0,0,0,0],
               [0,0,0,0,0,1,0,0,0],
               [0,0,0,0,0,0,1,0,0],
               [0,0,0,0,0,0,0,1,0],
               [0,0,0,0,0,0,0,0,1] ]
    return vec, [ output[int(x.split('_')[-1])-1] for x in\
                       clas ]

if __name__ == '__main__':
    conec = mlgraph((94,30,30,9))
    net = ffnet(conec)
    tra, out = readfile( sys.argv[1] )
    tra = np.array(tra)
    out = np.array(out)
    net.train_tnc( tra, out, maxfun=2000, messages=1 )
Beispiel #57
0
def predict(pred, y):
    c = 0
    for i in range(len(pred)):
        if numpy.argmax(pred[i]) == y[i]:
            c += 1
#			print 'good ', pred[i], y[i]
        else:
            #			print 'bad ', pred[i], y[i]
            pass
    print 'c = ', c, ' len = ', len(pred)
    return (c / len(pred)) * 100


# Generate standard layered network architecture and create network
conec = mlgraph((400, 200, 10))
net = ffnet(conec)

# Read data file
print "READING DATA..."
data = readdata('data.csv')
numpy.random.shuffle(data)
X = data[:, 1:]
y = data[:, 0]  #first 64 columns - bitmap definition
input = X
target = numpy.ndarray((input.shape[0], 10))
for i in range(len(y)):
    target[i] = numpy.zeros((1, 10))
    if y[i] == 10:
        y[i] = 0
    target[i][y[i]] = 1