Beispiel #1
0
util.registerFunction(outtestfunc, "outtestfunc", krui.OUT_FUNC, 0, 0)
util.registerFunction(outtestfunc2, "outtestfunc2", krui.OUT_FUNC, 0, 0)
util.registerFunction(acttest, "acttest", krui.ACT_FUNC, 0, 0)
util.registerFunction(actderivtest, "acttest", krui.ACT_DERIV_FUNC, 0, 0)
util.registerFunction(actderiv2test, "acttest", krui.ACT_2_DERIV_FUNC, 0, 0)
newnum = krui.getNoOfFunctions()
print "After adding:", newnum

for num in range(oldnum - 2, newnum + 1):
    print "Function number", num, "Info:", krui.getFuncInfo(num)

print krui.loadNet('encoder.net')
for num in [1, 10, 19]:
    krui.setUnitOutFunc(num, "outtestfunc")
for num in [2, 11, 18]:
    krui.setUnitOutFunc(num, "outtestfunc2")
for num in [3, 9, 17]:
    krui.setUnitActFunc(num, "acttest")

krui.loadNewPatterns('encoder.pat')
krui.DefTrainSubPat()
print "Learning one pattern"
krui.learnSinglePattern(1, (0.2, 0))
krui.setUnitDefaults(1.0, 0, krui.INPUT, 0, 1, "acttest", "outtestfunc")
newunit = krui.createDefaultUnit()
print "New unit:", newunit
print "Act func name:", krui.getUnitActFuncName(newunit)
krui.deleteUnitList(newunit)
krui.saveNet("tmp.net", "testnet")
print "finished"
Beispiel #2
0
# Construct a encoder.net like network from scratch

from snns import krui, util

krui.setLearnFunc('Std_Backpropagation')
krui.setUpdateFunc('Topological_Order')
krui.setUnitDefaults(1, 0, krui.INPUT, 0, 1, 'Act_Logistic', 'Out_Identity')

print "Creating the network out of thin air"

# build the input layer
pos = [0, 0, 0]
inputs = []
for i in range(1, 9):
    pos[0] = i
    num = krui.createDefaultUnit()
    inputs.append(num)
    krui.setUnitName(num, 'Input_%i' % i)
    krui.setUnitPosition(num, pos)
# hidden layer
pos[1] = 2
hidden = []
for i in range(1, 4):
    pos[0] = i + 3
    num = krui.createDefaultUnit()
    hidden.append(num)
    krui.setUnitName(num, 'Hidden_%i' % i)
    krui.setUnitTType(num, krui.HIDDEN)
    krui.setUnitPosition(num, pos)
    krui.setCurrentUnit(num)
    for src in inputs:
Beispiel #3
0
util.registerFunction(outtestfunc2,"outtestfunc2",krui.OUT_FUNC,0,0)
util.registerFunction(acttest,"acttest",krui.ACT_FUNC,0,0)
util.registerFunction(actderivtest,"acttest",krui.ACT_DERIV_FUNC,0,0)
util.registerFunction(actderiv2test,"acttest",krui.ACT_2_DERIV_FUNC,0,0)
newnum = krui.getNoOfFunctions()
print "After adding:", newnum

for num in range(oldnum - 2, newnum + 1) :
	print "Function number", num, "Info:",krui.getFuncInfo(num)

print krui.loadNet('encoder.net')
for num in [1,10,19] :
	krui.setUnitOutFunc(num,"outtestfunc")
for num in [2,11,18] :
	krui.setUnitOutFunc(num,"outtestfunc2")
for num in [3,9,17] :
	krui.setUnitActFunc(num,"acttest")

krui.loadNewPatterns('encoder.pat')
krui.DefTrainSubPat()
print "Learning one pattern"
krui.learnSinglePattern(1,(0.2,0))
krui.setUnitDefaults(1.0,0,krui.INPUT,0,1,"acttest","outtestfunc")
newunit = krui.createDefaultUnit()
print "New unit:", newunit
print "Act func name:", krui.getUnitActFuncName(newunit)
krui.deleteUnitList(newunit)
krui.saveNet("tmp.net","testnet")
print "finished"

# Construct a encoder.net like network from scratch

from snns import krui,util

krui.setLearnFunc('Std_Backpropagation')
krui.setUpdateFunc('Topological_Order')
krui.setUnitDefaults(1,0,krui.INPUT,0,1,'Act_Logistic','Out_Identity')

print "Creating the network out of thin air"

# build the input layer
pos = [0,0,0]
inputs = []
for i in range(1,9) :
	pos[0] = i
	num = krui.createDefaultUnit()
	inputs.append(num)
	krui.setUnitName(num,'Input_%i' % i)
	krui.setUnitPosition(num, pos)
# hidden layer
pos[1]=2
hidden = []
for i in range(1,4) :
	pos[0] = i + 3
	num = krui.createDefaultUnit()
	hidden.append(num)
	krui.setUnitName(num,'Hidden_%i' % i)
	krui.setUnitTType(num,krui.HIDDEN)
	krui.setUnitPosition(num,pos)
	krui.setCurrentUnit(num)
	for src in inputs :