self.weights.append(randomInitialWeight()) self.weightDeltas.append(0.0) # Output Format def __str__(self): out = "{" + str(round(self.input,2)) + "[" if self.layer.layerType == NetLayerType.Output: for w in self.weights: out = out + str(round(w,2)) + "," elif self.layer.layerType == NetLayerType.Hidden: for c in self.center: out = out + str(round(c,2)) + "," out = out + "]" + str(round(self.output,2)) + "} " return out #Main if __name__=="__main__": trainPercentage = 0.8 #p = PatternSet('data/optdigits/optdigits-orig.json', trainPercentage) # 32x32 #p = PatternSet('data/letter/letter-recognition.json', trainPercentage) # 20000 @ 1x16 # Try 1 center per attribute, and allow outputs to combine them #p = PatternSet('data/pendigits/pendigits.json', trainPercentage) # 10992 @ 1x16 # same as above #p = PatternSet('data/semeion/semeion.json', trainPercentage) # 1593 @ 16x16 # Training set is very limited p = PatternSet('data/optdigits/optdigits.json', trainPercentage) # 5620 @ 8x8 n = Net(p) n.run(PatternType.Train, 0, int(p.count*trainPercentage)) n.run(PatternType.Test, int(p.count*trainPercentage), p.count) p.printConfusionMatrix() print("Done")
# strategies = [TS.TrainingStrategyType.EvolutionStrategy, TS.TrainingStrategyType.GeneticAlgorithm] # Single: strategies = [TS.TrainingStrategyType.EvolutionStrategy] # strategies = [TS.TrainingStrategyType.GeneticAlgorithm] # strategies = [TS.TrainingStrategyType.DifferentialGA] trainPercentage = 0.8 maxGenerations = 30 populationSize = 20 runsPerDataSet = 5 # 10 # hiddenArchitecture = [14] # each hidden layer is a new index in this list, it's value = number of neurons in that layer for dataSet in allDataTypes: for strat in strategies: p = PatternSet(dataSet) # this is here simply to init the confusion matrix for run in range(runsPerDataSet): p = PatternSet(dataSet) print( "\nData Set: (" + str(dataSet) + ") Run: " + str(run) + " Strategy: " + str(TS.TrainingStrategyType.desc(strat)) ) if run == 0: p.initCombinedConfusionMatrix() hiddenArchitecture = [ 2 * len(p.patterns[0]["p"])
# 'data/zoo/zoo.json', # 'data/iris/iris.json'] # Single: # allDataTypes = ['data/ionosphere/ionosphere.json'] # allDataTypes = ['data/block/pageblocks.json'] # allDataTypes = ['data/heart/heart.json'] # allDataTypes = ['data/glass/glass.json'] # allDataTypes = ['data/car/car.json'] # allDataTypes = ['data/seeds/seeds.json'] allDataTypes = ['data/wine/wine.json'] # allDataTypes = ['data/yeast/yeast.json'] # allDataTypes = ['data/zoo/zoo.json'] # allDataTypes = ['data/iris/iris.json'] trainPercentage = 0.8 runsPerDataSet = 10 for dataset in allDataTypes: p = PatternSet(dataset) for run in range(runsPerDataSet): if run == 0: p.initCombinedConfusionMatrix() n = Network(p) n.run(PatternType.Train, 0, int(p.count*trainPercentage)) saveWeights(n) n.run(PatternType.Test, int(p.count*trainPercentage), p.count) p.printStats() p.saveConfusionMatrix() print("Done")