def runStretchExperiment(numObjects=25): """ Generates a lot of random objects to profile the network. Parameters: ---------------------------- @param numObjects (int) Number of objects to create and learn. """ exp = L4L2Experiment("profiling_experiment", ) objects = createObjectMachine(machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024) objects.createRandomObjects(numObjects=numObjects, numPoints=10) exp.learnObjects(objects.provideObjectsToLearn()) exp.printProfile() inferConfig = {"numSteps": len(objects[0]), "pairs": {0: objects[0]}} exp.infer(objects.provideObjectToInfer(inferConfig), objectName=0) exp.printProfile() exp.plotInferenceStats(fields=[ "L2 Representation", "Overlap L2 with object", "L4 Representation" ])
def runLateralDisambiguation(noiseLevel=None, profile=False): """ Runs a simple experiment where two objects share a (location, feature) pair. At inference, one column sees that ambiguous pair, and the other sees a unique one. We should see the first column rapidly converge to a unique representation. Parameters: ---------------------------- @param noiseLevel (float) Noise level to add to the locations and features during inference @param profile (bool) If True, the network will be profiled after learning and inference """ exp = L4L2Experiment( "lateral_disambiguation", numCorticalColumns=2, ) objects = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024, numCorticalColumns=2, ) objects.addObject([(1, 1), (2, 2)]) objects.addObject([(1, 1), (3, 2)]) exp.learnObjects(objects.provideObjectsToLearn()) if profile: exp.printProfile() inferConfig = { "noiseLevel": noiseLevel, "numSteps": 6, "pairs": { # this should activate 0 and 1 0: [(1, 1), (1, 1), (1, 1), (1, 1), (1, 1), (1, 1)], # this should activate 1 1: [(3, 2), (3, 2), (3, 2), (3, 2), (3, 2), (3, 2)] } } exp.infer(objects.provideObjectToInfer(inferConfig), objectName=1) if profile: exp.printProfile() exp.plotInferenceStats( fields=[ "L2 Representation", "Overlap L2 with object", "L4 Representation" ], onePlot=False, )
def runDisambiguationByUnions(noiseLevel=None, profile=False): """ Runs a simple experiment where an object is disambiguated as each column recognizes a union of two objects, and the real object is the only common one. Parameters: ---------------------------- @param noiseLevel (float) Noise level to add to the locations and features during inference @param profile (bool) If True, the network will be profiled after learning and inference """ exp = L4L2Experiment( "disambiguation_unions", numCorticalColumns=2, ) objects = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024, numCorticalColumns=2, ) objects.addObject([(1, 1), (2, 2)]) objects.addObject([(2, 2), (3, 3)]) objects.addObject([(3, 3), (4, 4)]) exp.learnObjects(objects.provideObjectsToLearn()) if profile: exp.printProfile() inferConfig = { "numSteps": 6, "noiseLevel": noiseLevel, "pairs": { # this should activate 1 and 2 0: [(2, 2), (2, 2), (2, 2), (2, 2), (2, 2), (2, 2)], # this should activate 2 and 3 1: [(3, 3), (3, 3), (3, 3), (3, 3), (3, 3), (3, 3)] } } exp.infer(objects.provideObjectToInfer(inferConfig), objectName=1) if profile: exp.printProfile() exp.plotInferenceStats( fields=[ "L2 Representation", "Overlap L2 with object", "L4 Representation" ], onePlot=False, )
def runCapacityTest(numObjects, numPointsPerObject, numCorticalColumns, l2Params, l4Params, objectParams, repeat=0): """ Generate [numObjects] objects with [numPointsPerObject] points per object Train L4-l2 network all the objects with single pass learning Test on (feature, location) pairs and compute :param numObjects: :param numPointsPerObject: :param sampleSize: :param activationThreshold: :param numCorticalColumns: :return: """ l4ColumnCount = l4Params["columnCount"] numInputBits = objectParams['numInputBits'] externalInputSize = objectParams['externalInputSize'] if numInputBits is None: numInputBits = int(l4ColumnCount * 0.02) objects = createObjectMachine(machineType="simple", numInputBits=numInputBits, sensorInputSize=l4ColumnCount, externalInputSize=externalInputSize, numCorticalColumns=numCorticalColumns, numLocations=NUM_LOCATIONS, numFeatures=NUM_FEATURES) exp = L4L2Experiment("capacity_two_objects", numInputBits=numInputBits, L2Overrides=l2Params, L4Overrides=l4Params, inputSize=l4ColumnCount, externalInputSize=externalInputSize, numLearningPoints=3, numCorticalColumns=numCorticalColumns) pairs = createRandomObjects(numObjects, numPointsPerObject, NUM_LOCATIONS, NUM_FEATURES) for object in pairs: objects.addObject(object) exp.learnObjects(objects.provideObjectsToLearn()) testResult = testOnSingleRandomSDR(objects, exp) testResult['repeatID'] = repeat return testResult
def runAmbiguities(noiseLevel=None, profile=False): """ Runs an experiment where three objects are being learnt, but share many patterns. At inference, only one object is being moved over, and we should see quick convergence. Parameters: ---------------------------- @param noiseLevel (float) Noise level to add to the locations and features during inference @param profile (bool) If True, the network will be profiled after learning and inference """ exp = L4L2Experiment( "ambiguities", numCorticalColumns=2, ) objects = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024, numCorticalColumns=2, ) objects.addObject([(1, 1), (2, 1), (3, 3)]) objects.addObject([(2, 2), (3, 3), (2, 1)]) objects.addObject([(3, 1), (2, 1), (1, 2)]) exp.learnObjects(objects.provideObjectsToLearn()) if profile: exp.printProfile() inferConfig = { "numSteps": 6, "noiseLevel": noiseLevel, "pairs": { 0: [(2, 1), (2, 1), (3, 3), (2, 2), (2, 2), (2, 2)], 1: [(3, 3), (3, 3), (3, 3), (2, 2), (2, 1), (2, 1)] } } exp.infer(objects.provideObjectToInfer(inferConfig), objectName=1) if profile: exp.printProfile() exp.plotInferenceStats( fields=[ "L2 Representation", "Overlap L2 with object", "L4 Representation" ], onePlot=False, )
def setUp(self): # params maxNumSegments = 2 L2Overrides = { "learningRate": 0.1, "noise": 1e-8, "cellCount": 256, # new: 256 # original: 4096 "inputWidth": 8192, # new: 8192 # original: 16384 (?) "sdrSize": 5, "activationThreshold": 0.01, "useSupport": True } L4Overrides = { "learningRate": 0.1, "noise": 1e-8, "cellsPerColumn": 4, # new: 4 # original 32 "columnCount": 2048, # new: 2048 # original: 2048 "minThreshold": 0.35, } self.exp1 = L4L2Experiment( 'single_column', implementation='Bayesian', L2RegionType="py.BayesianColumnPoolerRegion", L4RegionType="py.BayesianApicalTMPairRegion", L2Overrides=L2Overrides, L4Overrides=L4Overrides, numCorticalColumns=1, maxSegmentsPerCell=maxNumSegments, numLearningPoints=3, # number repetitions for learning seed=1 ) numFeatures = 3 # new: 3 # original: 3 numPoints = 5 # new: 5 # original: 10 numLocations = 5 # new: 5 # original: 10 numObjects = 5 # new: 2 # original: 10 numRptsPerSensation = 2 self.objectMachine = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024, numCorticalColumns=3, seed=40, ) self.objectMachine.createRandomObjects(numObjects, numPoints=numPoints, numLocations=numLocations, numFeatures=numFeatures)
def runCapacityTest(numObjects, numPointsPerObject, maxNewSynapseCount, activationThreshold, numCorticalColumns): """ Generate [numObjects] objects with [numPointsPerObject] points per object Train L4-l2 network all the objects with single pass learning Test on (feature, location) pairs and compute :param numObjects: :param numPointsPerObject: :param maxNewSynapseCount: :param activationThreshold: :param numCorticalColumns: :return: """ l4Params = getL4Params() l2Params = getL2Params() l2Params["maxNewProximalSynapseCount"] = maxNewSynapseCount l2Params["minThresholdProximal"] = activationThreshold l4ColumnCount = l4Params["columnCount"] numInputBits = int(l4Params["columnCount"] * 0.02) objects = createObjectMachine(machineType="simple", numInputBits=numInputBits, sensorInputSize=l4ColumnCount, externalInputSize=l4ColumnCount, numCorticalColumns=numCorticalColumns, numLocations=NUM_LOCATIONS, numFeatures=NUM_FEATURES) exp = L4L2Experiment("capacity_two_objects", numInputBits=numInputBits, L2Overrides=l2Params, L4Overrides=l4Params, inputSize=l4ColumnCount, externalInputSize=l4ColumnCount, numLearningPoints=4, numCorticalColumns=numCorticalColumns) pairs = createRandomObjects(numObjects, numPointsPerObject, NUM_LOCATIONS, NUM_FEATURES) for object in pairs: objects.addObject(object) exp.learnObjects(objects.provideObjectsToLearn()) testResult = testOnSingleRandomSDR(objects, exp) return testResult
def experiment(numColumns, sampleSize): locationSDRsByColumn = [ dict((name, set(random.sample(xrange(1024), 40))) for name in LOCATIONS) for _ in xrange(numColumns) ] featureSDRsByColumn = [ dict((name, set(random.sample(xrange(1024), 40))) for name in FEATURES) for _ in xrange(numColumns) ] exp = L4L2Experiment("Hello", numCorticalColumns=numColumns, L2Overrides={ "sampleSizeDistal": sampleSize, }, seed=random.randint(2048, 4096)) exp.learnObjects( dict((objectName, [ dict((column, (locationSDRsByColumn[column][location], featureSDRsByColumn[column][features[location]])) for column in xrange(numColumns)) for location in LOCATIONS ]) for objectName, features in OBJECTS.iteritems())) objectName = "Object 1" features = OBJECTS[objectName] inferredL2 = exp.objectL2Representations[objectName] touchCount = 0 for sensorPositions in greedySensorPositions(numColumns, len(LOCATIONS)): sensation = dict( (column, (locationSDRsByColumn[column][sensorPositions[column]], featureSDRsByColumn[column][features[sensorPositions[column]]])) for column in xrange(numColumns)) exp.infer([sensation] * TIMESTEPS_PER_SENSATION, reset=False, objectName=objectName) touchCount += 1 if exp.getL2Representations() == inferredL2: print "Inferred object after %d touches" % touchCount return touchCount if touchCount >= 60: print "Never inferred object" return None
def runUncertainLocations(missingLoc=None, profile=False): """ Runs the same experiment as above, with missing locations at some timesteps during inference (if it was not successfully computed by the rest of the network for example). @param missingLoc (dict) A dictionary mapping indices in the object to location index to replace with during inference (-1 means no location, a tuple means an union of locations). @param profile (bool) If True, the network will be profiled after learning and inference """ if missingLoc is None: missingLoc = {} exp = L4L2Experiment("uncertain_location", enableLateralSP=True, enableFeedForwardSP=True) pairs = createThreeObjects() objects = createObjectMachine(machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024) for object in pairs: objects.addObject(object) exp.learnObjects(objects.provideObjectsToLearn()) # create pairs with missing locations objectA = objects[0] for key, val in missingLoc.iteritems(): objectA[key] = (val, key) inferConfig = {"numSteps": 10, "pairs": {0: objectA}} exp.infer(objects.provideObjectToInfer(inferConfig), objectName=0) if profile: exp.printProfile() exp.plotInferenceStats(fields=[ "L2 Representation", "Overlap L2 with object", "L4 Representation", "L4 Predictive" ], )
def runSharedFeatures(noiseLevel=None, profile=False): """ Runs a simple experiment where three objects share a number of location, feature pairs. Parameters: ---------------------------- @param noiseLevel (float) Noise level to add to the locations and features during inference @param profile (bool) If True, the network will be profiled after learning and inference """ exp = L4L2Experiment("shared_features", enableLateralSP=True, enableFeedForwardSP=True) pairs = createThreeObjects() objects = createObjectMachine(machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024) for object in pairs: objects.addObject(object) exp.learnObjects(objects.provideObjectsToLearn()) if profile: exp.printProfile() inferConfig = { "numSteps": 10, "noiseLevel": noiseLevel, "pairs": { 0: zip(range(10), range(10)) } } exp.infer(objects.provideObjectToInfer(inferConfig), objectName=0) if profile: exp.printProfile() exp.plotInferenceStats(fields=[ "L2 Representation", "Overlap L2 with object", "L4 Representation" ], )
def runExperiment(arguments): """ We will run two experiments side by side, with either single column or 3 columns """ numColumns = 3 numFeatures = 3 # new: 3 # original: 3 numPoints = 5 # new: 5 # original: 10 numLocations = 5 # new: 5 # original: 10 numObjects = 5 # new: 2 # original: 10 numRptsPerSensation = 1 objectMachine = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024, numCorticalColumns=1, seed=40, ) objectMachine.createRandomObjects(numObjects, numPoints=numPoints, numLocations=numLocations, numFeatures=numFeatures) objects = objectMachine.provideObjectsToLearn() # single-out the inputs to the column #1 objectsSingleColumn = {} for i in range(numObjects): featureLocations = [] for j in range(numLocations): featureLocations.append({0: objects[i][j][0]}) objectsSingleColumn[i] = featureLocations cellsPerColumn = arguments.cellCount outputCells = arguments.outputCount # params maxNumSegments = 16 L2Overrides = { "noise": 1e-10, "cellCount": outputCells, # new: 256 # original: 4096 "inputWidth": 1024 * cellsPerColumn, # new: 8192 # original: 16384 (?) "activationThreshold": arguments.outputActivation, "sdrSize": arguments.sdrSize, "forgetting": arguments.forgetting, "initMovingAverages": 1 / float(outputCells), "useSupport": arguments.useSupport, "useProximalProbabilities": True, "avoidWeightExplosion": False } L4Overrides = { "noise": 1e-10, "cellsPerColumn": cellsPerColumn, # new: 4 # original 32 "columnCount": 1024, # new: 2048 # original: 2048 "initMovingAverages": 1 / float(2048 * cellsPerColumn), "minThreshold": 1 / float(cellsPerColumn), "useApicalTiebreak": arguments.useApicalTiebreak } if arguments.implementation is None or "Bayesian" in arguments.implementation: if "Summing" not in arguments.implementation: L2Overrides["learningRate"] = arguments.learningRate L4Overrides["learningRate"] = arguments.learningRate exp1 = L4L2Experiment( 'single_column', implementation=arguments.implementation, L2RegionType="py.BayesianColumnPoolerRegion", L4RegionType="py.BayesianApicalTMPairRegion", L2Overrides=L2Overrides, L4Overrides=L4Overrides, numCorticalColumns=1, maxSegmentsPerCell=maxNumSegments, numLearningPoints=7 if arguments.iterations is None else arguments.iterations, seed=1) else: exp1 = L4L2Experiment('single_column', numCorticalColumns=1, maxSegmentsPerCell=maxNumSegments, numLearningPoints=3, seed=1) print "train single column " exp1.learnObjects(objectsSingleColumn) # test on the first object objectId = 2 obj = objectMachine[objectId] # Create sequence of sensations for this object for all columns # We need to set the seed to get specific convergence points for the red # rectangle in the graph. objectSensations = {} random.seed(12) for c in range(numColumns): objectCopy = [pair for pair in obj] # random.shuffle(objectCopy) # stay multiple steps on each sensation sensations = [] for pair in objectCopy: for _ in xrange(numRptsPerSensation): sensations.append(pair) objectSensations[c] = sensations sensationStepsSingleColumn = [] sensationStepsMultiColumn = [] for step in xrange(len(objectSensations[0])): pairs = [objectSensations[col][step] for col in xrange(numColumns)] sdrs = objectMachine._getSDRPairs(pairs) sensationStepsMultiColumn.append(sdrs) sensationStepsSingleColumn.append({0: sdrs[0]}) print "inference: single column " exp1.sendReset() l2ActiveCellsSingleColumn = [] L2ActiveCellNVsTimeSingleColumn = [] l2ActiveValues = [] l2ActiveValuesRepresentation = [] target = objectMachine.objects[2] objects_but_target = [ element for element in objectMachine.objects.values() if element is not target ] counts = np.zeros(len(target)) for num, pair in enumerate(target): for in_object in objects_but_target: counts[num] += in_object.count(pair) print "The feaure-location pairs are shared as follows: ", counts for sensation in sensationStepsSingleColumn: exp1.infer([sensation], objectName=objectId, reset=False) if 'Bayesian' in arguments.implementation: l2ActiveCellsSingleColumn.append(exp1.getL2Prediction()) cellActivity = exp1.getActiveCellValues()[0] l2ActiveValuesRepresentation.append(cellActivity[list( exp1.objectL2Representations[objectId][0])]) l2ActiveValues.append(cellActivity) L2ActiveCellNVsTimeSingleColumn.append( len(exp1.getL2Prediction()[0])) else: rep = exp1.getL2Representations() l2ActiveCellsSingleColumn.append(rep) L2ActiveCellNVsTimeSingleColumn.append(len(rep[0])) # Used to figure out where to put the red rectangle! sdrSize = exp1.config["L2Params"]["sdrSize"] singleColumnHighlight = next( (idx for idx, value in enumerate(l2ActiveCellsSingleColumn) if len(value[0]) == sdrSize), None) firstObjectRepresentation = exp1.objectL2Representations[objectId][0] converged = next((idx for idx, value in enumerate(l2ActiveCellsSingleColumn) if (value[0] == firstObjectRepresentation)), None) print "Converged to first object representation after %s steps" % converged print "Exactly SDR-Size activity (%s) after %s steps" % ( sdrSize, singleColumnHighlight) print "Overlaps of each l2-representation (after new sensation) to each object" for idx in range(0, len(l2ActiveCellsSingleColumn)): print "overlap of l2-representation %s" % idx for i in range(0, len(exp1.objectL2Representations)): object = exp1.objectL2Representations[i][0] l2Representation = l2ActiveCellsSingleColumn[idx][0] overlap = len(l2Representation.intersection(object)) print "\tTo object %s is %s/%s" % (i, overlap, len(l2Representation)) if 'Bayesian' in arguments.implementation: return l2ActiveValuesRepresentation, l2ActiveValues, converged else: return None
def runCapacityTest(numObjects, numPointsPerObject, numCorticalColumns, l2Params, l4Params, objectParams, networkType="MultipleL4L2Columns", repeat=0): """ Generate [numObjects] objects with [numPointsPerObject] points per object Train L4-l2 network all the objects with single pass learning Test on (feature, location) pairs and compute :param numObjects: :param numPointsPerObject: :param sampleSize: :param activationThreshold: :param numCorticalColumns: :return: """ l4ColumnCount = l4Params["columnCount"] numInputBits = objectParams['numInputBits'] externalInputSize = objectParams['externalInputSize'] if numInputBits is None: numInputBits = int(l4ColumnCount * 0.02) numLocations = objectParams["numLocations"] numFeatures = objectParams["numFeatures"] objects = createObjectMachine(machineType="simple", numInputBits=numInputBits, sensorInputSize=l4ColumnCount, externalInputSize=externalInputSize, numCorticalColumns=numCorticalColumns, numLocations=numLocations, numFeatures=numFeatures) exp = L4L2Experiment("capacity_two_objects", numInputBits=numInputBits, L2Overrides=l2Params, L4Overrides=l4Params, inputSize=l4ColumnCount, networkType=networkType, externalInputSize=externalInputSize, numLearningPoints=3, numCorticalColumns=numCorticalColumns, objectNamesAreIndices=True) if objectParams["uniquePairs"]: pairs = createRandomObjects(numObjects, numPointsPerObject, numLocations, numFeatures) else: pairs = createRandomObjectsSharedPairs(numObjects, numPointsPerObject, numLocations, numFeatures) for object in pairs: objects.addObject(object) exp.learnObjects(objects.provideObjectsToLearn()) testResult = testOnSingleRandomSDR(objects, exp, 100, repeat) return testResult
def trainNetwork(objects, numColumns, l4Params, l2Params, verbose=False): print " Training sensorimotor network ..." objectNames = objects.objects.keys() numObjects = len(objectNames) exp = L4L2Experiment("shared_features", L2Overrides=l2Params, L4Overrides=l4Params, numCorticalColumns=numColumns) exp.learnObjects(objects.provideObjectsToLearn()) settlingTime = 1 L2Representations = exp.objectL2Representations # if verbose: # print "Learned object representations:" # pprint.pprint(L2Representations, width=400) # print "==========================" # For inference, we will check and plot convergence for each object. For each # object, we create a sequence of random sensations for each column. We will # present each sensation for settlingTime time steps to let it settle and # ensure it converges. maxSensationNumber = 30 overlapMat = np.zeros((numObjects, numObjects, maxSensationNumber)) numL2ActiveCells = np.zeros((numObjects, maxSensationNumber)) for objectIdx in range(numObjects): objectId = objectNames[objectIdx] obj = objects[objectId] # Create sequence of sensations for this object for one column. The total # number of sensations is equal to the number of points on the object. No # point should be visited more than once. objectCopy = [pair for pair in obj] random.shuffle(objectCopy) exp.sendReset() for sensationNumber in range(maxSensationNumber): objectSensations = {} for c in range(numColumns): objectSensations[c] = [] if sensationNumber >= len(objectCopy): pair = objectCopy[-1] else: pair = objectCopy[sensationNumber] if numColumns > 1: raise NotImplementedError else: # stay multiple steps on each sensation for _ in xrange(settlingTime): objectSensations[0].append(pair) inferConfig = { "object": objectId, "numSteps": len(objectSensations[0]), "pairs": objectSensations, "includeRandomLocation": False, } inferenceSDRs = objects.provideObjectToInfer(inferConfig) exp.infer(inferenceSDRs, objectName=objectId, reset=False) for i in range(numObjects): overlapMat[objectIdx, i, sensationNumber] = len( exp.getL2Representations()[0] & L2Representations[objects.objects.keys()[i]][0]) # if verbose: # print "Intersection with {}:{}".format( # objectNames[i], overlapMat[objectIdx, i]) for c in range(numColumns): numL2ActiveCells[objectIdx, sensationNumber] += len( exp.getL2Representations()[c]) print "{} # L2 active cells {}: ".format( sensationNumber, numL2ActiveCells[objectIdx, sensationNumber]) if verbose: print "Output for {}: {}".format(objectId, exp.getL2Representations()) print "Final L2 active cells {}: ".format( numL2ActiveCells[objectIdx, sensationNumber]) print exp.sendReset() expResult = { 'overlapMat': overlapMat, 'numL2ActiveCells': numL2ActiveCells } return expResult
def runExperiment(args): """ Run experiment. What did you think this does? args is a dict representing the parameters. We do it this way to support multiprocessing. args contains one or more of the following keys: @param noiseLevel (float) Noise level to add to the locations and features during inference. Default: None @param profile (bool) If True, the network will be profiled after learning and inference. Default: False @param numObjects (int) The number of objects we will train. Default: 10 @param numPoints (int) The number of points on each object. Default: 10 @param pointRange (int) Creates objects each with points ranging from [numPoints,...,numPoints+pointRange-1] A total of numObjects * pointRange objects will be created. Default: 1 @param numLocations (int) For each point, the number of locations to choose from. Default: 10 @param numFeatures (int) For each point, the number of features to choose from. Default: 10 @param numColumns (int) The total number of cortical columns in network. Default: 2 @param settlingTime (int) Number of iterations we wait to let columns stabilize. Important for multicolumn experiments with lateral connections. @param includeRandomLocation (bool) If True, a random location SDR will be generated during inference for each feature. The method returns the args dict updated with two additional keys: convergencePoint (int) The average number of iterations it took to converge across all objects objects (pairs) The list of objects we trained on """ numObjects = args.get("numObjects", 10) numLocations = args.get("numLocations", 10) numFeatures = args.get("numFeatures", 10) numColumns = args.get("numColumns", 2) profile = args.get("profile", False) noiseLevel = args.get("noiseLevel", None) # TODO: implement this? numPoints = args.get("numPoints", 10) trialNum = args.get("trialNum", 42) pointRange = args.get("pointRange", 1) plotInferenceStats = args.get("plotInferenceStats", True) settlingTime = args.get("settlingTime", 3) includeRandomLocation = args.get("includeRandomLocation", False) # Create the objects objects = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=150, externalInputSize=2400, numCorticalColumns=numColumns, numFeatures=numFeatures, seed=trialNum ) for p in range(pointRange): objects.createRandomObjects(numObjects, numPoints=numPoints+p, numLocations=numLocations, numFeatures=numFeatures) objectConfusion(objects.getObjects()) # print "Total number of objects created:",len(objects.getObjects()) # print "Objects are:" # for o in objects: # pairs = objects[o] # pairs.sort() # print str(o) + ": " + str(pairs) # Setup experiment and train the network name = "convergence_O%03d_L%03d_F%03d_C%03d_T%03d" % ( numObjects, numLocations, numFeatures, numColumns, trialNum ) exp = L4L2Experiment( name, numCorticalColumns=numColumns, inputSize=150, externalInputSize=2400, numInputBits=20, seed=trialNum ) exp.learnObjects(objects.provideObjectsToLearn()) if profile: exp.printProfile(reset=True) # For inference, we will check and plot convergence for each object. For each # object, we create a sequence of random sensations for each column. We will # present each sensation for settlingTime time steps to let it settle and # ensure it converges. for objectId in objects: obj = objects[objectId] objectSensations = {} for c in range(numColumns): objectSensations[c] = [] if numColumns > 1: # Create sequence of random sensations for this object for all columns At # any point in time, ensure each column touches a unique loc,feature pair # on the object. It is ok for a given column to sense a loc,feature pair # more than once. The total number of sensations is equal to the number of # points on the object. for sensationNumber in range(len(obj)): # Randomly shuffle points for each sensation objectCopy = [pair for pair in obj] random.shuffle(objectCopy) for c in range(numColumns): # stay multiple steps on each sensation for _ in xrange(settlingTime): objectSensations[c].append(objectCopy[c]) else: # Create sequence of sensations for this object for one column. The total # number of sensations is equal to the number of points on the object. No # point should be visited more than once. objectCopy = [pair for pair in obj] random.shuffle(objectCopy) for pair in objectCopy: # stay multiple steps on each sensation for _ in xrange(settlingTime): objectSensations[0].append(pair) inferConfig = { "object": objectId, "numSteps": len(objectSensations[0]), "pairs": objectSensations, "includeRandomLocation": includeRandomLocation, } inferenceSDRs = objects.provideObjectToInfer(inferConfig) exp.infer(inferenceSDRs, objectName=objectId) if profile: exp.printProfile(reset=True) if plotInferenceStats: exp.plotInferenceStats( fields=["L2 Representation", "Overlap L2 with object", "L4 Representation"], experimentID=objectId, onePlot=False, ) convergencePoint = averageConvergencePoint( exp.getInferenceStats(),"L2 Representation", 30, 40, settlingTime) print print "# objects {} # features {} # locations {} # columns {} trial # {}".format( numObjects, numFeatures, numLocations, numColumns, trialNum) print "Average convergence point=",convergencePoint # Return our convergence point as well as all the parameters and objects args.update({"objects": objects.getObjects()}) args.update({"convergencePoint":convergencePoint}) # Can't pickle experiment so can't return it for batch multiprocessing runs. # However this is very useful for debugging when running in a single thread. if plotInferenceStats: args.update({"experiment": exp}) return args
def runExperiment(args): """ Run experiment. args is a dict representing the parameters. We do it this way to support multiprocessing. The method returns the args dict updated with multiple additional keys representing accuracy metrics. """ numObjects = args.get("numObjects", 10) numLocations = args.get("numLocations", 10) numFeatures = args.get("numFeatures", 10) numColumns = args.get("numColumns", 2) sensorInputSize = args.get("sensorInputSize", 300) networkType = args.get("networkType", "MultipleL4L2Columns") longDistanceConnections = args.get("longDistanceConnections", 0) locationNoise = args.get("locationNoise", 0.0) featureNoise = args.get("featureNoise", 0.0) numPoints = args.get("numPoints", 10) trialNum = args.get("trialNum", 42) plotInferenceStats = args.get("plotInferenceStats", True) settlingTime = args.get("settlingTime", 3) includeRandomLocation = args.get("includeRandomLocation", False) enableFeedback = args.get("enableFeedback", True) numAmbiguousLocations = args.get("numAmbiguousLocations", 0) numInferenceRpts = args.get("numInferenceRpts", 1) numLearningRpts = args.get("numLearningRpts", 3) l2Params = args.get("l2Params", None) l4Params = args.get("l4Params", None) # Create the objects objects = createObjectMachine(machineType="simple", numInputBits=20, sensorInputSize=sensorInputSize, externalInputSize=2400, numCorticalColumns=numColumns, numFeatures=numFeatures, numLocations=numLocations, seed=trialNum) objects.createRandomObjects(numObjects, numPoints=numPoints, numLocations=numLocations, numFeatures=numFeatures) r = objects.objectConfusion() print "Average common pairs in objects=", r[0], print ", locations=", r[1], ", features=", r[2] # print "Total number of objects created:",len(objects.getObjects()) # print "Objects are:" # for o in objects: # pairs = objects[o] # pairs.sort() # print str(o) + ": " + str(pairs) # This object machine will simulate objects where each object is just one # unique feature/location pair. We will use this to pretrain L4/L2 with # individual pairs. pairObjects = createObjectMachine(machineType="simple", numInputBits=20, sensorInputSize=sensorInputSize, externalInputSize=2400, numCorticalColumns=numColumns, numFeatures=numFeatures, numLocations=numLocations, seed=trialNum) # Create "pair objects" consisting of all unique F/L pairs from our objects. # These pairs should have the same SDRs as the original objects. pairObjects.locations = objects.locations pairObjects.features = objects.features distinctPairs = objects.getDistinctPairs() print "Number of distinct feature/location pairs:", len(distinctPairs) for pairNumber, pair in enumerate(distinctPairs): pairObjects.addObject([pair], pairNumber) ##################################################### # # Setup experiment and train the network name = "dp_O%03d_L%03d_F%03d_C%03d_T%03d" % ( numObjects, numLocations, numFeatures, numColumns, trialNum) exp = L4L2Experiment( name, numCorticalColumns=numColumns, L2Overrides=l2Params, L4Overrides=l4Params, networkType=networkType, longDistanceConnections=longDistanceConnections, inputSize=sensorInputSize, externalInputSize=2400, numInputBits=20, seed=trialNum, enableFeedback=enableFeedback, numLearningPoints=numLearningRpts, ) # Learn all FL pairs in each L4 and in each L2 # Learning in L2 involves choosing a small random number of cells, growing # proximal synapses to L4 cells. Growing distal synapses to active cells in # each neighboring column. Each column gets its own distal segment. exp.learnObjects(pairObjects.provideObjectsToLearn()) # Verify that all columns learned the pairs # numCorrectClassifications = 0 # for pairId in pairObjects: # # obj = pairObjects[pairId] # objectSensations = {} # for c in range(numColumns): # objectSensations[c] = [obj[0]]*settlingTime # # inferConfig = { # "object": pairId, # "numSteps": settlingTime, # "pairs": objectSensations, # } # # inferenceSDRs = pairObjects.provideObjectToInfer(inferConfig) # # exp.infer(inferenceSDRs, objectName=pairId, reset=False) # # if exp.isObjectClassified(pairId, minOverlap=30): # numCorrectClassifications += 1 # # exp.sendReset() # # print "Classification accuracy for pairs=",100.0*numCorrectClassifications/len(distinctPairs) ######################################################################## # # Create "object representations" in L2 by simultaneously invoking the union # of all FL pairs in an object and doing some sort of spatial pooling to # create L2 representation. exp.resetStatistics() for objectId in objects: # Create one sensation per object consisting of the union of all features # and the union of locations. ul, uf = objects.getUniqueFeaturesLocationsInObject(objectId) print "Object", objectId, "Num unique features:", len( uf), "Num unique locations:", len(ul) objectSensations = {} for c in range(numColumns): objectSensations[c] = [(tuple(ul), tuple(uf))] * settlingTime inferConfig = { "object": objectId, "numSteps": settlingTime, "pairs": objectSensations, } inferenceSDRs = objects.provideObjectToInfer(inferConfig) exp.infer(inferenceSDRs, objectName="Object " + str(objectId)) # Compute confusion matrix between all objects as network settles for iteration in range(settlingTime): confusion = numpy.zeros((numObjects, numObjects)) for o1 in objects: for o2 in objects: confusion[o1, o2] = len( set(exp.statistics[o1]["Full L2 SDR C0"][iteration]) & set(exp.statistics[o2]["Full L2 SDR C0"][iteration])) plt.figure() plt.imshow(confusion) plt.xlabel('Object #') plt.ylabel('Object #') plt.title("Object overlaps") plt.colorbar() plt.savefig("confusion_random_10L_5F_" + str(iteration) + ".pdf") plt.close() for col in range(numColumns): print "Diagnostics for column", col printColumnPoolerDiagnostics(exp.getAlgorithmInstance(column=col)) print return args
def doExperiment(numColumns, objects, l2Overrides, noiseLevels, numInitialTraversals, noisyFeature, noisyLocation): """ Touch every point on an object 'numInitialTraversals' times, then evaluate whether it has inferred the object by touching every point once more and checking the number of correctly active and incorrectly active cells. @param numColumns (int) The number of sensors to use @param l2Overrides (dict) Parameters for the ColumnPooler @param objects (dict) A mapping of object names to their features. See 'createRandomObjects'. @param noiseLevels (list of floats) The noise levels to experiment with. The experiment is run once per noise level. Noise is applied at a constant rate to exactly one cortical column. It's applied to the same cortical column every time, and this is the cortical column that is measured. @param noisyFeature (bool) Whether to use a noisy feature @param noisyLocation (bool) Whether to use a noisy location """ featureSDR = lambda: set(random.sample(xrange(NUM_L4_COLUMNS), 40)) locationSDR = lambda: set(random.sample(xrange(1024), 40)) featureSDRsByColumn = [defaultdict(featureSDR) for _ in xrange(numColumns)] locationSDRsByColumn = [ defaultdict(locationSDR) for _ in xrange(numColumns) ] exp = L4L2Experiment("Experiment", numCorticalColumns=numColumns, inputSize=NUM_L4_COLUMNS, externalInputSize=1024, seed=random.randint(2048, 4096)) exp.learnObjects( dict((objectName, [ dict((column, (locationSDRsByColumn[column][location], featureSDRsByColumn[column][features[location]])) for column in xrange(numColumns)) for location in xrange(len(features)) ]) for objectName, features in objects.iteritems())) results = defaultdict(list) for noiseLevel in noiseLevels: # Try to infer the objects for objectName, features in objects.iteritems(): exp.sendReset() inferredL2 = exp.objectL2Representations[objectName] sensorPositionsIterator = greedySensorPositions( numColumns, len(features)) # Touch each location at least numInitialTouches times, and then touch it # once more, testing it. For each traversal, touch each point on the object # ~once. Not once per sensor -- just once. So we translate the "number of # traversals" into a "number of touches" according to the number of sensors. numTouchesPerTraversal = len(features) / float(numColumns) numInitialTouches = int( math.ceil(numInitialTraversals * numTouchesPerTraversal)) numTestTouches = len(features) for touch in xrange(numInitialTouches + numTestTouches): sensorPositions = next(sensorPositionsIterator) sensation = dict( (column, (locationSDRsByColumn[column][sensorPositions[column]], featureSDRsByColumn[column][features[ sensorPositions[column]]])) for column in xrange(1, numColumns)) # Add noise to the first column. featureSDR = featureSDRsByColumn[0][features[ sensorPositions[0]]] if noisyFeature: featureSDR = noisy(featureSDR, noiseLevel, NUM_L4_COLUMNS) locationSDR = locationSDRsByColumn[0][sensorPositions[0]] if noisyLocation: locationSDR = noisy(locationSDR, noiseLevel, 1024) sensation[0] = (locationSDR, featureSDR) exp.infer([sensation] * TIMESTEPS_PER_SENSATION, reset=False, objectName=objectName) if touch >= numInitialTouches: activeCells = exp.getL2Representations()[0] correctCells = inferredL2[0] results[noiseLevel].append( (len(activeCells & correctCells), len(activeCells - correctCells))) return results
def runExperiment(args): """ Run experiment. What did you think this does? args is a dict representing the parameters. We do it this way to support multiprocessing. args contains one or more of the following keys: @param noiseLevel (float) Noise level to add to the locations and features during inference. Default: None @param profile (bool) If True, the network will be profiled after learning and inference. Default: False @param numObjects (int) The number of objects we will train. Default: 10 @param numPoints (int) The number of points on each object. Default: 10 @param numLocations (int) For each point, the number of locations to choose from. Default: 10 @param numFeatures (int) For each point, the number of features to choose from. Default: 10 @param numColumns (int) The total number of cortical columns in network. Default: 2 The method returns the args dict updated with two additional keys: convergencePoint (int) The average number of iterations it took to converge across all objects objects (pairs) The list of objects we trained on """ numObjects = args.get("numObjects", 10) numLocations = args.get("numLocations", 10) numFeatures = args.get("numFeatures", 10) numColumns = args.get("numColumns", 2) profile = args.get("profile", False) noiseLevel = args.get("noiseLevel", None) # TODO: implement this? numPoints = args.get("numPoints", 10) trialNum = args.get("trialNum", 42) l2Params = args.get("l2Params", getL2Params()) l4Params = args.get("l4Params", getL4Params()) objectSeed = args.get("objectSeed", 41) # Create the objects objects = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024, numCorticalColumns=numColumns, seed=objectSeed, ) objects.createRandomObjects(numObjects, numPoints=numPoints, numLocations=numLocations, numFeatures=numFeatures) # print "Objects are:" # for o in objects: # pairs = objects[o] # pairs.sort() # print str(o) + ": " + str(pairs) # Setup experiment and train the network name = "convergence_O%03d_L%03d_F%03d_C%03d_T%03d" % ( numObjects, numLocations, numFeatures, numColumns, trialNum) exp = L4L2Experiment(name, L2Overrides=l2Params, L4Overrides=l4Params, numCorticalColumns=numColumns, seed=trialNum) exp.learnObjects(objects.provideObjectsToLearn()) L2TimeLearn = 0 L2TimeInfer = 0 if profile: # exp.printProfile(reset=True) L2TimeLearn = getProfileInfo(exp) args.update({"L2TimeLearn": L2TimeLearn}) exp.resetProfile() # For inference, we will check and plot convergence for each object. For each # object, we create a sequence of random sensations for each column. We will # present each sensation for 3 time steps to let it settle and ensure it # converges. for objectId in objects: obj = objects[objectId] # Create sequence of sensations for this object for all columns objectSensations = {} for c in range(numColumns): objectCopy = [pair for pair in obj] random.shuffle(objectCopy) # stay multiple steps on each sensation sensations = [] for pair in objectCopy: for _ in xrange(2): sensations.append(pair) objectSensations[c] = sensations inferConfig = { "object": objectId, "numSteps": len(objectSensations[0]), "pairs": objectSensations } exp.infer(objects.provideObjectToInfer(inferConfig), objectName=objectId) if profile: L2TimeInfer += getProfileInfo(exp) exp.resetProfile() # exp.printProfile(reset=True) if profile: L2TimeInfer /= len(objects) args.update({"L2TimeInfer": L2TimeInfer}) convergencePoint = averageConvergencePoint(exp.getInferenceStats(), "L2 Representation", 40) print "objectSeed {} # distal syn {} # proximal syn {}, " \ "# convergence point={:4.2f} train time {:4.3f} infer time {:4.3f}".format( objectSeed, l2Params["sampleSizeDistal"], l2Params["sampleSizeProximal"], convergencePoint, L2TimeLearn, L2TimeInfer) # Return our convergence point as well as all the parameters and objects args.update({"objects": objects.getObjects()}) args.update({"convergencePoint": convergencePoint}) # prepare experiment results numLateralConnections = [] numProximalConnections = [] for l2Columns in exp.L2Columns: numLateralConnections.append( l2Columns._pooler.numberOfDistalSynapses()) numProximalConnections.append( np.sum(l2Columns._pooler.numberOfProximalSynapses())) result = { 'trial': objectSeed, 'L2TimeLearn': args['L2TimeLearn'], 'L2TimeInfer': args['L2TimeInfer'], 'sampleSizeProximal': l2Params["sampleSizeProximal"], 'sampleSizeDistal': l2Params["sampleSizeDistal"], 'numLateralConnections': np.mean(np.array(numLateralConnections)), 'numProximalConnections': np.mean(np.array(numProximalConnections)), 'convergencePoint': args['convergencePoint'] } return result
def test_summing_bayesian(): # Init numCorticalColumnns = 1 numLearningPoints = 3 maxNumSegments = 5 columns_count = 2048 cells_per_column = 16 # Create objects num_objects = 10 num_of_sensations = 5 num_of_input = 15 # Recognition rec_object = 1 repetition = 3 use_noise = True noise = 1 L2Overrides = { "noise": 1e-8, "cellCount": 512, # new: 256 # original: 4096 "inputWidth": cells_per_column * columns_count, # new: 8192 # original: 16384 (?) = cells per column * column count "sdrSize": 40, "useProximalProbabilities": False, "avoidWeightExplosion": True, "useSupport": True } L4Overrides = { "noise": 1e-8, "cellsPerColumn": cells_per_column, # new: 4 # original 32 "columnCount": columns_count, # new: 2048 # original: 2048 "minThreshold": 0.1, "useApicalTiebreak": True } exp1 = L4L2Experiment( 'single_column', implementation='SummingBayesian', L2RegionType="py.BayesianColumnPoolerRegion", L4RegionType="py.BayesianApicalTMPairRegion", L2Overrides=L2Overrides, L4Overrides=L4Overrides, numCorticalColumns=numCorticalColumnns, maxSegmentsPerCell=maxNumSegments, numLearningPoints=numLearningPoints, # number repetitions for learning seed=1) objects, noise_objects = create_object_single(num_objects, num_of_sensations, num_of_input, 1024, noise=noise) # TODO Why 1024 # learn the sensations print "Train objects" exp1.learnObjects(objects) exp1.sendReset() object_representations = [] for o in range(num_objects): rep = exp1.objectL2Representations[o][0] object_representations.append(rep) objs_recognition = objects if not use_noise else noise_objects for _ in range(repetition): np.random.shuffle(objs_recognition[rec_object]) for num, sensation in enumerate(objs_recognition[rec_object]): exp1.infer([sensation], objectName=rec_object, reset=False) object_prediction = exp1.getL2Prediction()[0] print "\nStep: %s" % num printRecognition(object_representations, object_prediction)
def runExperiment(args): """ Run experiment. What did you think this does? args is a dict representing the parameters. We do it this way to support multiprocessing. args contains one or more of the following keys: @param noiseLevel (float) Noise level to add to the locations and features during inference. Default: None @param profile (bool) If True, the network will be profiled after learning and inference. Default: False @param numObjects (int) The number of objects we will train. Default: 10 @param numPoints (int) The number of points on each object. Default: 10 @param numLocations (int) For each point, the number of locations to choose from. Default: 10 @param numFeatures (int) For each point, the number of features to choose from. Default: 10 @param numColumns (int) The total number of cortical columns in network. Default: 2 The method returns the args dict updated with two additional keys: convergencePoint (int) The average number of iterations it took to converge across all objects objects (pairs) The list of objects we trained on """ numObjects = args.get("numObjects", 10) numLocations = args.get("numLocations", 10) numFeatures = args.get("numFeatures", 10) numColumns = args.get("numColumns", 2) profile = args.get("profile", False) noiseLevel = args.get("noiseLevel", None) # TODO: implement this? numPoints = args.get("numPoints", 10) trialNum = args.get("trialNum", 42) # Create the objects objects = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024, numCorticalColumns=numColumns, ) objects.createRandomObjects(numObjects, numPoints=numPoints, numLocations=numLocations, numFeatures=numFeatures) print "Objects are:" for o in objects: pairs = objects[o] pairs.sort() print str(o) + ": " + str(pairs) # Setup experiment and train the network name = "convergence_O%03d_L%03d_F%03d_C%03d_T%03d" % ( numObjects, numLocations, numFeatures, numColumns, trialNum) exp = L4L2Experiment(name, numCorticalColumns=numColumns, seed=trialNum) exp.learnObjects(objects.provideObjectsToLearn()) if profile: exp.printProfile(reset=True) # For inference, we will check and plot convergence for each object. For each # object, we create a sequence of random sensations for each column. We will # present each sensation for 3 time steps to let it settle and ensure it # converges. for objectId in objects: obj = objects[objectId] # Create sequence of sensations for this object for all columns objectSensations = {} for c in range(numColumns): objectCopy = [pair for pair in obj] random.shuffle(objectCopy) # stay multiple steps on each sensation sensations = [] for pair in objectCopy: for _ in xrange(2): sensations.append(pair) objectSensations[c] = sensations inferConfig = { "object": objectId, "numSteps": len(objectSensations[0]), "pairs": objectSensations } exp.infer(objects.provideObjectToInfer(inferConfig), objectName=objectId) if profile: exp.printProfile(reset=True) exp.plotInferenceStats( fields=[ "L2 Representation", "Overlap L2 with object", "L4 Representation" ], experimentID=objectId, onePlot=False, ) convergencePoint = averageConvergencePoint(exp.getInferenceStats(), "L2 Representation", 40) print "Average convergence point=", convergencePoint # Return our convergence point as well as all the parameters and objects args.update({"objects": objects.getObjects()}) args.update({"convergencePoint": convergencePoint}) # Can't pickle experiment so can't return it. However this is very useful # for debugging when running in a single thread. # args.update({"experiment": exp}) return args
def runExperiment(args): """ Run experiment. What did you think this does? args is a dict representing the parameters. We do it this way to support multiprocessing. args contains one or more of the following keys: @param featureNoise (float) Noise level to add to the features during inference. Default: None @param locationNoise (float) Noise level to add to the locations during inference. Default: None @param numObjects (int) The number of objects we will train. Default: 10 @param numPoints (int) The number of points on each object. Default: 10 @param numLocations (int) For each point, the number of locations to choose from. Default: 10 @param numFeatures (int) For each point, the number of features to choose from. Default: 10 @param numColumns (int) The total number of cortical columns in network. Default: 2 @param networkType (string)The type of network to use. Options are: "MultipleL4L2Columns", "MultipleL4L2ColumnsWithTopology" and "MultipleL4L2ColumnsWithRandomTopology". Default: "MultipleL4L2Columns" @param longDistanceConnections (float) The probability that a column will connect to a distant column. Only relevant when using the random topology network type. If > 1, will instead be taken as desired number of long-distance connections per column. @param settlingTime (int) Number of iterations we wait to let columns stabilize. Important for multicolumn experiments with lateral connections. @param includeRandomLocation (bool) If True, a random location SDR will be generated during inference for each feature. @param enableFeedback (bool) If True, enable feedback, default is True @param numAmbiguousLocations (int) number of ambiguous locations. Ambiguous locations will present during inference if this parameter is set to be a positive number The method returns the args dict updated with multiple additional keys representing accuracy metrics. """ numObjects = args.get("numObjects", 10) numLocations = args.get("numLocations", 10) numFeatures = args.get("numFeatures", 10) numColumns = args.get("numColumns", 2) networkType = args.get("networkType", "MultipleL4L2Columns") longDistanceConnections = args.get("longDistanceConnections", 0) locationNoise = args.get("locationNoise", 0.0) featureNoise = args.get("featureNoise", 0.0) numPoints = args.get("numPoints", 10) trialNum = args.get("trialNum", 42) plotInferenceStats = args.get("plotInferenceStats", True) settlingTime = args.get("settlingTime", 3) includeRandomLocation = args.get("includeRandomLocation", False) enableFeedback = args.get("enableFeedback", True) numAmbiguousLocations = args.get("numAmbiguousLocations", 0) numInferenceRpts = args.get("numInferenceRpts", 1) l2Params = args.get("l2Params", None) l4Params = args.get("l4Params", None) # Create the objects objects = createObjectMachine(machineType="simple", numInputBits=20, sensorInputSize=150, externalInputSize=2400, numCorticalColumns=numColumns, numFeatures=numFeatures, numLocations=numLocations, seed=trialNum) objects.createRandomObjects(numObjects, numPoints=numPoints, numLocations=numLocations, numFeatures=numFeatures) r = objects.objectConfusion() print "Average common pairs in objects=", r[0], print ", locations=", r[1], ", features=", r[2] # print "Total number of objects created:",len(objects.getObjects()) # print "Objects are:" # for o in objects: # pairs = objects[o] # pairs.sort() # print str(o) + ": " + str(pairs) # Setup experiment and train the network name = "convergence_O%03d_L%03d_F%03d_C%03d_T%03d" % ( numObjects, numLocations, numFeatures, numColumns, trialNum) exp = L4L2Experiment( name, numCorticalColumns=numColumns, L2Overrides=l2Params, L4Overrides=l4Params, networkType=networkType, longDistanceConnections=longDistanceConnections, inputSize=150, externalInputSize=2400, numInputBits=20, seed=trialNum, enableFeedback=enableFeedback, ) exp.learnObjects(objects.provideObjectsToLearn()) # For inference, we will check and plot convergence for each object. For each # object, we create a sequence of random sensations for each column. We will # present each sensation for settlingTime time steps to let it settle and # ensure it converges. numCorrectClassifications = 0 for objectId in objects: exp.sendReset() obj = objects[objectId] objectSensations = {} for c in range(numColumns): objectSensations[c] = [] if numColumns > 1: # Create sequence of random sensations for this object for all columns At # any point in time, ensure each column touches a unique loc,feature pair # on the object. It is ok for a given column to sense a loc,feature pair # more than once. The total number of sensations is equal to the number of # points on the object. for sensationNumber in range(len(obj)): # Randomly shuffle points for each sensation objectCopy = [pair for pair in obj] random.shuffle(objectCopy) for c in range(numColumns): # stay multiple steps on each sensation for _ in xrange(settlingTime): objectSensations[c].append(objectCopy[c]) else: # Create sequence of sensations for this object for one column. The total # number of sensations is equal to the number of points on the object. No # point should be visited more than once. objectCopy = [pair for pair in obj] random.shuffle(objectCopy) for pair in objectCopy: # stay multiple steps on each sensation for _ in xrange(settlingTime): objectSensations[0].append(pair) inferConfig = { "object": objectId, "numSteps": len(objectSensations[0]), "pairs": objectSensations, "noiseLevel": featureNoise, "locationNoise": locationNoise, "includeRandomLocation": includeRandomLocation, "numAmbiguousLocations": numAmbiguousLocations, } inferenceSDRs = objects.provideObjectToInfer(inferConfig) exp.infer(inferenceSDRs, objectName=objectId, reset=False) classificationResults = exp.getCurrentClassification( 30, includeZeros=False) # print "Classification for object",objectId, "=", classificationResults if (classificationResults.get(objectId, 0.0) == 1.0 and len(classificationResults) == 1): numCorrectClassifications += 1 if plotInferenceStats: exp.plotInferenceStats( fields=[ "L2 Representation", "Overlap L2 with object", "L4 Representation" ], experimentID=objectId, onePlot=False, ) convergencePoint = exp.averageConvergencePoint("L2 Representation", 30, 40, settlingTime) classificationAccuracy = float(numCorrectClassifications) / numObjects print "# objects {} # features {} # locations {} # columns {} trial # {} network type {}".format( numObjects, numFeatures, numLocations, numColumns, trialNum, networkType) print "Average convergence point=", convergencePoint print "Classification accuracy=", classificationAccuracy print # Return our convergence point as well as all the parameters and objects args.update({"objects": objects.getObjects()}) args.update({"convergencePoint": convergencePoint}) args.update({"classificationAccuracy": classificationAccuracy}) # Can't pickle experiment so can't return it for batch multiprocessing runs. # However this is very useful for debugging when running in a single thread. if plotInferenceStats: args.update({"experiment": exp}) return args
def runStretch(noiseLevel=None, profile=False): """ Stretch test that learns a lot of objects. Parameters: ---------------------------- @param noiseLevel (float) Noise level to add to the locations and features during inference @param profile (bool) If True, the network will be profiled after learning and inference """ exp = L4L2Experiment( "stretch_L10_F10_C2", numCorticalColumns=2, ) objects = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024, numCorticalColumns=2, ) objects.createRandomObjects(10, 10, numLocations=10, numFeatures=10) print "Objects are:" for object, pairs in objects.objects.iteritems(): print str(object) + ": " + str(pairs) exp.learnObjects(objects.provideObjectsToLearn()) if profile: exp.printProfile(reset=True) # For inference, we will check and plot convergence for object 0. We create a # sequence of random sensations for each column. We will present each # sensation for 4 time steps to let it settle and ensure it converges. objectCopy1 = [pair for pair in objects[0]] objectCopy2 = [pair for pair in objects[0]] objectCopy3 = [pair for pair in objects[0]] random.shuffle(objectCopy1) random.shuffle(objectCopy2) random.shuffle(objectCopy3) # stay multiple steps on each sensation objectSensations1 = [] for pair in objectCopy1: for _ in xrange(4): objectSensations1.append(pair) # stay multiple steps on each sensation objectSensations2 = [] for pair in objectCopy2: for _ in xrange(4): objectSensations2.append(pair) # stay multiple steps on each sensation objectSensations3 = [] for pair in objectCopy3: for _ in xrange(4): objectSensations3.append(pair) inferConfig = { "numSteps": len(objectSensations1), "noiseLevel": noiseLevel, "pairs": { 0: objectSensations1, 1: objectSensations2, # 2: objectSensations3, # Uncomment for 3 columns } } exp.infer(objects.provideObjectToInfer(inferConfig), objectName=0) if profile: exp.printProfile() exp.plotInferenceStats( fields=[ "L2 Representation", "Overlap L2 with object", "L4 Representation" ], onePlot=False, )
def runExperiment(): """ We will run two experiments side by side, with either single column or 3 columns """ numColumns = 3 numFeatures = 3 numPoints = 10 numLocations = 10 numObjects = 10 numRptsPerSensation = 2 objectMachine = createObjectMachine(machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024, numCorticalColumns=3, seed=40) objectMachine.createRandomObjects(numObjects, numPoints=numPoints, numLocations=numLocations, numFeatures=numFeatures) objects = objectMachine.provideObjectsToLearn() # single-out the inputs to the column #1 objectsSingleColumn = {} for i in range(numObjects): featureLocations = [] for j in range(numLocations): featureLocations.append({0: objects[i][j][0]}) objectsSingleColumn[i] = featureLocations # we will run two experiments side by side, with either single column # or 3 columns exp3 = L4L2Experiment('three_column', numCorticalColumns=3, seed=1) exp1 = L4L2Experiment('single_column', numCorticalColumns=1, seed=1) print "train single column " exp1.learnObjects(objectsSingleColumn) print "train multi-column " exp3.learnObjects(objects) # test on the first object objectId = 0 obj = objectMachine[objectId] # Create sequence of sensations for this object for all columns # We need to set the seed to get specific convergence points for the red # rectangle in the graph. objectSensations = {} random.seed(12) for c in range(numColumns): objectCopy = [pair for pair in obj] random.shuffle(objectCopy) # stay multiple steps on each sensation sensations = [] for pair in objectCopy: for _ in xrange(numRptsPerSensation): sensations.append(pair) objectSensations[c] = sensations sensationStepsSingleColumn = [] sensationStepsMultiColumn = [] for step in xrange(len(objectSensations[0])): pairs = [objectSensations[col][step] for col in xrange(numColumns)] sdrs = objectMachine._getSDRPairs(pairs) sensationStepsMultiColumn.append(sdrs) sensationStepsSingleColumn.append({0: sdrs[0]}) print "inference: multi-columns " exp3.sendReset() l2ActiveCellsMultiColumn = [] L2ActiveCellNVsTimeMultiColumn = [] for sensation in sensationStepsMultiColumn: exp3.infer([sensation], objectName=objectId, reset=False) l2ActiveCellsMultiColumn.append(exp3.getL2Representations()) activeCellNum = 0 for c in range(numColumns): activeCellNum += len(exp3.getL2Representations()[c]) L2ActiveCellNVsTimeMultiColumn.append(activeCellNum / numColumns) print "inference: single column " exp1.sendReset() l2ActiveCellsSingleColumn = [] L2ActiveCellNVsTimeSingleColumn = [] for sensation in sensationStepsSingleColumn: exp1.infer([sensation], objectName=objectId, reset=False) l2ActiveCellsSingleColumn.append(exp1.getL2Representations()) L2ActiveCellNVsTimeSingleColumn.append( len(exp1.getL2Representations()[0])) # Used to figure out where to put the red rectangle! sdrSize = exp1.config["L2Params"]["sdrSize"] singleColumnHighlight = next( (idx for idx, value in enumerate(l2ActiveCellsSingleColumn) if len(value[0]) == sdrSize), None) sdrSize = exp3.config["L2Params"]["sdrSize"] multiColumnHighlight = next( (idx for idx, value in enumerate(l2ActiveCellsMultiColumn) if len(value[0]) == sdrSize), None) plotActivity(l2ActiveCellsMultiColumn, multiColumnHighlight) plotActivity(l2ActiveCellsSingleColumn, singleColumnHighlight) plotL2ObjectRepresentations(exp1)
def createExperiment(logFilename): # Typically this would be done by Thing exp = L4L2Experiment("shared_features", logCalls=True) exp.learnObjects(thingObjects) LoggingDecorator.save(exp.callLog, logFilename)
def trainNetwork(objects, numColumns): exp = L4L2Experiment("shared_features", numCorticalColumns=numColumns) exp.learnObjects(objects.provideObjectsToLearn()) settlingTime = 3 L2Representations = exp.objectL2Representations print "Learned object representations:" pprint.pprint(L2Representations, width=400) print "==========================" # For inference, we will check and plot convergence for each object. For each # object, we create a sequence of random sensations for each column. We will # present each sensation for settlingTime time steps to let it settle and # ensure it converges. for objectId in objects: obj = objects[objectId] objectSensations = {} for c in range(numColumns): objectSensations[c] = [] if numColumns > 1: # Create sequence of random sensations for this object for all columns At # any point in time, ensure each column touches a unique loc,feature pair # on the object. It is ok for a given column to sense a loc,feature pair # more than once. The total number of sensations is equal to the number of # points on the object. for sensationNumber in range(len(obj)): # Randomly shuffle points for each sensation objectCopy = [pair for pair in obj] random.shuffle(objectCopy) for c in range(numColumns): # stay multiple steps on each sensation for _ in xrange(settlingTime): objectSensations[c].append(objectCopy[c]) else: # Create sequence of sensations for this object for one column. The total # number of sensations is equal to the number of points on the object. No # point should be visited more than once. objectCopy = [pair for pair in obj] # random.shuffle(objectCopy) for pair in objectCopy: # stay multiple steps on each sensation for _ in xrange(settlingTime): objectSensations[0].append(pair) inferConfig = { "object": objectId, "numSteps": len(objectSensations[0]), "pairs": objectSensations, "includeRandomLocation": False, } inferenceSDRs = objects.provideObjectToInfer(inferConfig) exp.infer(inferenceSDRs, objectName=objectId, reset=False) print "Output for {}: {}".format(objectId, exp.getL2Representations()) for i in range(len(objects)): print "Intersection with {}:{}".format( objectId, len(exp.getL2Representations()[0] & L2Representations[objects.objects.keys()[i]][0])) exp.sendReset()
numLocations=numLocations, numFeatures=numFeatures) objects = objectMachine.provideObjectsToLearn() # single-out the inputs to the column #1 objectsSingleColumn = {} for i in range(numObjects): featureLocations = [] for j in range(numLocations): featureLocations.append({0: objects[i][j][0]}) objectsSingleColumn[i] = featureLocations # we will run two experiments side by side, with either single column # or 3 columns exp3 = L4L2Experiment('three_column', numCorticalColumns=3, seed=1) exp1 = L4L2Experiment('single_column', numCorticalColumns=1, seed=1) print "train single column " exp1.learnObjects(objectsSingleColumn) print "train multi-column " exp3.learnObjects(objects) # test on the first object objectId = 0 obj = objectMachine[objectId] # Create sequence of sensations for this object for all columns # We need to set the seed to get specific convergence points for the red # rectangle in the graph.
def runExperiment(): """ We will run two experiments side by side, with either single column or 3 columns """ numColumns = 1 # 3 numFeatures = 3 numPoints = 10 numLocations = 10 numObjects = 10 # 2 numRptsPerSensation = 2 objectMachine = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=1024, externalInputSize=1024, numCorticalColumns=numColumns, seed=40, ) objectMachine.createRandomObjects(numObjects, numPoints=numPoints, numLocations=numLocations, numFeatures=numFeatures) objects = objectMachine.provideObjectsToLearn() # single-out the inputs to the column #1 objectsSingleColumn = {} for i in range(numObjects): featureLocations = [] for j in range(numLocations): featureLocations.append({0: objects[i][j][0]}) objectsSingleColumn[i] = featureLocations maxNumSegments = 2 # we will run two experiments side by side, with either single column # or 3 columns # exp3 = L4L2Experiment( # 'three_column', # implementation='BayesianApicalTiebreak', # L4RegionType="py.BayesianApicalTMPairRegion", # numCorticalColumns=3, # maxSegmentsPerCell=5, # seed=1 # ) exp1 = L4L2Experiment('single_column', implementation='SummingBayesian', L2RegionType="py.BayesianColumnPoolerRegion", L4RegionType="py.BayesianApicalTMPairRegion", numCorticalColumns=1, maxSegmentsPerCell=maxNumSegments, seed=1) print "train single column " exp1.learnObjects(objectsSingleColumn) # print "train multi-column " # exp3.learnObjects(objects) # test on the first object objectId = 0 obj = objectMachine[objectId] # Create sequence of sensations for this object for all columns # We need to set the seed to get specific convergence points for the red # rectangle in the graph. objectSensations = {} random.seed(12) for c in range(numColumns): objectCopy = [pair for pair in obj] random.shuffle(objectCopy) # stay multiple steps on each sensation sensations = [] for pair in objectCopy: for _ in xrange(numRptsPerSensation): sensations.append(pair) objectSensations[c] = sensations sensationStepsSingleColumn = [] sensationStepsMultiColumn = [] for step in xrange(len(objectSensations[0])): pairs = [objectSensations[col][step] for col in xrange(numColumns)] sdrs = objectMachine._getSDRPairs(pairs) sensationStepsMultiColumn.append(sdrs) sensationStepsSingleColumn.append({0: sdrs[0]}) # print "inference: multi-columns " # exp3.sendReset() # l2ActiveCellsMultiColumn = [] # L2ActiveCellNVsTimeMultiColumn = [] # for sensation in sensationStepsMultiColumn: # exp3.infer([sensation], objectName=objectId, reset=False) # l2ActiveCellsMultiColumn.append(exp3.getL2Representations()) # activeCellNum = 0 # for c in range(numColumns): # activeCellNum += len(exp3.getL2Representations()[c]) # L2ActiveCellNVsTimeMultiColumn.append(activeCellNum / numColumns) print "inference: single column " exp1.sendReset() l2ActiveCellsSingleColumn = [] L2ActiveCellNVsTimeSingleColumn = [] for sensation in sensationStepsSingleColumn: exp1.infer([sensation], objectName=objectId, reset=False) rep = exp1.getL2Representations() l2ActiveCellsSingleColumn.append(rep) print "\n\nRepresentation", rep print "Length Representation", len(rep[0]) L2ActiveCellNVsTimeSingleColumn.append(len(rep[0])) # Used to figure out where to put the red rectangle! sdrSize = exp1.config["L2Params"]["sdrSize"] singleColumnHighlight = next( (idx for idx, value in enumerate(l2ActiveCellsSingleColumn) if len(value[0]) == sdrSize), None) firstObjectRepresentation = exp1.objectL2Representations[0][0] converged = next((idx for idx, value in enumerate(l2ActiveCellsSingleColumn) if (value[0] == firstObjectRepresentation)), None) print "Exactly SDR-Size activity (%s) after %s steps" % ( sdrSize, singleColumnHighlight) print "Converged to first object representation after %s steps" % converged print "First Object representation", firstObjectRepresentation print "L2 Output over steps", l2ActiveCellsSingleColumn
def runBasic(noiseLevel=None, profile=False): """ Runs a basic experiment on continuous locations, learning a few locations on four basic objects, and inferring one of them. This experiment is mostly used for testing the pipeline, as the learned locations are too random and sparse to actually perform inference. Parameters: ---------------------------- @param noiseLevel (float) Noise level to add to the locations and features during inference @param profile (bool) If True, the network will be profiled after learning and inference """ exp = L4L2Experiment("basic_continuous", numCorticalColumns=2) objects = createObjectMachine( machineType="continuous", numInputBits=21, sensorInputSize=1024, externalInputSize=1024, numCorticalColumns=2, ) objects.addObject(Sphere(radius=20), name="sphere") objects.addObject(Cylinder(height=50, radius=20), name="cylinder") objects.addObject(Box(dimensions=[ 10, 20, 30, ]), name="box") objects.addObject(Cube(width=20), name="cube") learnConfig = { "sphere": [("surface", 10)], # the two learning config below will be exactly the same "box": [("face", 5), ("edge", 5), ("vertex", 5)], "cube": [(feature, 5) for feature in objects["cube"].getFeatures()], "cylinder": [(feature, 5) for feature in objects["cylinder"].getFeatures()] } exp.learnObjects(objects.provideObjectsToLearn(learnConfig, plot=True), reset=True) if profile: exp.printProfile() inferConfig = { "numSteps": 4, "noiseLevel": noiseLevel, "objectName": "cube", "pairs": { 0: ["face", "face", "edge", "edge"], 1: ["edge", "face", "face", "edge"] } } exp.infer(objects.provideObjectToInfer(inferConfig, plot=True), objectName="cube", reset=True) if profile: exp.printProfile() exp.plotInferenceStats(fields=[ "L2 Representation", "Overlap L2 with object", "L4 Representation" ], )