def testZeroActiveColumns(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.5, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0] previousActiveCells = [0, 1, 2, 3] expectedActiveCells = [4] segment = tm.connections.createSegment(expectedActiveCells[0]) tm.connections.createSynapse(segment, previousActiveCells[0], .5) tm.connections.createSynapse(segment, previousActiveCells[1], .5) tm.connections.createSynapse(segment, previousActiveCells[2], .5) tm.connections.createSynapse(segment, previousActiveCells[3], .5) tm.compute(previousActiveColumns, True) self.assertFalse(len(tm.getActiveCells()) == 0) self.assertFalse(len(tm.getWinnerCells()) == 0) self.assertFalse(len(tm.getPredictiveCells()) == 0) zeroColumns = [] tm.compute(zeroColumns, True) self.assertTrue(len(tm.getActiveCells()) == 0) self.assertTrue(len(tm.getWinnerCells()) == 0) self.assertTrue(len(tm.getPredictiveCells()) == 0)
def testZeroActiveColumns(self): tm = TemporalMemory(columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.5, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0] previousActiveCells = [0, 1, 2, 3] expectedActiveCells = [4] segment = tm.connections.createSegment(expectedActiveCells[0]) tm.connections.createSynapse(segment, previousActiveCells[0], .5) tm.connections.createSynapse(segment, previousActiveCells[1], .5) tm.connections.createSynapse(segment, previousActiveCells[2], .5) tm.connections.createSynapse(segment, previousActiveCells[3], .5) tm.compute(previousActiveColumns, True) self.assertFalse(len(tm.getActiveCells()) == 0) self.assertFalse(len(tm.getWinnerCells()) == 0) self.assertFalse(len(tm.getPredictiveCells()) == 0) zeroColumns = [] tm.compute(zeroColumns, True) self.assertTrue(len(tm.getActiveCells()) == 0) self.assertTrue(len(tm.getWinnerCells()) == 0) self.assertTrue(len(tm.getPredictiveCells()) == 0)
def testWriteRead(self): tm1 = TemporalMemory( columnDimensions=[100], cellsPerColumn=4, activationThreshold=7, initialPermanence=0.37, connectedPermanence=0.58, minThreshold=4, maxNewSynapseCount=18, permanenceIncrement=0.23, permanenceDecrement=0.08, seed=91 ) # Run some data through before serializing self.patternMachine = PatternMachine(100, 4) self.sequenceMachine = SequenceMachine(self.patternMachine) sequence = self.sequenceMachine.generateFromNumbers(range(5)) for _ in range(3): for pattern in sequence: tm1.compute(pattern) proto1 = TemporalMemoryProto_capnp.TemporalMemoryProto.new_message() tm1.write(proto1) # Write the proto to a temp file and read it back into a new proto with tempfile.TemporaryFile() as f: proto1.write(f) f.seek(0) proto2 = TemporalMemoryProto_capnp.TemporalMemoryProto.read(f) # Load the deserialized proto tm2 = TemporalMemory.read(proto2) # Check that the two temporal memory objects have the same attributes self.assertEqual(tm1, tm2) # Run a couple records through after deserializing and check results match tm1.compute(self.patternMachine.get(0)) tm2.compute(self.patternMachine.get(0)) self.assertEqual(set(tm1.getActiveCells()), set(tm2.getActiveCells())) self.assertEqual(set(tm1.getPredictiveCells()), set(tm2.getPredictiveCells())) self.assertEqual(set(tm1.getWinnerCells()), set(tm2.getWinnerCells())) self.assertEqual(tm1.connections, tm2.connections) tm1.compute(self.patternMachine.get(3)) tm2.compute(self.patternMachine.get(3)) self.assertEqual(set(tm1.getActiveCells()), set(tm2.getActiveCells())) self.assertEqual(set(tm1.getPredictiveCells()), set(tm2.getPredictiveCells())) self.assertEqual(set(tm1.getWinnerCells()), set(tm2.getWinnerCells())) self.assertEqual(tm1.connections, tm2.connections)
def testWriteRead(self): tm1 = TemporalMemory(columnDimensions=[100], cellsPerColumn=4, activationThreshold=7, initialPermanence=0.37, connectedPermanence=0.58, minThreshold=4, maxNewSynapseCount=18, permanenceIncrement=0.23, permanenceDecrement=0.08, seed=91) # Run some data through before serializing self.patternMachine = PatternMachine(100, 4) self.sequenceMachine = SequenceMachine(self.patternMachine) sequence = self.sequenceMachine.generateFromNumbers(range(5)) for _ in range(3): for pattern in sequence: tm1.compute(pattern) proto1 = TemporalMemoryProto_capnp.TemporalMemoryProto.new_message() tm1.write(proto1) # Write the proto to a temp file and read it back into a new proto with tempfile.TemporaryFile() as f: proto1.write(f) f.seek(0) proto2 = TemporalMemoryProto_capnp.TemporalMemoryProto.read(f) # Load the deserialized proto tm2 = TemporalMemory.read(proto2) # Check that the two temporal memory objects have the same attributes self.assertEqual(tm1, tm2) # Run a couple records through after deserializing and check results match tm1.compute(self.patternMachine.get(0)) tm2.compute(self.patternMachine.get(0)) self.assertEqual(set(tm1.getActiveCells()), set(tm2.getActiveCells())) self.assertEqual(set(tm1.getPredictiveCells()), set(tm2.getPredictiveCells())) self.assertEqual(set(tm1.getWinnerCells()), set(tm2.getWinnerCells())) self.assertEqual(tm1.connections, tm2.connections) tm1.compute(self.patternMachine.get(3)) tm2.compute(self.patternMachine.get(3)) self.assertEqual(set(tm1.getActiveCells()), set(tm2.getActiveCells())) self.assertEqual(set(tm1.getPredictiveCells()), set(tm2.getPredictiveCells())) self.assertEqual(set(tm1.getWinnerCells()), set(tm2.getWinnerCells())) self.assertEqual(tm1.connections, tm2.connections)
def testActivateCorrectlyPredictiveCells(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.5, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0] activeColumns = [1] previousActiveCells = [0,1,2,3] expectedActiveCells = [4] activeSegment = tm.connections.createSegment(expectedActiveCells[0]) tm.connections.createSynapse(activeSegment, previousActiveCells[0], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[1], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[2], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[3], .5) tm.compute(previousActiveColumns, True) self.assertEqual(expectedActiveCells, tm.getPredictiveCells()) tm.compute(activeColumns, True) self.assertEqual(expectedActiveCells, tm.getActiveCells())
def testActivateCorrectlyPredictiveCells(self): tm = TemporalMemory(columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.5, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0] activeColumns = [1] previousActiveCells = [0, 1, 2, 3] expectedActiveCells = [4] activeSegment = tm.connections.createSegment(expectedActiveCells[0]) tm.connections.createSynapse(activeSegment, previousActiveCells[0], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[1], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[2], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[3], .5) tm.compute(previousActiveColumns, True) self.assertEqual(expectedActiveCells, tm.getPredictiveCells()) tm.compute(activeColumns, True) self.assertEqual(expectedActiveCells, tm.getActiveCells())
def testNoChangeToMatchingSegmentsInPredictedActiveColumn(self): tm = TemporalMemory(columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0] activeColumns = [1] previousActiveCells = [0, 1, 2, 3] expectedActiveCells = [4] otherburstingCells = [5, 6, 7] activeSegment = tm.connections.createSegment(expectedActiveCells[0]) tm.connections.createSynapse(activeSegment, previousActiveCells[0], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[1], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[2], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[3], .5) matchingSegmentOnSameCell = tm.connections.createSegment( expectedActiveCells[0]) s1 = tm.connections.createSynapse(matchingSegmentOnSameCell, previousActiveCells[0], .3) s2 = tm.connections.createSynapse(matchingSegmentOnSameCell, previousActiveCells[1], .3) matchingSegmentOnOtherCell = tm.connections.createSegment( otherburstingCells[0]) s3 = tm.connections.createSynapse(matchingSegmentOnOtherCell, previousActiveCells[0], .3) s4 = tm.connections.createSynapse(matchingSegmentOnOtherCell, previousActiveCells[1], .3) tm.compute(previousActiveColumns, True) self.assertEqual(expectedActiveCells, tm.getPredictiveCells()) tm.compute(activeColumns, True) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s1).permanence) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s2).permanence) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s3).permanence) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s4).permanence)
def testNoChangeToMatchingSegmentsInPredictedActiveColumn(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0] activeColumns = [1] previousActiveCells = [0,1,2,3] expectedActiveCells = [4] otherburstingCells = [5,6,7] activeSegment = tm.connections.createSegment(expectedActiveCells[0]) tm.connections.createSynapse(activeSegment, previousActiveCells[0], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[1], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[2], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[3], .5) matchingSegmentOnSameCell = tm.connections.createSegment( expectedActiveCells[0]) s1 = tm.connections.createSynapse(matchingSegmentOnSameCell, previousActiveCells[0], .3) s2 = tm.connections.createSynapse(matchingSegmentOnSameCell, previousActiveCells[1], .3) matchingSegmentOnOtherCell = tm.connections.createSegment( otherburstingCells[0]) s3 = tm.connections.createSynapse(matchingSegmentOnOtherCell, previousActiveCells[0], .3) s4 = tm.connections.createSynapse(matchingSegmentOnOtherCell, previousActiveCells[1], .3) tm.compute(previousActiveColumns, True) self.assertEqual(expectedActiveCells, tm.getPredictiveCells()) tm.compute(activeColumns, True) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s1).permanence) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s2).permanence) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s3).permanence) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s4).permanence)
# We repeat the sequence 10 times for i in range(10): # Send each letter in the sequence in order for j in range(5): activeColumns = set([i for i, j in zip(count(), x[j]) if j == 1]) # The compute method performs one step of learning and/or inference. Note: # here we just perform learning but you can perform prediction/inference and # learning in the same step if you want (online learning). tm.compute(activeColumns, learn=True) # The following print statements can be ignored. # Useful for tracing internal states print("active cells " + str(tm.getActiveCells())) print("predictive cells " + str(tm.getPredictiveCells())) print("winner cells " + str(tm.getWinnerCells())) print("# of active segments " + str(tm.connections.numSegments())) # The reset command tells the TP that a sequence just ended and essentially # zeros out all the states. It is not strictly necessary but it's a bit # messier without resets, and the TP learns quicker with resets. tm.reset() ####################################################################### # # Step 3: send the same sequence of vectors and look at predictions made by # temporal memory for j in range(5): print "\n\n--------", "ABCDE"[j], "-----------" print "Raw input vector : " + formatRow(x[j])
# We repeat the sequence 10 times for i in range(10): # Send each letter in the sequence in order for j in range(5): activeColumns = set([i for i, j in zip(count(), x[j]) if j == 1]) # The compute method performs one step of learning and/or inference. Note: # here we just perform learning but you can perform prediction/inference and # learning in the same step if you want (online learning). tm.compute(activeColumns, learn = True) # The following print statements can be ignored. # Useful for tracing internal states print("active cells " + str(tm.getActiveCells())) print("predictive cells " + str(tm.getPredictiveCells())) print("winner cells " + str(tm.getWinnerCells())) print("# of active segments " + str(tm.connections.numSegments())) # The reset command tells the TP that a sequence just ended and essentially # zeros out all the states. It is not strictly necessary but it's a bit # messier without resets, and the TP learns quicker with resets. tm.reset() ####################################################################### # # Step 3: send the same sequence of vectors and look at predictions made by # temporal memory for j in range(5): print "\n\n--------","ABCDE"[j],"-----------"
Vec = [] for x in range(xRange): for y in range(yRange): V = EncodeVector(random.randint(-100, 100), random.randint(-100, 100)) u, v = DecodeVector(V) #print "(%d, %d) = v(%d, %d)" % (x, y, u, v) #plt.quiver(x, y, u, v, pivot='mid', scale=10, units='dots', width=1) Vec = numpy.append(Vec, V) activeColumns = set([j for j, k in zip(count(), Vec) if k == 1]) tm.compute(activeColumns, learn = False) activeColumnsIndeces = [tm.columnForCell(i) for i in tm.getActiveCells()] predictedColumnIndeces = [tm.columnForCell(i) for i in tm.getPredictiveCells()] actColState = [1 if i in activeColumnsIndeces else 0 for i in range(tm.numberOfColumns())] predColState = [1 if i in predictedColumnIndeces else 0 for i in range(tm.numberOfColumns())] z = 0 for x in range(xRange): for y in range(yRange): AV = actColState[z: z + UnitEncoder.getWidth() * 2] PV = predColState[z: z + UnitEncoder.getWidth() * 2] PV = numpy.asarray( PV ) u, v = DecodeVector( PV ) if u != -999 and v != -999: print "(%d, %d) = v(%d, %d)" % (x, y, u, v) plt.quiver(x, y, u, v, pivot='mid', scale=10, units='dots', width=1) else:
# We repeat the sequence 10 times for i in range(10): # Send each letter in the sequence in order for j in range(5): activeColumns = set([i for i, j in zip(count(), x[j]) if j == 1]) # The compute method performs one step of learning and/or inference. Note: # here we just perform learning but you can perform prediction/inference and # learning in the same step if you want (online learning). tm.compute(activeColumns, learn = True) # The following print statements can be ignored. # Useful for tracing internal states print("active cells " + str(tm.getActiveCells())) print("predictive cells " + str(tm.getPredictiveCells())) print("winner cells " + str(tm.getWinnerCells())) print("# of active segments " + str(tm.connections.numSegments())) # The reset command tells the TM that a sequence just ended and essentially # zeros out all the states. It is not strictly necessary but it's a bit # messier without resets, and the TM learns quicker with resets. tm.reset() ####################################################################### # # Step 3: send the same sequence of vectors and look at predictions made by # temporal memory for j in range(5): print "\n\n--------","ABCDE"[j],"-----------"