def __init__(self, columnDimensions=(2048, ), cellsPerColumn=32, activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, maxNewSynapseCount=20, permanenceIncrement=0.10, permanenceDecrement=0.10, predictedSegmentDecrement=0.0, seed=42): """ @param columnDimensions (list) Dimensions of the column space @param cellsPerColumn (int) Number of cells per column @param activationThreshold (int) If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active. @param initialPermanence (float) Initial permanence of a new synapse. @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected. @param minThreshold (int) If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column. @param maxNewSynapseCount (int) The maximum number of synapses added to a segment during learning. @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param predictedSegmentDecrement (float) Amount by which active permanences of synapses of previously predicted but inactive segments are decremented. @param seed (int) Seed for the random number generator. """ # Error checking if not len(columnDimensions): raise ValueError( "Number of column dimensions must be greater than 0") if not cellsPerColumn > 0: raise ValueError( "Number of cells per column must be greater than 0") # TODO: Validate all parameters (and add validation tests) # Save member variables self.columnDimensions = columnDimensions self.cellsPerColumn = cellsPerColumn self.activationThreshold = activationThreshold self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.minThreshold = minThreshold self.maxNewSynapseCount = maxNewSynapseCount self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.predictedSegmentDecrement = predictedSegmentDecrement # Initialize member variables self.connections = Connections(self.numberOfCells()) self._random = Random(seed) self.activeCells = set() self.predictiveCells = set() self.activeSegments = set() self.winnerCells = set() self.matchingSegments = set() self.matchingCells = set()
def testCreateSegment(self): connections = Connections(1024) segment1 = connections.createSegment(10) self.assertEqual(segment1.cell, 10) segment2 = connections.createSegment(10) self.assertEqual(segment2.cell, 10) self.assertEqual([segment1, segment2], list(connections.segmentsForCell(10)))
def __init__(self, columnDimensions=(2048, ), cellsPerColumn=32, activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, maxNewSynapseCount=20, permanenceIncrement=0.10, permanenceDecrement=0.10, seed=42): """ Translate parameters and initialize member variables specific to TemporalMemory """ numberOfCols = 1 for n in columnDimensions: numberOfCols *= n super(TemporalMemoryShim, self).__init__(numberOfCols=numberOfCols, cellsPerColumn=cellsPerColumn, initialPerm=initialPermanence, connectedPerm=connectedPermanence, minThreshold=minThreshold, newSynapseCount=maxNewSynapseCount, permanenceInc=permanenceIncrement, permanenceDec=permanenceDecrement, permanenceMax=1.0, globalDecay=0, activationThreshold=activationThreshold, seed=seed) self.connections = Connections(numberOfCols * cellsPerColumn) self.predictiveCells = set()
def read(cls, proto): """ Reads deserialized data from proto object @param proto (DynamicStructBuilder) Proto object @return (TemporalMemory) TemporalMemory instance """ tm = object.__new__(cls) tm.columnDimensions = list(proto.columnDimensions) tm.cellsPerColumn = int(proto.cellsPerColumn) tm.activationThreshold = int(proto.activationThreshold) tm.initialPermanence = proto.initialPermanence tm.connectedPermanence = proto.connectedPermanence tm.minThreshold = int(proto.minThreshold) tm.maxNewSynapseCount = int(proto.maxNewSynapseCount) tm.permanenceIncrement = proto.permanenceIncrement tm.permanenceDecrement = proto.permanenceDecrement tm.predictedSegmentDecrement = proto.predictedSegmentDecrement tm.connections = Connections.read(proto.connections) tm._random = Random() tm._random.read(proto.random) tm.activeCells = set([int(x) for x in proto.activeCells]) tm.predictiveCells = set([int(x) for x in proto.predictiveCells]) tm.activeSegments = set([int(x) for x in proto.activeSegments]) tm.winnerCells = set([int(x) for x in proto.winnerCells]) tm.matchingSegments = set([int(x) for x in proto.matchingSegments]) tm.matchingCells = set([int(x) for x in proto.matchingCells]) return tm
def read(cls, proto): """ Reads deserialized data from proto object @param proto (DynamicStructBuilder) Proto object @return (TemporalMemory) TemporalMemory instance """ tm = object.__new__(cls) # capnp fails to save a tuple, so proto.columnDimensions was forced to # serialize as a list. We prefer a tuple, however, because columnDimensions # should be regarded as immutable. tm.columnDimensions = tuple(proto.columnDimensions) tm.cellsPerColumn = int(proto.cellsPerColumn) tm.activationThreshold = int(proto.activationThreshold) tm.initialPermanence = proto.initialPermanence tm.connectedPermanence = proto.connectedPermanence tm.minThreshold = int(proto.minThreshold) tm.maxNewSynapseCount = int(proto.maxNewSynapseCount) tm.permanenceIncrement = proto.permanenceIncrement tm.permanenceDecrement = proto.permanenceDecrement tm.predictedSegmentDecrement = proto.predictedSegmentDecrement tm.connections = Connections.read(proto.connections) #pylint: disable=W0212 tm._random = Random() tm._random.read(proto.random) #pylint: enable=W0212 tm.activeCells = [int(x) for x in proto.activeCells] tm.winnerCells = [int(x) for x in proto.winnerCells] flatListLength = tm.connections.segmentFlatListLength() tm.numActiveConnectedSynapsesForSegment = [0] * flatListLength tm.numActivePotentialSynapsesForSegment = [0] * flatListLength tm.activeSegments = [] tm.matchingSegments = [] for i in xrange(len(proto.activeSegmentOverlaps)): protoSegmentOverlap = proto.activeSegmentOverlaps[i] segment = tm.connections.getSegment(protoSegmentOverlap.cell, protoSegmentOverlap.segment) tm.activeSegments.append(segment) overlap = protoSegmentOverlap.overlap tm.numActiveConnectedSynapsesForSegment[segment.flatIdx] = overlap for i in xrange(len(proto.matchingSegmentOverlaps)): protoSegmentOverlap = proto.matchingSegmentOverlaps[i] segment = tm.connections.getSegment(protoSegmentOverlap.cell, protoSegmentOverlap.segment) tm.matchingSegments.append(segment) overlap = protoSegmentOverlap.overlap tm.numActivePotentialSynapsesForSegment[segment.flatIdx] = overlap return tm
def connectionsFactory(*args, **kwargs): """ Create a Connections instance. TemporalMemory subclasses may override this method to choose a different Connections implementation, or to augment the instance otherwise returned by the default Connections implementation. See Connections for constructor signature and usage @return: Connections instance """ return Connections(*args, **kwargs)
def __init__(self, columnDimensions=(2048,), cellsPerColumn=32, activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, maxNewSynapseCount=20, permanenceIncrement=0.10, permanenceDecrement=0.10, predictedSegmentDecrement = 0.004, seed=42): """ @param columnDimensions (list) Dimensions of the column space @param cellsPerColumn (int) Number of cells per column @param activationThreshold (int) If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active. @param initialPermanence (float) Initial permanence of a new synapse. @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected. @param minThreshold (int) If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column. @param maxNewSynapseCount (int) The maximum number of synapses added to a segment during learning. @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param predictedSegmentDecrement (float) Amount by which active permanences of synapses of previously predicted but inactive segments are decremented. @param seed (int) Seed for the random number generator. """ # Error checking if not len(columnDimensions): raise ValueError("Number of column dimensions must be greater than 0") if not cellsPerColumn > 0: raise ValueError("Number of cells per column must be greater than 0") # TODO: Validate all parameters (and add validation tests) # Save member variables self.columnDimensions = columnDimensions self.cellsPerColumn = cellsPerColumn self.activationThreshold = activationThreshold self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.minThreshold = minThreshold self.maxNewSynapseCount = maxNewSynapseCount self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.predictedSegmentDecrement = predictedSegmentDecrement # Initialize member variables self.connections = Connections(self.numberOfCells()) self._random = Random(seed) self.activeCells = set() self.predictiveCells = set() self.activeSegments = set() self.winnerCells = set() self.matchingSegments = set() self.matchingCells = set()
def testDestroySynapse(self): """ Creates a segment, creates a number of synapses on it, destroys a synapse, and makes sure it got destroyed. """ connections = Connections(1024) segment = connections.createSegment(20) synapse1 = connections.createSynapse(segment, 80, .85) synapse2 = connections.createSynapse(segment, 81, .85) synapse3 = connections.createSynapse(segment, 82, .15) self.assertEqual(3, connections.numSynapses()) connections.destroySynapse(synapse2) self.assertEqual(2, connections.numSynapses()) self.assertEqual(connections.synapsesForSegment(segment), [synapse1, synapse3]) active, matching = connections.computeActivity([80, 81, 82], .5, 2, 0.0, 1) self.assertEqual(0, len(active)) self.assertEqual(1, len(matching)) self.assertEqual(2, matching[0].overlap)
def testUpdateSynapsePermanence(self): """ Creates a synapse and updates its permanence, and makes sure that its data was correctly updated. """ connections = Connections(1024) segment = connections.createSegment(10) synapse = connections.createSynapse(segment, 50, .34) connections.updateSynapsePermanence(synapse, .21) synapseData = connections.dataForSynapse(synapse) self.assertAlmostEqual(synapseData.permanence, .21)
def testDestroySynapse(self): """ Creates a segment, creates a number of synapses on it, destroys a synapse, and makes sure it got destroyed. """ connections = Connections(1024) segment = connections.createSegment(20) synapse1 = connections.createSynapse(segment, 80, .85) synapse2 = connections.createSynapse(segment, 81, .85) synapse3 = connections.createSynapse(segment, 82, .15) self.assertEqual(3, connections.numSynapses()) connections.destroySynapse(synapse2) self.assertEqual(2, connections.numSynapses()) self.assertEqual(set([synapse1, synapse3]), connections.synapsesForSegment(segment)) (numActiveConnected, numActivePotential) = connections.computeActivity([80, 81, 82], .5) self.assertEqual(1, numActiveConnected[segment.flatIdx]) self.assertEqual(2, numActivePotential[segment.flatIdx])
def read(cls, proto): """ Reads deserialized data from proto object @param proto (DynamicStructBuilder) Proto object @return (TemporalMemory) TemporalMemory instance """ tm = object.__new__(cls) tm.columnDimensions = list(proto.columnDimensions) tm.cellsPerColumn = int(proto.cellsPerColumn) tm.activationThreshold = int(proto.activationThreshold) tm.initialPermanence = proto.initialPermanence tm.connectedPermanence = proto.connectedPermanence tm.minThreshold = int(proto.minThreshold) tm.maxNewSynapseCount = int(proto.maxNewSynapseCount) tm.permanenceIncrement = proto.permanenceIncrement tm.permanenceDecrement = proto.permanenceDecrement tm.predictedSegmentDecrement = proto.predictedSegmentDecrement tm.connections = Connections.read(proto.connections) #pylint: disable=W0212 tm._random = Random() tm._random.read(proto.random) #pylint: enable=W0212 tm.activeCells = [int(x) for x in proto.activeCells] tm.winnerCells = [int(x) for x in proto.winnerCells] tm.activeSegments = [] tm.matchingSegments = [] for i in xrange(len(proto.activeSegmentOverlaps)): protoSegmentOverlap = proto.activeSegmentOverlaps[i] segment = tm.connections.getSegment(protoSegmentOverlap.segment, protoSegmentOverlap.cell) segmentOverlap = SegmentOverlap(segment, protoSegmentOverlap.overlap) tm.activeSegments.append(segmentOverlap) for i in xrange(len(proto.matchingSegmentOverlaps)): protoSegmentOverlap = proto.matchingSegmentOverlaps[i] segment = tm.connections.getSegment(protoSegmentOverlap.segment, protoSegmentOverlap.cell) segmentOverlap = SegmentOverlap(segment, protoSegmentOverlap.overlap) tm.matchingSegments.append(segmentOverlap) return tm
def __init__(self, learnOnOneCell=True, **kwargs): """ @param learnOnOneCell (boolean) If True, the winner cell for each column will be fixed between resets. """ super(ExtendedTemporalMemory, self).__init__(**kwargs) self.activeExternalCells = set() self.learnOnOneCell = learnOnOneCell self.chosenCellForColumn = dict() self.unpredictedActiveColumns = set() self.predictedActiveCells = set() self.activeApicalCells = set() self.apicalConnections = Connections(self.numberOfCells()) self.activeApicalSegments = set() self.matchingApicalSegments = set()
def testSynapseReuse(self): """ Creates a synapse over the synapses per segment limit, and verifies that the lowest permanence synapse is removed to make room for the new synapse. """ connections = Connections(1024, 1024, 2) segment = connections.createSegment(10) synapse1 = connections.createSynapse(segment, 50, .34) synapse2 = connections.createSynapse(segment, 51, .48) synapses = connections.synapsesForSegment(segment) self.assertEqual(set([synapse1, synapse2]), synapses) # Add an additional synapse to force it over the limit of num synapses # per segment. connections.createSynapse(segment, 52, .52) # Ensure lower permanence synapse was removed. self.assertEqual( set([51, 52]), set(synapse.presynapticCell for synapse in connections.synapsesForSegment(segment)))
def testSynapseReuse(self): """ Creates a synapse over the synapses per segment limit, and verifies that the lowest permanence synapse is removed to make room for the new synapse. """ connections = Connections(1024, 1024, 2) segment = connections.createSegment(10) synapse1 = connections.createSynapse(segment, 50, .34) synapse2 = connections.createSynapse(segment, 51, .34) synapses = connections.synapsesForSegment(segment) self.assertEqual(synapses, [synapse1, synapse2]) #Add an additional synapse to force it over the limit of num synapses #per segment. synapse3 = connections.createSynapse(segment, 52, .52) self.assertEqual(0, synapse3.idx) #ensure lower permanence synapse was removed synapses = connections.synapsesForSegment(segment) self.assertEqual(synapses, [synapse3, synapse2])
def testSynapseReuse(self): """ Creates a synapse over the synapses per segment limit, and verifies that the lowest permanence synapse is removed to make room for the new synapse. """ connections = Connections(1024, 1024, 2) segment = connections.createSegment(10) synapse1 = connections.createSynapse(segment, 50, .34) synapse2 = connections.createSynapse(segment, 51, .48) synapses = connections.synapsesForSegment(segment) self.assertEqual(set([synapse1, synapse2]), synapses) # Add an additional synapse to force it over the limit of num synapses # per segment. connections.createSynapse(segment, 52, .52) # Ensure lower permanence synapse was removed. self.assertEqual(set([51, 52]), set(synapse.presynapticCell for synapse in connections.synapsesForSegment(segment)))
def testCreateSegmentReuse(self): connections = Connections(1024, 2) segment1 = connections.createSegment(42) connections.createSynapse(segment1, 1, .5) connections.createSynapse(segment1, 2, .5) # Let some time pass. connections.startNewIteration(); connections.startNewIteration(); connections.startNewIteration(); # Create a segment with 3 synapse. segment2 = connections.createSegment(42) connections.createSynapse(segment2, 1, .5) connections.createSynapse(segment2, 2, .5) connections.createSynapse(segment2, 3, .5) connections.startNewIteration(); # Give the first segment some activity. connections.recordSegmentActivity(segment1) # Create a new segment with 1 synapse. segment3 = connections.createSegment(42); connections.createSynapse(segment3, 1, .5) segments = connections.segmentsForCell(42) self.assertEqual(2, len(segments)) # Verify first segment is still there with the same synapses. self.assertEqual(set([1, 2]), set(synapse.presynapticCell for synapse in connections.synapsesForSegment(segments[0]))) # Verify second segment has been replaced. self.assertEqual(set([1]), set(synapse.presynapticCell for synapse in connections.synapsesForSegment(segments[1]))) # Verify the flatIdxs were properly reused. self.assertLess(segment1.flatIdx, 2) self.assertLess(segment3.flatIdx, 2) self.assertTrue(segment1 is connections.segmentForFlatIdx(segment1.flatIdx)) self.assertTrue(segment3 is connections.segmentForFlatIdx(segment3.flatIdx))
def testDestroySegment(self): """ Creates a segment, destroys it, and makes sure it got destroyed along with all of its synapses. """ connections = Connections(1024) connections.createSegment(10) segment2 = connections.createSegment(20) connections.createSegment(30) connections.createSegment(40) connections.createSynapse(segment2, 80, 0.85) connections.createSynapse(segment2, 81, 0.85) connections.createSynapse(segment2, 82, 0.15) self.assertEqual(4, connections.numSegments()) self.assertEqual(3, connections.numSynapses()) connections.destroySegment(segment2) self.assertEqual(3, connections.numSegments()) self.assertEqual(0, connections.numSynapses()) (numActiveConnected, numActivePotential) = connections.computeActivity([80, 81, 82], 0.5) self.assertEqual(0, numActiveConnected[segment2.flatIdx]) self.assertEqual(0, numActivePotential[segment2.flatIdx])
def testCreateSegmentReuse(self): connections = Connections(1024, 2) segment1 = connections.createSegment(42) connections.createSynapse(segment1, 1, .5) connections.createSynapse(segment1, 2, .5) # Let some time pass. connections.startNewIteration() connections.startNewIteration() connections.startNewIteration() # Create a segment with 3 synapse. segment2 = connections.createSegment(42) connections.createSynapse(segment2, 1, .5) connections.createSynapse(segment2, 2, .5) connections.createSynapse(segment2, 3, .5) connections.startNewIteration() # Give the first segment some activity. connections.recordSegmentActivity(segment1) # Create a new segment with 1 synapse. segment3 = connections.createSegment(42) connections.createSynapse(segment3, 1, .5) segments = connections.segmentsForCell(42) self.assertEqual(2, len(segments)) # Verify first segment is still there with the same synapses. self.assertEqual( set([1, 2]), set(synapse.presynapticCell for synapse in connections.synapsesForSegment(segments[0]))) # Verify second segment has been replaced. self.assertEqual( set([1]), set(synapse.presynapticCell for synapse in connections.synapsesForSegment(segments[1]))) # Verify the flatIdxs were properly reused. self.assertLess(segment1.flatIdx, 2) self.assertLess(segment3.flatIdx, 2) self.assertTrue( segment1 is connections.segmentForFlatIdx(segment1.flatIdx)) self.assertTrue( segment3 is connections.segmentForFlatIdx(segment3.flatIdx))
def testComputeActivity(self): """ Creates a sample set of connections, and makes sure that computing the activity for a collection of cells with no activity returns the right activity data. """ connections = Connections(1024) # Cell with 1 segment. # Segment with: # - 1 connected synapse: active # - 2 matching synapses segment1a = connections.createSegment(10) connections.createSynapse(segment1a, 150, .85) connections.createSynapse(segment1a, 151, .15) # Cell with 1 segment. # Segment with: # - 2 connected synapse: 2 active # - 3 matching synapses: 3 active segment2a = connections.createSegment(20) connections.createSynapse(segment2a, 80, .85) connections.createSynapse(segment2a, 81, .85) synapse = connections.createSynapse(segment2a, 82, .85) connections.updateSynapsePermanence(synapse, .15) inputVec = [50, 52, 53, 80, 81, 82, 150, 151] (numActiveConnected, numActivePotential) = connections.computeActivity(inputVec, .5) self.assertEqual(1, numActiveConnected[segment1a.flatIdx]) self.assertEqual(2, numActivePotential[segment1a.flatIdx]) self.assertEqual(2, numActiveConnected[segment2a.flatIdx]) self.assertEqual(3, numActivePotential[segment2a.flatIdx])
class TemporalMemory(object): """ Class implementing the Temporal Memory algorithm. """ def __init__(self, columnDimensions=(2048, ), cellsPerColumn=32, activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, maxNewSynapseCount=20, permanenceIncrement=0.10, permanenceDecrement=0.10, predictedSegmentDecrement=0.0, maxSegmentsPerCell=255, maxSynapsesPerSegment=255, seed=42, **kwargs): """ @param columnDimensions (list) Dimensions of the column space @param cellsPerColumn (int) Number of cells per column @param activationThreshold (int) If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active. @param initialPermanence (float) Initial permanence of a new synapse. @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected. @param minThreshold (int) If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column. @param maxNewSynapseCount (int) The maximum number of synapses added to a segment during learning. @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param predictedSegmentDecrement (float) Amount by which active permanences of synapses of previously predicted but inactive segments are decremented. @param seed (int) Seed for the random number generator. Notes: predictedSegmentDecrement: A good value is just a bit larger than (the column-level sparsity * permanenceIncrement). So, if column-level sparsity is 2% and permanenceIncrement is 0.01, this parameter should be something like 4% * 0.01 = 0.0004). """ # Error checking if not len(columnDimensions): raise ValueError( "Number of column dimensions must be greater than 0") if not cellsPerColumn > 0: raise ValueError( "Number of cells per column must be greater than 0") # TODO: Validate all parameters (and add validation tests) # Save member variables self.columnDimensions = columnDimensions self.cellsPerColumn = cellsPerColumn self.activationThreshold = activationThreshold self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.minThreshold = minThreshold self.maxNewSynapseCount = maxNewSynapseCount self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.predictedSegmentDecrement = predictedSegmentDecrement # Initialize member variables self.connections = Connections( self.numberOfCells(), maxSegmentsPerCell=maxSegmentsPerCell, maxSynapsesPerSegment=maxSynapsesPerSegment) self._random = Random(seed) self.activeCells = set() self.predictiveCells = set() self.activeSegments = set() self.winnerCells = set() self.matchingSegments = set() self.matchingCells = set() # ============================== # Main functions # ============================== def compute(self, activeColumns, learn=True): """ Feeds input record through TM, performing inference and learning. @param activeColumns (set) Indices of active columns @param learn (bool) Whether or not learning is enabled Updates member variables: - `activeCells` (set) - `winnerCells` (set) - `activeSegments` (set) - `predictiveCells` (set) - `matchingSegments`(set) - `matchingCells` (set) """ prevPredictiveCells = self.predictiveCells prevActiveSegments = self.activeSegments prevActiveCells = self.activeCells prevWinnerCells = self.winnerCells prevMatchingSegments = self.matchingSegments prevMatchingCells = self.matchingCells activeCells = set() winnerCells = set() (_activeCells, _winnerCells, predictedActiveColumns, predictedInactiveCells) = self.activateCorrectlyPredictiveCells( prevPredictiveCells, prevMatchingCells, activeColumns) activeCells.update(_activeCells) winnerCells.update(_winnerCells) (_activeCells, _winnerCells, learningSegments) = self.burstColumns( activeColumns, predictedActiveColumns, prevActiveCells, prevWinnerCells, self.connections) activeCells.update(_activeCells) winnerCells.update(_winnerCells) if learn: self.learnOnSegments(prevActiveSegments, learningSegments, prevActiveCells, winnerCells, prevWinnerCells, self.connections, predictedInactiveCells, prevMatchingSegments) (activeSegments, predictiveCells, matchingSegments, matchingCells) = self.computePredictiveCells(activeCells, self.connections) self.activeCells = activeCells self.winnerCells = winnerCells self.activeSegments = activeSegments self.predictiveCells = predictiveCells self.matchingSegments = matchingSegments self.matchingCells = matchingCells def reset(self): """ Indicates the start of a new sequence. Resets sequence state of the TM. """ self.activeCells = set() self.predictiveCells = set() self.activeSegments = set() self.winnerCells = set() # ============================== # Phases # ============================== def activateCorrectlyPredictiveCells(self, prevPredictiveCells, prevMatchingCells, activeColumns): """ Phase 1: Activate the correctly predictive cells. Pseudocode: - for each prev predictive cell - if in active column - mark it as active - mark it as winner cell - mark column as predicted => active - if not in active column - mark it as an predicted but inactive cell @param prevPredictiveCells (set) Indices of predictive cells in `t-1` @param activeColumns (set) Indices of active columns in `t` @return (tuple) Contains: `activeCells` (set), `winnerCells` (set), `predictedActiveColumns` (set), `predictedInactiveCells` (set) """ activeCells = set() winnerCells = set() predictedActiveColumns = set() predictedInactiveCells = set() for cell in prevPredictiveCells: column = self.columnForCell(cell) if column in activeColumns: activeCells.add(cell) winnerCells.add(cell) predictedActiveColumns.add(column) if self.predictedSegmentDecrement > 0: for cell in prevMatchingCells: column = self.columnForCell(cell) if column not in activeColumns: predictedInactiveCells.add(cell) return (activeCells, winnerCells, predictedActiveColumns, predictedInactiveCells) def burstColumns(self, activeColumns, predictedActiveColumns, prevActiveCells, prevWinnerCells, connections): """ Phase 2: Burst unpredicted columns. Pseudocode: - for each unpredicted active column - mark all cells as active - mark the best matching cell as winner cell - (learning) - if it has no matching segment - (optimization) if there are prev winner cells - add a segment to it - mark the segment as learning @param activeColumns (set) Indices of active columns in `t` @param predictedActiveColumns (set) Indices of predicted => active columns in `t` @param prevActiveCells (set) Indices of active cells in `t-1` @param prevWinnerCells (set) Indices of winner cells in `t-1` @param connections (Connections) Connectivity of layer @return (tuple) Contains: `activeCells` (set), `winnerCells` (set), `learningSegments` (set) """ activeCells = set() winnerCells = set() learningSegments = set() unpredictedActiveColumns = activeColumns - predictedActiveColumns for column in unpredictedActiveColumns: cells = self.cellsForColumn(column) activeCells.update(cells) (bestCell, bestSegment) = self.bestMatchingCell(cells, prevActiveCells, connections) winnerCells.add(bestCell) if bestSegment is None and len(prevWinnerCells): bestSegment = connections.createSegment(bestCell) if bestSegment is not None: learningSegments.add(bestSegment) return activeCells, winnerCells, learningSegments def learnOnSegments(self, prevActiveSegments, learningSegments, prevActiveCells, winnerCells, prevWinnerCells, connections, predictedInactiveCells, prevMatchingSegments): """ Phase 3: Perform learning by adapting segments. Pseudocode: - (learning) for each prev active or learning segment - if learning segment or from winner cell - strengthen active synapses - weaken inactive synapses - if learning segment - add some synapses to the segment - subsample from prev winner cells - if predictedSegmentDecrement > 0 - for each previously matching segment - if cell is a predicted inactive cell - weaken active synapses but don't touch inactive synapses @param prevActiveSegments (set) Indices of active segments in `t-1` @param learningSegments (set) Indices of learning segments in `t` @param prevActiveCells (set) Indices of active cells in `t-1` @param winnerCells (set) Indices of winner cells in `t` @param prevWinnerCells (set) Indices of winner cells in `t-1` @param connections (Connections) Connectivity of layer @param predictedInactiveCells (set) Indices of predicted inactive cells @param prevMatchingSegments (set) Indices of segments with """ for segment in prevActiveSegments | learningSegments: isLearningSegment = segment in learningSegments isFromWinnerCell = connections.cellForSegment( segment) in winnerCells activeSynapses = self.activeSynapsesForSegment( segment, prevActiveCells, connections) if isLearningSegment or isFromWinnerCell: self.adaptSegment(segment, activeSynapses, connections, self.permanenceIncrement, self.permanenceDecrement) if isLearningSegment: n = self.maxNewSynapseCount - len(activeSynapses) for presynapticCell in self.pickCellsToLearnOn( n, segment, prevWinnerCells, connections): connections.createSynapse(segment, presynapticCell, self.initialPermanence) if self.predictedSegmentDecrement > 0: for segment in prevMatchingSegments: isPredictedInactiveCell = connections.cellForSegment( segment) in predictedInactiveCells activeSynapses = self.activeSynapsesForSegment( segment, prevActiveCells, connections) if isPredictedInactiveCell: self.adaptSegment(segment, activeSynapses, connections, -self.predictedSegmentDecrement, 0.0) def computePredictiveCells(self, activeCells, connections): """ Phase 4: Compute predictive cells due to lateral input on distal dendrites. Pseudocode: - for each distal dendrite segment with activity >= activationThreshold - mark the segment as active - mark the cell as predictive - if predictedSegmentDecrement > 0 - for each distal dendrite segment with unconnected activity >= minThreshold - mark the segment as matching - mark the cell as matching Forward propagates activity from active cells to the synapses that touch them, to determine which synapses are active. @param activeCells (set) Indices of active cells in `t` @param connections (Connections) Connectivity of layer @return (tuple) Contains: `activeSegments` (set), `predictiveCells` (set), `matchingSegments` (set), `matchingCells` (set) """ numActiveConnectedSynapsesForSegment = defaultdict(int) numActiveSynapsesForSegment = defaultdict(int) activeSegments = set() predictiveCells = set() matchingSegments = set() matchingCells = set() for cell in activeCells: for synapseData in connections.synapsesForPresynapticCell( cell).values(): segment = synapseData.segment permanence = synapseData.permanence if permanence >= self.connectedPermanence: numActiveConnectedSynapsesForSegment[segment] += 1 if (numActiveConnectedSynapsesForSegment[segment] >= self.activationThreshold): activeSegments.add(segment) predictiveCells.add( connections.cellForSegment(segment)) if permanence > 0 and self.predictedSegmentDecrement > 0: numActiveSynapsesForSegment[segment] += 1 if numActiveSynapsesForSegment[ segment] >= self.minThreshold: matchingSegments.add(segment) matchingCells.add(connections.cellForSegment(segment)) return activeSegments, predictiveCells, matchingSegments, matchingCells # ============================== # Helper functions # ============================== def bestMatchingCell(self, cells, activeCells, connections): """ Gets the cell with the best matching segment (see `TM.bestMatchingSegment`) that has the largest number of active synapses of all best matching segments. If none were found, pick the least used cell (see `TM.leastUsedCell`). @param cells (set) Indices of cells @param activeCells (set) Indices of active cells @param connections (Connections) Connectivity of layer @return (tuple) Contains: `cell` (int), `bestSegment` (int) """ maxSynapses = 0 bestCell = None bestSegment = None for cell in cells: segment, numActiveSynapses = self.bestMatchingSegment( cell, activeCells, connections) if segment is not None and numActiveSynapses > maxSynapses: maxSynapses = numActiveSynapses bestCell = cell bestSegment = segment if bestCell is None: bestCell = self.leastUsedCell(cells, connections) return bestCell, bestSegment def bestMatchingSegment(self, cell, activeCells, connections): """ Gets the segment on a cell with the largest number of activate synapses, including all synapses with non-zero permanences. @param cell (int) Cell index @param activeCells (set) Indices of active cells @param connections (Connections) Connectivity of layer @return (tuple) Contains: `segment` (int), `connectedActiveSynapses` (set) """ maxSynapses = self.minThreshold bestSegment = None bestNumActiveSynapses = None for segment in connections.segmentsForCell(cell): numActiveSynapses = 0 for synapse in connections.synapsesForSegment(segment): synapseData = connections.dataForSynapse(synapse) if ((synapseData.presynapticCell in activeCells) and synapseData.permanence > 0): numActiveSynapses += 1 if numActiveSynapses >= maxSynapses: maxSynapses = numActiveSynapses bestSegment = segment bestNumActiveSynapses = numActiveSynapses return bestSegment, bestNumActiveSynapses def leastUsedCell(self, cells, connections): """ Gets the cell with the smallest number of segments. Break ties randomly. @param cells (set) Indices of cells @param connections (Connections) Connectivity of layer @return (int) Cell index """ leastUsedCells = set() minNumSegments = float("inf") for cell in cells: numSegments = len(connections.segmentsForCell(cell)) if numSegments < minNumSegments: minNumSegments = numSegments leastUsedCells = set() if numSegments == minNumSegments: leastUsedCells.add(cell) i = self._random.getUInt32(len(leastUsedCells)) return sorted(leastUsedCells)[i] @staticmethod def activeSynapsesForSegment(segment, activeCells, connections): """ Returns the synapses on a segment that are active due to lateral input from active cells. @param segment (int) Segment index @param activeCells (set) Indices of active cells @param connections (Connections) Connectivity of layer @return (set) Indices of active synapses on segment """ synapses = set() for synapse in connections.synapsesForSegment(segment): synapseData = connections.dataForSynapse(synapse) if synapseData.presynapticCell in activeCells: synapses.add(synapse) return synapses def adaptSegment(self, segment, activeSynapses, connections, permanenceIncrement, permanenceDecrement): """ Updates synapses on segment. Strengthens active synapses; weakens inactive synapses. @param segment (int) Segment index @param activeSynapses (set) Indices of active synapses @param connections (Connections) Connectivity of layer @param permanenceIncrement (float) Amount to increment active synapses @param permanenceDecrement (float) Amount to decrement inactive synapses """ # Need to copy synapses for segment set below because it will be modified # during iteration by `destroySynapse` for synapse in set(connections.synapsesForSegment(segment)): synapseData = connections.dataForSynapse(synapse) permanence = synapseData.permanence if synapse in activeSynapses: permanence += permanenceIncrement else: permanence -= permanenceDecrement # Keep permanence within min/max bounds permanence = max(0.0, min(1.0, permanence)) if (abs(permanence) < EPSILON): connections.destroySynapse(synapse) else: connections.updateSynapsePermanence(synapse, permanence) def pickCellsToLearnOn(self, n, segment, winnerCells, connections): """ Pick cells to form distal connections to. TODO: Respect topology and learningRadius @param n (int) Number of cells to pick @param segment (int) Segment index @param winnerCells (set) Indices of winner cells in `t` @param connections (Connections) Connectivity of layer @return (set) Indices of cells picked """ candidates = set(winnerCells) # Remove cells that are already synapsed on by this segment for synapse in connections.synapsesForSegment(segment): synapseData = connections.dataForSynapse(synapse) presynapticCell = synapseData.presynapticCell if presynapticCell in candidates: candidates.remove(presynapticCell) n = min(n, len(candidates)) candidates = sorted(candidates) cells = set() # Pick n cells randomly for _ in range(n): i = self._random.getUInt32(len(candidates)) cells.add(candidates[i]) del candidates[i] return cells def columnForCell(self, cell): """ Returns the index of the column that a cell belongs to. @param cell (int) Cell index @return (int) Column index """ self._validateCell(cell) return int(cell / self.cellsPerColumn) def cellsForColumn(self, column): """ Returns the indices of cells that belong to a column. @param column (int) Column index @return (set) Cell indices """ self._validateColumn(column) start = self.cellsPerColumn * self.getCellIndex(column) end = start + self.cellsPerColumn return set(xrange(start, end)) def numberOfColumns(self): """ Returns the number of columns in this layer. @return (int) Number of columns """ return reduce(mul, self.columnDimensions, 1) def numberOfCells(self): """ Returns the number of cells in this layer. @return (int) Number of cells """ return self.numberOfColumns() * self.cellsPerColumn def getActiveCells(self): """ Returns the indices of the active cells. @return (list) Indices of active cells. """ return self.getCellIndices(self.activeCells) def getPredictiveCells(self): """ Returns the indices of the predictive cells. @return (list) Indices of predictive cells. """ return self.getCellIndices(self.predictiveCells) def getWinnerCells(self): """ Returns the indices of the winner cells. @return (list) Indices of winner cells. """ return self.getCellIndices(self.winnerCells) def getMatchingCells(self): """ Returns the indices of the matching cells. @return (list) Indices of matching cells. """ return self.getCellIndices(self.matchingCells) def mapCellsToColumns(self, cells): """ Maps cells to the columns they belong to @param cells (set) Cells @return (dict) Mapping from columns to their cells in `cells` """ cellsForColumns = defaultdict(set) for cell in cells: column = self.columnForCell(cell) cellsForColumns[column].add(cell) return cellsForColumns def write(self, proto): """ Writes serialized data to proto object @param proto (DynamicStructBuilder) Proto object """ proto.columnDimensions = self.columnDimensions proto.cellsPerColumn = self.cellsPerColumn proto.activationThreshold = self.activationThreshold proto.initialPermanence = self.initialPermanence proto.connectedPermanence = self.connectedPermanence proto.minThreshold = self.minThreshold proto.maxNewSynapseCount = self.maxNewSynapseCount proto.permanenceIncrement = self.permanenceIncrement proto.permanenceDecrement = self.permanenceDecrement proto.predictedSegmentDecrement = self.predictedSegmentDecrement self.connections.write(proto.connections) self._random.write(proto.random) proto.activeCells = list(self.activeCells) proto.predictiveCells = list(self.predictiveCells) proto.activeSegments = list(self.activeSegments) proto.winnerCells = list(self.winnerCells) proto.matchingSegments = list(self.matchingSegments) proto.matchingCells = list(self.matchingCells) @classmethod def read(cls, proto): """ Reads deserialized data from proto object @param proto (DynamicStructBuilder) Proto object @return (TemporalMemory) TemporalMemory instance """ tm = object.__new__(cls) tm.columnDimensions = list(proto.columnDimensions) tm.cellsPerColumn = int(proto.cellsPerColumn) tm.activationThreshold = int(proto.activationThreshold) tm.initialPermanence = proto.initialPermanence tm.connectedPermanence = proto.connectedPermanence tm.minThreshold = int(proto.minThreshold) tm.maxNewSynapseCount = int(proto.maxNewSynapseCount) tm.permanenceIncrement = proto.permanenceIncrement tm.permanenceDecrement = proto.permanenceDecrement tm.predictedSegmentDecrement = proto.predictedSegmentDecrement tm.connections = Connections.read(proto.connections) tm._random = Random() tm._random.read(proto.random) tm.activeCells = set([int(x) for x in proto.activeCells]) tm.predictiveCells = set([int(x) for x in proto.predictiveCells]) tm.activeSegments = set([int(x) for x in proto.activeSegments]) tm.winnerCells = set([int(x) for x in proto.winnerCells]) tm.matchingSegments = set([int(x) for x in proto.matchingSegments]) tm.matchingCells = set([int(x) for x in proto.matchingCells]) return tm def __eq__(self, other): """ Equality operator for TemporalMemory instances. Checks if two instances are functionally identical (might have different internal state). @param other (TemporalMemory) TemporalMemory instance to compare to """ if self.columnDimensions != other.columnDimensions: return False if self.cellsPerColumn != other.cellsPerColumn: return False if self.activationThreshold != other.activationThreshold: return False if abs(self.initialPermanence - other.initialPermanence) > EPSILON: return False if abs(self.connectedPermanence - other.connectedPermanence) > EPSILON: return False if self.minThreshold != other.minThreshold: return False if self.maxNewSynapseCount != other.maxNewSynapseCount: return False if abs(self.permanenceIncrement - other.permanenceIncrement) > EPSILON: return False if abs(self.permanenceDecrement - other.permanenceDecrement) > EPSILON: return False if abs(self.predictedSegmentDecrement - other.predictedSegmentDecrement) > EPSILON: return False if self.connections != other.connections: return False if self.activeCells != other.activeCells: return False if self.predictiveCells != other.predictiveCells: return False if self.winnerCells != other.winnerCells: return False if self.matchingSegments != other.matchingSegments: return False if self.matchingCells != other.matchingCells: return False return True def __ne__(self, other): """ Non-equality operator for TemporalMemory instances. Checks if two instances are not functionally identical (might have different internal state). @param other (TemporalMemory) TemporalMemory instance to compare to """ return not self.__eq__(other) def _validateColumn(self, column): """ Raises an error if column index is invalid. @param column (int) Column index """ if column >= self.numberOfColumns() or column < 0: raise IndexError("Invalid column") def _validateCell(self, cell): """ Raises an error if cell index is invalid. @param cell (int) Cell index """ if cell >= self.numberOfCells() or cell < 0: raise IndexError("Invalid cell") @classmethod def getCellIndices(cls, cells): return [cls.getCellIndex(c) for c in cells] @staticmethod def getCellIndex(cell): return cell
def testComputeActivity(self): """ Creates a sample set of connections, and makes sure that computing the activity for a collection of cells with no activity returns the right activity data. """ connections = Connections(1024) # Cell with 1 segment. # Segment with: # - 1 connected synapse: active # - 2 matching synapses segment1a = connections.createSegment(10) connections.createSynapse(segment1a, 150, .85) connections.createSynapse(segment1a, 151, .15) # Cell with 2 segment. # Segment with: # - 2 connected synapse: 2 active # - 3 matching synapses: 3 active segment2a = connections.createSegment(20) connections.createSynapse(segment2a, 80, .85) connections.createSynapse(segment2a, 81, .85) synapse = connections.createSynapse(segment2a, 82, .85) connections.updateSynapsePermanence(synapse, .15) # Segment with: # - 2 connected synapses: 1 active, 1 inactive # - 3 matching synapses: 2 active, 1 inactive # - 1 non-matching synapse: 1 active segment2b = connections.createSegment(20) connections.createSynapse(segment2b, 50, .85) connections.createSynapse(segment2b, 51, .85) connections.createSynapse(segment2b, 52, .15) connections.createSynapse(segment2b, 53, .05) # Cell with one segment. # Segment with: # - 1 non-matching synapse: 1 active segment3a = connections.createSegment(30) connections.createSynapse(segment3a, 53, .05) inputVec = [50, 52, 53, 80, 81, 82, 150, 151] active, matching = connections.computeActivity(inputVec, .5, 2, .1, 1) self.assertEqual(1, len(active)) self.assertEqual(segment2a, active[0].segment) self.assertEqual(2, active[0].overlap) self.assertEqual(3, len(matching)) self.assertEqual(segment1a, matching[0].segment) self.assertEqual(2, matching[0].overlap) self.assertEqual(segment2a, matching[1].segment) self.assertEqual(3, matching[1].overlap) self.assertEqual(segment2b, matching[2].segment) self.assertEqual(2, matching[2].overlap)
def testDestroySynapsesThenReachLimit(self): """ Destroy some synapses then verify that the maxSynapsesPerSegment is still correctly applied. """ connections = Connections(1024, 2, 2) segment = connections.createSegment(10) synapse1 = connections.createSynapse(segment, 201, .85) synapse2 = connections.createSynapse(segment, 202, .85) self.assertEqual(2, connections.numSynapses()) connections.destroySynapse(synapse1) connections.destroySynapse(synapse2) self.assertEqual(0, connections.numSynapses()) connections.createSynapse(segment, 201, .85) self.assertEqual(1, connections.numSynapses()) connections.createSynapse(segment, 202, .90) self.assertEqual(2, connections.numSynapses()) synapse3 = connections.createSynapse(segment, 203, .8) self.assertLess(synapse3.idx, 2) self.assertEqual(2, connections.numSynapses())
def testReuseSegmentWithDestroyedSynapses(self): """ Destroy a segment that has a destroyed synapse and a non-destroyed synapse. Create a new segment in the same place. Make sure its synapse count is correct. """ connections = Connections(1024) segment = connections.createSegment(11) synapse1 = connections.createSynapse(segment, 201, .85) connections.createSynapse(segment, 202, .85) connections.destroySynapse(synapse1) self.assertEqual(1, connections.numSynapses(segment)) connections.destroySegment(segment) reincarnated = connections.createSegment(11) self.assertEqual(0, connections.numSynapses(reincarnated)) self.assertEqual(0, len(connections.synapsesForSegment(reincarnated)))
def testPathsNotInvalidatedByOtherDestroys(self): """ Creates segments and synapses, then destroys segments and synapses on either side of them and verifies that existing Segment and Synapse instances still point to the same segment / synapse as before. """ connections = Connections(1024) segment1 = connections.createSegment(11) connections.createSegment(12) segment3 = connections.createSegment(13) connections.createSegment(14) segment5 = connections.createSegment(15) synapse1 = connections.createSynapse(segment3, 201, .85) synapse2 = connections.createSynapse(segment3, 202, .85) synapse3 = connections.createSynapse(segment3, 203, .85) synapse4 = connections.createSynapse(segment3, 204, .85) synapse5 = connections.createSynapse(segment3, 205, .85) self.assertEqual(203, connections.dataForSynapse(synapse3).presynapticCell) connections.destroySynapse(synapse1) self.assertEqual(203, connections.dataForSynapse(synapse3).presynapticCell) connections.destroySynapse(synapse5) self.assertEqual(203, connections.dataForSynapse(synapse3).presynapticCell) connections.destroySegment(segment1) self.assertEqual(connections.synapsesForSegment(segment3), [synapse2, synapse3, synapse4]) connections.destroySegment(segment5) self.assertEqual(connections.synapsesForSegment(segment3), [synapse2, synapse3, synapse4]) self.assertEqual(203, connections.dataForSynapse(synapse3).presynapticCell)
def testCreateSegmentReuse(self): connections = Connections(1024, 2) segment1 = connections.createSegment(42) connections.createSynapse(segment1, 1, .5) connections.createSynapse(segment1, 2, .5) connections.computeActivity([], .5, 2, .1, 1) connections.computeActivity([], .5, 2, .1, 1) connections.computeActivity([], .5, 2, .1, 1) segment2 = connections.createSegment(42) activeSegs, _ = connections.computeActivity([1, 2], .5, 2, .1, 1) self.assertEqual(1, len(activeSegs)) self.assertEqual(segment1, activeSegs[0].segment) segment3 = connections.createSegment(42) self.assertEqual(segment2.idx, segment3.idx)
def setUp(self): self.connections = Connections(2048 * 32)
def __init__(self, columnDimensions=(2048, ), cellsPerColumn=32, activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, maxNewSynapseCount=20, permanenceIncrement=0.10, permanenceDecrement=0.10, predictedSegmentDecrement=0.0, maxSegmentsPerCell=255, maxSynapsesPerSegment=255, seed=42, **kwargs): """ @param columnDimensions (list) Dimensions of the column space @param cellsPerColumn (int) Number of cells per column @param activationThreshold (int) If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active. @param initialPermanence (float) Initial permanence of a new synapse @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected. @param minThreshold (int) If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column @param maxNewSynapseCount (int) The maximum number of synapses added to a segment during learning @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param predictedSegmentDecrement (float) Amount by which active permanences of synapses of previously predicted but inactive segments are decremented. @param seed (int) Seed for the random number generator Notes: predictedSegmentDecrement: A good value is just a bit larger than (the column-level sparsity * permanenceIncrement). So, if column-level sparsity is 2% and permanenceIncrement is 0.01, this parameter should be something like 4% * 0.01 = 0.0004). """ # Error checking if not len(columnDimensions): raise ValueError( "Number of column dimensions must be greater than 0") if cellsPerColumn <= 0: raise ValueError( "Number of cells per column must be greater than 0") if minThreshold > activationThreshold: raise ValueError( "The min threshold can't be greater than the activation threshold" ) # TODO: Validate all parameters (and add validation tests) # Save member variables self.columnDimensions = columnDimensions self.cellsPerColumn = cellsPerColumn self.activationThreshold = activationThreshold self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.minThreshold = minThreshold self.maxNewSynapseCount = maxNewSynapseCount self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.predictedSegmentDecrement = predictedSegmentDecrement # Initialize member variables self.connections = Connections( self.numberOfCells(), maxSegmentsPerCell=maxSegmentsPerCell, maxSynapsesPerSegment=maxSynapsesPerSegment) self._random = Random(seed) self.activeCells = [] self.winnerCells = [] self.activeSegments = [] self.matchingSegments = []
def testWriteRead(self): c1 = Connections(1024) # Add data before serializing s1 = c1.createSegment(0) c1.createSynapse(s1, 254, 0.1173) s2 = c1.createSegment(100) c1.createSynapse(s2, 20, 0.3) c1.createSynapse(s1, 40, 0.3) s3 = c1.createSegment(0) c1.createSynapse(s3, 0, 0.5) c1.createSynapse(s3, 1, 0.5) s4 = c1.createSegment(10) c1.createSynapse(s4, 0, 0.5) c1.createSynapse(s4, 1, 0.5) c1.destroySegment(s4) proto1 = ConnectionsProto_capnp.ConnectionsProto.new_message() c1.write(proto1) # Write the proto to a temp file and read it back into a new proto with tempfile.TemporaryFile() as f: proto1.write(f) f.seek(0) proto2 = ConnectionsProto_capnp.ConnectionsProto.read(f) # Load the deserialized proto c2 = Connections.read(proto2) # Check that the two connections objects are functionally equal self.assertEqual(c1, c2)
class TemporalMemory(object): """ Class implementing the Temporal Memory algorithm. """ def __init__(self, columnDimensions=(2048, ), cellsPerColumn=32, activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, maxNewSynapseCount=20, permanenceIncrement=0.10, permanenceDecrement=0.10, predictedSegmentDecrement=0.0, maxSegmentsPerCell=255, maxSynapsesPerSegment=255, seed=42, **kwargs): """ @param columnDimensions (list) Dimensions of the column space @param cellsPerColumn (int) Number of cells per column @param activationThreshold (int) If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active. @param initialPermanence (float) Initial permanence of a new synapse @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected. @param minThreshold (int) If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column @param maxNewSynapseCount (int) The maximum number of synapses added to a segment during learning @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param predictedSegmentDecrement (float) Amount by which active permanences of synapses of previously predicted but inactive segments are decremented. @param seed (int) Seed for the random number generator Notes: predictedSegmentDecrement: A good value is just a bit larger than (the column-level sparsity * permanenceIncrement). So, if column-level sparsity is 2% and permanenceIncrement is 0.01, this parameter should be something like 4% * 0.01 = 0.0004). """ # Error checking if not len(columnDimensions): raise ValueError( "Number of column dimensions must be greater than 0") if cellsPerColumn <= 0: raise ValueError( "Number of cells per column must be greater than 0") if minThreshold > activationThreshold: raise ValueError( "The min threshold can't be greater than the activation threshold" ) # TODO: Validate all parameters (and add validation tests) # Save member variables self.columnDimensions = columnDimensions self.cellsPerColumn = cellsPerColumn self.activationThreshold = activationThreshold self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.minThreshold = minThreshold self.maxNewSynapseCount = maxNewSynapseCount self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.predictedSegmentDecrement = predictedSegmentDecrement # Initialize member variables self.connections = Connections( self.numberOfCells(), maxSegmentsPerCell=maxSegmentsPerCell, maxSynapsesPerSegment=maxSynapsesPerSegment) self._random = Random(seed) self.activeCells = [] self.winnerCells = [] self.activeSegments = [] self.matchingSegments = [] # ============================== # Main functions # ============================== def compute(self, activeColumns, learn=True): """ Feeds input record through TM, performing inference and learning. @param activeColumns (set) Indices of active columns @param learn (bool) Whether or not learning is enabled """ self.activateCells(sorted(activeColumns), learn) self.activateDendrites(learn) def activateCells(self, activeColumns, learn=True): """ Calculate the active cells, using the current active columns and dendrite segments. Grow and reinforce synapses. @param activeColumns (list) A sorted list of active column indices. @param learn (bool) If true, reinforce / punish / grow synapses. Pseudocode: for each column if column is active and has active distal dendrite segments call activatePredictedColumn if column is active and doesn't have active distal dendrite segments call burstColumn if column is inactive and has matching distal dendrite segments call punishPredictedColumn """ prevActiveCells = self.activeCells prevWinnerCells = self.winnerCells self.activeCells = [] self.winnerCells = [] segToCol = lambda segment: int(segment.segment.cell / self. cellsPerColumn) identity = lambda x: x for columnData in groupby2(activeColumns, identity, self.activeSegments, segToCol, self.matchingSegments, segToCol): (column, activeColumns, activeSegmentsOnCol, matchingSegmentsOnCol) = columnData if activeColumns is not None: if activeSegmentsOnCol is not None: cellsToAdd = TemporalMemory.activatePredictedColumn( self.connections, self._random, activeSegmentsOnCol, matchingSegmentsOnCol, prevActiveCells, prevWinnerCells, self.maxNewSynapseCount, self.initialPermanence, self.permanenceIncrement, self.permanenceDecrement, learn) self.activeCells += cellsToAdd self.winnerCells += cellsToAdd else: (cellsToAdd, winnerCell) = TemporalMemory.burstColumn( self.connections, self._random, column, matchingSegmentsOnCol, prevActiveCells, prevWinnerCells, self.cellsPerColumn, self.maxNewSynapseCount, self.initialPermanence, self.permanenceIncrement, self.permanenceDecrement, learn) self.activeCells += cellsToAdd self.winnerCells.append(winnerCell) else: if learn: TemporalMemory.punishPredictedColumn( self.connections, matchingSegmentsOnCol, self.predictedSegmentDecrement, prevActiveCells) def activateDendrites(self, learn=True): """ Calculate dendrite segment activity, using the current active cells. @param learn (bool) If true, segment activations will be recorded. This information is used during segment cleanup. Pseudocode: for each distal dendrite segment with activity >= activationThreshold mark the segment as active for each distal dendrite segment with unconnected activity >= minThreshold mark the segment as matching """ (activeSegments, matchingSegments) = self.connections.computeActivity( self.activeCells, self.connectedPermanence, self.activationThreshold, 0.0, self.minThreshold, learn) self.activeSegments = activeSegments self.matchingSegments = matchingSegments def reset(self): """ Indicates the start of a new sequence and resets the sequence state of the TM. """ self.activeCells = [] self.winnerCells = [] self.activeSegments = [] self.matchingSegments = [] @staticmethod def activatePredictedColumn(connections, random, activeSegments, matchingSegments, prevActiveCells, prevWinnerCells, maxNewSynapseCount, initialPermanence, permanenceIncrement, permanenceDecrement, learn): """ Determines which cells in a predicted column should be added to winner cells list, and learns on the segments that correctly predicted this column. @param connections (Object) Connections for the TM. Gets mutated. @param random (Object) Random number generator. Gets mutated. @param activeSegments (iter) An iterable of SegmentOverlap objects. Active segments for this column, and an overlap for each segment. @param matchingSegments (iter) An iterable of SegmentOverlap objects. Matching segments for this column, and an overlap for each segment. @param prevActiveCells (list) Active cells in `t-1`. @param prevWinnerCells (list) Winner cells in `t-1`. @param maxNewSynapseCount (int) The maximum number of synapses added to a segment during learning. @param initialPermanence (float) Initial permanence of a new synapse. @permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param learn (bool) Determines if permanences are adjusted. @return cellsToAdd (list) A list of predicted cells that will be added to active cells and winner cells. Pseudocode: for each cell in the column that has an active distal dendrite segment mark the cell as active mark the cell as a winner cell (learning) for each active distal dendrite segment strengthen active synapses weaken inactive synapses grow synapses to previous winner cells """ cellsToAdd = [] byCell = lambda x: x.segment.cell for cellData in groupby2(activeSegments, byCell, matchingSegments, byCell): (cell, activeSegmentsOnCell, matchingSegmentsOnCell) = cellData if activeSegmentsOnCell is not None: cellsToAdd.append(cell) if learn: # Learn on every active segment. # # For each active segment, get its corresponding matching # segment so that we can use its overlap to compute the # number of synapses to grow. bySegment = lambda x: x.segment for segmentData in groupby2(activeSegmentsOnCell, bySegment, matchingSegmentsOnCell, bySegment): (segment, activeOverlaps, matchingOverlaps) = segmentData if activeOverlaps is not None: # Active segments are a superset of matching segments, # so this iterator must contain a segment (and overlap). matching = matchingOverlaps.next() TemporalMemory.adaptSegment( connections, segment, prevActiveCells, permanenceIncrement, permanenceDecrement) nGrowDesired = maxNewSynapseCount - matching.overlap if nGrowDesired > 0: TemporalMemory.growSynapses( connections, random, segment, nGrowDesired, prevWinnerCells, initialPermanence) return cellsToAdd @staticmethod def burstColumn(connections, random, column, matchingSegments, prevActiveCells, prevWinnerCells, cellsPerColumn, maxNewSynapseCount, initialPermanence, permanenceIncrement, permanenceDecrement, learn): """ Activates all of the cells in an unpredicted active column, chooses a winner cell, and, if learning is turned on, learns on one segment, growing a new segment if necessary. @param connections (Object) Connections for the TM. Gets mutated. @param random (Object) Random number generator. Gets mutated. @param column (int) Index of bursting column. @param matchingSegments (iter) An iterable of SegmentOverlap objects. Matching segments for this column, and an overlap for each segment. @param prevActiveCells (list) Active cells in `t-1`. @param prevWinnerCells (list) Winner cells in `t-1`. @param cellsPerColumn (int) Number of cells per column. @param maxNewSynapseCount (int) The maximum number of synapses added to a segment during learning. @param initialPermanence (float) Initial permanence of a new synapse. @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param learn (bool) Whether or not learning is enabled. @return (tuple) Contains: `cells` (iter), `winnerCell` (int), Pseudocode: mark all cells as active if there are any matching distal dendrite segments find the most active matching segment mark its cell as a winner cell (learning) grow and reinforce synapses to previous winner cells else find the cell with the least segments, mark it as a winner cell (learning) (optimization) if there are prev winner cells add a segment to this winner cell grow synapses to previous winner cells """ start = cellsPerColumn * column cells = xrange(start, start + cellsPerColumn) if matchingSegments is not None: bestMatching = max(matchingSegments, key=lambda seg: seg.overlap) winnerCell = bestMatching.segment.cell if learn: TemporalMemory.adaptSegment(connections, bestMatching.segment, prevActiveCells, permanenceIncrement, permanenceDecrement) nGrowDesired = maxNewSynapseCount - bestMatching.overlap if nGrowDesired > 0: TemporalMemory.growSynapses(connections, random, bestMatching.segment, nGrowDesired, prevWinnerCells, initialPermanence) else: winnerCell = TemporalMemory.leastUsedCell(cells, connections, random) if learn: nGrowExact = min(maxNewSynapseCount, len(prevWinnerCells)) if nGrowExact > 0: segment = connections.createSegment(winnerCell) TemporalMemory.growSynapses(connections, random, segment, nGrowExact, prevWinnerCells, initialPermanence) return cells, winnerCell @staticmethod def punishPredictedColumn(connections, matchingSegments, predictedSegmentDecrement, prevActiveCells): """Punishes the Segments that incorrectly predicted a column to be active. @param connections (Object) Connections for the TM. Gets mutated. @param matchingSegments (iter) An iterable of SegmentOverlap objects. Matching segments for this column, and an overlap for each segment. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param prevActiveCells (list) Active cells in `t-1` Pseudocode: for each matching segment in the column weaken active synapses """ if predictedSegmentDecrement > 0.0 and matchingSegments is not None: for matching in matchingSegments: TemporalMemory.adaptSegment(connections, matching.segment, prevActiveCells, -predictedSegmentDecrement, 0.0) # ============================== # Helper functions # ============================== @staticmethod def leastUsedCell(cells, connections, random): """ Gets the cell with the smallest number of segments. Break ties randomly. @param cells (list) Indices of cells @param connections (Object) Connections instance for the tm @param random (Object) Random number generator @return (int) Cell index """ leastUsedCells = [] minNumSegments = float("inf") for cell in cells: numSegments = len(connections.segmentsForCell(cell)) if numSegments < minNumSegments: minNumSegments = numSegments leastUsedCells = [] if numSegments == minNumSegments: leastUsedCells.append(cell) i = random.getUInt32(len(leastUsedCells)) return leastUsedCells[i] @staticmethod def growSynapses(connections, random, segment, nDesiredNewSynapes, prevWinnerCells, initialPermanence): """ Creates nDesiredNewSynapes synapses on the segment passed in if possible, choosing random cells from the previous winner cells that are not already on the segment. @param connections (Object) Connections instance for the tm @param random (Object) Tm object used to generate random numbers @param segment (int) Segment to grow synapses on. @params nDesiredNewSynapes (int) Desired number of synapses to grow @params prevWinnerCells (list) Winner cells in `t-1` @param initialPermanence (float) Initial permanence of a new synapse. Notes: The process of writing the last value into the index in the array that was most recently changed is to ensure the same results that we get in the c++ implentation using iter_swap with vectors. """ candidates = list(prevWinnerCells) eligibleEnd = len(candidates) - 1 for synapse in connections.synapsesForSegment(segment): presynapticCell = connections.dataForSynapse( synapse).presynapticCell try: index = candidates[:eligibleEnd + 1].index(presynapticCell) except ValueError: index = -1 if index != -1: candidates[index] = candidates[eligibleEnd] eligibleEnd -= 1 candidatesLength = eligibleEnd + 1 nActual = min(nDesiredNewSynapes, candidatesLength) for _ in range(nActual): rand = random.getUInt32(candidatesLength) connections.createSynapse(segment, candidates[rand], initialPermanence) candidates[rand] = candidates[candidatesLength - 1] candidatesLength -= 1 @staticmethod def adaptSegment(connections, segment, prevActiveCells, permanenceIncrement, permanenceDecrement): """ Updates synapses on segment. Strengthens active synapses; weakens inactive synapses. @param connections (Object) Connections instance for the tm @param segment (int) Segment to adapt @param prevActiveCells (list) Active cells in `t-1` @param permanenceIncrement (float) Amount to increment active synapses @param permanenceDecrement (float) Amount to decrement inactive synapses """ for synapse in connections.synapsesForSegment(segment): synapseData = connections.dataForSynapse(synapse) permanence = synapseData.permanence if binSearch(prevActiveCells, synapseData.presynapticCell) != -1: permanence += permanenceIncrement else: permanence -= permanenceDecrement # Keep permanence within min/max bounds permanence = max(0.0, min(1.0, permanence)) if permanence < EPSILON: connections.destroySynapse(synapse) else: connections.updateSynapsePermanence(synapse, permanence) if connections.numSynapses(segment) == 0: connections.destroySegment(segment) def columnForCell(self, cell): """ Returns the index of the column that a cell belongs to. @param cell (int) Cell index @return (int) Column index """ self._validateCell(cell) return int(cell / self.cellsPerColumn) def cellsForColumn(self, column): """ Returns the indices of cells that belong to a column. @param column (int) Column index @return (list) Cell indices """ self._validateColumn(column) start = self.cellsPerColumn * column end = start + self.cellsPerColumn return range(start, end) def numberOfColumns(self): """ Returns the number of columns in this layer. @return (int) Number of columns """ return reduce(mul, self.columnDimensions, 1) def numberOfCells(self): """ Returns the number of cells in this layer. @return (int) Number of cells """ return self.numberOfColumns() * self.cellsPerColumn def mapCellsToColumns(self, cells): """ Maps cells to the columns they belong to @param cells (set) Cells @return (dict) Mapping from columns to their cells in `cells` """ cellsForColumns = defaultdict(set) for cell in cells: column = self.columnForCell(cell) cellsForColumns[column].add(cell) return cellsForColumns def getActiveCells(self): """ Returns the indices of the active cells. @return (list) Indices of active cells. """ return self.getCellIndices(self.activeCells) def getPredictiveCells(self): """ Returns the indices of the predictive cells. @return (list) Indices of predictive cells. """ predictiveCells = set() for activeSegment in self.activeSegments: cell = activeSegment.segment.cell if not cell in predictiveCells: predictiveCells.add(cell) return sorted(predictiveCells) def getWinnerCells(self): """ Returns the indices of the winner cells. @return (list) Indices of winner cells. """ return self.getCellIndices(self.winnerCells) def getCellsPerColumn(self): """ Returns the number of cells per column. @return (int) The number of cells per column. """ return self.cellsPerColumn def getColumnDimensions(self): """ Returns the dimensions of the columns in the region. @return (tuple) Column dimensions """ return self.columnDimensions def getActivationThreshold(self): """ Returns the activation threshold. @return (int) The activation threshold. """ return self.activationThreshold def setActivationThreshold(self, activationThreshold): """ Sets the activation threshold. @param activationThreshold (int) activation threshold. """ self.activationThreshold = activationThreshold def getInitialPermanence(self): """ Get the initial permanence. @return (float) The initial permanence. """ return self.initialPermanence def setInitialPermanence(self, initialPermanence): """ Sets the initial permanence. @param initialPermanence (float) The initial permanence. """ self.initialPermanence = initialPermanence def getMinThreshold(self): """ Returns the min threshold. @return (int) The min threshold. """ return self.minThreshold def setMinThreshold(self, minThreshold): """ Sets the min threshold. @param minThreshold (int) min threshold. """ self.minThreshold = minThreshold def getMaxNewSynapseCount(self): """ Returns the max new synapse count. @return (int) The max new synapse count. """ return self.maxNewSynapseCount def setMaxNewSynapseCount(self, maxNewSynapseCount): """ Sets the max new synapse count. @param maxNewSynapseCount (int) Max new synapse count. """ self.maxNewSynapseCount = maxNewSynapseCount def getPermanenceIncrement(self): """ Get the permanence increment. @return (float) The permanence increment. """ return self.permanenceIncrement def setPermanenceIncrement(self, permanenceIncrement): """ Sets the permanence increment. @param permanenceIncrement (float) The permanence increment. """ self.permanenceIncrement = permanenceIncrement def getPermanenceDecrement(self): """ Get the permanence decrement. @return (float) The permanence decrement. """ return self.permanenceDecrement def setPermanenceDecrement(self, permanenceDecrement): """ Sets the permanence decrement. @param permanenceDecrement (float) The permanence decrement. """ self.permanenceDecrement = permanenceDecrement def getPredictedSegmentDecrement(self): """ Get the predicted segment decrement. @return (float) The predicted segment decrement. """ return self.predictedSegmentDecrement def setPredictedSegmentDecrement(self, predictedSegmentDecrement): """ Sets the predicted segment decrement. @param predictedSegmentDecrement (float) The predicted segment decrement. """ self.predictedSegmentDecrement = predictedSegmentDecrement def getConnectedPermanence(self): """ Get the connected permanence. @return (float) The connected permanence. """ return self.connectedPermanence def setConnectedPermanence(self, connectedPermanence): """ Sets the connected permanence. @param connectedPermanence (float) The connected permanence. """ self.connectedPermanence = connectedPermanence def write(self, proto): """ Writes serialized data to proto object @param proto (DynamicStructBuilder) Proto object """ # capnp fails to save a tuple. Let's force columnDimensions to list. proto.columnDimensions = list(self.columnDimensions) proto.cellsPerColumn = self.cellsPerColumn proto.activationThreshold = self.activationThreshold proto.initialPermanence = self.initialPermanence proto.connectedPermanence = self.connectedPermanence proto.minThreshold = self.minThreshold proto.maxNewSynapseCount = self.maxNewSynapseCount proto.permanenceIncrement = self.permanenceIncrement proto.permanenceDecrement = self.permanenceDecrement proto.predictedSegmentDecrement = self.predictedSegmentDecrement self.connections.write(proto.connections) self._random.write(proto.random) proto.activeCells = list(self.activeCells) proto.winnerCells = list(self.winnerCells) activeSegmentOverlaps = \ proto.init('activeSegmentOverlaps', len(self.activeSegments)) for i, active in enumerate(self.activeSegments): activeSegmentOverlaps[i].cell = active.segment.cell activeSegmentOverlaps[i].segment = active.segment.idx activeSegmentOverlaps[i].overlap = active.overlap matchingSegmentOverlaps = \ proto.init('matchingSegmentOverlaps', len(self.matchingSegments)) for i, matching in enumerate(self.matchingSegments): matchingSegmentOverlaps[i].cell = matching.segment.cell matchingSegmentOverlaps[i].segment = matching.segment.idx matchingSegmentOverlaps[i].overlap = matching.overlap @classmethod def read(cls, proto): """ Reads deserialized data from proto object @param proto (DynamicStructBuilder) Proto object @return (TemporalMemory) TemporalMemory instance """ tm = object.__new__(cls) # capnp fails to save a tuple, so proto.columnDimensions was forced to # serialize as a list. We prefer a tuple, however, because columnDimensions # should be regarded as immutable. tm.columnDimensions = tuple(proto.columnDimensions) tm.cellsPerColumn = int(proto.cellsPerColumn) tm.activationThreshold = int(proto.activationThreshold) tm.initialPermanence = proto.initialPermanence tm.connectedPermanence = proto.connectedPermanence tm.minThreshold = int(proto.minThreshold) tm.maxNewSynapseCount = int(proto.maxNewSynapseCount) tm.permanenceIncrement = proto.permanenceIncrement tm.permanenceDecrement = proto.permanenceDecrement tm.predictedSegmentDecrement = proto.predictedSegmentDecrement tm.connections = Connections.read(proto.connections) #pylint: disable=W0212 tm._random = Random() tm._random.read(proto.random) #pylint: enable=W0212 tm.activeCells = [int(x) for x in proto.activeCells] tm.winnerCells = [int(x) for x in proto.winnerCells] tm.activeSegments = [] tm.matchingSegments = [] for i in xrange(len(proto.activeSegmentOverlaps)): protoSegmentOverlap = proto.activeSegmentOverlaps[i] segment = tm.connections.getSegment(protoSegmentOverlap.segment, protoSegmentOverlap.cell) segmentOverlap = SegmentOverlap(segment, protoSegmentOverlap.overlap) tm.activeSegments.append(segmentOverlap) for i in xrange(len(proto.matchingSegmentOverlaps)): protoSegmentOverlap = proto.matchingSegmentOverlaps[i] segment = tm.connections.getSegment(protoSegmentOverlap.segment, protoSegmentOverlap.cell) segmentOverlap = SegmentOverlap(segment, protoSegmentOverlap.overlap) tm.matchingSegments.append(segmentOverlap) return tm def __eq__(self, other): """ Equality operator for TemporalMemory instances. Checks if two instances are functionally identical (might have different internal state). @param other (TemporalMemory) TemporalMemory instance to compare to """ if self.columnDimensions != other.columnDimensions: return False if self.cellsPerColumn != other.cellsPerColumn: return False if self.activationThreshold != other.activationThreshold: return False if abs(self.initialPermanence - other.initialPermanence) > EPSILON: return False if abs(self.connectedPermanence - other.connectedPermanence) > EPSILON: return False if self.minThreshold != other.minThreshold: return False if self.maxNewSynapseCount != other.maxNewSynapseCount: return False if abs(self.permanenceIncrement - other.permanenceIncrement) > EPSILON: return False if abs(self.permanenceDecrement - other.permanenceDecrement) > EPSILON: return False if abs(self.predictedSegmentDecrement - other.predictedSegmentDecrement) > EPSILON: return False if self.connections != other.connections: return False if self.activeCells != other.activeCells: return False if self.winnerCells != other.winnerCells: return False if self.matchingSegments != other.matchingSegments: return False if self.activeSegments != other.activeSegments: return False return True def __ne__(self, other): """ Non-equality operator for TemporalMemory instances. Checks if two instances are not functionally identical (might have different internal state). @param other (TemporalMemory) TemporalMemory instance to compare to """ return not self.__eq__(other) def _validateColumn(self, column): """ Raises an error if column index is invalid. @param column (int) Column index """ if column >= self.numberOfColumns() or column < 0: raise IndexError("Invalid column") def _validateCell(self, cell): """ Raises an error if cell index is invalid. @param cell (int) Cell index """ if cell >= self.numberOfCells() or cell < 0: raise IndexError("Invalid cell") @classmethod def getCellIndices(cls, cells): """ Returns the indices of the cells passed in. @param cells (list) cells to find the indices of """ return [cls.getCellIndex(c) for c in cells] @staticmethod def getCellIndex(cell): """ Returns the index of the cell @param cell (int) cell to find the index of """ return cell
class TemporalMemory(object): """ Class implementing the Temporal Memory algorithm. """ def __init__(self, columnDimensions=(2048,), cellsPerColumn=32, activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, maxNewSynapseCount=20, permanenceIncrement=0.10, permanenceDecrement=0.10, predictedSegmentDecrement = 0.004, seed=42): """ @param columnDimensions (list) Dimensions of the column space @param cellsPerColumn (int) Number of cells per column @param activationThreshold (int) If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active. @param initialPermanence (float) Initial permanence of a new synapse. @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected. @param minThreshold (int) If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column. @param maxNewSynapseCount (int) The maximum number of synapses added to a segment during learning. @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param predictedSegmentDecrement (float) Amount by which active permanences of synapses of previously predicted but inactive segments are decremented. @param seed (int) Seed for the random number generator. """ # Error checking if not len(columnDimensions): raise ValueError("Number of column dimensions must be greater than 0") if not cellsPerColumn > 0: raise ValueError("Number of cells per column must be greater than 0") # TODO: Validate all parameters (and add validation tests) # Save member variables self.columnDimensions = columnDimensions self.cellsPerColumn = cellsPerColumn self.activationThreshold = activationThreshold self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.minThreshold = minThreshold self.maxNewSynapseCount = maxNewSynapseCount self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.predictedSegmentDecrement = predictedSegmentDecrement # Initialize member variables self.connections = Connections(self.numberOfCells()) self._random = Random(seed) self.activeCells = set() self.predictiveCells = set() self.activeSegments = set() self.winnerCells = set() self.matchingSegments = set() self.matchingCells = set() # ============================== # Main functions # ============================== def compute(self, activeColumns, learn=True): """ Feeds input record through TM, performing inference and learning. Updates member variables with new state. @param activeColumns (set) Indices of active columns in `t` """ (activeCells, winnerCells, activeSegments, predictiveCells, predictedColumns, matchingSegments, matchingCells) = self.computeFn(activeColumns, self.predictiveCells, self.activeSegments, self.activeCells, self.winnerCells, self.matchingSegments, self.matchingCells, self.connections, learn=learn) self.activeCells = activeCells self.winnerCells = winnerCells self.activeSegments = activeSegments self.predictiveCells = predictiveCells self.matchingSegments = matchingSegments self.matchingCells = matchingCells def computeFn(self, activeColumns, prevPredictiveCells, prevActiveSegments, prevActiveCells, prevWinnerCells, prevMatchingSegments, prevMatchingCells, connections, learn=True): """ 'Functional' version of compute. Returns new state. @param activeColumns (set) Indices of active columns in `t` @param prevPredictiveCells (set) Indices of predictive cells in `t-1` @param prevActiveSegments (set) Indices of active segments in `t-1` @param prevActiveCells (set) Indices of active cells in `t-1` @param prevWinnerCells (set) Indices of winner cells in `t-1` @param prevMatchingSegments (set) Indices of matching segments in `t-1` @param prevMatchingCells (set) Indices of matching cells in `t-1` @param connections (Connections) Connectivity of layer @param learn (bool) Whether or not learning is enabled @return (tuple) Contains: `activeCells` (set), `winnerCells` (set), `activeSegments` (set), `predictiveCells` (set), 'matchingSegments'(set), 'matchingCells' (set) """ activeCells = set() winnerCells = set() (_activeCells, _winnerCells, predictedColumns, predictedInactiveCells) = self.activateCorrectlyPredictiveCells( prevPredictiveCells, prevMatchingCells, activeColumns) activeCells.update(_activeCells) winnerCells.update(_winnerCells) (_activeCells, _winnerCells, learningSegments) = self.burstColumns(activeColumns, predictedColumns, prevActiveCells, prevWinnerCells, connections) activeCells.update(_activeCells) winnerCells.update(_winnerCells) if learn: self.learnOnSegments(prevActiveSegments, learningSegments, prevActiveCells, winnerCells, prevWinnerCells, connections, predictedInactiveCells, prevMatchingSegments) (activeSegments, predictiveCells, matchingSegments, matchingCells) = self.computePredictiveCells(activeCells, connections) return (activeCells, winnerCells, activeSegments, predictiveCells, predictedColumns, matchingSegments, matchingCells) def reset(self): """ Indicates the start of a new sequence. Resets sequence state of the TM. """ self.activeCells = set() self.predictiveCells = set() self.activeSegments = set() self.winnerCells = set() # ============================== # Phases # ============================== def activateCorrectlyPredictiveCells(self, prevPredictiveCells, prevMatchingCells, activeColumns): """ Phase 1: Activate the correctly predictive cells. Pseudocode: - for each prev predictive cell - if in active column - mark it as active - mark it as winner cell - mark column as predicted - if not in active column - mark it as an predicted but inactive cell @param prevPredictiveCells (set) Indices of predictive cells in `t-1` @param activeColumns (set) Indices of active columns in `t` @return (tuple) Contains: `activeCells` (set), `winnerCells` (set), `predictedColumns` (set), `predictedInactiveCells` (set) """ activeCells = set() winnerCells = set() predictedColumns = set() predictedInactiveCells = set() for cell in prevPredictiveCells: column = self.columnForCell(cell) if column in activeColumns: activeCells.add(cell) winnerCells.add(cell) predictedColumns.add(column) if self.predictedSegmentDecrement > 0: for cell in prevMatchingCells: column = self.columnForCell(cell) if column not in activeColumns: predictedInactiveCells.add(cell) return activeCells, winnerCells, predictedColumns, predictedInactiveCells def burstColumns(self, activeColumns, predictedColumns, prevActiveCells, prevWinnerCells, connections): """ Phase 2: Burst unpredicted columns. Pseudocode: - for each unpredicted active column - mark all cells as active - mark the best matching cell as winner cell - (learning) - if it has no matching segment - (optimization) if there are prev winner cells - add a segment to it - mark the segment as learning @param activeColumns (set) Indices of active columns in `t` @param predictedColumns (set) Indices of predicted columns in `t` @param prevActiveCells (set) Indices of active cells in `t-1` @param prevWinnerCells (set) Indices of winner cells in `t-1` @param connections (Connections) Connectivity of layer @return (tuple) Contains: `activeCells` (set), `winnerCells` (set), `learningSegments` (set) """ activeCells = set() winnerCells = set() learningSegments = set() unpredictedColumns = activeColumns - predictedColumns for column in unpredictedColumns: cells = self.cellsForColumn(column) activeCells.update(cells) (bestCell, bestSegment) = self.bestMatchingCell(cells, prevActiveCells, connections) winnerCells.add(bestCell) if bestSegment is None and len(prevWinnerCells): bestSegment = connections.createSegment(bestCell) if bestSegment is not None: learningSegments.add(bestSegment) return activeCells, winnerCells, learningSegments def learnOnSegments(self, prevActiveSegments, learningSegments, prevActiveCells, winnerCells, prevWinnerCells, connections, predictedInactiveCells, prevMatchingSegments): """ Phase 3: Perform learning by adapting segments. Pseudocode: - (learning) for each prev active or learning segment - if learning segment or from winner cell - strengthen active synapses - weaken inactive synapses - if learning segment - add some synapses to the segment - subsample from prev winner cells - if predictedSegmentDecrement > 0 - for each previously matching segment - if cell is a predicted inactive cell - weaken active synapses but don't touch inactive synapses @param prevActiveSegments (set) Indices of active segments in `t-1` @param learningSegments (set) Indices of learning segments in `t` @param prevActiveCells (set) Indices of active cells in `t-1` @param winnerCells (set) Indices of winner cells in `t` @param prevWinnerCells (set) Indices of winner cells in `t-1` @param connections (Connections) Connectivity of layer @param predictedInactiveCells (set) Indices of predicted inactive cells @param prevMatchingSegments (set) Indices of segments with """ for segment in prevActiveSegments | learningSegments: isLearningSegment = segment in learningSegments isFromWinnerCell = connections.cellForSegment(segment) in winnerCells activeSynapses = self.activeSynapsesForSegment( segment, prevActiveCells, connections) if isLearningSegment or isFromWinnerCell: self.adaptSegment(segment, activeSynapses, connections, self.permanenceIncrement, self.permanenceDecrement) if isLearningSegment: n = self.maxNewSynapseCount - len(activeSynapses) for presynapticCell in self.pickCellsToLearnOn(n, segment, prevWinnerCells, connections): connections.createSynapse(segment, presynapticCell, self.initialPermanence) if self.predictedSegmentDecrement > 0: for segment in prevMatchingSegments: isPredictedInactiveCell = connections.cellForSegment(segment) in predictedInactiveCells activeSynapses = self.activeSynapsesForSegment( segment, prevActiveCells, connections) if isPredictedInactiveCell: self.adaptSegment(segment, activeSynapses, connections, -self.predictedSegmentDecrement, 0.0) def computePredictiveCells(self, activeCells, connections): """ Phase 4: Compute predictive cells due to lateral input on distal dendrites. Pseudocode: - for each distal dendrite segment with activity >= activationThreshold - mark the segment as active - mark the cell as predictive - if predictedSegmentDecrement > 0 - for each distal dendrite segment with unconnected activity >= minThreshold - mark the segment as matching - mark the cell as matching Forward propagates activity from active cells to the synapses that touch them, to determine which synapses are active. @param activeCells (set) Indices of active cells in `t` @param connections (Connections) Connectivity of layer @return (tuple) Contains: `activeSegments` (set), `predictiveCells` (set), `matchingSegments` (set), `matchingCells` (set) """ numActiveConnectedSynapsesForSegment = defaultdict(int) numActiveSynapsesForSegment = defaultdict(int) activeSegments = set() predictiveCells = set() matchingSegments = set() matchingCells = set() for cell in activeCells: for synapseData in connections.synapsesForPresynapticCell(cell).values(): segment = synapseData.segment permanence = synapseData.permanence if permanence >= self.connectedPermanence: numActiveConnectedSynapsesForSegment[segment] += 1 if (numActiveConnectedSynapsesForSegment[segment] >= self.activationThreshold): activeSegments.add(segment) predictiveCells.add(connections.cellForSegment(segment)) if permanence > 0 and self.predictedSegmentDecrement > 0: numActiveSynapsesForSegment[segment] += 1 if numActiveSynapsesForSegment[segment] >= self.minThreshold: matchingSegments.add(segment) matchingCells.add(connections.cellForSegment(segment)) return activeSegments, predictiveCells, matchingSegments, matchingCells # ============================== # Helper functions # ============================== def bestMatchingCell(self, cells, activeCells, connections): """ Gets the cell with the best matching segment (see `TM.bestMatchingSegment`) that has the largest number of active synapses of all best matching segments. If none were found, pick the least used cell (see `TM.leastUsedCell`). @param cells (set) Indices of cells @param activeCells (set) Indices of active cells @param connections (Connections) Connectivity of layer @return (tuple) Contains: `cell` (int), `bestSegment` (int) """ maxSynapses = 0 bestCell = None bestSegment = None for cell in cells: segment, numActiveSynapses = self.bestMatchingSegment( cell, activeCells, connections) if segment is not None and numActiveSynapses > maxSynapses: maxSynapses = numActiveSynapses bestCell = cell bestSegment = segment if bestCell is None: bestCell = self.leastUsedCell(cells, connections) return bestCell, bestSegment def bestMatchingSegment(self, cell, activeCells, connections): """ Gets the segment on a cell with the largest number of activate synapses, including all synapses with non-zero permanences. @param cell (int) Cell index @param activeCells (set) Indices of active cells @param connections (Connections) Connectivity of layer @return (tuple) Contains: `segment` (int), `connectedActiveSynapses` (set) """ maxSynapses = self.minThreshold bestSegment = None bestNumActiveSynapses = None for segment in connections.segmentsForCell(cell): numActiveSynapses = 0 for synapse in connections.synapsesForSegment(segment): synapseData = connections.dataForSynapse(synapse) if ( (synapseData.presynapticCell in activeCells) and synapseData.permanence > 0): numActiveSynapses += 1 if numActiveSynapses >= maxSynapses: maxSynapses = numActiveSynapses bestSegment = segment bestNumActiveSynapses = numActiveSynapses return bestSegment, bestNumActiveSynapses def leastUsedCell(self, cells, connections): """ Gets the cell with the smallest number of segments. Break ties randomly. @param cells (set) Indices of cells @param connections (Connections) Connectivity of layer @return (int) Cell index """ leastUsedCells = set() minNumSegments = float("inf") for cell in cells: numSegments = len(connections.segmentsForCell(cell)) if numSegments < minNumSegments: minNumSegments = numSegments leastUsedCells = set() if numSegments == minNumSegments: leastUsedCells.add(cell) i = self._random.getUInt32(len(leastUsedCells)) return sorted(leastUsedCells)[i] @staticmethod def activeSynapsesForSegment(segment, activeCells, connections): """ Returns the synapses on a segment that are active due to lateral input from active cells. @param segment (int) Segment index @param activeCells (set) Indices of active cells @param connections (Connections) Connectivity of layer @return (set) Indices of active synapses on segment """ synapses = set() for synapse in connections.synapsesForSegment(segment): synapseData = connections.dataForSynapse(synapse) if synapseData.presynapticCell in activeCells: synapses.add(synapse) return synapses def adaptSegment(self, segment, activeSynapses, connections, permanenceIncrement, permanenceDecrement): """ Updates synapses on segment. Strengthens active synapses; weakens inactive synapses. @param segment (int) Segment index @param activeSynapses (set) Indices of active synapses @param connections (Connections) Connectivity of layer @param permanenceIncrement (float) Amount to increment active synapses @param permanenceDecrement (float) Amount to decrement inactive synapses """ for synapse in connections.synapsesForSegment(segment): synapseData = connections.dataForSynapse(synapse) permanence = synapseData.permanence if synapse in activeSynapses: permanence += permanenceIncrement else: permanence -= permanenceDecrement # Keep permanence within min/max bounds permanence = max(0.0, min(1.0, permanence)) connections.updateSynapsePermanence(synapse, permanence) def pickCellsToLearnOn(self, n, segment, winnerCells, connections): """ Pick cells to form distal connections to. TODO: Respect topology and learningRadius @param n (int) Number of cells to pick @param segment (int) Segment index @param winnerCells (set) Indices of winner cells in `t` @param connections (Connections) Connectivity of layer @return (set) Indices of cells picked """ candidates = set(winnerCells) # Remove cells that are already synapsed on by this segment for synapse in connections.synapsesForSegment(segment): synapseData = connections.dataForSynapse(synapse) presynapticCell = synapseData.presynapticCell if presynapticCell in candidates: candidates.remove(presynapticCell) n = min(n, len(candidates)) candidates = sorted(candidates) cells = set() # Pick n cells randomly for _ in range(n): i = self._random.getUInt32(len(candidates)) cells.add(candidates[i]) del candidates[i] return cells def columnForCell(self, cell): """ Returns the index of the column that a cell belongs to. @param cell (int) Cell index @return (int) Column index """ self._validateCell(cell) return int(cell / self.cellsPerColumn) def cellsForColumn(self, column): """ Returns the indices of cells that belong to a column. @param column (int) Column index @return (set) Cell indices """ self._validateColumn(column) start = self.cellsPerColumn * column end = start + self.cellsPerColumn return set([cell for cell in range(start, end)]) def numberOfColumns(self): """ Returns the number of columns in this layer. @return (int) Number of columns """ return reduce(mul, self.columnDimensions, 1) def numberOfCells(self): """ Returns the number of cells in this layer. @return (int) Number of cells """ return self.numberOfColumns() * self.cellsPerColumn def mapCellsToColumns(self, cells): """ Maps cells to the columns they belong to @param cells (set) Cells @return (dict) Mapping from columns to their cells in `cells` """ cellsForColumns = defaultdict(set) for cell in cells: column = self.columnForCell(cell) cellsForColumns[column].add(cell) return cellsForColumns def write(self, proto): """ Writes serialized data to proto object @param proto (DynamicStructBuilder) Proto object """ proto.columnDimensions = self.columnDimensions proto.cellsPerColumn = self.cellsPerColumn proto.activationThreshold = self.activationThreshold proto.initialPermanence = self.initialPermanence proto.connectedPermanence = self.connectedPermanence proto.minThreshold = self.minThreshold proto.maxNewSynapseCount = self.maxNewSynapseCount proto.permanenceIncrement = self.permanenceIncrement proto.permanenceDecrement = self.permanenceDecrement proto.predictedSegmentDecrement = self.predictedSegmentDecrement self.connections.write(proto.connections) self._random.write(proto.random) proto.activeCells = list(self.activeCells) proto.predictiveCells = list(self.predictiveCells) proto.activeSegments = list(self.activeSegments) proto.winnerCells = list(self.winnerCells) proto.matchingSegments = list(self.matchingSegments) proto.matchingCells = list(self.matchingCells) @classmethod def read(cls, proto): """ Reads deserialized data from proto object @param proto (DynamicStructBuilder) Proto object @return (TemporalMemory) TemporalMemory instance """ tm = object.__new__(cls) tm.columnDimensions = list(proto.columnDimensions) tm.cellsPerColumn = int(proto.cellsPerColumn) tm.activationThreshold = int(proto.activationThreshold) tm.initialPermanence = proto.initialPermanence tm.connectedPermanence = proto.connectedPermanence tm.minThreshold = int(proto.minThreshold) tm.maxNewSynapseCount = int(proto.maxNewSynapseCount) tm.permanenceIncrement = proto.permanenceIncrement tm.permanenceDecrement = proto.permanenceDecrement tm.predictedSegmentDecrement = proto.predictedSegmentDecrement tm.connections = Connections.read(proto.connections) tm._random = Random() tm._random.read(proto.random) tm.activeCells = set([int(x) for x in proto.activeCells]) tm.predictiveCells = set([int(x) for x in proto.predictiveCells]) tm.activeSegments = set([int(x) for x in proto.activeSegments]) tm.winnerCells = set([int(x) for x in proto.winnerCells]) tm.matchingSegments = set([int(x) for x in proto.matchingSegments]) tm.matchingCells = set([int(x) for x in proto.matchingCells]) return tm def __eq__(self, other): """ Equality operator for TemporalMemory instances. Checks if two instances are functionally identical (might have different internal state). @param other (TemporalMemory) TemporalMemory instance to compare to """ epsilon = 0.0000001 if self.columnDimensions != other.columnDimensions: return False if self.cellsPerColumn != other.cellsPerColumn: return False if self.activationThreshold != other.activationThreshold: return False if abs(self.initialPermanence - other.initialPermanence) > epsilon: return False if abs(self.connectedPermanence - other.connectedPermanence) > epsilon: return False if self.minThreshold != other.minThreshold: return False if self.maxNewSynapseCount != other.maxNewSynapseCount: return False if abs(self.permanenceIncrement - other.permanenceIncrement) > epsilon: return False if abs(self.permanenceDecrement - other.permanenceDecrement) > epsilon: return False if abs(self.predictedSegmentDecrement - other.predictedSegmentDecrement) > epsilon: return False if self.connections != other.connections: return False if self.activeCells != other.activeCells: return False if self.predictiveCells != other.predictiveCells: return False if self.winnerCells != other.winnerCells: return False if self.matchingSegments != other.matchingSegments: return False if self.matchingCells != other.matchingCells: return False return True def __ne__(self, other): """ Non-equality operator for TemporalMemory instances. Checks if two instances are not functionally identical (might have different internal state). @param other (TemporalMemory) TemporalMemory instance to compare to """ return not self.__eq__(other) def _validateColumn(self, column): """ Raises an error if column index is invalid. @param column (int) Column index """ if column >= self.numberOfColumns() or column < 0: raise IndexError("Invalid column") def _validateCell(self, cell): """ Raises an error if cell index is invalid. @param cell (int) Cell index """ if cell >= self.numberOfCells() or cell < 0: raise IndexError("Invalid cell") @classmethod def getCellIndices(cls, cells): return [cls.getCellIndex(c) for c in cells] @staticmethod def getCellIndex(cell): return cell.idx
class TemporalMemory(object): """ Class implementing the Temporal Memory algorithm. """ def __init__(self, columnDimensions=(2048,), cellsPerColumn=32, activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, maxNewSynapseCount=20, permanenceIncrement=0.10, permanenceDecrement=0.10, predictedSegmentDecrement=0.0, maxSegmentsPerCell=255, maxSynapsesPerSegment=255, seed=42, **kwargs): """ @param columnDimensions (list) Dimensions of the column space @param cellsPerColumn (int) Number of cells per column @param activationThreshold (int) If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active. @param initialPermanence (float) Initial permanence of a new synapse @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected. @param minThreshold (int) If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column @param maxNewSynapseCount (int) The maximum number of synapses added to a segment during learning @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param predictedSegmentDecrement (float) Amount by which active permanences of synapses of previously predicted but inactive segments are decremented. @param seed (int) Seed for the random number generator Notes: predictedSegmentDecrement: A good value is just a bit larger than (the column-level sparsity * permanenceIncrement). So, if column-level sparsity is 2% and permanenceIncrement is 0.01, this parameter should be something like 4% * 0.01 = 0.0004). """ # Error checking if not len(columnDimensions): raise ValueError("Number of column dimensions must be greater than 0") if cellsPerColumn <= 0: raise ValueError("Number of cells per column must be greater than 0") # TODO: Validate all parameters (and add validation tests) # Save member variables self.columnDimensions = columnDimensions self.cellsPerColumn = cellsPerColumn self.activationThreshold = activationThreshold self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.minThreshold = minThreshold self.maxNewSynapseCount = maxNewSynapseCount self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.predictedSegmentDecrement = predictedSegmentDecrement # Initialize member variables self.connections = Connections(self.numberOfCells(), maxSegmentsPerCell=maxSegmentsPerCell, maxSynapsesPerSegment=maxSynapsesPerSegment) self._random = Random(seed) self.activeCells = [] self.winnerCells = [] self.activeSegments = [] self.matchingSegments = [] # ============================== # Main functions # ============================== def compute(self, activeColumns, learn=True): """ Feeds input record through TM, performing inference and learning. @param activeColumns (set) Indices of active columns @param learn (bool) Whether or not learning is enabled Updates member variables: - `activeCells` (list) - `winnerCells` (list) - `activeSegments` (list) - `matchingSegments`(list) Pseudocode: for each column if column is active and has active distal dendrite segments call activatePredictedColumn if column is active and doesn't have active distal dendrite segments call burstColumn if column is inactive and has matching distal dendrite segments call punishPredictedColumn for each distal dendrite segment with activity >= activationThreshold mark the segment as active for each distal dendrite segment with unconnected activity >= minThreshold mark the segment as matching """ prevActiveCells = self.activeCells prevWinnerCells = self.winnerCells activeColumns = sorted(activeColumns) self.activeCells = [] self.winnerCells = [] segToCol = lambda segment: int(segment.segment.cell / self.cellsPerColumn) identity = lambda column: int(column) for columnData in groupby2(activeColumns, identity, self.activeSegments, segToCol, self.matchingSegments, segToCol): (column, activeColumns, activeSegmentsOnCol, matchingSegmentsOnCol) = columnData if activeColumns is not None: if activeSegmentsOnCol is not None: cellsToAdd = TemporalMemory.activatePredictedColumn( activeSegmentsOnCol, self.connections, learn, self.permanenceDecrement, self.permanenceIncrement, prevActiveCells) self.activeCells += cellsToAdd self.winnerCells += cellsToAdd else: (cellsToAdd, winnerCell) = TemporalMemory.burstColumn(self.cellsPerColumn, column, self.connections, self.initialPermanence, learn, matchingSegmentsOnCol, self.maxNewSynapseCount, self.permanenceDecrement, self.permanenceIncrement, prevActiveCells, prevWinnerCells, self._random) self.activeCells += cellsToAdd self.winnerCells.append(winnerCell) else: if learn: TemporalMemory.punishPredictedColumn(self.connections, matchingSegmentsOnCol, self.predictedSegmentDecrement, prevActiveCells) (activeSegments, matchingSegments) = self.connections.computeActivity( self.activeCells, self.connectedPermanence, self.activationThreshold, 0.0, self.minThreshold, learn) self.activeSegments = activeSegments self.matchingSegments = matchingSegments def reset(self): """ Indicates the start of a new sequence and resets the sequence state of the TM. """ self.activeCells = [] self.winnerCells = [] self.activeSegments = [] self.matchingSegments = [] @staticmethod def activatePredictedColumn(activeSegments, connections, learn, permanenceDecrement, permanenceIncrement, prevActiveCells): """ Determines which cells in a predicted column should be added to winner cells list and calls adaptSegment on the segments that correctly predicted this column. @param activeSegments (iter) A iterable of SegmentOverlap objects for the column compute is operating on that are active @param connections (Object) Connections instance for the tm @param learn (bool) Determines if permanences are adjusted @permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param prevActiveCells (list) Active cells in `t-1` @return cellsToAdd (list) A list of predicted cells that will be added to active cells and winner cells. Pseudocode: for each cell in the column that has an active distal dendrite segment mark the cell as active mark the cell as a winner cell (learning) for each active distal dendrite segment strengthen active synapses weaken inactive synapses """ cellsToAdd = [] cell = None for active in activeSegments: newCell = cell != active.segment.cell if newCell: cell = active.segment.cell cellsToAdd.append(cell) if learn: TemporalMemory.adaptSegment(connections, prevActiveCells, permanenceIncrement, permanenceDecrement, active.segment) return cellsToAdd @staticmethod def burstColumn(cellsPerColumn, column, connections, initialPermanence, learn, matchingSegments, maxNewSynapseCount, permanenceDecrement, permanenceIncrement, prevActiveCells, prevWinnerCells, random): """ Activates all of the cells in an unpredicted active column, chooses a winner cell, and, if learning is turned on, either adapts or creates a segment. growSynapses is invoked on this segment. @param cellsPerColumn (int) Number of cells per column @param column (int) Index of bursting column @param connections (Object) Connections instance for the tm @param initialPermanence (float) Initial permanence of a new synapse. @param learn (bool) Whether or not learning is enabled @param matchingSegments (iter) A iterable of SegmentOverlap objects for the column compute is operating on that are matching; None if empty. @param maxNewSynapseCount (int) The maximum number of synapses added to a segment during learning @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning @param prevActiveCells (list) Active cells in `t-1` @param prevWinnerCells (list) Winner cells in `t-1` @param random (Object) Random number generator @return (tuple) Contains: `cells` (list), `bestCell` (int), Pseudocode: mark all cells as active if there are any matching distal dendrite segments find the most active matching segment mark its cell as a winner cell (learning) grow and reinforce synapses to previous winner cells else find the cell with the least segments, mark it as a winner cell (learning) (optimization) if there are prev winner cells add a segment to this winner cell grow synapses to previous winner cells """ start = cellsPerColumn * column cells = range(start, start + cellsPerColumn) if matchingSegments is not None: bestSegment = max(matchingSegments, key=lambda seg: seg.overlap) bestCell = bestSegment.segment.cell if learn: TemporalMemory.adaptSegment(connections, prevActiveCells, permanenceIncrement, permanenceDecrement, bestSegment.segment) nGrowDesired = maxNewSynapseCount - bestSegment.overlap if nGrowDesired > 0: TemporalMemory.growSynapses(connections, initialPermanence, nGrowDesired, prevWinnerCells, random, bestSegment.segment) else: bestCell = TemporalMemory.leastUsedCell(cells, connections, random) if learn: nGrowExact = min(maxNewSynapseCount, len(prevWinnerCells)) if nGrowExact > 0: bestSegment = connections.createSegment(bestCell) TemporalMemory.growSynapses(connections, initialPermanence, nGrowExact, prevWinnerCells, random, bestSegment) return cells, bestCell @staticmethod def punishPredictedColumn(connections, matchingSegments, predictedSegmentDecrement, prevActiveCells): """Punishes the Segments that incorrectly predicted a column to be active. @param connections (Object) Connections instance for the tm @param matchingSegments (iter) An iterable of SegmentOverlap objects for the column compute is operating on that are matching; None if empty @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param prevActiveCells (list) Active cells in `t-1` Pseudocode: for each matching segment in the column weaken active synapses """ if predictedSegmentDecrement > 0.0 and matchingSegments is not None: for segment in matchingSegments: TemporalMemory.adaptSegment(connections, prevActiveCells, -predictedSegmentDecrement, 0.0, segment.segment) # ============================== # Helper functions # ============================== @staticmethod def leastUsedCell(cells, connections, random): """ Gets the cell with the smallest number of segments. Break ties randomly. @param cells (list) Indices of cells @param connections (Object) Connections instance for the tm @param random (Object) Random number generator @return (int) Cell index """ leastUsedCells = [] minNumSegments = float("inf") for cell in cells: numSegments = len(connections.segmentsForCell(cell)) if numSegments < minNumSegments: minNumSegments = numSegments leastUsedCells = [] if numSegments == minNumSegments: leastUsedCells.append(cell) i = random.getUInt32(len(leastUsedCells)) return leastUsedCells[i] @staticmethod def growSynapses(connections, initialPermanence, nDesiredNewSynapes, prevWinnerCells, random, segment): """ Creates nDesiredNewSynapes synapses on the segment passed in if possible, choosing random cells from the previous winner cells that are not already on the segment. @param connections (Object) Connections instance for the tm @param initialPermanence (float) Initial permanence of a new synapse. @params nDesiredNewSynapes (int) Desired number of synapses to grow @params prevWinnerCells (list) Winner cells in `t-1` @param random (Object) Tm object used to generate random numbers @param segment (int) Segment to grow synapses on. Notes: The process of writing the last value into the index in the array that was most recently changed is to ensure the same results that we get in the c++ implentation using iter_swap with vectors. """ candidates = list(prevWinnerCells) eligibleEnd = len(candidates) - 1 for synapse in connections.synapsesForSegment(segment): presynapticCell = connections.dataForSynapse(synapse).presynapticCell try: index = candidates[:eligibleEnd + 1].index(presynapticCell) except ValueError: index = -1 if index != -1: candidates[index] = candidates[eligibleEnd] eligibleEnd -= 1 candidatesLength = eligibleEnd + 1 nActual = min(nDesiredNewSynapes, candidatesLength) for _ in range(nActual): rand = random.getUInt32(candidatesLength) connections.createSynapse(segment, candidates[rand], initialPermanence) candidates[rand] = candidates[candidatesLength - 1] candidatesLength -= 1 @staticmethod def adaptSegment(connections, prevActiveCells, permanenceIncrement, permanenceDecrement, segment): """ Updates synapses on segment. Strengthens active synapses; weakens inactive synapses. @param connections (Object) Connections instance for the tm @param prevActiveCells (list) Active cells in `t-1` @param permanenceIncrement (float) Amount to increment active synapses @param permanenceDecrement (float) Amount to decrement inactive synapses @param segment (int) Segment to adapt """ for synapse in connections.synapsesForSegment(segment): synapseData = connections.dataForSynapse(synapse) permanence = synapseData.permanence if binSearch(prevActiveCells, synapseData.presynapticCell) != -1: permanence += permanenceIncrement else: permanence -= permanenceDecrement # Keep permanence within min/max bounds permanence = max(0.0, min(1.0, permanence)) if permanence < EPSILON: connections.destroySynapse(synapse) else: connections.updateSynapsePermanence(synapse, permanence) if connections.numSynapses(segment) == 0: connections.destroySegment(segment) def columnForCell(self, cell): """ Returns the index of the column that a cell belongs to. @param cell (int) Cell index @return (int) Column index """ self._validateCell(cell) return int(cell / self.cellsPerColumn) def cellsForColumn(self, column): """ Returns the indices of cells that belong to a column. @param column (int) Column index @return (list) Cell indices """ self._validateColumn(column) start = self.cellsPerColumn * column end = start + self.cellsPerColumn return range(start, end) def numberOfColumns(self): """ Returns the number of columns in this layer. @return (int) Number of columns """ return reduce(mul, self.columnDimensions, 1) def numberOfCells(self): """ Returns the number of cells in this layer. @return (int) Number of cells """ return self.numberOfColumns() * self.cellsPerColumn def mapCellsToColumns(self, cells): """ Maps cells to the columns they belong to @param cells (set) Cells @return (dict) Mapping from columns to their cells in `cells` """ cellsForColumns = defaultdict(set) for cell in cells: column = self.columnForCell(cell) cellsForColumns[column].add(cell) return cellsForColumns def getActiveCells(self): """ Returns the indices of the active cells. @return (list) Indices of active cells. """ return self.getCellIndices(self.activeCells) def getPredictiveCells(self): """ Returns the indices of the predictive cells. @return (list) Indices of predictive cells. """ predictiveCells = set() for activeSegment in self.activeSegments: cell = activeSegment.segment.cell if not cell in predictiveCells: predictiveCells.add(cell) return sorted(predictiveCells) def getWinnerCells(self): """ Returns the indices of the winner cells. @return (list) Indices of winner cells. """ return self.getCellIndices(self.winnerCells) def getCellsPerColumn(self): """ Returns the number of cells per column. @return (int) The number of cells per column. """ return self.cellsPerColumn def getColumnDimensions(self): """ Returns the dimensions of the columns in the region. @return (tuple) Column dimensions """ return self.columnDimensions def getActivationThreshold(self): """ Returns the activation threshold. @return (int) The activation threshold. """ return self.activationThreshold def setActivationThreshold(self, activationThreshold): """ Sets the activation threshold. @param activationThreshold (int) activation threshold. """ self.activationThreshold = activationThreshold def getInitialPermanence(self): """ Get the initial permanence. @return (float) The initial permanence. """ return self.initialPermanence def setInitialPermanence(self, initialPermanence): """ Sets the initial permanence. @param initialPermanence (float) The initial permanence. """ self.initialPermanence = initialPermanence def getMinThreshold(self): """ Returns the min threshold. @return (int) The min threshold. """ return self.minThreshold def setMinThreshold(self, minThreshold): """ Sets the min threshold. @param minThreshold (int) min threshold. """ self.minThreshold = minThreshold def getMaxNewSynapseCount(self): """ Returns the max new synapse count. @return (int) The max new synapse count. """ return self.maxNewSynapseCount def setMaxNewSynapseCount(self, maxNewSynapseCount): """ Sets the max new synapse count. @param maxNewSynapseCount (int) Max new synapse count. """ self.maxNewSynapseCount = maxNewSynapseCount def getPermanenceIncrement(self): """ Get the permanence increment. @return (float) The permanence increment. """ return self.permanenceIncrement def setPermanenceIncrement(self, permanenceIncrement): """ Sets the permanence increment. @param permanenceIncrement (float) The permanence increment. """ self.permanenceIncrement = permanenceIncrement def getPermanenceDecrement(self): """ Get the permanence decrement. @return (float) The permanence decrement. """ return self.permanenceDecrement def setPermanenceDecrement(self, permanenceDecrement): """ Sets the permanence decrement. @param permanenceDecrement (float) The permanence decrement. """ self.permanenceDecrement = permanenceDecrement def getPredictedSegmentDecrement(self): """ Get the predicted segment decrement. @return (float) The predicted segment decrement. """ return self.predictedSegmentDecrement def setPredictedSegmentDecrement(self, predictedSegmentDecrement): """ Sets the predicted segment decrement. @param predictedSegmentDecrement (float) The predicted segment decrement. """ self.predictedSegmentDecrement = predictedSegmentDecrement def getConnectedPermanence(self): """ Get the connected permanence. @return (float) The connected permanence. """ return self.connectedPermanence def setConnectedPermanence(self, connectedPermanence): """ Sets the connected permanence. @param connectedPermanence (float) The connected permanence. """ self.connectedPermanence = connectedPermanence def write(self, proto): """ Writes serialized data to proto object @param proto (DynamicStructBuilder) Proto object """ proto.columnDimensions = self.columnDimensions proto.cellsPerColumn = self.cellsPerColumn proto.activationThreshold = self.activationThreshold proto.initialPermanence = self.initialPermanence proto.connectedPermanence = self.connectedPermanence proto.minThreshold = self.minThreshold proto.maxNewSynapseCount = self.maxNewSynapseCount proto.permanenceIncrement = self.permanenceIncrement proto.permanenceDecrement = self.permanenceDecrement proto.predictedSegmentDecrement = self.predictedSegmentDecrement self.connections.write(proto.connections) self._random.write(proto.random) proto.activeCells = list(self.activeCells) proto.winnerCells = list(self.winnerCells) activeSegmentOverlaps = \ proto.init('activeSegmentOverlaps', len(self.activeSegments)) for i, active in enumerate(self.activeSegments): activeSegmentOverlaps[i].cell = active.segment.cell activeSegmentOverlaps[i].segment = active.segment.idx activeSegmentOverlaps[i].overlap = active.overlap matchingSegmentOverlaps = \ proto.init('matchingSegmentOverlaps', len(self.matchingSegments)) for i, matching in enumerate(self.matchingSegments): matchingSegmentOverlaps[i].cell = matching.segment.cell matchingSegmentOverlaps[i].segment = matching.segment.idx matchingSegmentOverlaps[i].overlap = matching.overlap @classmethod def read(cls, proto): """ Reads deserialized data from proto object @param proto (DynamicStructBuilder) Proto object @return (TemporalMemory) TemporalMemory instance """ tm = object.__new__(cls) tm.columnDimensions = list(proto.columnDimensions) tm.cellsPerColumn = int(proto.cellsPerColumn) tm.activationThreshold = int(proto.activationThreshold) tm.initialPermanence = proto.initialPermanence tm.connectedPermanence = proto.connectedPermanence tm.minThreshold = int(proto.minThreshold) tm.maxNewSynapseCount = int(proto.maxNewSynapseCount) tm.permanenceIncrement = proto.permanenceIncrement tm.permanenceDecrement = proto.permanenceDecrement tm.predictedSegmentDecrement = proto.predictedSegmentDecrement tm.connections = Connections.read(proto.connections) #pylint: disable=W0212 tm._random = Random() tm._random.read(proto.random) #pylint: enable=W0212 tm.activeCells = [int(x) for x in proto.activeCells] tm.winnerCells = [int(x) for x in proto.winnerCells] tm.activeSegments = [] tm.matchingSegments = [] for i in xrange(len(proto.activeSegmentOverlaps)): protoSegmentOverlap = proto.activeSegmentOverlaps[i] segment = tm.connections.getSegment(protoSegmentOverlap.segment, protoSegmentOverlap.cell) segmentOverlap = SegmentOverlap(segment, protoSegmentOverlap.overlap) tm.activeSegments.append(segmentOverlap) for i in xrange(len(proto.matchingSegmentOverlaps)): protoSegmentOverlap = proto.matchingSegmentOverlaps[i] segment = tm.connections.getSegment(protoSegmentOverlap.segment, protoSegmentOverlap.cell) segmentOverlap = SegmentOverlap(segment, protoSegmentOverlap.overlap) tm.matchingSegments.append(segmentOverlap) return tm def __eq__(self, other): """ Equality operator for TemporalMemory instances. Checks if two instances are functionally identical (might have different internal state). @param other (TemporalMemory) TemporalMemory instance to compare to """ if self.columnDimensions != other.columnDimensions: return False if self.cellsPerColumn != other.cellsPerColumn: return False if self.activationThreshold != other.activationThreshold: return False if abs(self.initialPermanence - other.initialPermanence) > EPSILON: return False if abs(self.connectedPermanence - other.connectedPermanence) > EPSILON: return False if self.minThreshold != other.minThreshold: return False if self.maxNewSynapseCount != other.maxNewSynapseCount: return False if abs(self.permanenceIncrement - other.permanenceIncrement) > EPSILON: return False if abs(self.permanenceDecrement - other.permanenceDecrement) > EPSILON: return False if abs(self.predictedSegmentDecrement - other.predictedSegmentDecrement) > EPSILON: return False if self.connections != other.connections: return False if self.activeCells != other.activeCells: return False if self.winnerCells != other.winnerCells: return False if self.matchingSegments != other.matchingSegments: return False if self.activeSegments != other.activeSegments: return False return True def __ne__(self, other): """ Non-equality operator for TemporalMemory instances. Checks if two instances are not functionally identical (might have different internal state). @param other (TemporalMemory) TemporalMemory instance to compare to """ return not self.__eq__(other) def _validateColumn(self, column): """ Raises an error if column index is invalid. @param column (int) Column index """ if column >= self.numberOfColumns() or column < 0: raise IndexError("Invalid column") def _validateCell(self, cell): """ Raises an error if cell index is invalid. @param cell (int) Cell index """ if cell >= self.numberOfCells() or cell < 0: raise IndexError("Invalid cell") @classmethod def getCellIndices(cls, cells): """ Returns the indices of the cells passed in. @param cells (list) cells to find the indices of """ return [cls.getCellIndex(c) for c in cells] @staticmethod def getCellIndex(cell): """ Returns the index of the cell @param cell (int) cell to find the index of """ return cell
def __init__(self, columnDimensions=(2048,), cellsPerColumn=32, activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, maxNewSynapseCount=20, permanenceIncrement=0.10, permanenceDecrement=0.10, predictedSegmentDecrement=0.0, maxSegmentsPerCell=255, maxSynapsesPerSegment=255, seed=42, **kwargs): """ @param columnDimensions (list) Dimensions of the column space @param cellsPerColumn (int) Number of cells per column @param activationThreshold (int) If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active. @param initialPermanence (float) Initial permanence of a new synapse @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected. @param minThreshold (int) If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column @param maxNewSynapseCount (int) The maximum number of synapses added to a segment during learning @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param predictedSegmentDecrement (float) Amount by which active permanences of synapses of previously predicted but inactive segments are decremented. @param seed (int) Seed for the random number generator Notes: predictedSegmentDecrement: A good value is just a bit larger than (the column-level sparsity * permanenceIncrement). So, if column-level sparsity is 2% and permanenceIncrement is 0.01, this parameter should be something like 4% * 0.01 = 0.0004). """ # Error checking if not len(columnDimensions): raise ValueError("Number of column dimensions must be greater than 0") if cellsPerColumn <= 0: raise ValueError("Number of cells per column must be greater than 0") if minThreshold > activationThreshold: raise ValueError( "The min threshold can't be greater than the activation threshold") # TODO: Validate all parameters (and add validation tests) # Save member variables self.columnDimensions = columnDimensions self.cellsPerColumn = cellsPerColumn self.activationThreshold = activationThreshold self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.minThreshold = minThreshold self.maxNewSynapseCount = maxNewSynapseCount self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.predictedSegmentDecrement = predictedSegmentDecrement # Initialize member variables self.connections = Connections(self.numberOfCells(), maxSegmentsPerCell=maxSegmentsPerCell, maxSynapsesPerSegment=maxSynapsesPerSegment) self._random = Random(seed) self.activeCells = [] self.winnerCells = [] self.activeSegments = [] self.matchingSegments = []
def testPathsNotInvalidatedByOtherDestroys(self): """ Creates segments and synapses, then destroys segments and synapses on either side of them and verifies that existing Segment and Synapse instances still point to the same segment / synapse as before. """ connections = Connections(1024) segment1 = connections.createSegment(11) connections.createSegment(12) segment3 = connections.createSegment(13) connections.createSegment(14) segment5 = connections.createSegment(15) synapse1 = connections.createSynapse(segment3, 201, .85) synapse2 = connections.createSynapse(segment3, 202, .85) synapse3 = connections.createSynapse(segment3, 203, .85) synapse4 = connections.createSynapse(segment3, 204, .85) synapse5 = connections.createSynapse(segment3, 205, .85) self.assertEqual(203, synapse3.presynapticCell) connections.destroySynapse(synapse1) self.assertEqual(203, synapse3.presynapticCell) connections.destroySynapse(synapse5) self.assertEqual(203, synapse3.presynapticCell) connections.destroySegment(segment1) self.assertEqual(set([synapse2, synapse3, synapse4]), connections.synapsesForSegment(segment3)) connections.destroySegment(segment5) self.assertEqual(set([synapse2, synapse3, synapse4]), connections.synapsesForSegment(segment3)) self.assertEqual(203, synapse3.presynapticCell)
def testDestroySegment(self): """ Creates a segment, destroys it, and makes sure it got destroyed along with all of its synapses. """ connections = Connections(1024) connections.createSegment(10) segment2 = connections.createSegment(20) connections.createSegment(30) connections.createSegment(40) connections.createSynapse(segment2, 80, 0.85) connections.createSynapse(segment2, 81, 0.85) connections.createSynapse(segment2, 82, 0.15) self.assertEqual(4, connections.numSegments()) self.assertEqual(3, connections.numSynapses()) connections.destroySegment(segment2) self.assertEqual(3, connections.numSegments()) self.assertEqual(0, connections.numSynapses()) args = [segment2] self.assertRaises(ValueError, connections.synapsesForSegment, *args) active, matching = connections.computeActivity([80, 81, 82], .5, 2, .1, 1) self.assertEqual(len(active), 0) self.assertEqual(len(matching), 0)
def testDestroySegmentWithDestroyedSynapses(self): """ Destroy a segment that has a destroyed synapse and a non-destroyed synapse. Make sure nothing gets double-destroyed. """ connections = Connections(1024) segment1 = connections.createSegment(11) segment2 = connections.createSegment(12) connections.createSynapse(segment1, 101, .85) synapse2a = connections.createSynapse(segment2, 201, .85) connections.createSynapse(segment2, 202, .85) self.assertEqual(3, connections.numSynapses()) connections.destroySynapse(synapse2a) self.assertEqual(2, connections.numSegments()) self.assertEqual(2, connections.numSynapses()) connections.destroySegment(segment2) self.assertEqual(1, connections.numSegments()) self.assertEqual(1, connections.numSynapses())
def testDestroySegmentsThenReachLimit(self): """ Destroy some segments then verify that the maxSegmentsPerCell is still correctly applied. """ connections = Connections(1024, 2, 2) segment1 = connections.createSegment(11) segment2 = connections.createSegment(11) self.assertEqual(2, connections.numSegments()) connections.destroySegment(segment1) connections.destroySegment(segment2) self.assertEqual(0, connections.numSegments()) connections.createSegment(11) self.assertEqual(1, connections.numSegments()) connections.createSegment(11) self.assertEqual(2, connections.numSegments()) segment3 = connections.createSegment(11) self.assertLess(segment3.idx, 2) self.assertEqual(2, connections.numSegments())
def testDestroySegmentsThenReachLimit(self): """ Destroy some segments then verify that the maxSegmentsPerCell is still correctly applied. """ connections = Connections(1024, 2, 2) segment1 = connections.createSegment(11) segment2 = connections.createSegment(11) self.assertEqual(2, connections.numSegments()) connections.destroySegment(segment1) connections.destroySegment(segment2) self.assertEqual(0, connections.numSegments()) connections.createSegment(11) self.assertEqual(1, connections.numSegments()) connections.createSegment(11) self.assertEqual(2, connections.numSegments()) segment3 = connections.createSegment(11) self.assertEqual(2, connections.numSegments(11)) self.assertEqual(2, connections.numSegments())
def testReachSegmentLimitMultipleTimes(self): """ Hit the maxSynapsesPerSegment threshold multiple times. Make sure it works more than once. """ connections = Connections(1024, 2, 2) segment = connections.createSegment(10) connections.createSynapse(segment, 201, .85) self.assertEqual(1, connections.numSynapses()) connections.createSynapse(segment, 202, .9) self.assertEqual(2, connections.numSynapses()) connections.createSynapse(segment, 203, .8) self.assertEqual(2, connections.numSynapses()) synapse = connections.createSynapse(segment, 204, .8) self.assertLess(synapse.idx, 2) self.assertEqual(2, connections.numSynapses())
def testDestroySynapsesThenReachLimit(self): """ Destroy some synapses then verify that the maxSynapsesPerSegment is still correctly applied. """ connections = Connections(1024, 2, 2) segment = connections.createSegment(10) synapse1 = connections.createSynapse(segment, 201, .85) synapse2 = connections.createSynapse(segment, 202, .85) self.assertEqual(2, connections.numSynapses()) connections.destroySynapse(synapse1) connections.destroySynapse(synapse2) self.assertEqual(0, connections.numSynapses()) connections.createSynapse(segment, 201, .85) self.assertEqual(1, connections.numSynapses()) connections.createSynapse(segment, 202, .90) self.assertEqual(2, connections.numSynapses()) synapse3 = connections.createSynapse(segment, 203, .8) self.assertEqual(2, connections.numSynapses())
def testReachSegmentLimitMultipleTimes(self): """ Hit the maxSynapsesPerSegment threshold multiple times. Make sure it works more than once. """ connections = Connections(1024, 2, 2) segment = connections.createSegment(10) connections.createSynapse(segment, 201, .85) self.assertEqual(1, connections.numSynapses()) connections.createSynapse(segment, 202, .9) self.assertEqual(2, connections.numSynapses()) connections.createSynapse(segment, 203, .8) self.assertEqual(2, connections.numSynapses()) synapse = connections.createSynapse(segment, 204, .8) self.assertEqual(2, connections.numSynapses())
def testCreateSegmentReuse(self): connections = Connections(1024, 2) segment1 = connections.createSegment(42) connections.createSynapse(segment1, 1, .5) connections.createSynapse(segment1, 2, .5) # Let some time pass. connections.startNewIteration(); connections.startNewIteration(); connections.startNewIteration(); segment2 = connections.createSegment(42) connections.startNewIteration(); connections.recordSegmentActivity(segment1) segment3 = connections.createSegment(42); self.assertEqual(segment2.idx, segment3.idx)