def test_adjustActiveSynapses(self): for (name, cells, inputs, adjustedSegments, activeInputs, initialPermanence, delta, connectedPermanence, expected) in (("Basic test", [1, 2, 3], [42, 43, 44], [0, 2], [42, 44], 0.45, 0.1, 0.5, [2, 0, 2]), ("Negative increment", [1, 2, 3], [42, 43, 44], [0, 2], [42, 44], 0.55, -0.1, 0.5, [1, 3, 1]), ("No segments", [1, 2, 3], [42, 43, 44], [], [42, 44], 0.45, 0.1, 0.5, [0, 0, 0]), ("No active synapses", [1, 2, 3], [42, 43, 44], [0, 2], [], 0.45, 0.1, 0.5, [0, 0, 0]), ("Delta of zero", [1, 2, 3], [42, 43, 44], [0, 2], [42, 44], 0.55, 0.0, 0.5, [3, 3, 3]) ): connections = SparseMatrixConnections(2048, 2048) segments = connections.createSegments(cells) connections.growSynapses(segments, inputs, initialPermanence) connections.adjustActiveSynapses(segments[adjustedSegments], activeInputs, delta) overlaps = connections.computeActivity(inputs, connectedPermanence) np.testing.assert_equal(overlaps[segments], expected, name)
def test_whenPermanenceFallsBelowZero(self): connections = SparseMatrixConnections(2048, 2048) segments = connections.createSegments([1, 2, 3]) connections.growSynapses(segments, [42, 43], 0.05) connections.adjustSynapses(segments, [42, 43], -0.06, 0.0) np.testing.assert_equal(connections.mapSegmentsToSynapseCounts(segments), [0, 0, 0]) connections.growSynapses(segments, [42, 43], 0.05) connections.adjustSynapses(segments, [], 0.0, -0.06) np.testing.assert_equal(connections.mapSegmentsToSynapseCounts(segments), [0, 0, 0]) connections.growSynapses(segments, [42, 43], 0.05) connections.adjustActiveSynapses(segments, [42, 43], -0.06) np.testing.assert_equal(connections.mapSegmentsToSynapseCounts(segments), [0, 0, 0]) connections.growSynapses(segments, [42, 43], 0.05) connections.adjustInactiveSynapses(segments, [], -0.06) np.testing.assert_equal(connections.mapSegmentsToSynapseCounts(segments), [0, 0, 0])
class TemporalMemory(object): """ TemporalMemory with basal and apical connections, and with the ability to connect to external cells. Basal connections are used to implement traditional Temporal Memory. The apical connections are used for further disambiguation. If multiple cells in a minicolumn have active basal segments, each of those cells is predicted, unless one of them also has an active apical segment, in which case only the cells with active basal and apical segments are predicted. In other words, the apical connections have no effect unless the basal input is a union of SDRs (e.g. from bursting minicolumns). This TemporalMemory is unaware of whether its basalInput or apicalInput are from internal or external cells. They are just cell numbers. The caller knows what these cell numbers mean, but the TemporalMemory doesn't. This allows the same code to work for various algorithms. To implement sequence memory, use basalInputDimensions=(numColumns*cellsPerColumn,) and call compute like this: tm.compute(activeColumns, tm.getActiveCells(), tm.getWinnerCells()) """ def __init__(self, columnDimensions=(2048,), basalInputDimensions=(), apicalInputDimensions=(), cellsPerColumn=32, activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, sampleSize=20, permanenceIncrement=0.1, permanenceDecrement=0.1, predictedSegmentDecrement=0.0, maxNewSynapseCount=None, maxSynapsesPerSegment=-1, maxSegmentsPerCell=None, seed=42): self.columnDimensions = columnDimensions self.numColumns = self._numPoints(columnDimensions) self.basalInputDimensions = basalInputDimensions self.apicalInputDimensions = apicalInputDimensions self.cellsPerColumn = cellsPerColumn self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.minThreshold = minThreshold self.sampleSize = sampleSize if maxNewSynapseCount is not None: print "Parameter 'maxNewSynapseCount' is deprecated. Use 'sampleSize'." self.sampleSize = maxNewSynapseCount if maxSegmentsPerCell is not None: print "Warning: ignoring parameter 'maxSegmentsPerCell'" self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.predictedSegmentDecrement = predictedSegmentDecrement self.activationThreshold = activationThreshold self.maxSynapsesPerSegment = maxSynapsesPerSegment self.basalConnections = SparseMatrixConnections( self.numColumns*cellsPerColumn, self._numPoints(basalInputDimensions)) self.apicalConnections = SparseMatrixConnections( self.numColumns*cellsPerColumn, self._numPoints(apicalInputDimensions)) self.rng = Random(seed) self.activeCells = EMPTY_UINT_ARRAY self.winnerCells = EMPTY_UINT_ARRAY self.prevPredictedCells = EMPTY_UINT_ARRAY def reset(self): self.activeCells = EMPTY_UINT_ARRAY self.winnerCells = EMPTY_UINT_ARRAY self.prevPredictedCells = EMPTY_UINT_ARRAY def compute(self, activeColumns, basalInput, basalGrowthCandidates, apicalInput=EMPTY_UINT_ARRAY, apicalGrowthCandidates=EMPTY_UINT_ARRAY, learn=True): """ @param activeColumns (numpy array) @param basalInput (numpy array) @param basalGrowthCandidates (numpy array) @param apicalInput (numpy array) @param apicalGrowthCandidates (numpy array) @param learn (bool) """ # Calculate predictions for this timestep (activeBasalSegments, matchingBasalSegments, basalPotentialOverlaps) = self._calculateSegmentActivity( self.basalConnections, basalInput, self.connectedPermanence, self.activationThreshold, self.minThreshold) (activeApicalSegments, matchingApicalSegments, apicalPotentialOverlaps) = self._calculateSegmentActivity( self.apicalConnections, apicalInput, self.connectedPermanence, self.activationThreshold, self.minThreshold) predictedCells = self._calculatePredictedCells(activeBasalSegments, activeApicalSegments) # Calculate active cells (correctPredictedCells, burstingColumns) = np2.setCompare(predictedCells, activeColumns, predictedCells / self.cellsPerColumn, rightMinusLeft=True) newActiveCells = np.concatenate((correctPredictedCells, np2.getAllCellsInColumns( burstingColumns, self.cellsPerColumn))) # Calculate learning (learningActiveBasalSegments, learningMatchingBasalSegments, basalSegmentsToPunish, newBasalSegmentCells, learningCells) = self._calculateBasalLearning( activeColumns, burstingColumns, correctPredictedCells, activeBasalSegments, matchingBasalSegments, basalPotentialOverlaps) (learningActiveApicalSegments, learningMatchingApicalSegments, apicalSegmentsToPunish, newApicalSegmentCells) = self._calculateApicalLearning( learningCells, activeColumns, activeApicalSegments, matchingApicalSegments, apicalPotentialOverlaps) # Learn if learn: # Learn on existing segments for learningSegments in (learningActiveBasalSegments, learningMatchingBasalSegments): self._learn(self.basalConnections, self.rng, learningSegments, basalInput, basalGrowthCandidates, basalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) for learningSegments in (learningActiveApicalSegments, learningMatchingApicalSegments): self._learn(self.apicalConnections, self.rng, learningSegments, apicalInput, apicalGrowthCandidates, apicalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) # Punish incorrect predictions if self.predictedSegmentDecrement != 0.0: self.basalConnections.adjustActiveSynapses( basalSegmentsToPunish, basalInput, -self.predictedSegmentDecrement) self.apicalConnections.adjustActiveSynapses( apicalSegmentsToPunish, apicalInput, -self.predictedSegmentDecrement) # Grow new segments if len(basalGrowthCandidates) > 0: self._learnOnNewSegments(self.basalConnections, self.rng, newBasalSegmentCells, basalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) if len(apicalGrowthCandidates) > 0: self._learnOnNewSegments(self.apicalConnections, self.rng, newApicalSegmentCells, apicalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) # Save the results self.activeCells = newActiveCells self.winnerCells = learningCells self.prevPredictedCells = predictedCells def _calculateBasalLearning(self, activeColumns, burstingColumns, correctPredictedCells, activeBasalSegments, matchingBasalSegments, basalPotentialOverlaps): """ Basic Temporal Memory learning. Correctly predicted cells always have active basal segments, and we learn on these segments. In bursting columns, we either learn on an existing basal segment, or we grow a new one. The only influence apical dendrites have on basal learning is: the apical dendrites influence which cells are considered "predicted". So an active apical dendrite can keep some basal segments in active columns from learning. @param correctPredictedCells (numpy array) @param burstingColumns (numpy array) @param activeBasalSegments (numpy array) @param matchingBasalSegments (numpy array) @param basalPotentialOverlaps (numpy array) @return (tuple) - learningActiveBasalSegments (numpy array) Active basal segments on correct predicted cells - learningMatchingBasalSegments (numpy array) Matching basal segments selected for learning in bursting columns - basalSegmentsToPunish (numpy array) Basal segments that should be punished for predicting an inactive column - newBasalSegmentCells (numpy array) Cells in bursting columns that were selected to grow new basal segments - learningCells (numpy array) Cells that have learning basal segments or are selected to grow a basal segment """ # Correctly predicted columns learningActiveBasalSegments = self.basalConnections.filterSegmentsByCell( activeBasalSegments, correctPredictedCells) cellsForMatchingBasal = self.basalConnections.mapSegmentsToCells( matchingBasalSegments) matchingCells = np.unique(cellsForMatchingBasal) (matchingCellsInBurstingColumns, burstingColumnsWithNoMatch) = np2.setCompare( matchingCells, burstingColumns, matchingCells / self.cellsPerColumn, rightMinusLeft=True) learningMatchingBasalSegments = self._chooseBestSegmentPerColumn( self.basalConnections, matchingCellsInBurstingColumns, matchingBasalSegments, basalPotentialOverlaps, self.cellsPerColumn) newBasalSegmentCells = self._getCellsWithFewestSegments( self.basalConnections, self.rng, burstingColumnsWithNoMatch, self.cellsPerColumn) learningCells = np.concatenate( (correctPredictedCells, self.basalConnections.mapSegmentsToCells(learningMatchingBasalSegments), newBasalSegmentCells)) # Incorrectly predicted columns correctMatchingBasalMask = np.in1d( cellsForMatchingBasal / self.cellsPerColumn, activeColumns) basalSegmentsToPunish = matchingBasalSegments[~correctMatchingBasalMask] return (learningActiveBasalSegments, learningMatchingBasalSegments, basalSegmentsToPunish, newBasalSegmentCells, learningCells) def _calculateApicalLearning(self, learningCells, activeColumns, activeApicalSegments, matchingApicalSegments, apicalPotentialOverlaps): """ Calculate apical learning for each learning cell. The set of learning cells was determined completely from basal segments. Do all apical learning on the same cells. Learn on any active segments on learning cells. For cells without active segments, learn on the best matching segment. For cells without a matching segment, grow a new segment. @param learningCells (numpy array) @param correctPredictedCells (numpy array) @param activeApicalSegments (numpy array) @param matchingApicalSegments (numpy array) @param apicalPotentialOverlaps (numpy array) @return (tuple) - learningActiveApicalSegments (numpy array) Active apical segments on correct predicted cells - learningMatchingApicalSegments (numpy array) Matching apical segments selected for learning in bursting columns - apicalSegmentsToPunish (numpy array) Apical segments that should be punished for predicting an inactive column - newApicalSegmentCells (numpy array) Cells in bursting columns that were selected to grow new apical segments """ # Cells with active apical segments learningActiveApicalSegments = self.apicalConnections.filterSegmentsByCell( activeApicalSegments, learningCells) # Cells with matching apical segments learningCellsWithoutActiveApical = np.setdiff1d( learningCells, self.apicalConnections.mapSegmentsToCells(learningActiveApicalSegments)) cellsForMatchingApical = self.apicalConnections.mapSegmentsToCells( matchingApicalSegments) learningCellsWithMatchingApical = np.intersect1d( learningCellsWithoutActiveApical, cellsForMatchingApical) learningMatchingApicalSegments = self._chooseBestSegmentPerCell( self.apicalConnections, learningCellsWithMatchingApical, matchingApicalSegments, apicalPotentialOverlaps) # Cells that need to grow an apical segment newApicalSegmentCells = np.setdiff1d(learningCellsWithoutActiveApical, learningCellsWithMatchingApical) # Incorrectly predicted columns correctMatchingApicalMask = np.in1d( cellsForMatchingApical / self.cellsPerColumn, activeColumns) apicalSegmentsToPunish = matchingApicalSegments[~correctMatchingApicalMask] return (learningActiveApicalSegments, learningMatchingApicalSegments, apicalSegmentsToPunish, newApicalSegmentCells) @staticmethod def _calculateSegmentActivity(connections, activeInput, connectedPermanence, activationThreshold, minThreshold): """ Calculate the active and matching segments for this timestep. @param connections (SparseMatrixConnections) @param activeInput (numpy array) @return (tuple) - activeSegments (numpy array) Dendrite segments with enough active connected synapses to cause a dendritic spike - matchingSegments (numpy array) Dendrite segments with enough active potential synapses to be selected for learning in a bursting column - potentialOverlaps (numpy array) The number of active potential synapses for each segment. Includes counts for active, matching, and nonmatching segments. """ # Active overlaps = connections.computeActivity(activeInput, connectedPermanence) activeSegments = np.flatnonzero(overlaps >= activationThreshold) # Matching potentialOverlaps = connections.computeActivity(activeInput) matchingSegments = np.flatnonzero(potentialOverlaps >= minThreshold) return (activeSegments, matchingSegments, potentialOverlaps) def _calculatePredictedCells(self, activeBasalSegments, activeApicalSegments): """ Calculate the predicted cells, given the set of active segments. An active basal segment is enough to predict a cell. An active apical segment is *not* enough to predict a cell. When a cell has both types of segments active, other cells in its minicolumn must also have both types of segments to be considered predictive. @param activeBasalSegments (numpy array) @param activeApicalSegments (numpy array) @return (numpy array) """ cellsForBasalSegments = self.basalConnections.mapSegmentsToCells( activeBasalSegments) cellsForApicalSegments = self.apicalConnections.mapSegmentsToCells( activeApicalSegments) fullyDepolarizedCells = np.intersect1d(cellsForBasalSegments, cellsForApicalSegments) partlyDepolarizedCells = np.setdiff1d(cellsForBasalSegments, fullyDepolarizedCells) inhibitedMask = np.in1d(partlyDepolarizedCells / self.cellsPerColumn, fullyDepolarizedCells / self.cellsPerColumn) predictedCells = np.append(fullyDepolarizedCells, partlyDepolarizedCells[~inhibitedMask]) return predictedCells @staticmethod def _learn(connections, rng, learningSegments, activeInput, growthCandidates, potentialOverlaps, initialPermanence, sampleSize, permanenceIncrement, permanenceDecrement, maxSynapsesPerSegment): """ Adjust synapse permanences, grow new synapses, and grow new segments. @param learningActiveSegments (numpy array) @param learningMatchingSegments (numpy array) @param segmentsToPunish (numpy array) @param newSegmentCells (numpy array) @param activeInput (numpy array) @param growthCandidates (numpy array) @param potentialOverlaps (numpy array) """ # Learn on existing segments connections.adjustSynapses(learningSegments, activeInput, permanenceIncrement, -permanenceDecrement) # Grow new synapses. Calculate "maxNew", the maximum number of synapses to # grow per segment. "maxNew" might be a number or it might be a list of # numbers. if sampleSize == -1: maxNew = len(growthCandidates) else: maxNew = sampleSize - potentialOverlaps[learningSegments] if maxSynapsesPerSegment != -1: synapseCounts = connections.mapSegmentsToSynapseCounts( learningSegments) numSynapsesToReachMax = maxSynapsesPerSegment - synapseCounts maxNew = np.where(maxNew <= numSynapsesToReachMax, maxNew, numSynapsesToReachMax) connections.growSynapsesToSample(learningSegments, growthCandidates, maxNew, initialPermanence, rng) @staticmethod def _learnOnNewSegments(connections, rng, newSegmentCells, growthCandidates, initialPermanence, sampleSize, maxSynapsesPerSegment): numNewSynapses = len(growthCandidates) if sampleSize != -1: numNewSynapses = min(numNewSynapses, sampleSize) if maxSynapsesPerSegment != -1: numNewSynapses = min(numNewSynapses, maxSynapsesPerSegment) newSegments = connections.createSegments(newSegmentCells) connections.growSynapsesToSample(newSegments, growthCandidates, numNewSynapses, initialPermanence, rng) @classmethod def _chooseBestSegmentPerCell(cls, connections, cells, allMatchingSegments, potentialOverlaps): """ For each specified cell, choose its matching segment with largest number of active potential synapses. When there's a tie, the first segment wins. @param connections (SparseMatrixConnections) @param cells (numpy array) @param allMatchingSegments (numpy array) @param potentialOverlaps (numpy array) @return (numpy array) One segment per cell """ candidateSegments = connections.filterSegmentsByCell(allMatchingSegments, cells) # Narrow it down to one pair per cell. onePerCellFilter = np2.argmaxMulti(potentialOverlaps[candidateSegments], connections.mapSegmentsToCells( candidateSegments)) learningSegments = candidateSegments[onePerCellFilter] return learningSegments @classmethod def _chooseBestSegmentPerColumn(cls, connections, matchingCells, allMatchingSegments, potentialOverlaps, cellsPerColumn): """ For all the columns covered by 'matchingCells', choose the column's matching segment with largest number of active potential synapses. When there's a tie, the first segment wins. @param connections (SparseMatrixConnections) @param matchingCells (numpy array) @param allMatchingSegments (numpy array) @param potentialOverlaps (numpy array) """ candidateSegments = connections.filterSegmentsByCell(allMatchingSegments, matchingCells) # Narrow it down to one segment per column. cellScores = potentialOverlaps[candidateSegments] columnsForCandidates = (connections.mapSegmentsToCells(candidateSegments) / cellsPerColumn) onePerColumnFilter = np2.argmaxMulti(cellScores, columnsForCandidates) learningSegments = candidateSegments[onePerColumnFilter] return learningSegments @classmethod def _getCellsWithFewestSegments(cls, connections, rng, columns, cellsPerColumn): """ For each column, get the cell that has the fewest total basal segments. Break ties randomly. @param connections (SparseMatrixConnections) @param rng (Random) @param columns (numpy array) Columns to check @return (numpy array) One cell for each of the provided columns """ candidateCells = np2.getAllCellsInColumns(columns, cellsPerColumn) # Arrange the segment counts into one row per minicolumn. segmentCounts = np.reshape(connections.getSegmentCounts(candidateCells), newshape=(len(columns), cellsPerColumn)) # Filter to just the cells that are tied for fewest in their minicolumn. minSegmentCounts = np.amin(segmentCounts, axis=1, keepdims=True) candidateCells = candidateCells[np.flatnonzero(segmentCounts == minSegmentCounts)] # Filter to one cell per column, choosing randomly from the minimums. # To do the random choice, add a random offset to each index in-place, using # casting to floor the result. (_, onePerColumnFilter, numCandidatesInColumns) = np.unique(candidateCells / cellsPerColumn, return_index=True, return_counts=True) offsetPercents = np.empty(len(columns), dtype="float32") rng.initializeReal32Array(offsetPercents) np.add(onePerColumnFilter, offsetPercents*numCandidatesInColumns, out=onePerColumnFilter, casting="unsafe") return candidateCells[onePerColumnFilter] @staticmethod def _numPoints(dimensions): """ Get the number of discrete points in a set of dimensions. @param dimensions (sequence of integers) @return (int) """ if len(dimensions) == 0: return 0 else: return reduce(operator.mul, dimensions, 1) def getActiveCells(self): return self.activeCells def getWinnerCells(self): return self.winnerCells def getPreviouslyPredictedCells(self): return self.prevPredictedCells
class ApicalDependentTemporalMemory(object): """ A generalized Temporal Memory that creates cell SDRs that are specific to both the basal and apical input. Prediction requires both basal and apical support. For sequence memory, the result is that every sequence happens within a "world" which is specified by the apical input. Sequences are not shared between worlds. This class is generalized in two ways: - This class does not specify when a 'timestep' begins and ends. It exposes two main methods: 'depolarizeCells' and 'activateCells', and callers or subclasses can introduce the notion of a timestep. - This class is unaware of whether its 'basalInput' or 'apicalInput' are from internal or external cells. They are just cell numbers. The caller knows what these cell numbers mean, but the TemporalMemory doesn't. """ def __init__(self, columnCount=2048, basalInputSize=0, apicalInputSize=0, cellsPerColumn=32, activationThreshold=13, reducedBasalThreshold=10, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, sampleSize=20, permanenceIncrement=0.1, permanenceDecrement=0.1, basalPredictedSegmentDecrement=0.0, apicalPredictedSegmentDecrement=0.0, maxSynapsesPerSegment=-1, seed=42): """ @param columnCount (int) The number of minicolumns @param basalInputSize (sequence) The number of bits in the basal input @param apicalInputSize (int) The number of bits in the apical input @param cellsPerColumn (int) Number of cells per column @param activationThreshold (int) If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active. @param reducedBasalThreshold (int) The activation threshold of basal (lateral) segments for cells that have active apical segments. If equal to activationThreshold (default), this parameter has no effect. @param initialPermanence (float) Initial permanence of a new synapse @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected. @param minThreshold (int) If the number of potential synapses active on a segment is at least this threshold, it is said to be "matching" and is eligible for learning. @param sampleSize (int) How much of the active SDR to sample with synapses. @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param basalPredictedSegmentDecrement (float) Amount by which segments are punished for incorrect predictions. @param apicalPredictedSegmentDecrement (float) Amount by which segments are punished for incorrect predictions. @param maxSynapsesPerSegment The maximum number of synapses per segment. @param seed (int) Seed for the random number generator. """ self.columnCount = columnCount self.cellsPerColumn = cellsPerColumn self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.minThreshold = minThreshold self.sampleSize = sampleSize self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.basalPredictedSegmentDecrement = basalPredictedSegmentDecrement self.apicalPredictedSegmentDecrement = apicalPredictedSegmentDecrement self.activationThreshold = activationThreshold self.reducedBasalThreshold = reducedBasalThreshold self.maxSynapsesPerSegment = maxSynapsesPerSegment self.basalConnections = SparseMatrixConnections(columnCount*cellsPerColumn, basalInputSize) self.disableApicalDependence = False self.apicalConnections = SparseMatrixConnections(columnCount*cellsPerColumn, apicalInputSize) self.rng = Random(seed) self.activeCells = np.empty(0, dtype="uint32") self.winnerCells = np.empty(0, dtype="uint32") self.predictedCells = np.empty(0, dtype="uint32") self.predictedActiveCells = np.empty(0, dtype="uint32") self.activeBasalSegments = np.empty(0, dtype="uint32") self.activeApicalSegments = np.empty(0, dtype="uint32") self.matchingBasalSegments = np.empty(0, dtype="uint32") self.matchingApicalSegments = np.empty(0, dtype="uint32") self.basalPotentialOverlaps = np.empty(0, dtype="int32") self.apicalPotentialOverlaps = np.empty(0, dtype="int32") def reset(self): """ Clear all cell and segment activity. """ self.activeCells = np.empty(0, dtype="uint32") self.winnerCells = np.empty(0, dtype="uint32") self.predictedCells = np.empty(0, dtype="uint32") self.predictedActiveCells = np.empty(0, dtype="uint32") self.activeBasalSegments = np.empty(0, dtype="uint32") self.activeApicalSegments = np.empty(0, dtype="uint32") self.matchingBasalSegments = np.empty(0, dtype="uint32") self.matchingApicalSegments = np.empty(0, dtype="uint32") self.basalPotentialOverlaps = np.empty(0, dtype="int32") self.apicalPotentialOverlaps = np.empty(0, dtype="int32") def depolarizeCells(self, basalInput, apicalInput, learn): """ Calculate predictions. @param basalInput (numpy array) List of active input bits for the basal dendrite segments @param apicalInput (numpy array) List of active input bits for the apical dendrite segments @param learn (bool) Whether learning is enabled. Some TM implementations may depolarize cells differently or do segment activity bookkeeping when learning is enabled. """ # Calculate predictions for this timestep (activeApicalSegments, matchingApicalSegments, apicalPotentialOverlaps) = self._calculateSegmentActivity( self.apicalConnections, apicalInput, self.connectedPermanence, self.activationThreshold, self.minThreshold, self.reducedBasalThreshold) apicallySupportedCells = self.apicalConnections.mapSegmentsToCells( activeApicalSegments) if not self.disableApicalDependence: (activeBasalSegments, matchingBasalSegments, basalPotentialOverlaps) = self._calculateSegmentActivity( self.basalConnections, basalInput, self.connectedPermanence, self.activationThreshold, self.minThreshold, self.reducedBasalThreshold, reducedThresholdCells = apicallySupportedCells,) predictedCells = np.intersect1d( self.basalConnections.mapSegmentsToCells(activeBasalSegments), apicallySupportedCells) else: (activeBasalSegments, matchingBasalSegments, basalPotentialOverlaps) = self._calculateSegmentActivity( self.basalConnections, basalInput, self.connectedPermanence, self.activationThreshold, self.minThreshold, self.reducedBasalThreshold) predictedCells = self.basalConnections.mapSegmentsToCells(activeBasalSegments) self.predictedCells = predictedCells self.activeBasalSegments = activeBasalSegments self.activeApicalSegments = activeApicalSegments self.matchingBasalSegments = matchingBasalSegments self.matchingApicalSegments = matchingApicalSegments self.basalPotentialOverlaps = basalPotentialOverlaps self.apicalPotentialOverlaps = apicalPotentialOverlaps def activateCells(self, activeColumns, basalReinforceCandidates, apicalReinforceCandidates, basalGrowthCandidates, apicalGrowthCandidates, learn=True): """ Activate cells in the specified columns, using the result of the previous 'depolarizeCells' as predictions. Then learn. @param activeColumns (numpy array) List of active columns @param basalReinforceCandidates (numpy array) List of bits that the active cells may reinforce basal synapses to. @param apicalReinforceCandidates (numpy array) List of bits that the active cells may reinforce apical synapses to. @param basalGrowthCandidates (numpy array or None) List of bits that the active cells may grow new basal synapses to. @param apicalGrowthCandidates (numpy array or None) List of bits that the active cells may grow new apical synapses to @param learn (bool) Whether to grow / reinforce / punish synapses """ # Calculate active cells (correctPredictedCells, burstingColumns) = np2.setCompare(self.predictedCells, activeColumns, self.predictedCells / self.cellsPerColumn, rightMinusLeft=True) newActiveCells = np.concatenate((correctPredictedCells, np2.getAllCellsInColumns( burstingColumns, self.cellsPerColumn))) # Calculate learning (learningActiveBasalSegments, learningActiveApicalSegments, learningMatchingBasalSegments, learningMatchingApicalSegments, basalSegmentsToPunish, apicalSegmentsToPunish, newSegmentCells, learningCells) = self._calculateLearning(activeColumns, burstingColumns, correctPredictedCells, self.activeBasalSegments, self.activeApicalSegments, self.matchingBasalSegments, self.matchingApicalSegments, self.basalPotentialOverlaps, self.apicalPotentialOverlaps) if learn: # Learn on existing segments for learningSegments in (learningActiveBasalSegments, learningMatchingBasalSegments): self._learn(self.basalConnections, self.rng, learningSegments, basalReinforceCandidates, basalGrowthCandidates, self.basalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) for learningSegments in (learningActiveApicalSegments, learningMatchingApicalSegments): self._learn(self.apicalConnections, self.rng, learningSegments, apicalReinforceCandidates, apicalGrowthCandidates, self.apicalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) # Punish incorrect predictions if self.basalPredictedSegmentDecrement != 0.0: self.basalConnections.adjustActiveSynapses( basalSegmentsToPunish, basalReinforceCandidates, -self.basalPredictedSegmentDecrement) if self.apicalPredictedSegmentDecrement != 0.0: self.apicalConnections.adjustActiveSynapses( apicalSegmentsToPunish, apicalReinforceCandidates, -self.apicalPredictedSegmentDecrement) # Only grow segments if there is basal *and* apical input. if len(basalGrowthCandidates) > 0 and len(apicalGrowthCandidates) > 0: self._learnOnNewSegments(self.basalConnections, self.rng, newSegmentCells, basalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) self._learnOnNewSegments(self.apicalConnections, self.rng, newSegmentCells, apicalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) # Save the results newActiveCells.sort() learningCells.sort() self.activeCells = newActiveCells self.winnerCells = learningCells self.predictedActiveCells = correctPredictedCells def _calculateLearning(self, activeColumns, burstingColumns, correctPredictedCells, activeBasalSegments, activeApicalSegments, matchingBasalSegments, matchingApicalSegments, basalPotentialOverlaps, apicalPotentialOverlaps): """ Learning occurs on pairs of segments. Correctly predicted cells always have active basal and apical segments, and we learn on these segments. In bursting columns, we either learn on an existing segment pair, or we grow a new pair of segments. @param activeColumns (numpy array) @param burstingColumns (numpy array) @param correctPredictedCells (numpy array) @param activeBasalSegments (numpy array) @param activeApicalSegments (numpy array) @param matchingBasalSegments (numpy array) @param matchingApicalSegments (numpy array) @param basalPotentialOverlaps (numpy array) @param apicalPotentialOverlaps (numpy array) @return (tuple) - learningActiveBasalSegments (numpy array) Active basal segments on correct predicted cells - learningActiveApicalSegments (numpy array) Active apical segments on correct predicted cells - learningMatchingBasalSegments (numpy array) Matching basal segments selected for learning in bursting columns - learningMatchingApicalSegments (numpy array) Matching apical segments selected for learning in bursting columns - basalSegmentsToPunish (numpy array) Basal segments that should be punished for predicting an inactive column - apicalSegmentsToPunish (numpy array) Apical segments that should be punished for predicting an inactive column - newSegmentCells (numpy array) Cells in bursting columns that were selected to grow new segments - learningCells (numpy array) Every cell that has a learning segment or was selected to grow a segment """ # Correctly predicted columns learningActiveBasalSegments = self.basalConnections.filterSegmentsByCell( activeBasalSegments, correctPredictedCells) learningActiveApicalSegments = self.apicalConnections.filterSegmentsByCell( activeApicalSegments, correctPredictedCells) # Bursting columns cellsForMatchingBasal = self.basalConnections.mapSegmentsToCells( matchingBasalSegments) cellsForMatchingApical = self.apicalConnections.mapSegmentsToCells( matchingApicalSegments) matchingCells = np.intersect1d( cellsForMatchingBasal, cellsForMatchingApical) (matchingCellsInBurstingColumns, burstingColumnsWithNoMatch) = np2.setCompare( matchingCells, burstingColumns, matchingCells / self.cellsPerColumn, rightMinusLeft=True) (learningMatchingBasalSegments, learningMatchingApicalSegments) = self._chooseBestSegmentPairPerColumn( matchingCellsInBurstingColumns, matchingBasalSegments, matchingApicalSegments, basalPotentialOverlaps, apicalPotentialOverlaps) newSegmentCells = self._getCellsWithFewestSegments( burstingColumnsWithNoMatch) # Incorrectly predicted columns if self.basalPredictedSegmentDecrement > 0.0: correctMatchingBasalMask = np.in1d( cellsForMatchingBasal / self.cellsPerColumn, activeColumns) basalSegmentsToPunish = matchingBasalSegments[~correctMatchingBasalMask] else: basalSegmentsToPunish = () if self.apicalPredictedSegmentDecrement > 0.0: correctMatchingApicalMask = np.in1d( cellsForMatchingApical / self.cellsPerColumn, activeColumns) apicalSegmentsToPunish = matchingApicalSegments[~correctMatchingApicalMask] else: apicalSegmentsToPunish = () # Make a list of every cell that is learning learningCells = np.concatenate( (correctPredictedCells, self.basalConnections.mapSegmentsToCells(learningMatchingBasalSegments), newSegmentCells)) return (learningActiveBasalSegments, learningActiveApicalSegments, learningMatchingBasalSegments, learningMatchingApicalSegments, basalSegmentsToPunish, apicalSegmentsToPunish, newSegmentCells, learningCells) @staticmethod def _calculateSegmentActivity(connections, activeInput, connectedPermanence, activationThreshold, minThreshold, reducedThreshold, reducedThresholdCells = ()): """ Calculate the active and matching basal segments for this timestep. @param connections (SparseMatrixConnections) @param activeInput (numpy array) @return (tuple) - activeSegments (numpy array) Dendrite segments with enough active connected synapses to cause a dendritic spike - matchingSegments (numpy array) Dendrite segments with enough active potential synapses to be selected for learning in a bursting column - potentialOverlaps (numpy array) The number of active potential synapses for each segment. Includes counts for active, matching, and nonmatching segments. """ # Active apical segments lower the activation threshold for basal segments overlaps = connections.computeActivity(activeInput, connectedPermanence) outrightActiveSegments = np.flatnonzero(overlaps >= activationThreshold) if (reducedThreshold != activationThreshold and len(reducedThresholdCells) > 0): potentiallyActiveSegments = np.flatnonzero( (overlaps < activationThreshold) & (overlaps >= reducedThreshold)) cellsOfCASegments = connections.mapSegmentsToCells( potentiallyActiveSegments) # apically active segments are condit. active segments from apically # active cells conditionallyActiveSegments = potentiallyActiveSegments[ np.in1d(cellsOfCASegments, reducedThresholdCells)] activeSegments = np.concatenate((outrightActiveSegments, conditionallyActiveSegments)) else: activeSegments = outrightActiveSegments # Matching potentialOverlaps = connections.computeActivity(activeInput) matchingSegments = np.flatnonzero(potentialOverlaps >= minThreshold) return (activeSegments, matchingSegments, potentialOverlaps) @staticmethod def _learn(connections, rng, learningSegments, activeInput, growthCandidates, potentialOverlaps, initialPermanence, sampleSize, permanenceIncrement, permanenceDecrement, maxSynapsesPerSegment): """ Adjust synapse permanences, and grow new synapses. @param learningActiveSegments (numpy array) @param learningMatchingSegments (numpy array) @param activeInput (numpy array) @param growthCandidates (numpy array) @param potentialOverlaps (numpy array) """ # Learn on existing segments connections.adjustSynapses(learningSegments, activeInput, permanenceIncrement, -permanenceDecrement) # Grow new synapses. Calculate "maxNew", the maximum number of synapses to # grow per segment. "maxNew" might be a number or it might be a list of # numbers. if sampleSize == -1: maxNew = len(growthCandidates) else: maxNew = sampleSize - potentialOverlaps[learningSegments] if maxSynapsesPerSegment != -1: synapseCounts = connections.mapSegmentsToSynapseCounts( learningSegments) numSynapsesToReachMax = maxSynapsesPerSegment - synapseCounts maxNew = np.where(maxNew <= numSynapsesToReachMax, maxNew, numSynapsesToReachMax) connections.growSynapsesToSample(learningSegments, growthCandidates, maxNew, initialPermanence, rng) @staticmethod def _learnOnNewSegments(connections, rng, newSegmentCells, growthCandidates, initialPermanence, sampleSize, maxSynapsesPerSegment): """ Create new segments, and grow synapses on them. @param connections (SparseMatrixConnections) @param rng (Random) @param newSegmentCells (numpy array) @param growthCandidates (numpy array) """ numNewSynapses = len(growthCandidates) if sampleSize != -1: numNewSynapses = min(numNewSynapses, sampleSize) if maxSynapsesPerSegment != -1: numNewSynapses = min(numNewSynapses, maxSynapsesPerSegment) newSegments = connections.createSegments(newSegmentCells) connections.growSynapsesToSample(newSegments, growthCandidates, numNewSynapses, initialPermanence, rng) def _chooseBestSegmentPairPerColumn(self, matchingCellsInBurstingColumns, matchingBasalSegments, matchingApicalSegments, basalPotentialOverlaps, apicalPotentialOverlaps): """ Choose the best pair of matching segments - one basal and one apical - for each column. Pairs are ranked by the sum of their potential overlaps. When there's a tie, the first pair wins. @param matchingCellsInBurstingColumns (numpy array) Cells in bursting columns that have at least one matching basal segment and at least one matching apical segment @param matchingBasalSegments (numpy array) @param matchingApicalSegments (numpy array) @param basalPotentialOverlaps (numpy array) @param apicalPotentialOverlaps (numpy array) @return (tuple) - learningBasalSegments (numpy array) The selected basal segments - learningApicalSegments (numpy array) The selected apical segments """ basalCandidateSegments = self.basalConnections.filterSegmentsByCell( matchingBasalSegments, matchingCellsInBurstingColumns) apicalCandidateSegments = self.apicalConnections.filterSegmentsByCell( matchingApicalSegments, matchingCellsInBurstingColumns) # Sort everything once rather than inside of each call to argmaxMulti. self.basalConnections.sortSegmentsByCell(basalCandidateSegments) self.apicalConnections.sortSegmentsByCell(apicalCandidateSegments) # Narrow it down to one pair per cell. oneBasalPerCellFilter = np2.argmaxMulti( basalPotentialOverlaps[basalCandidateSegments], self.basalConnections.mapSegmentsToCells(basalCandidateSegments), assumeSorted=True) basalCandidateSegments = basalCandidateSegments[oneBasalPerCellFilter] oneApicalPerCellFilter = np2.argmaxMulti( apicalPotentialOverlaps[apicalCandidateSegments], self.apicalConnections.mapSegmentsToCells(apicalCandidateSegments), assumeSorted=True) apicalCandidateSegments = apicalCandidateSegments[oneApicalPerCellFilter] # Narrow it down to one pair per column. cellScores = (basalPotentialOverlaps[basalCandidateSegments] + apicalPotentialOverlaps[apicalCandidateSegments]) columnsForCandidates = ( self.basalConnections.mapSegmentsToCells(basalCandidateSegments) / self.cellsPerColumn) onePerColumnFilter = np2.argmaxMulti(cellScores, columnsForCandidates, assumeSorted=True) learningBasalSegments = basalCandidateSegments[onePerColumnFilter] learningApicalSegments = apicalCandidateSegments[onePerColumnFilter] return (learningBasalSegments, learningApicalSegments) def _getCellsWithFewestSegments(self, columns): """ For each column, get the cell that has the fewest total segments (basal or apical). Break ties randomly. @param columns (numpy array) Columns to check @return (numpy array) One cell for each of the provided columns """ candidateCells = np2.getAllCellsInColumns(columns, self.cellsPerColumn) # Arrange the segment counts into one row per minicolumn. segmentCounts = np.reshape( self.basalConnections.getSegmentCounts(candidateCells) + self.apicalConnections.getSegmentCounts(candidateCells), newshape=(len(columns), self.cellsPerColumn)) # Filter to just the cells that are tied for fewest in their minicolumn. minSegmentCounts = np.amin(segmentCounts, axis=1, keepdims=True) candidateCells = candidateCells[np.flatnonzero(segmentCounts == minSegmentCounts)] # Filter to one cell per column, choosing randomly from the minimums. # To do the random choice, add a random offset to each index in-place, using # casting to floor the result. (_, onePerColumnFilter, numCandidatesInColumns) = np.unique(candidateCells / self.cellsPerColumn, return_index=True, return_counts=True) offsetPercents = np.empty(len(columns), dtype="float32") self.rng.initializeReal32Array(offsetPercents) np.add(onePerColumnFilter, offsetPercents*numCandidatesInColumns, out=onePerColumnFilter, casting="unsafe") return candidateCells[onePerColumnFilter] def getActiveCells(self): """ @return (numpy array) Active cells """ return self.activeCells def getPredictedActiveCells(self): """ @return (numpy array) Active cells that were correctly predicted """ return np.intersect1d(self.activeCells, self.predictedCells) def getWinnerCells(self): """ @return (numpy array) Cells that were selected for learning """ return self.winnerCells def getPredictedCells(self): """ @return (numpy array) Cells that were predicted for this timestep """ return self.predictedCells def getActiveBasalSegments(self): """ @return (numpy array) Active basal segments for this timestep """ return self.activeBasalSegments def getActiveApicalSegments(self): """ @return (numpy array) Matching basal segments for this timestep """ return self.activeApicalSegments def numberOfColumns(self): """ Returns the number of columns in this layer. @return (int) Number of columns """ return self.columnCount def numberOfCells(self): """ Returns the number of cells in this layer. @return (int) Number of cells """ return self.numberOfColumns() * self.cellsPerColumn def getCellsPerColumn(self): """ Returns the number of cells per column. @return (int) The number of cells per column. """ return self.cellsPerColumn def getActivationThreshold(self): """ Returns the activation threshold. @return (int) The activation threshold. """ return self.activationThreshold def setActivationThreshold(self, activationThreshold): """ Sets the activation threshold. @param activationThreshold (int) activation threshold. """ self.activationThreshold = activationThreshold def getInitialPermanence(self): """ Get the initial permanence. @return (float) The initial permanence. """ return self.initialPermanence def setInitialPermanence(self, initialPermanence): """ Sets the initial permanence. @param initialPermanence (float) The initial permanence. """ self.initialPermanence = initialPermanence def getMinThreshold(self): """ Returns the min threshold. @return (int) The min threshold. """ return self.minThreshold def setMinThreshold(self, minThreshold): """ Sets the min threshold. @param minThreshold (int) min threshold. """ self.minThreshold = minThreshold def getSampleSize(self): """ Gets the sampleSize. @return (int) """ return self.sampleSize def setSampleSize(self, sampleSize): """ Sets the sampleSize. @param sampleSize (int) """ self.sampleSize = sampleSize def getPermanenceIncrement(self): """ Get the permanence increment. @return (float) The permanence increment. """ return self.permanenceIncrement def setPermanenceIncrement(self, permanenceIncrement): """ Sets the permanence increment. @param permanenceIncrement (float) The permanence increment. """ self.permanenceIncrement = permanenceIncrement def getPermanenceDecrement(self): """ Get the permanence decrement. @return (float) The permanence decrement. """ return self.permanenceDecrement def setPermanenceDecrement(self, permanenceDecrement): """ Sets the permanence decrement. @param permanenceDecrement (float) The permanence decrement. """ self.permanenceDecrement = permanenceDecrement def getBasalPredictedSegmentDecrement(self): """ Get the predicted segment decrement. @return (float) The predicted segment decrement. """ return self.basalPredictedSegmentDecrement def setBasalPredictedSegmentDecrement(self, predictedSegmentDecrement): """ Sets the predicted segment decrement. @param predictedSegmentDecrement (float) The predicted segment decrement. """ self.basalPredictedSegmentDecrement = basalPredictedSegmentDecrement def getApicalPredictedSegmentDecrement(self): """ Get the predicted segment decrement. @return (float) The predicted segment decrement. """ return self.apicalPredictedSegmentDecrement def setApicalPredictedSegmentDecrement(self, predictedSegmentDecrement): """ Sets the predicted segment decrement. @param predictedSegmentDecrement (float) The predicted segment decrement. """ self.apicalPredictedSegmentDecrement = apicalPredictedSegmentDecrement def getConnectedPermanence(self): """ Get the connected permanence. @return (float) The connected permanence. """ return self.connectedPermanence def setConnectedPermanence(self, connectedPermanence): """ Sets the connected permanence. @param connectedPermanence (float) The connected permanence. """ self.connectedPermanence = connectedPermanence
class ApicalTiebreakTemporalMemory(object): """ A generalized Temporal Memory with apical dendrites that add a "tiebreak". Basal connections are used to implement traditional Temporal Memory. The apical connections are used for further disambiguation. If multiple cells in a minicolumn have active basal segments, each of those cells is predicted, unless one of them also has an active apical segment, in which case only the cells with active basal and apical segments are predicted. In other words, the apical connections have no effect unless the basal input is a union of SDRs (e.g. from bursting minicolumns). This class is generalized in two ways: - This class does not specify when a 'timestep' begins and ends. It exposes two main methods: 'depolarizeCells' and 'activateCells', and callers or subclasses can introduce the notion of a timestep. - This class is unaware of whether its 'basalInput' or 'apicalInput' are from internal or external cells. They are just cell numbers. The caller knows what these cell numbers mean, but the TemporalMemory doesn't. """ def __init__(self, columnCount=2048, basalInputSize=0, apicalInputSize=0, cellsPerColumn=32, activationThreshold=13, reducedBasalThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, sampleSize=20, permanenceIncrement=0.1, permanenceDecrement=0.1, basalPredictedSegmentDecrement=0.0, apicalPredictedSegmentDecrement=0.0, maxSynapsesPerSegment=-1, seed=42): """ @param columnCount (int) The number of minicolumns @param basalInputSize (sequence) The number of bits in the basal input @param apicalInputSize (int) The number of bits in the apical input @param cellsPerColumn (int) Number of cells per column @param activationThreshold (int) If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active. @param reducedBasalThreshold (int) The activation threshold of basal (lateral) segments for cells that have active apical segments. If equal to activationThreshold (default), this parameter has no effect. @param initialPermanence (float) Initial permanence of a new synapse @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected. @param minThreshold (int) If the number of potential synapses active on a segment is at least this threshold, it is said to be "matching" and is eligible for learning. @param sampleSize (int) How much of the active SDR to sample with synapses. @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param basalPredictedSegmentDecrement (float) Amount by which segments are punished for incorrect predictions. @param apicalPredictedSegmentDecrement (float) Amount by which segments are punished for incorrect predictions. @param maxSynapsesPerSegment The maximum number of synapses per segment. @param seed (int) Seed for the random number generator. """ self.columnCount = columnCount self.cellsPerColumn = cellsPerColumn self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.reducedBasalThreshold = reducedBasalThreshold self.minThreshold = minThreshold self.sampleSize = sampleSize self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.basalPredictedSegmentDecrement = basalPredictedSegmentDecrement self.apicalPredictedSegmentDecrement = apicalPredictedSegmentDecrement self.activationThreshold = activationThreshold self.maxSynapsesPerSegment = maxSynapsesPerSegment self.basalConnections = SparseMatrixConnections( columnCount * cellsPerColumn, basalInputSize) self.apicalConnections = SparseMatrixConnections( columnCount * cellsPerColumn, apicalInputSize) self.rng = Random(seed) self.activeCells = np.empty(0, dtype="uint32") self.winnerCells = np.empty(0, dtype="uint32") self.predictedCells = np.empty(0, dtype="uint32") self.predictedActiveCells = np.empty(0, dtype="uint32") self.activeBasalSegments = np.empty(0, dtype="uint32") self.activeApicalSegments = np.empty(0, dtype="uint32") self.matchingBasalSegments = np.empty(0, dtype="uint32") self.matchingApicalSegments = np.empty(0, dtype="uint32") self.basalPotentialOverlaps = np.empty(0, dtype="int32") self.apicalPotentialOverlaps = np.empty(0, dtype="int32") self.useApicalTiebreak = True self.useApicalModulationBasalThreshold = True def reset(self): """ Clear all cell and segment activity. """ self.activeCells = np.empty(0, dtype="uint32") self.winnerCells = np.empty(0, dtype="uint32") self.predictedCells = np.empty(0, dtype="uint32") self.predictedActiveCells = np.empty(0, dtype="uint32") self.activeBasalSegments = np.empty(0, dtype="uint32") self.activeApicalSegments = np.empty(0, dtype="uint32") self.matchingBasalSegments = np.empty(0, dtype="uint32") self.matchingApicalSegments = np.empty(0, dtype="uint32") self.basalPotentialOverlaps = np.empty(0, dtype="int32") self.apicalPotentialOverlaps = np.empty(0, dtype="int32") def depolarizeCells(self, basalInput, apicalInput, learn): """ Calculate predictions. @param basalInput (numpy array) List of active input bits for the basal dendrite segments @param apicalInput (numpy array) List of active input bits for the apical dendrite segments @param learn (bool) Whether learning is enabled. Some TM implementations may depolarize cells differently or do segment activity bookkeeping when learning is enabled. """ (activeApicalSegments, matchingApicalSegments, apicalPotentialOverlaps) = self._calculateApicalSegmentActivity( self.apicalConnections, apicalInput, self.connectedPermanence, self.activationThreshold, self.minThreshold) if learn or self.useApicalModulationBasalThreshold == False: reducedBasalThresholdCells = () else: reducedBasalThresholdCells = self.apicalConnections.mapSegmentsToCells( activeApicalSegments) (activeBasalSegments, matchingBasalSegments, basalPotentialOverlaps) = self._calculateBasalSegmentActivity( self.basalConnections, basalInput, reducedBasalThresholdCells, self.connectedPermanence, self.activationThreshold, self.minThreshold, self.reducedBasalThreshold) predictedCells = self._calculatePredictedCells(activeBasalSegments, activeApicalSegments) self.predictedCells = predictedCells self.activeBasalSegments = activeBasalSegments self.activeApicalSegments = activeApicalSegments self.matchingBasalSegments = matchingBasalSegments self.matchingApicalSegments = matchingApicalSegments self.basalPotentialOverlaps = basalPotentialOverlaps self.apicalPotentialOverlaps = apicalPotentialOverlaps def activateCells(self, activeColumns, basalReinforceCandidates, apicalReinforceCandidates, basalGrowthCandidates, apicalGrowthCandidates, learn=True): """ Activate cells in the specified columns, using the result of the previous 'depolarizeCells' as predictions. Then learn. @param activeColumns (numpy array) List of active columns @param basalReinforceCandidates (numpy array) List of bits that the active cells may reinforce basal synapses to. @param apicalReinforceCandidates (numpy array) List of bits that the active cells may reinforce apical synapses to. @param basalGrowthCandidates (numpy array) List of bits that the active cells may grow new basal synapses to. @param apicalGrowthCandidates (numpy array) List of bits that the active cells may grow new apical synapses to @param learn (bool) Whether to grow / reinforce / punish synapses """ # Calculate active cells (correctPredictedCells, burstingColumns) = np2.setCompare( self.predictedCells, activeColumns, self.predictedCells / self.cellsPerColumn, rightMinusLeft=True) newActiveCells = np.concatenate( (correctPredictedCells, np2.getAllCellsInColumns(burstingColumns, self.cellsPerColumn))) # Calculate learning (learningActiveBasalSegments, learningMatchingBasalSegments, basalSegmentsToPunish, newBasalSegmentCells, learningCells) = self._calculateBasalLearning( activeColumns, burstingColumns, correctPredictedCells, self.activeBasalSegments, self.matchingBasalSegments, self.basalPotentialOverlaps) (learningActiveApicalSegments, learningMatchingApicalSegments, apicalSegmentsToPunish, newApicalSegmentCells) = self._calculateApicalLearning( learningCells, activeColumns, self.activeApicalSegments, self.matchingApicalSegments, self.apicalPotentialOverlaps) # Learn if learn: # Learn on existing segments for learningSegments in (learningActiveBasalSegments, learningMatchingBasalSegments): self._learn(self.basalConnections, self.rng, learningSegments, basalReinforceCandidates, basalGrowthCandidates, self.basalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) for learningSegments in (learningActiveApicalSegments, learningMatchingApicalSegments): self._learn(self.apicalConnections, self.rng, learningSegments, apicalReinforceCandidates, apicalGrowthCandidates, self.apicalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) # Punish incorrect predictions if self.basalPredictedSegmentDecrement != 0.0: self.basalConnections.adjustActiveSynapses( basalSegmentsToPunish, basalReinforceCandidates, -self.basalPredictedSegmentDecrement) if self.apicalPredictedSegmentDecrement != 0.0: self.apicalConnections.adjustActiveSynapses( apicalSegmentsToPunish, apicalReinforceCandidates, -self.apicalPredictedSegmentDecrement) # Grow new segments if len(basalGrowthCandidates) > 0: self._learnOnNewSegments(self.basalConnections, self.rng, newBasalSegmentCells, basalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) if len(apicalGrowthCandidates) > 0: self._learnOnNewSegments(self.apicalConnections, self.rng, newApicalSegmentCells, apicalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) # Save the results newActiveCells.sort() learningCells.sort() self.activeCells = newActiveCells self.winnerCells = learningCells self.predictedActiveCells = correctPredictedCells def _calculateBasalLearning(self, activeColumns, burstingColumns, correctPredictedCells, activeBasalSegments, matchingBasalSegments, basalPotentialOverlaps): """ Basic Temporal Memory learning. Correctly predicted cells always have active basal segments, and we learn on these segments. In bursting columns, we either learn on an existing basal segment, or we grow a new one. The only influence apical dendrites have on basal learning is: the apical dendrites influence which cells are considered "predicted". So an active apical dendrite can prevent some basal segments in active columns from learning. @param correctPredictedCells (numpy array) @param burstingColumns (numpy array) @param activeBasalSegments (numpy array) @param matchingBasalSegments (numpy array) @param basalPotentialOverlaps (numpy array) @return (tuple) - learningActiveBasalSegments (numpy array) Active basal segments on correct predicted cells - learningMatchingBasalSegments (numpy array) Matching basal segments selected for learning in bursting columns - basalSegmentsToPunish (numpy array) Basal segments that should be punished for predicting an inactive column - newBasalSegmentCells (numpy array) Cells in bursting columns that were selected to grow new basal segments - learningCells (numpy array) Cells that have learning basal segments or are selected to grow a basal segment """ # Correctly predicted columns learningActiveBasalSegments = self.basalConnections.filterSegmentsByCell( activeBasalSegments, correctPredictedCells) cellsForMatchingBasal = self.basalConnections.mapSegmentsToCells( matchingBasalSegments) matchingCells = np.unique(cellsForMatchingBasal) (matchingCellsInBurstingColumns, burstingColumnsWithNoMatch) = np2.setCompare(matchingCells, burstingColumns, matchingCells / self.cellsPerColumn, rightMinusLeft=True) learningMatchingBasalSegments = self._chooseBestSegmentPerColumn( self.basalConnections, matchingCellsInBurstingColumns, matchingBasalSegments, basalPotentialOverlaps, self.cellsPerColumn) newBasalSegmentCells = self._getCellsWithFewestSegments( self.basalConnections, self.rng, burstingColumnsWithNoMatch, self.cellsPerColumn) learningCells = np.concatenate( (correctPredictedCells, self.basalConnections.mapSegmentsToCells( learningMatchingBasalSegments), newBasalSegmentCells)) # Incorrectly predicted columns correctMatchingBasalMask = np.in1d( cellsForMatchingBasal / self.cellsPerColumn, activeColumns) basalSegmentsToPunish = matchingBasalSegments[ ~correctMatchingBasalMask] return (learningActiveBasalSegments, learningMatchingBasalSegments, basalSegmentsToPunish, newBasalSegmentCells, learningCells) def _calculateApicalLearning(self, learningCells, activeColumns, activeApicalSegments, matchingApicalSegments, apicalPotentialOverlaps): """ Calculate apical learning for each learning cell. The set of learning cells was determined completely from basal segments. Do all apical learning on the same cells. Learn on any active segments on learning cells. For cells without active segments, learn on the best matching segment. For cells without a matching segment, grow a new segment. @param learningCells (numpy array) @param correctPredictedCells (numpy array) @param activeApicalSegments (numpy array) @param matchingApicalSegments (numpy array) @param apicalPotentialOverlaps (numpy array) @return (tuple) - learningActiveApicalSegments (numpy array) Active apical segments on correct predicted cells - learningMatchingApicalSegments (numpy array) Matching apical segments selected for learning in bursting columns - apicalSegmentsToPunish (numpy array) Apical segments that should be punished for predicting an inactive column - newApicalSegmentCells (numpy array) Cells in bursting columns that were selected to grow new apical segments """ # Cells with active apical segments learningActiveApicalSegments = self.apicalConnections.filterSegmentsByCell( activeApicalSegments, learningCells) # Cells with matching apical segments learningCellsWithoutActiveApical = np.setdiff1d( learningCells, self.apicalConnections.mapSegmentsToCells( learningActiveApicalSegments)) cellsForMatchingApical = self.apicalConnections.mapSegmentsToCells( matchingApicalSegments) learningCellsWithMatchingApical = np.intersect1d( learningCellsWithoutActiveApical, cellsForMatchingApical) learningMatchingApicalSegments = self._chooseBestSegmentPerCell( self.apicalConnections, learningCellsWithMatchingApical, matchingApicalSegments, apicalPotentialOverlaps) # Cells that need to grow an apical segment newApicalSegmentCells = np.setdiff1d(learningCellsWithoutActiveApical, learningCellsWithMatchingApical) # Incorrectly predicted columns correctMatchingApicalMask = np.in1d( cellsForMatchingApical / self.cellsPerColumn, activeColumns) apicalSegmentsToPunish = matchingApicalSegments[ ~correctMatchingApicalMask] return (learningActiveApicalSegments, learningMatchingApicalSegments, apicalSegmentsToPunish, newApicalSegmentCells) @staticmethod def _calculateApicalSegmentActivity(connections, activeInput, connectedPermanence, activationThreshold, minThreshold): """ Calculate the active and matching apical segments for this timestep. @param connections (SparseMatrixConnections) @param activeInput (numpy array) @return (tuple) - activeSegments (numpy array) Dendrite segments with enough active connected synapses to cause a dendritic spike - matchingSegments (numpy array) Dendrite segments with enough active potential synapses to be selected for learning in a bursting column - potentialOverlaps (numpy array) The number of active potential synapses for each segment. Includes counts for active, matching, and nonmatching segments. """ # Active overlaps = connections.computeActivity(activeInput, connectedPermanence) activeSegments = np.flatnonzero(overlaps >= activationThreshold) # Matching potentialOverlaps = connections.computeActivity(activeInput) matchingSegments = np.flatnonzero(potentialOverlaps >= minThreshold) return (activeSegments, matchingSegments, potentialOverlaps) @staticmethod def _calculateBasalSegmentActivity(connections, activeInput, reducedBasalThresholdCells, connectedPermanence, activationThreshold, minThreshold, reducedBasalThreshold): """ Calculate the active and matching basal segments for this timestep. The difference with _calculateApicalSegmentActivity is that cells with active apical segments (collected in reducedBasalThresholdCells) have a lower activation threshold for their basal segments (set by reducedBasalThreshold parameter). @param connections (SparseMatrixConnections) @param activeInput (numpy array) @return (tuple) - activeSegments (numpy array) Dendrite segments with enough active connected synapses to cause a dendritic spike - matchingSegments (numpy array) Dendrite segments with enough active potential synapses to be selected for learning in a bursting column - potentialOverlaps (numpy array) The number of active potential synapses for each segment. Includes counts for active, matching, and nonmatching segments. """ # Active apical segments lower the activation threshold for basal (lateral) segments overlaps = connections.computeActivity(activeInput, connectedPermanence) outrightActiveSegments = np.flatnonzero( overlaps >= activationThreshold) if reducedBasalThreshold != activationThreshold and len( reducedBasalThresholdCells) > 0: potentiallyActiveSegments = np.flatnonzero( (overlaps < activationThreshold) & (overlaps >= reducedBasalThreshold)) cellsOfCASegments = connections.mapSegmentsToCells( potentiallyActiveSegments) # apically active segments are condit. active segments from apically active cells conditionallyActiveSegments = potentiallyActiveSegments[np.in1d( cellsOfCASegments, reducedBasalThresholdCells)] activeSegments = np.concatenate( (outrightActiveSegments, conditionallyActiveSegments)) else: activeSegments = outrightActiveSegments # Matching potentialOverlaps = connections.computeActivity(activeInput) matchingSegments = np.flatnonzero(potentialOverlaps >= minThreshold) return (activeSegments, matchingSegments, potentialOverlaps) def _calculatePredictedCells(self, activeBasalSegments, activeApicalSegments): """ Calculate the predicted cells, given the set of active segments. An active basal segment is enough to predict a cell. An active apical segment is *not* enough to predict a cell. When a cell has both types of segments active, other cells in its minicolumn must also have both types of segments to be considered predictive. @param activeBasalSegments (numpy array) @param activeApicalSegments (numpy array) @return (numpy array) """ cellsForBasalSegments = self.basalConnections.mapSegmentsToCells( activeBasalSegments) cellsForApicalSegments = self.apicalConnections.mapSegmentsToCells( activeApicalSegments) fullyDepolarizedCells = np.intersect1d(cellsForBasalSegments, cellsForApicalSegments) partlyDepolarizedCells = np.setdiff1d(cellsForBasalSegments, fullyDepolarizedCells) inhibitedMask = np.in1d(partlyDepolarizedCells / self.cellsPerColumn, fullyDepolarizedCells / self.cellsPerColumn) predictedCells = np.append(fullyDepolarizedCells, partlyDepolarizedCells[~inhibitedMask]) if self.useApicalTiebreak == False: predictedCells = cellsForBasalSegments return predictedCells @staticmethod def _learn(connections, rng, learningSegments, activeInput, growthCandidates, potentialOverlaps, initialPermanence, sampleSize, permanenceIncrement, permanenceDecrement, maxSynapsesPerSegment): """ Adjust synapse permanences, grow new synapses, and grow new segments. @param learningActiveSegments (numpy array) @param learningMatchingSegments (numpy array) @param activeInput (numpy array) @param growthCandidates (numpy array) @param potentialOverlaps (numpy array) """ # Learn on existing segments connections.adjustSynapses(learningSegments, activeInput, permanenceIncrement, -permanenceDecrement) # Grow new synapses. Calculate "maxNew", the maximum number of synapses to # grow per segment. "maxNew" might be a number or it might be a list of # numbers. if sampleSize == -1: maxNew = len(growthCandidates) else: maxNew = sampleSize - potentialOverlaps[learningSegments] if maxSynapsesPerSegment != -1: synapseCounts = connections.mapSegmentsToSynapseCounts( learningSegments) numSynapsesToReachMax = maxSynapsesPerSegment - synapseCounts maxNew = np.where(maxNew <= numSynapsesToReachMax, maxNew, numSynapsesToReachMax) connections.growSynapsesToSample(learningSegments, growthCandidates, maxNew, initialPermanence, rng) @staticmethod def _learnOnNewSegments(connections, rng, newSegmentCells, growthCandidates, initialPermanence, sampleSize, maxSynapsesPerSegment): numNewSynapses = len(growthCandidates) if sampleSize != -1: numNewSynapses = min(numNewSynapses, sampleSize) if maxSynapsesPerSegment != -1: numNewSynapses = min(numNewSynapses, maxSynapsesPerSegment) newSegments = connections.createSegments(newSegmentCells) connections.growSynapsesToSample(newSegments, growthCandidates, numNewSynapses, initialPermanence, rng) @classmethod def _chooseBestSegmentPerCell(cls, connections, cells, allMatchingSegments, potentialOverlaps): """ For each specified cell, choose its matching segment with largest number of active potential synapses. When there's a tie, the first segment wins. @param connections (SparseMatrixConnections) @param cells (numpy array) @param allMatchingSegments (numpy array) @param potentialOverlaps (numpy array) @return (numpy array) One segment per cell """ candidateSegments = connections.filterSegmentsByCell( allMatchingSegments, cells) # Narrow it down to one pair per cell. onePerCellFilter = np2.argmaxMulti( potentialOverlaps[candidateSegments], connections.mapSegmentsToCells(candidateSegments)) learningSegments = candidateSegments[onePerCellFilter] return learningSegments @classmethod def _chooseBestSegmentPerColumn(cls, connections, matchingCells, allMatchingSegments, potentialOverlaps, cellsPerColumn): """ For all the columns covered by 'matchingCells', choose the column's matching segment with largest number of active potential synapses. When there's a tie, the first segment wins. @param connections (SparseMatrixConnections) @param matchingCells (numpy array) @param allMatchingSegments (numpy array) @param potentialOverlaps (numpy array) """ candidateSegments = connections.filterSegmentsByCell( allMatchingSegments, matchingCells) # Narrow it down to one segment per column. cellScores = potentialOverlaps[candidateSegments] columnsForCandidates = ( connections.mapSegmentsToCells(candidateSegments) / cellsPerColumn) onePerColumnFilter = np2.argmaxMulti(cellScores, columnsForCandidates) learningSegments = candidateSegments[onePerColumnFilter] return learningSegments @classmethod def _getCellsWithFewestSegments(cls, connections, rng, columns, cellsPerColumn): """ For each column, get the cell that has the fewest total basal segments. Break ties randomly. @param connections (SparseMatrixConnections) @param rng (Random) @param columns (numpy array) Columns to check @return (numpy array) One cell for each of the provided columns """ candidateCells = np2.getAllCellsInColumns(columns, cellsPerColumn) # Arrange the segment counts into one row per minicolumn. segmentCounts = np.reshape( connections.getSegmentCounts(candidateCells), newshape=(len(columns), cellsPerColumn)) # Filter to just the cells that are tied for fewest in their minicolumn. minSegmentCounts = np.amin(segmentCounts, axis=1, keepdims=True) candidateCells = candidateCells[np.flatnonzero( segmentCounts == minSegmentCounts)] # Filter to one cell per column, choosing randomly from the minimums. # To do the random choice, add a random offset to each index in-place, using # casting to floor the result. (_, onePerColumnFilter, numCandidatesInColumns) = np.unique(candidateCells / cellsPerColumn, return_index=True, return_counts=True) offsetPercents = np.empty(len(columns), dtype="float32") rng.initializeReal32Array(offsetPercents) np.add(onePerColumnFilter, offsetPercents * numCandidatesInColumns, out=onePerColumnFilter, casting="unsafe") return candidateCells[onePerColumnFilter] def getActiveCells(self): """ @return (numpy array) Active cells """ return self.activeCells def getPredictedActiveCells(self): """ @return (numpy array) Active cells that were correctly predicted """ return self.predictedActiveCells def getWinnerCells(self): """ @return (numpy array) Cells that were selected for learning """ return self.winnerCells def getActiveBasalSegments(self): """ @return (numpy array) Active basal segments for this timestep """ return self.activeBasalSegments def getActiveApicalSegments(self): """ @return (numpy array) Matching basal segments for this timestep """ return self.activeApicalSegments def numberOfColumns(self): """ Returns the number of columns in this layer. @return (int) Number of columns """ return self.columnCount def numberOfCells(self): """ Returns the number of cells in this layer. @return (int) Number of cells """ return self.numberOfColumns() * self.cellsPerColumn def getCellsPerColumn(self): """ Returns the number of cells per column. @return (int) The number of cells per column. """ return self.cellsPerColumn def getActivationThreshold(self): """ Returns the activation threshold. @return (int) The activation threshold. """ return self.activationThreshold def setActivationThreshold(self, activationThreshold): """ Sets the activation threshold. @param activationThreshold (int) activation threshold. """ self.activationThreshold = activationThreshold def getReducedBasalThreshold(self): """ Returns the reduced basal activation threshold for apically active cells. @return (int) The activation threshold. """ return self.reducedBasalThreshold def setReducedBasalThreshold(self, reducedBasalThreshold): """ Sets the reduced basal activation threshold for apically active cells. @param reducedBasalThreshold (int) activation threshold. """ self.reducedBasalThreshold = reducedBasalThreshold def getInitialPermanence(self): """ Get the initial permanence. @return (float) The initial permanence. """ return self.initialPermanence def setInitialPermanence(self, initialPermanence): """ Sets the initial permanence. @param initialPermanence (float) The initial permanence. """ self.initialPermanence = initialPermanence def getMinThreshold(self): """ Returns the min threshold. @return (int) The min threshold. """ return self.minThreshold def setMinThreshold(self, minThreshold): """ Sets the min threshold. @param minThreshold (int) min threshold. """ self.minThreshold = minThreshold def getSampleSize(self): """ Gets the sampleSize. @return (int) """ return self.sampleSize def setSampleSize(self, sampleSize): """ Sets the sampleSize. @param sampleSize (int) """ self.sampleSize = sampleSize def getPermanenceIncrement(self): """ Get the permanence increment. @return (float) The permanence increment. """ return self.permanenceIncrement def setPermanenceIncrement(self, permanenceIncrement): """ Sets the permanence increment. @param permanenceIncrement (float) The permanence increment. """ self.permanenceIncrement = permanenceIncrement def getPermanenceDecrement(self): """ Get the permanence decrement. @return (float) The permanence decrement. """ return self.permanenceDecrement def setPermanenceDecrement(self, permanenceDecrement): """ Sets the permanence decrement. @param permanenceDecrement (float) The permanence decrement. """ self.permanenceDecrement = permanenceDecrement def getBasalPredictedSegmentDecrement(self): """ Get the predicted segment decrement. @return (float) The predicted segment decrement. """ return self.basalPredictedSegmentDecrement def setBasalPredictedSegmentDecrement(self, predictedSegmentDecrement): """ Sets the predicted segment decrement. @param predictedSegmentDecrement (float) The predicted segment decrement. """ self.basalPredictedSegmentDecrement = basalPredictedSegmentDecrement def getApicalPredictedSegmentDecrement(self): """ Get the predicted segment decrement. @return (float) The predicted segment decrement. """ return self.apicalPredictedSegmentDecrement def setApicalPredictedSegmentDecrement(self, predictedSegmentDecrement): """ Sets the predicted segment decrement. @param predictedSegmentDecrement (float) The predicted segment decrement. """ self.apicalPredictedSegmentDecrement = apicalPredictedSegmentDecrement def getConnectedPermanence(self): """ Get the connected permanence. @return (float) The connected permanence. """ return self.connectedPermanence def setConnectedPermanence(self, connectedPermanence): """ Sets the connected permanence. @param connectedPermanence (float) The connected permanence. """ self.connectedPermanence = connectedPermanence def getUseApicalTieBreak(self): """ Get whether we actually use apical tie-break. @return (Bool) Whether apical tie-break is used. """ return self.useApicalTiebreak def setUseApicalTiebreak(self, useApicalTiebreak): """ Sets whether we actually use apical tie-break. @param useApicalTiebreak (Bool) Whether apical tie-break is used. """ self.useApicalTiebreak = useApicalTiebreak def getUseApicalModulationBasalThreshold(self): """ Get whether we actually use apical modulation of basal threshold. @return (Bool) Whether apical modulation is used. """ return self.useApicalModulationBasalThreshold def setUseApicalModulationBasalThreshold( self, useApicalModulationBasalThreshold): """ Sets whether we actually use apical modulation of basal threshold. @param useApicalModulationBasalThreshold (Bool) Whether apical modulation is used. """ self.useApicalModulationBasalThreshold = useApicalModulationBasalThreshold
class ApicalDependentTemporalMemory(object): """ An alternate approach to apical dendrites. Every cell SDR is specific to both the basal the apical input. Prediction requires both basal and apical support. A normal TemporalMemory trained on the sequences "A B C D" and "A B C E" will not assign "B" and "C" SDRs specific to their full sequence. These two sequences will use the same B' and C' SDRs. When the sequence reaches D/E, the SDRs finally diverge. With this algorithm, the SDRs diverge immediately, because the SDRs are specific to the apical input. But if there's never any apical input, there will never be predictions. """ def __init__(self, columnDimensions=(2048,), basalInputDimensions=(), apicalInputDimensions=(), cellsPerColumn=32, activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, sampleSize=20, permanenceIncrement=0.1, permanenceDecrement=0.1, predictedSegmentDecrement=0.0, maxNewSynapseCount=None, maxSynapsesPerSegment=-1, maxSegmentsPerCell=None, seed=42): self.columnDimensions = columnDimensions self.numColumns = self._numPoints(columnDimensions) self.basalInputDimensions = basalInputDimensions self.apicalInputDimensions = apicalInputDimensions self.cellsPerColumn = cellsPerColumn self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.minThreshold = minThreshold self.sampleSize = sampleSize if maxNewSynapseCount is not None: print "Parameter 'maxNewSynapseCount' is deprecated. Use 'sampleSize'." self.sampleSize = maxNewSynapseCount if maxSegmentsPerCell is not None: print "Warning: ignoring parameter 'maxSegmentsPerCell'" self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.predictedSegmentDecrement = predictedSegmentDecrement self.activationThreshold = activationThreshold self.maxSynapsesPerSegment = maxSynapsesPerSegment self.basalConnections = SparseMatrixConnections( self.numColumns*cellsPerColumn, self._numPoints(basalInputDimensions)) self.apicalConnections = SparseMatrixConnections( self.numColumns*cellsPerColumn, self._numPoints(apicalInputDimensions)) self.rng = Random(seed) self.activeCells = EMPTY_UINT_ARRAY self.winnerCells = EMPTY_UINT_ARRAY self.prevPredictedCells = EMPTY_UINT_ARRAY def reset(self): self.activeCells = EMPTY_UINT_ARRAY self.winnerCells = EMPTY_UINT_ARRAY self.prevPredictedCells = EMPTY_UINT_ARRAY def compute(self, activeColumns, basalInput, basalGrowthCandidates, apicalInput, apicalGrowthCandidates, learn=True): """ @param activeColumns (numpy array) @param basalInput (numpy array) @param basalGrowthCandidates (numpy array) @param apicalInput (numpy array) @param apicalGrowthCandidates (numpy array) @param learn (bool) """ # Calculate predictions for this timestep (activeBasalSegments, matchingBasalSegments, basalPotentialOverlaps) = self._calculateSegmentActivity( self.basalConnections, basalInput, self.connectedPermanence, self.activationThreshold, self.minThreshold) (activeApicalSegments, matchingApicalSegments, apicalPotentialOverlaps) = self._calculateSegmentActivity( self.apicalConnections, apicalInput, self.connectedPermanence, self.activationThreshold, self.minThreshold) predictedCells = np.intersect1d( self.basalConnections.mapSegmentsToCells(activeBasalSegments), self.apicalConnections.mapSegmentsToCells(activeApicalSegments)) # Calculate active cells (correctPredictedCells, burstingColumns) = np2.setCompare(predictedCells, activeColumns, predictedCells / self.cellsPerColumn, rightMinusLeft=True) newActiveCells = np.concatenate((correctPredictedCells, np2.getAllCellsInColumns( burstingColumns, self.cellsPerColumn))) # Calculate learning (learningActiveBasalSegments, learningActiveApicalSegments, learningMatchingBasalSegments, learningMatchingApicalSegments, basalSegmentsToPunish, apicalSegmentsToPunish, newSegmentCells, learningCells) = self._calculateLearning(activeColumns, burstingColumns, correctPredictedCells, activeBasalSegments, activeApicalSegments, matchingBasalSegments, matchingApicalSegments, basalPotentialOverlaps, apicalPotentialOverlaps) if learn: # Learn on existing segments for learningSegments in (learningActiveBasalSegments, learningMatchingBasalSegments): self._learn(self.basalConnections, self.rng, learningSegments, basalInput, basalGrowthCandidates, basalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) for learningSegments in (learningActiveApicalSegments, learningMatchingApicalSegments): self._learn(self.apicalConnections, self.rng, learningSegments, apicalInput, apicalGrowthCandidates, apicalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) # Punish incorrect predictions if self.predictedSegmentDecrement != 0.0: self.basalConnections.adjustActiveSynapses( basalSegmentsToPunish, basalInput, -self.predictedSegmentDecrement) self.apicalConnections.adjustActiveSynapses( apicalSegmentsToPunish, apicalInput, -self.predictedSegmentDecrement) # Only grow segments if there is basal *and* apical input. if len(basalGrowthCandidates) > 0 and len(apicalGrowthCandidates) > 0: self._learnOnNewSegments(self.basalConnections, self.rng, newSegmentCells, basalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) self._learnOnNewSegments(self.apicalConnections, self.rng, newSegmentCells, apicalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) # Save the results self.prevPredictedCells = predictedCells self.activeCells = newActiveCells self.winnerCells = learningCells def _calculateLearning(self, activeColumns, burstingColumns, correctPredictedCells, activeBasalSegments, activeApicalSegments, matchingBasalSegments, matchingApicalSegments, basalPotentialOverlaps, apicalPotentialOverlaps): """ Learning occurs on pairs of segments. Correctly predicted cells always have active basal and apical segments, and we learn on these segments. In bursting columns, we either learn on an existing segment pair, or we grow a new pair of segments. @param activeColumns (numpy array) @param burstingColumns (numpy array) @param correctPredictedCells (numpy array) @param activeBasalSegments (numpy array) @param activeApicalSegments (numpy array) @param matchingBasalSegments (numpy array) @param matchingApicalSegments (numpy array) @param basalPotentialOverlaps (numpy array) @param apicalPotentialOverlaps (numpy array) @return (tuple) - learningActiveBasalSegments (numpy array) Active basal segments on correct predicted cells - learningActiveApicalSegments (numpy array) Active apical segments on correct predicted cells - learningMatchingBasalSegments (numpy array) Matching basal segments selected for learning in bursting columns - learningMatchingApicalSegments (numpy array) Matching apical segments selected for learning in bursting columns - basalSegmentsToPunish (numpy array) Basal segments that should be punished for predicting an inactive column - apicalSegmentsToPunish (numpy array) Apical segments that should be punished for predicting an inactive column - newSegmentCells (numpy array) Cells in bursting columns that were selected to grow new segments - learningCells (numpy array) Every cell that has a learning segment or was selected to grow a segment """ # Correctly predicted columns learningActiveBasalSegments = self.basalConnections.filterSegmentsByCell( activeBasalSegments, correctPredictedCells) learningActiveApicalSegments = self.apicalConnections.filterSegmentsByCell( activeApicalSegments, correctPredictedCells) # Bursting columns cellsForMatchingBasal = self.basalConnections.mapSegmentsToCells( matchingBasalSegments) cellsForMatchingApical = self.apicalConnections.mapSegmentsToCells( matchingApicalSegments) matchingCells = np.intersect1d( cellsForMatchingBasal, cellsForMatchingApical) (matchingCellsInBurstingColumns, burstingColumnsWithNoMatch) = np2.setCompare( matchingCells, burstingColumns, matchingCells / self.cellsPerColumn, rightMinusLeft=True) (learningMatchingBasalSegments, learningMatchingApicalSegments) = self._chooseBestSegmentPairPerColumn( matchingCellsInBurstingColumns, matchingBasalSegments, matchingApicalSegments, basalPotentialOverlaps, apicalPotentialOverlaps) newSegmentCells = self._getCellsWithFewestSegments( burstingColumnsWithNoMatch) # Incorrectly predicted columns if self.predictedSegmentDecrement > 0.0: correctMatchingBasalMask = np.in1d( cellsForMatchingBasal / self.cellsPerColumn, activeColumns) correctMatchingApicalMask = np.in1d( cellsForMatchingApical / self.cellsPerColumn, activeColumns) basalSegmentsToPunish = matchingBasalSegments[~correctMatchingBasalMask] apicalSegmentsToPunish = matchingApicalSegments[~correctMatchingApicalMask] else: basalSegmentsToPunish = () apicalSegmentsToPunish = () # Make a list of every cell that is learning learningCells = np.concatenate( (correctPredictedCells, self.basalConnections.mapSegmentsToCells(learningMatchingBasalSegments), newSegmentCells)) return (learningActiveBasalSegments, learningActiveApicalSegments, learningMatchingBasalSegments, learningMatchingApicalSegments, basalSegmentsToPunish, apicalSegmentsToPunish, newSegmentCells, learningCells) @staticmethod def _calculateSegmentActivity(connections, activeInput, connectedPermanence, activationThreshold, minThreshold): """ Calculate the active and matching segments for this timestep. @param connections (SparseMatrixConnections) @param activeInput (numpy array) @return (tuple) - activeSegments (numpy array) Dendrite segments with enough active connected synapses to cause a dendritic spike - matchingSegments (numpy array) Dendrite segments with enough active potential synapses to be selected for learning in a bursting column - potentialOverlaps (numpy array) The number of active potential synapses for each segment. Includes counts for active, matching, and nonmatching segments. """ # Active overlaps = connections.computeActivity(activeInput, connectedPermanence) activeSegments = np.flatnonzero(overlaps >= activationThreshold) # Matching potentialOverlaps = connections.computeActivity(activeInput) matchingSegments = np.flatnonzero(potentialOverlaps >= minThreshold) return (activeSegments, matchingSegments, potentialOverlaps) @staticmethod def _learn(connections, rng, learningSegments, activeInput, growthCandidates, potentialOverlaps, initialPermanence, sampleSize, permanenceIncrement, permanenceDecrement, maxSynapsesPerSegment): """ Adjust synapse permanences, and grow new synapses. @param learningActiveSegments (numpy array) @param learningMatchingSegments (numpy array) @param segmentsToPunish (numpy array) @param newSegmentCells (numpy array) @param activeInput (numpy array) @param growthCandidates (numpy array) @param potentialOverlaps (numpy array) """ # Learn on existing segments connections.adjustSynapses(learningSegments, activeInput, permanenceIncrement, -permanenceDecrement) # Grow new synapses. Calculate "maxNew", the maximum number of synapses to # grow per segment. "maxNew" might be a number or it might be a list of # numbers. if sampleSize == -1: maxNew = len(growthCandidates) else: maxNew = sampleSize - potentialOverlaps[learningSegments] if maxSynapsesPerSegment != -1: synapseCounts = connections.mapSegmentsToSynapseCounts( learningSegments) numSynapsesToReachMax = maxSynapsesPerSegment - synapseCounts maxNew = np.where(maxNew <= numSynapsesToReachMax, maxNew, numSynapsesToReachMax) connections.growSynapsesToSample(learningSegments, growthCandidates, maxNew, initialPermanence, rng) @staticmethod def _learnOnNewSegments(connections, rng, newSegmentCells, growthCandidates, initialPermanence, sampleSize, maxSynapsesPerSegment): """ Create new segments, and grow synapses on them. @param connections (SparseMatrixConnections) @param rng (Random) @param newSegmentCells (numpy array) @param growthCandidates (numpy array) """ numNewSynapses = len(growthCandidates) if sampleSize != -1: numNewSynapses = min(numNewSynapses, sampleSize) if maxSynapsesPerSegment != -1: numNewSynapses = min(numNewSynapses, maxSynapsesPerSegment) newSegments = connections.createSegments(newSegmentCells) connections.growSynapsesToSample(newSegments, growthCandidates, numNewSynapses, initialPermanence, rng) def _chooseBestSegmentPairPerColumn(self, matchingCellsInBurstingColumns, matchingBasalSegments, matchingApicalSegments, basalPotentialOverlaps, apicalPotentialOverlaps): """ Choose the best pair of matching segments - one basal and one apical - for each column. Pairs are ranked by the sum of their potential overlaps. When there's a tie, the first pair wins. @param matchingCellsInBurstingColumns (numpy array) Cells in bursting columns that have at least one matching basal segment and at least one matching apical segment @param matchingBasalSegments (numpy array) @param matchingApicalSegments (numpy array) @param basalPotentialOverlaps (numpy array) @param apicalPotentialOverlaps (numpy array) @return (tuple) - learningBasalSegments (numpy array) The selected basal segments - learningApicalSegments (numpy array) The selected apical segments """ basalCandidateSegments = self.basalConnections.filterSegmentsByCell( matchingBasalSegments, matchingCellsInBurstingColumns) apicalCandidateSegments = self.apicalConnections.filterSegmentsByCell( matchingApicalSegments, matchingCellsInBurstingColumns) # Sort everything once rather than inside of each call to argmaxMulti. self.basalConnections.sortSegmentsByCell(basalCandidateSegments) self.apicalConnections.sortSegmentsByCell(apicalCandidateSegments) # Narrow it down to one pair per cell. oneBasalPerCellFilter = np2.argmaxMulti( basalPotentialOverlaps[basalCandidateSegments], self.basalConnections.mapSegmentsToCells(basalCandidateSegments), assumeSorted=True) basalCandidateSegments = basalCandidateSegments[oneBasalPerCellFilter] oneApicalPerCellFilter = np2.argmaxMulti( apicalPotentialOverlaps[apicalCandidateSegments], self.apicalConnections.mapSegmentsToCells(apicalCandidateSegments), assumeSorted=True) apicalCandidateSegments = apicalCandidateSegments[oneApicalPerCellFilter] # Narrow it down to one pair per column. cellScores = (basalPotentialOverlaps[basalCandidateSegments] + apicalPotentialOverlaps[apicalCandidateSegments]) columnsForCandidates = ( self.basalConnections.mapSegmentsToCells(basalCandidateSegments) / self.cellsPerColumn) onePerColumnFilter = np2.argmaxMulti(cellScores, columnsForCandidates, assumeSorted=True) learningBasalSegments = basalCandidateSegments[onePerColumnFilter] learningApicalSegments = apicalCandidateSegments[onePerColumnFilter] return (learningBasalSegments, learningApicalSegments) def _getCellsWithFewestSegments(self, columns): """ For each column, get the cell that has the fewest total segments (basal or apical). Break ties randomly. @param columns (numpy array) Columns to check @return (numpy array) One cell for each of the provided columns """ candidateCells = np2.getAllCellsInColumns(columns, self.cellsPerColumn) # Arrange the segment counts into one row per minicolumn. segmentCounts = np.reshape( self.basalConnections.getSegmentCounts(candidateCells) + self.apicalConnections.getSegmentCounts(candidateCells), newshape=(len(columns), self.cellsPerColumn)) # Filter to just the cells that are tied for fewest in their minicolumn. minSegmentCounts = np.amin(segmentCounts, axis=1, keepdims=True) candidateCells = candidateCells[np.flatnonzero(segmentCounts == minSegmentCounts)] # Filter to one cell per column, choosing randomly from the minimums. # To do the random choice, add a random offset to each index in-place, using # casting to floor the result. (_, onePerColumnFilter, numCandidatesInColumns) = np.unique(candidateCells / self.cellsPerColumn, return_index=True, return_counts=True) offsetPercents = np.empty(len(columns), dtype="float32") self.rng.initializeReal32Array(offsetPercents) np.add(onePerColumnFilter, offsetPercents*numCandidatesInColumns, out=onePerColumnFilter, casting="unsafe") return candidateCells[onePerColumnFilter] @staticmethod def _numPoints(dimensions): """ Get the number of discrete points in a set of dimensions. @param dimensions (sequence of integers) @return (int) """ if len(dimensions) == 0: return 0 else: return reduce(operator.mul, dimensions, 1) def getActiveCells(self): return self.activeCells def getWinnerCells(self): return self.winnerCells def getPreviouslyPredictedCells(self): return self.prevPredictedCells
class ApicalTiebreakTemporalMemory(object): """ A generalized Temporal Memory with apical dendrites that add a "tiebreak". Basal connections are used to implement traditional Temporal Memory. The apical connections are used for further disambiguation. If multiple cells in a minicolumn have active basal segments, each of those cells is predicted, unless one of them also has an active apical segment, in which case only the cells with active basal and apical segments are predicted. In other words, the apical connections have no effect unless the basal input is a union of SDRs (e.g. from bursting minicolumns). This class is generalized in two ways: - This class does not specify when a 'timestep' begins and ends. It exposes two main methods: 'depolarizeCells' and 'activateCells', and callers or subclasses can introduce the notion of a timestep. - This class is unaware of whether its 'basalInput' or 'apicalInput' are from internal or external cells. They are just cell numbers. The caller knows what these cell numbers mean, but the TemporalMemory doesn't. """ def __init__(self, columnCount=2048, basalInputSize=0, apicalInputSize=0, cellsPerColumn=32, activationThreshold=13, reducedBasalThreshold=13, initialPermanence=0.21, connectedPermanence=0.50, minThreshold=10, sampleSize=20, permanenceIncrement=0.1, permanenceDecrement=0.1, basalPredictedSegmentDecrement=0.0, apicalPredictedSegmentDecrement=0.0, maxSynapsesPerSegment=-1, seed=42): """ @param columnCount (int) The number of minicolumns @param basalInputSize (sequence) The number of bits in the basal input @param apicalInputSize (int) The number of bits in the apical input @param cellsPerColumn (int) Number of cells per column @param activationThreshold (int) If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active. @param reducedBasalThreshold (int) The activation threshold of basal (lateral) segments for cells that have active apical segments. If equal to activationThreshold (default), this parameter has no effect. @param initialPermanence (float) Initial permanence of a new synapse @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected. @param minThreshold (int) If the number of potential synapses active on a segment is at least this threshold, it is said to be "matching" and is eligible for learning. @param sampleSize (int) How much of the active SDR to sample with synapses. @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning. @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning. @param basalPredictedSegmentDecrement (float) Amount by which segments are punished for incorrect predictions. @param apicalPredictedSegmentDecrement (float) Amount by which segments are punished for incorrect predictions. @param maxSynapsesPerSegment The maximum number of synapses per segment. @param seed (int) Seed for the random number generator. """ self.columnCount = columnCount self.cellsPerColumn = cellsPerColumn self.initialPermanence = initialPermanence self.connectedPermanence = connectedPermanence self.reducedBasalThreshold = reducedBasalThreshold self.minThreshold = minThreshold self.sampleSize = sampleSize self.permanenceIncrement = permanenceIncrement self.permanenceDecrement = permanenceDecrement self.basalPredictedSegmentDecrement = basalPredictedSegmentDecrement self.apicalPredictedSegmentDecrement = apicalPredictedSegmentDecrement self.activationThreshold = activationThreshold self.maxSynapsesPerSegment = maxSynapsesPerSegment self.basalConnections = SparseMatrixConnections(columnCount*cellsPerColumn, basalInputSize) self.apicalConnections = SparseMatrixConnections(columnCount*cellsPerColumn, apicalInputSize) self.rng = Random(seed) self.activeCells = np.empty(0, dtype="uint32") self.winnerCells = np.empty(0, dtype="uint32") self.predictedCells = np.empty(0, dtype="uint32") self.predictedActiveCells = np.empty(0, dtype="uint32") self.activeBasalSegments = np.empty(0, dtype="uint32") self.activeApicalSegments = np.empty(0, dtype="uint32") self.matchingBasalSegments = np.empty(0, dtype="uint32") self.matchingApicalSegments = np.empty(0, dtype="uint32") self.basalPotentialOverlaps = np.empty(0, dtype="int32") self.apicalPotentialOverlaps = np.empty(0, dtype="int32") self.useApicalTiebreak=True self.useApicalModulationBasalThreshold=True def reset(self): """ Clear all cell and segment activity. """ self.activeCells = np.empty(0, dtype="uint32") self.winnerCells = np.empty(0, dtype="uint32") self.predictedCells = np.empty(0, dtype="uint32") self.predictedActiveCells = np.empty(0, dtype="uint32") self.activeBasalSegments = np.empty(0, dtype="uint32") self.activeApicalSegments = np.empty(0, dtype="uint32") self.matchingBasalSegments = np.empty(0, dtype="uint32") self.matchingApicalSegments = np.empty(0, dtype="uint32") self.basalPotentialOverlaps = np.empty(0, dtype="int32") self.apicalPotentialOverlaps = np.empty(0, dtype="int32") def depolarizeCells(self, basalInput, apicalInput, learn): """ Calculate predictions. @param basalInput (numpy array) List of active input bits for the basal dendrite segments @param apicalInput (numpy array) List of active input bits for the apical dendrite segments @param learn (bool) Whether learning is enabled. Some TM implementations may depolarize cells differently or do segment activity bookkeeping when learning is enabled. """ (activeApicalSegments, matchingApicalSegments, apicalPotentialOverlaps) = self._calculateApicalSegmentActivity( self.apicalConnections, apicalInput, self.connectedPermanence, self.activationThreshold, self.minThreshold) if learn or self.useApicalModulationBasalThreshold==False: reducedBasalThresholdCells = () else: reducedBasalThresholdCells = self.apicalConnections.mapSegmentsToCells( activeApicalSegments) (activeBasalSegments, matchingBasalSegments, basalPotentialOverlaps) = self._calculateBasalSegmentActivity( self.basalConnections, basalInput, reducedBasalThresholdCells, self.connectedPermanence, self.activationThreshold, self.minThreshold, self.reducedBasalThreshold) predictedCells = self._calculatePredictedCells(activeBasalSegments, activeApicalSegments) self.predictedCells = predictedCells self.activeBasalSegments = activeBasalSegments self.activeApicalSegments = activeApicalSegments self.matchingBasalSegments = matchingBasalSegments self.matchingApicalSegments = matchingApicalSegments self.basalPotentialOverlaps = basalPotentialOverlaps self.apicalPotentialOverlaps = apicalPotentialOverlaps def activateCells(self, activeColumns, basalReinforceCandidates, apicalReinforceCandidates, basalGrowthCandidates, apicalGrowthCandidates, learn=True): """ Activate cells in the specified columns, using the result of the previous 'depolarizeCells' as predictions. Then learn. @param activeColumns (numpy array) List of active columns @param basalReinforceCandidates (numpy array) List of bits that the active cells may reinforce basal synapses to. @param apicalReinforceCandidates (numpy array) List of bits that the active cells may reinforce apical synapses to. @param basalGrowthCandidates (numpy array) List of bits that the active cells may grow new basal synapses to. @param apicalGrowthCandidates (numpy array) List of bits that the active cells may grow new apical synapses to @param learn (bool) Whether to grow / reinforce / punish synapses """ # Calculate active cells (correctPredictedCells, burstingColumns) = np2.setCompare(self.predictedCells, activeColumns, self.predictedCells / self.cellsPerColumn, rightMinusLeft=True) newActiveCells = np.concatenate((correctPredictedCells, np2.getAllCellsInColumns( burstingColumns, self.cellsPerColumn))) # Calculate learning (learningActiveBasalSegments, learningMatchingBasalSegments, basalSegmentsToPunish, newBasalSegmentCells, learningCells) = self._calculateBasalLearning( activeColumns, burstingColumns, correctPredictedCells, self.activeBasalSegments, self.matchingBasalSegments, self.basalPotentialOverlaps) (learningActiveApicalSegments, learningMatchingApicalSegments, apicalSegmentsToPunish, newApicalSegmentCells) = self._calculateApicalLearning( learningCells, activeColumns, self.activeApicalSegments, self.matchingApicalSegments, self.apicalPotentialOverlaps) # Learn if learn: # Learn on existing segments for learningSegments in (learningActiveBasalSegments, learningMatchingBasalSegments): self._learn(self.basalConnections, self.rng, learningSegments, basalReinforceCandidates, basalGrowthCandidates, self.basalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) for learningSegments in (learningActiveApicalSegments, learningMatchingApicalSegments): self._learn(self.apicalConnections, self.rng, learningSegments, apicalReinforceCandidates, apicalGrowthCandidates, self.apicalPotentialOverlaps, self.initialPermanence, self.sampleSize, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment) # Punish incorrect predictions if self.basalPredictedSegmentDecrement != 0.0: self.basalConnections.adjustActiveSynapses( basalSegmentsToPunish, basalReinforceCandidates, -self.basalPredictedSegmentDecrement) if self.apicalPredictedSegmentDecrement != 0.0: self.apicalConnections.adjustActiveSynapses( apicalSegmentsToPunish, apicalReinforceCandidates, -self.apicalPredictedSegmentDecrement) # Grow new segments if len(basalGrowthCandidates) > 0: self._learnOnNewSegments(self.basalConnections, self.rng, newBasalSegmentCells, basalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) if len(apicalGrowthCandidates) > 0: self._learnOnNewSegments(self.apicalConnections, self.rng, newApicalSegmentCells, apicalGrowthCandidates, self.initialPermanence, self.sampleSize, self.maxSynapsesPerSegment) # Save the results newActiveCells.sort() learningCells.sort() self.activeCells = newActiveCells self.winnerCells = learningCells self.predictedActiveCells = correctPredictedCells def _calculateBasalLearning(self, activeColumns, burstingColumns, correctPredictedCells, activeBasalSegments, matchingBasalSegments, basalPotentialOverlaps): """ Basic Temporal Memory learning. Correctly predicted cells always have active basal segments, and we learn on these segments. In bursting columns, we either learn on an existing basal segment, or we grow a new one. The only influence apical dendrites have on basal learning is: the apical dendrites influence which cells are considered "predicted". So an active apical dendrite can prevent some basal segments in active columns from learning. @param correctPredictedCells (numpy array) @param burstingColumns (numpy array) @param activeBasalSegments (numpy array) @param matchingBasalSegments (numpy array) @param basalPotentialOverlaps (numpy array) @return (tuple) - learningActiveBasalSegments (numpy array) Active basal segments on correct predicted cells - learningMatchingBasalSegments (numpy array) Matching basal segments selected for learning in bursting columns - basalSegmentsToPunish (numpy array) Basal segments that should be punished for predicting an inactive column - newBasalSegmentCells (numpy array) Cells in bursting columns that were selected to grow new basal segments - learningCells (numpy array) Cells that have learning basal segments or are selected to grow a basal segment """ # Correctly predicted columns learningActiveBasalSegments = self.basalConnections.filterSegmentsByCell( activeBasalSegments, correctPredictedCells) cellsForMatchingBasal = self.basalConnections.mapSegmentsToCells( matchingBasalSegments) matchingCells = np.unique(cellsForMatchingBasal) (matchingCellsInBurstingColumns, burstingColumnsWithNoMatch) = np2.setCompare( matchingCells, burstingColumns, matchingCells / self.cellsPerColumn, rightMinusLeft=True) learningMatchingBasalSegments = self._chooseBestSegmentPerColumn( self.basalConnections, matchingCellsInBurstingColumns, matchingBasalSegments, basalPotentialOverlaps, self.cellsPerColumn) newBasalSegmentCells = self._getCellsWithFewestSegments( self.basalConnections, self.rng, burstingColumnsWithNoMatch, self.cellsPerColumn) learningCells = np.concatenate( (correctPredictedCells, self.basalConnections.mapSegmentsToCells(learningMatchingBasalSegments), newBasalSegmentCells)) # Incorrectly predicted columns correctMatchingBasalMask = np.in1d( cellsForMatchingBasal / self.cellsPerColumn, activeColumns) basalSegmentsToPunish = matchingBasalSegments[~correctMatchingBasalMask] return (learningActiveBasalSegments, learningMatchingBasalSegments, basalSegmentsToPunish, newBasalSegmentCells, learningCells) def _calculateApicalLearning(self, learningCells, activeColumns, activeApicalSegments, matchingApicalSegments, apicalPotentialOverlaps): """ Calculate apical learning for each learning cell. The set of learning cells was determined completely from basal segments. Do all apical learning on the same cells. Learn on any active segments on learning cells. For cells without active segments, learn on the best matching segment. For cells without a matching segment, grow a new segment. @param learningCells (numpy array) @param correctPredictedCells (numpy array) @param activeApicalSegments (numpy array) @param matchingApicalSegments (numpy array) @param apicalPotentialOverlaps (numpy array) @return (tuple) - learningActiveApicalSegments (numpy array) Active apical segments on correct predicted cells - learningMatchingApicalSegments (numpy array) Matching apical segments selected for learning in bursting columns - apicalSegmentsToPunish (numpy array) Apical segments that should be punished for predicting an inactive column - newApicalSegmentCells (numpy array) Cells in bursting columns that were selected to grow new apical segments """ # Cells with active apical segments learningActiveApicalSegments = self.apicalConnections.filterSegmentsByCell( activeApicalSegments, learningCells) # Cells with matching apical segments learningCellsWithoutActiveApical = np.setdiff1d( learningCells, self.apicalConnections.mapSegmentsToCells(learningActiveApicalSegments)) cellsForMatchingApical = self.apicalConnections.mapSegmentsToCells( matchingApicalSegments) learningCellsWithMatchingApical = np.intersect1d( learningCellsWithoutActiveApical, cellsForMatchingApical) learningMatchingApicalSegments = self._chooseBestSegmentPerCell( self.apicalConnections, learningCellsWithMatchingApical, matchingApicalSegments, apicalPotentialOverlaps) # Cells that need to grow an apical segment newApicalSegmentCells = np.setdiff1d(learningCellsWithoutActiveApical, learningCellsWithMatchingApical) # Incorrectly predicted columns correctMatchingApicalMask = np.in1d( cellsForMatchingApical / self.cellsPerColumn, activeColumns) apicalSegmentsToPunish = matchingApicalSegments[~correctMatchingApicalMask] return (learningActiveApicalSegments, learningMatchingApicalSegments, apicalSegmentsToPunish, newApicalSegmentCells) @staticmethod def _calculateApicalSegmentActivity(connections, activeInput, connectedPermanence, activationThreshold, minThreshold): """ Calculate the active and matching apical segments for this timestep. @param connections (SparseMatrixConnections) @param activeInput (numpy array) @return (tuple) - activeSegments (numpy array) Dendrite segments with enough active connected synapses to cause a dendritic spike - matchingSegments (numpy array) Dendrite segments with enough active potential synapses to be selected for learning in a bursting column - potentialOverlaps (numpy array) The number of active potential synapses for each segment. Includes counts for active, matching, and nonmatching segments. """ # Active overlaps = connections.computeActivity(activeInput, connectedPermanence) activeSegments = np.flatnonzero(overlaps >= activationThreshold) # Matching potentialOverlaps = connections.computeActivity(activeInput) matchingSegments = np.flatnonzero(potentialOverlaps >= minThreshold) return (activeSegments, matchingSegments, potentialOverlaps) @staticmethod def _calculateBasalSegmentActivity(connections, activeInput, reducedBasalThresholdCells, connectedPermanence, activationThreshold, minThreshold, reducedBasalThreshold): """ Calculate the active and matching basal segments for this timestep. The difference with _calculateApicalSegmentActivity is that cells with active apical segments (collected in reducedBasalThresholdCells) have a lower activation threshold for their basal segments (set by reducedBasalThreshold parameter). @param connections (SparseMatrixConnections) @param activeInput (numpy array) @return (tuple) - activeSegments (numpy array) Dendrite segments with enough active connected synapses to cause a dendritic spike - matchingSegments (numpy array) Dendrite segments with enough active potential synapses to be selected for learning in a bursting column - potentialOverlaps (numpy array) The number of active potential synapses for each segment. Includes counts for active, matching, and nonmatching segments. """ # Active apical segments lower the activation threshold for basal (lateral) segments overlaps = connections.computeActivity(activeInput, connectedPermanence) outrightActiveSegments = np.flatnonzero(overlaps >= activationThreshold) if reducedBasalThreshold != activationThreshold and len(reducedBasalThresholdCells) > 0: potentiallyActiveSegments = np.flatnonzero((overlaps < activationThreshold) & (overlaps >= reducedBasalThreshold)) cellsOfCASegments = connections.mapSegmentsToCells(potentiallyActiveSegments) # apically active segments are condit. active segments from apically active cells conditionallyActiveSegments = potentiallyActiveSegments[np.in1d(cellsOfCASegments, reducedBasalThresholdCells)] activeSegments = np.concatenate((outrightActiveSegments, conditionallyActiveSegments)) else: activeSegments = outrightActiveSegments # Matching potentialOverlaps = connections.computeActivity(activeInput) matchingSegments = np.flatnonzero(potentialOverlaps >= minThreshold) return (activeSegments, matchingSegments, potentialOverlaps) def _calculatePredictedCells(self, activeBasalSegments, activeApicalSegments): """ Calculate the predicted cells, given the set of active segments. An active basal segment is enough to predict a cell. An active apical segment is *not* enough to predict a cell. When a cell has both types of segments active, other cells in its minicolumn must also have both types of segments to be considered predictive. @param activeBasalSegments (numpy array) @param activeApicalSegments (numpy array) @return (numpy array) """ cellsForBasalSegments = self.basalConnections.mapSegmentsToCells( activeBasalSegments) cellsForApicalSegments = self.apicalConnections.mapSegmentsToCells( activeApicalSegments) fullyDepolarizedCells = np.intersect1d(cellsForBasalSegments, cellsForApicalSegments) partlyDepolarizedCells = np.setdiff1d(cellsForBasalSegments, fullyDepolarizedCells) inhibitedMask = np.in1d(partlyDepolarizedCells / self.cellsPerColumn, fullyDepolarizedCells / self.cellsPerColumn) predictedCells = np.append(fullyDepolarizedCells, partlyDepolarizedCells[~inhibitedMask]) if self.useApicalTiebreak == False: predictedCells = cellsForBasalSegments return predictedCells @staticmethod def _learn(connections, rng, learningSegments, activeInput, growthCandidates, potentialOverlaps, initialPermanence, sampleSize, permanenceIncrement, permanenceDecrement, maxSynapsesPerSegment): """ Adjust synapse permanences, grow new synapses, and grow new segments. @param learningActiveSegments (numpy array) @param learningMatchingSegments (numpy array) @param activeInput (numpy array) @param growthCandidates (numpy array) @param potentialOverlaps (numpy array) """ # Learn on existing segments connections.adjustSynapses(learningSegments, activeInput, permanenceIncrement, -permanenceDecrement) # Grow new synapses. Calculate "maxNew", the maximum number of synapses to # grow per segment. "maxNew" might be a number or it might be a list of # numbers. if sampleSize == -1: maxNew = len(growthCandidates) else: maxNew = sampleSize - potentialOverlaps[learningSegments] if maxSynapsesPerSegment != -1: synapseCounts = connections.mapSegmentsToSynapseCounts( learningSegments) numSynapsesToReachMax = maxSynapsesPerSegment - synapseCounts maxNew = np.where(maxNew <= numSynapsesToReachMax, maxNew, numSynapsesToReachMax) connections.growSynapsesToSample(learningSegments, growthCandidates, maxNew, initialPermanence, rng) @staticmethod def _learnOnNewSegments(connections, rng, newSegmentCells, growthCandidates, initialPermanence, sampleSize, maxSynapsesPerSegment): numNewSynapses = len(growthCandidates) if sampleSize != -1: numNewSynapses = min(numNewSynapses, sampleSize) if maxSynapsesPerSegment != -1: numNewSynapses = min(numNewSynapses, maxSynapsesPerSegment) newSegments = connections.createSegments(newSegmentCells) connections.growSynapsesToSample(newSegments, growthCandidates, numNewSynapses, initialPermanence, rng) @classmethod def _chooseBestSegmentPerCell(cls, connections, cells, allMatchingSegments, potentialOverlaps): """ For each specified cell, choose its matching segment with largest number of active potential synapses. When there's a tie, the first segment wins. @param connections (SparseMatrixConnections) @param cells (numpy array) @param allMatchingSegments (numpy array) @param potentialOverlaps (numpy array) @return (numpy array) One segment per cell """ candidateSegments = connections.filterSegmentsByCell(allMatchingSegments, cells) # Narrow it down to one pair per cell. onePerCellFilter = np2.argmaxMulti(potentialOverlaps[candidateSegments], connections.mapSegmentsToCells( candidateSegments)) learningSegments = candidateSegments[onePerCellFilter] return learningSegments @classmethod def _chooseBestSegmentPerColumn(cls, connections, matchingCells, allMatchingSegments, potentialOverlaps, cellsPerColumn): """ For all the columns covered by 'matchingCells', choose the column's matching segment with largest number of active potential synapses. When there's a tie, the first segment wins. @param connections (SparseMatrixConnections) @param matchingCells (numpy array) @param allMatchingSegments (numpy array) @param potentialOverlaps (numpy array) """ candidateSegments = connections.filterSegmentsByCell(allMatchingSegments, matchingCells) # Narrow it down to one segment per column. cellScores = potentialOverlaps[candidateSegments] columnsForCandidates = (connections.mapSegmentsToCells(candidateSegments) / cellsPerColumn) onePerColumnFilter = np2.argmaxMulti(cellScores, columnsForCandidates) learningSegments = candidateSegments[onePerColumnFilter] return learningSegments @classmethod def _getCellsWithFewestSegments(cls, connections, rng, columns, cellsPerColumn): """ For each column, get the cell that has the fewest total basal segments. Break ties randomly. @param connections (SparseMatrixConnections) @param rng (Random) @param columns (numpy array) Columns to check @return (numpy array) One cell for each of the provided columns """ candidateCells = np2.getAllCellsInColumns(columns, cellsPerColumn) # Arrange the segment counts into one row per minicolumn. segmentCounts = np.reshape(connections.getSegmentCounts(candidateCells), newshape=(len(columns), cellsPerColumn)) # Filter to just the cells that are tied for fewest in their minicolumn. minSegmentCounts = np.amin(segmentCounts, axis=1, keepdims=True) candidateCells = candidateCells[np.flatnonzero(segmentCounts == minSegmentCounts)] # Filter to one cell per column, choosing randomly from the minimums. # To do the random choice, add a random offset to each index in-place, using # casting to floor the result. (_, onePerColumnFilter, numCandidatesInColumns) = np.unique(candidateCells / cellsPerColumn, return_index=True, return_counts=True) offsetPercents = np.empty(len(columns), dtype="float32") rng.initializeReal32Array(offsetPercents) np.add(onePerColumnFilter, offsetPercents*numCandidatesInColumns, out=onePerColumnFilter, casting="unsafe") return candidateCells[onePerColumnFilter] def getActiveCells(self): """ @return (numpy array) Active cells """ return self.activeCells def getPredictedActiveCells(self): """ @return (numpy array) Active cells that were correctly predicted """ return self.predictedActiveCells def getWinnerCells(self): """ @return (numpy array) Cells that were selected for learning """ return self.winnerCells def getActiveBasalSegments(self): """ @return (numpy array) Active basal segments for this timestep """ return self.activeBasalSegments def getActiveApicalSegments(self): """ @return (numpy array) Matching basal segments for this timestep """ return self.activeApicalSegments def numberOfColumns(self): """ Returns the number of columns in this layer. @return (int) Number of columns """ return self.columnCount def numberOfCells(self): """ Returns the number of cells in this layer. @return (int) Number of cells """ return self.numberOfColumns() * self.cellsPerColumn def getCellsPerColumn(self): """ Returns the number of cells per column. @return (int) The number of cells per column. """ return self.cellsPerColumn def getActivationThreshold(self): """ Returns the activation threshold. @return (int) The activation threshold. """ return self.activationThreshold def setActivationThreshold(self, activationThreshold): """ Sets the activation threshold. @param activationThreshold (int) activation threshold. """ self.activationThreshold = activationThreshold def getReducedBasalThreshold(self): """ Returns the reduced basal activation threshold for apically active cells. @return (int) The activation threshold. """ return self.reducedBasalThreshold def setReducedBasalThreshold(self, reducedBasalThreshold): """ Sets the reduced basal activation threshold for apically active cells. @param reducedBasalThreshold (int) activation threshold. """ self.reducedBasalThreshold = reducedBasalThreshold def getInitialPermanence(self): """ Get the initial permanence. @return (float) The initial permanence. """ return self.initialPermanence def setInitialPermanence(self, initialPermanence): """ Sets the initial permanence. @param initialPermanence (float) The initial permanence. """ self.initialPermanence = initialPermanence def getMinThreshold(self): """ Returns the min threshold. @return (int) The min threshold. """ return self.minThreshold def setMinThreshold(self, minThreshold): """ Sets the min threshold. @param minThreshold (int) min threshold. """ self.minThreshold = minThreshold def getSampleSize(self): """ Gets the sampleSize. @return (int) """ return self.sampleSize def setSampleSize(self, sampleSize): """ Sets the sampleSize. @param sampleSize (int) """ self.sampleSize = sampleSize def getPermanenceIncrement(self): """ Get the permanence increment. @return (float) The permanence increment. """ return self.permanenceIncrement def setPermanenceIncrement(self, permanenceIncrement): """ Sets the permanence increment. @param permanenceIncrement (float) The permanence increment. """ self.permanenceIncrement = permanenceIncrement def getPermanenceDecrement(self): """ Get the permanence decrement. @return (float) The permanence decrement. """ return self.permanenceDecrement def setPermanenceDecrement(self, permanenceDecrement): """ Sets the permanence decrement. @param permanenceDecrement (float) The permanence decrement. """ self.permanenceDecrement = permanenceDecrement def getBasalPredictedSegmentDecrement(self): """ Get the predicted segment decrement. @return (float) The predicted segment decrement. """ return self.basalPredictedSegmentDecrement def setBasalPredictedSegmentDecrement(self, predictedSegmentDecrement): """ Sets the predicted segment decrement. @param predictedSegmentDecrement (float) The predicted segment decrement. """ self.basalPredictedSegmentDecrement = basalPredictedSegmentDecrement def getApicalPredictedSegmentDecrement(self): """ Get the predicted segment decrement. @return (float) The predicted segment decrement. """ return self.apicalPredictedSegmentDecrement def setApicalPredictedSegmentDecrement(self, predictedSegmentDecrement): """ Sets the predicted segment decrement. @param predictedSegmentDecrement (float) The predicted segment decrement. """ self.apicalPredictedSegmentDecrement = apicalPredictedSegmentDecrement def getConnectedPermanence(self): """ Get the connected permanence. @return (float) The connected permanence. """ return self.connectedPermanence def setConnectedPermanence(self, connectedPermanence): """ Sets the connected permanence. @param connectedPermanence (float) The connected permanence. """ self.connectedPermanence = connectedPermanence def getUseApicalTieBreak(self): """ Get whether we actually use apical tie-break. @return (Bool) Whether apical tie-break is used. """ return self.useApicalTiebreak def setUseApicalTiebreak(self, useApicalTiebreak): """ Sets whether we actually use apical tie-break. @param useApicalTiebreak (Bool) Whether apical tie-break is used. """ self.useApicalTiebreak = useApicalTiebreak def getUseApicalModulationBasalThreshold(self): """ Get whether we actually use apical modulation of basal threshold. @return (Bool) Whether apical modulation is used. """ return self.useApicalModulationBasalThreshold def setUseApicalModulationBasalThreshold(self, useApicalModulationBasalThreshold): """ Sets whether we actually use apical modulation of basal threshold. @param useApicalModulationBasalThreshold (Bool) Whether apical modulation is used. """ self.useApicalModulationBasalThreshold = useApicalModulationBasalThreshold