예제 #1
0
  def testWriteRead(self):
    c1 = Connections(1024)

    # Add data before serializing
    s1 = c1.createSegment(0)
    c1.createSynapse(s1, 254, 0.1173)

    s2 = c1.createSegment(100)
    c1.createSynapse(s2, 20, 0.3)

    c1.createSynapse(s1, 40, 0.3)

    s3 = c1.createSegment(0)
    c1.createSynapse(s3, 0, 0.5)
    c1.createSynapse(s3, 1, 0.5)

    s4 = c1.createSegment(10)
    c1.createSynapse(s4, 0, 0.5)
    c1.createSynapse(s4, 1, 0.5)
    c1.destroySegment(s4)

    proto1 = ConnectionsProto_capnp.ConnectionsProto.new_message()
    c1.write(proto1)

    # Write the proto to a temp file and read it back into a new proto
    with tempfile.TemporaryFile() as f:
      proto1.write(f)
      f.seek(0)
      proto2 = ConnectionsProto_capnp.ConnectionsProto.read(f)

    # Load the deserialized proto
    c2 = Connections.read(proto2)

    # Check that the two connections objects are functionally equal
    self.assertEqual(c1, c2)
예제 #2
0
    def testWriteRead(self):
        c1 = Connections(1024)

        # Add data before serializing
        s1 = c1.createSegment(0)
        c1.createSynapse(s1, 254, 0.1173)

        s2 = c1.createSegment(100)
        c1.createSynapse(s2, 20, 0.3)

        c1.createSynapse(s1, 40, 0.3)

        s3 = c1.createSegment(0)
        c1.createSynapse(s3, 0, 0.5)
        c1.createSynapse(s3, 1, 0.5)

        s4 = c1.createSegment(10)
        c1.createSynapse(s4, 0, 0.5)
        c1.createSynapse(s4, 1, 0.5)
        c1.destroySegment(s4)

        proto1 = ConnectionsProto_capnp.ConnectionsProto.new_message()
        c1.write(proto1)

        # Write the proto to a temp file and read it back into a new proto
        with tempfile.TemporaryFile() as f:
            proto1.write(f)
            f.seek(0)
            proto2 = ConnectionsProto_capnp.ConnectionsProto.read(f)

        # Load the deserialized proto
        c2 = Connections.read(proto2)

        # Check that the two connections objects are functionally equal
        self.assertEqual(c1, c2)
예제 #3
0
class TemporalMemory(object):
    """
  Class implementing the Temporal Memory algorithm.
  """
    def __init__(self,
                 columnDimensions=(2048, ),
                 cellsPerColumn=32,
                 activationThreshold=13,
                 initialPermanence=0.21,
                 connectedPermanence=0.50,
                 minThreshold=10,
                 maxNewSynapseCount=20,
                 permanenceIncrement=0.10,
                 permanenceDecrement=0.10,
                 predictedSegmentDecrement=0.0,
                 maxSegmentsPerCell=255,
                 maxSynapsesPerSegment=255,
                 seed=42,
                 **kwargs):
        """
    @param columnDimensions          (list)  Dimensions of the column space
    @param cellsPerColumn            (int)   Number of cells per column
    @param activationThreshold       (int)   If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active.
    @param initialPermanence         (float) Initial permanence of a new synapse.
    @param connectedPermanence       (float) If the permanence value for a synapse is greater than this value, it is said to be connected.
    @param minThreshold              (int)   If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column.
    @param maxNewSynapseCount        (int)   The maximum number of synapses added to a segment during learning.
    @param permanenceIncrement       (float) Amount by which permanences of synapses are incremented during learning.
    @param permanenceDecrement       (float) Amount by which permanences of synapses are decremented during learning.
    @param predictedSegmentDecrement (float) Amount by which active permanences of synapses of previously predicted but inactive segments are decremented.
    @param seed                      (int)   Seed for the random number generator.

    Notes:

    predictedSegmentDecrement: A good value is just a bit larger than
    (the column-level sparsity * permanenceIncrement). So, if column-level
    sparsity is 2% and permanenceIncrement is 0.01, this parameter should be
    something like 4% * 0.01 = 0.0004).
    """
        # Error checking
        if not len(columnDimensions):
            raise ValueError(
                "Number of column dimensions must be greater than 0")

        if not cellsPerColumn > 0:
            raise ValueError(
                "Number of cells per column must be greater than 0")

        # TODO: Validate all parameters (and add validation tests)

        # Save member variables
        self.columnDimensions = columnDimensions
        self.cellsPerColumn = cellsPerColumn
        self.activationThreshold = activationThreshold
        self.initialPermanence = initialPermanence
        self.connectedPermanence = connectedPermanence
        self.minThreshold = minThreshold
        self.maxNewSynapseCount = maxNewSynapseCount
        self.permanenceIncrement = permanenceIncrement
        self.permanenceDecrement = permanenceDecrement
        self.predictedSegmentDecrement = predictedSegmentDecrement
        # Initialize member variables
        self.connections = Connections(
            self.numberOfCells(),
            maxSegmentsPerCell=maxSegmentsPerCell,
            maxSynapsesPerSegment=maxSynapsesPerSegment)
        self._random = Random(seed)

        self.activeCells = set()
        self.predictiveCells = set()
        self.activeSegments = set()
        self.winnerCells = set()
        self.matchingSegments = set()
        self.matchingCells = set()

    # ==============================
    # Main functions
    # ==============================

    def compute(self, activeColumns, learn=True):
        """
    Feeds input record through TM, performing inference and learning.

    @param activeColumns (set)  Indices of active columns
    @param learn         (bool) Whether or not learning is enabled

    Updates member variables:
      - `activeCells`     (set)
      - `winnerCells`     (set)
      - `activeSegments`  (set)
      - `predictiveCells` (set)
      - `matchingSegments`(set)
      - `matchingCells`   (set)
    """
        prevPredictiveCells = self.predictiveCells
        prevActiveSegments = self.activeSegments
        prevActiveCells = self.activeCells
        prevWinnerCells = self.winnerCells
        prevMatchingSegments = self.matchingSegments
        prevMatchingCells = self.matchingCells

        activeCells = set()
        winnerCells = set()

        (_activeCells, _winnerCells, predictedActiveColumns,
         predictedInactiveCells) = self.activateCorrectlyPredictiveCells(
             prevPredictiveCells, prevMatchingCells, activeColumns)

        activeCells.update(_activeCells)
        winnerCells.update(_winnerCells)

        (_activeCells, _winnerCells, learningSegments) = self.burstColumns(
            activeColumns, predictedActiveColumns, prevActiveCells,
            prevWinnerCells, self.connections)

        activeCells.update(_activeCells)
        winnerCells.update(_winnerCells)

        if learn:
            self.learnOnSegments(prevActiveSegments, learningSegments,
                                 prevActiveCells, winnerCells, prevWinnerCells,
                                 self.connections, predictedInactiveCells,
                                 prevMatchingSegments)

        (activeSegments, predictiveCells, matchingSegments,
         matchingCells) = self.computePredictiveCells(activeCells,
                                                      self.connections)

        self.activeCells = activeCells
        self.winnerCells = winnerCells
        self.activeSegments = activeSegments
        self.predictiveCells = predictiveCells
        self.matchingSegments = matchingSegments
        self.matchingCells = matchingCells

    def reset(self):
        """
    Indicates the start of a new sequence. Resets sequence state of the TM.
    """
        self.activeCells = set()
        self.predictiveCells = set()
        self.activeSegments = set()
        self.winnerCells = set()

    # ==============================
    # Phases
    # ==============================

    def activateCorrectlyPredictiveCells(self, prevPredictiveCells,
                                         prevMatchingCells, activeColumns):
        """
    Phase 1: Activate the correctly predictive cells.

    Pseudocode:

      - for each prev predictive cell
        - if in active column
          - mark it as active
          - mark it as winner cell
          - mark column as predicted => active
        - if not in active column
          - mark it as an predicted but inactive cell

    @param prevPredictiveCells (set) Indices of predictive cells in `t-1`
    @param activeColumns       (set) Indices of active columns in `t`

    @return (tuple) Contains:
                      `activeCells`               (set),
                      `winnerCells`               (set),
                      `predictedActiveColumns`    (set),
                      `predictedInactiveCells`    (set)
    """
        activeCells = set()
        winnerCells = set()
        predictedActiveColumns = set()
        predictedInactiveCells = set()

        for cell in prevPredictiveCells:
            column = self.columnForCell(cell)

            if column in activeColumns:
                activeCells.add(cell)
                winnerCells.add(cell)
                predictedActiveColumns.add(column)

        if self.predictedSegmentDecrement > 0:
            for cell in prevMatchingCells:
                column = self.columnForCell(cell)

                if column not in activeColumns:
                    predictedInactiveCells.add(cell)

        return (activeCells, winnerCells, predictedActiveColumns,
                predictedInactiveCells)

    def burstColumns(self, activeColumns, predictedActiveColumns,
                     prevActiveCells, prevWinnerCells, connections):
        """
    Phase 2: Burst unpredicted columns.

    Pseudocode:

      - for each unpredicted active column
        - mark all cells as active
        - mark the best matching cell as winner cell
          - (learning)
            - if it has no matching segment
              - (optimization) if there are prev winner cells
                - add a segment to it
            - mark the segment as learning

    @param activeColumns                   (set)         Indices of active columns in `t`
    @param predictedActiveColumns          (set)         Indices of predicted => active columns in `t`
    @param prevActiveCells                 (set)         Indices of active cells in `t-1`
    @param prevWinnerCells                 (set)         Indices of winner cells in `t-1`
    @param connections                     (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeCells`      (set),
                      `winnerCells`      (set),
                      `learningSegments` (set)
    """
        activeCells = set()
        winnerCells = set()
        learningSegments = set()

        unpredictedActiveColumns = activeColumns - predictedActiveColumns

        for column in unpredictedActiveColumns:
            cells = self.cellsForColumn(column)
            activeCells.update(cells)

            (bestCell,
             bestSegment) = self.bestMatchingCell(cells, prevActiveCells,
                                                  connections)
            winnerCells.add(bestCell)

            if bestSegment is None and len(prevWinnerCells):
                bestSegment = connections.createSegment(bestCell)

            if bestSegment is not None:
                learningSegments.add(bestSegment)

        return activeCells, winnerCells, learningSegments

    def learnOnSegments(self, prevActiveSegments, learningSegments,
                        prevActiveCells, winnerCells, prevWinnerCells,
                        connections, predictedInactiveCells,
                        prevMatchingSegments):
        """
    Phase 3: Perform learning by adapting segments.

    Pseudocode:

      - (learning) for each prev active or learning segment
        - if learning segment or from winner cell
          - strengthen active synapses
          - weaken inactive synapses
        - if learning segment
          - add some synapses to the segment
            - subsample from prev winner cells

      - if predictedSegmentDecrement > 0
        - for each previously matching segment
          - if cell is a predicted inactive cell
            - weaken active synapses but don't touch inactive synapses

    @param prevActiveSegments           (set)         Indices of active segments in `t-1`
    @param learningSegments             (set)         Indices of learning segments in `t`
    @param prevActiveCells              (set)         Indices of active cells in `t-1`
    @param winnerCells                  (set)         Indices of winner cells in `t`
    @param prevWinnerCells              (set)         Indices of winner cells in `t-1`
    @param connections                  (Connections) Connectivity of layer
    @param predictedInactiveCells       (set)         Indices of predicted inactive cells
    @param prevMatchingSegments         (set)         Indices of segments with
    """
        for segment in prevActiveSegments | learningSegments:
            isLearningSegment = segment in learningSegments
            isFromWinnerCell = connections.cellForSegment(
                segment) in winnerCells

            activeSynapses = self.activeSynapsesForSegment(
                segment, prevActiveCells, connections)

            if isLearningSegment or isFromWinnerCell:
                self.adaptSegment(segment, activeSynapses, connections,
                                  self.permanenceIncrement,
                                  self.permanenceDecrement)

            if isLearningSegment:
                n = self.maxNewSynapseCount - len(activeSynapses)

                for presynapticCell in self.pickCellsToLearnOn(
                        n, segment, prevWinnerCells, connections):
                    connections.createSynapse(segment, presynapticCell,
                                              self.initialPermanence)

        if self.predictedSegmentDecrement > 0:
            for segment in prevMatchingSegments:
                isPredictedInactiveCell = connections.cellForSegment(
                    segment) in predictedInactiveCells
                activeSynapses = self.activeSynapsesForSegment(
                    segment, prevActiveCells, connections)

                if isPredictedInactiveCell:
                    self.adaptSegment(segment, activeSynapses, connections,
                                      -self.predictedSegmentDecrement, 0.0)

    def computePredictiveCells(self, activeCells, connections):
        """
    Phase 4: Compute predictive cells due to lateral input
    on distal dendrites.

    Pseudocode:

      - for each distal dendrite segment with activity >= activationThreshold
        - mark the segment as active
        - mark the cell as predictive

      - if predictedSegmentDecrement > 0
        - for each distal dendrite segment with unconnected
          activity >=  minThreshold
          - mark the segment as matching
          - mark the cell as matching

    Forward propagates activity from active cells to the synapses that touch
    them, to determine which synapses are active.

    @param activeCells (set)         Indices of active cells in `t`
    @param connections (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeSegments`  (set),
                      `predictiveCells` (set),
                      `matchingSegments` (set),
                      `matchingCells`    (set)
    """
        numActiveConnectedSynapsesForSegment = defaultdict(int)
        numActiveSynapsesForSegment = defaultdict(int)
        activeSegments = set()
        predictiveCells = set()

        matchingSegments = set()
        matchingCells = set()

        for cell in activeCells:
            for synapseData in connections.synapsesForPresynapticCell(
                    cell).values():
                segment = synapseData.segment
                permanence = synapseData.permanence

                if permanence >= self.connectedPermanence:
                    numActiveConnectedSynapsesForSegment[segment] += 1

                    if (numActiveConnectedSynapsesForSegment[segment] >=
                            self.activationThreshold):
                        activeSegments.add(segment)
                        predictiveCells.add(
                            connections.cellForSegment(segment))

                if permanence > 0 and self.predictedSegmentDecrement > 0:
                    numActiveSynapsesForSegment[segment] += 1

                    if numActiveSynapsesForSegment[
                            segment] >= self.minThreshold:
                        matchingSegments.add(segment)
                        matchingCells.add(connections.cellForSegment(segment))

        return activeSegments, predictiveCells, matchingSegments, matchingCells

    # ==============================
    # Helper functions
    # ==============================

    def bestMatchingCell(self, cells, activeCells, connections):
        """
    Gets the cell with the best matching segment
    (see `TM.bestMatchingSegment`) that has the largest number of active
    synapses of all best matching segments.

    If none were found, pick the least used cell (see `TM.leastUsedCell`).

    @param cells                       (set)         Indices of cells
    @param activeCells                 (set)         Indices of active cells
    @param connections                 (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `cell`        (int),
                      `bestSegment` (int)
    """
        maxSynapses = 0
        bestCell = None
        bestSegment = None

        for cell in cells:
            segment, numActiveSynapses = self.bestMatchingSegment(
                cell, activeCells, connections)

            if segment is not None and numActiveSynapses > maxSynapses:
                maxSynapses = numActiveSynapses
                bestCell = cell
                bestSegment = segment

        if bestCell is None:
            bestCell = self.leastUsedCell(cells, connections)

        return bestCell, bestSegment

    def bestMatchingSegment(self, cell, activeCells, connections):
        """
    Gets the segment on a cell with the largest number of activate synapses,
    including all synapses with non-zero permanences.

    @param cell                        (int)         Cell index
    @param activeCells                 (set)         Indices of active cells
    @param connections                 (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `segment`                 (int),
                      `connectedActiveSynapses` (set)
    """
        maxSynapses = self.minThreshold
        bestSegment = None
        bestNumActiveSynapses = None

        for segment in connections.segmentsForCell(cell):
            numActiveSynapses = 0

            for synapse in connections.synapsesForSegment(segment):
                synapseData = connections.dataForSynapse(synapse)
                if ((synapseData.presynapticCell in activeCells)
                        and synapseData.permanence > 0):
                    numActiveSynapses += 1

            if numActiveSynapses >= maxSynapses:
                maxSynapses = numActiveSynapses
                bestSegment = segment
                bestNumActiveSynapses = numActiveSynapses

        return bestSegment, bestNumActiveSynapses

    def leastUsedCell(self, cells, connections):
        """
    Gets the cell with the smallest number of segments.
    Break ties randomly.

    @param cells       (set)         Indices of cells
    @param connections (Connections) Connectivity of layer

    @return (int) Cell index
    """
        leastUsedCells = set()
        minNumSegments = float("inf")

        for cell in cells:
            numSegments = len(connections.segmentsForCell(cell))

            if numSegments < minNumSegments:
                minNumSegments = numSegments
                leastUsedCells = set()

            if numSegments == minNumSegments:
                leastUsedCells.add(cell)

        i = self._random.getUInt32(len(leastUsedCells))
        return sorted(leastUsedCells)[i]

    @staticmethod
    def activeSynapsesForSegment(segment, activeCells, connections):
        """
    Returns the synapses on a segment that are active due to lateral input
    from active cells.

    @param segment     (int)         Segment index
    @param activeCells (set)         Indices of active cells
    @param connections (Connections) Connectivity of layer

    @return (set) Indices of active synapses on segment
    """
        synapses = set()

        for synapse in connections.synapsesForSegment(segment):
            synapseData = connections.dataForSynapse(synapse)

            if synapseData.presynapticCell in activeCells:
                synapses.add(synapse)

        return synapses

    def adaptSegment(self, segment, activeSynapses, connections,
                     permanenceIncrement, permanenceDecrement):
        """
    Updates synapses on segment.
    Strengthens active synapses; weakens inactive synapses.

    @param segment              (int)         Segment index
    @param activeSynapses       (set)         Indices of active synapses
    @param connections          (Connections) Connectivity of layer
    @param permanenceIncrement  (float)  Amount to increment active synapses
    @param permanenceDecrement  (float)  Amount to decrement inactive synapses
    """
        # Need to copy synapses for segment set below because it will be modified
        # during iteration by `destroySynapse`
        for synapse in set(connections.synapsesForSegment(segment)):
            synapseData = connections.dataForSynapse(synapse)
            permanence = synapseData.permanence

            if synapse in activeSynapses:
                permanence += permanenceIncrement
            else:
                permanence -= permanenceDecrement

            # Keep permanence within min/max bounds
            permanence = max(0.0, min(1.0, permanence))

            if (abs(permanence) < EPSILON):
                connections.destroySynapse(synapse)
            else:
                connections.updateSynapsePermanence(synapse, permanence)

    def pickCellsToLearnOn(self, n, segment, winnerCells, connections):
        """
    Pick cells to form distal connections to.

    TODO: Respect topology and learningRadius

    @param n           (int)         Number of cells to pick
    @param segment     (int)         Segment index
    @param winnerCells (set)         Indices of winner cells in `t`
    @param connections (Connections) Connectivity of layer

    @return (set) Indices of cells picked
    """
        candidates = set(winnerCells)

        # Remove cells that are already synapsed on by this segment
        for synapse in connections.synapsesForSegment(segment):
            synapseData = connections.dataForSynapse(synapse)
            presynapticCell = synapseData.presynapticCell

            if presynapticCell in candidates:
                candidates.remove(presynapticCell)

        n = min(n, len(candidates))
        candidates = sorted(candidates)
        cells = set()

        # Pick n cells randomly
        for _ in range(n):
            i = self._random.getUInt32(len(candidates))
            cells.add(candidates[i])
            del candidates[i]

        return cells

    def columnForCell(self, cell):
        """
    Returns the index of the column that a cell belongs to.

    @param cell (int) Cell index

    @return (int) Column index
    """
        self._validateCell(cell)

        return int(cell / self.cellsPerColumn)

    def cellsForColumn(self, column):
        """
    Returns the indices of cells that belong to a column.

    @param column (int) Column index

    @return (set) Cell indices
    """
        self._validateColumn(column)

        start = self.cellsPerColumn * self.getCellIndex(column)
        end = start + self.cellsPerColumn
        return set(xrange(start, end))

    def numberOfColumns(self):
        """
    Returns the number of columns in this layer.

    @return (int) Number of columns
    """
        return reduce(mul, self.columnDimensions, 1)

    def numberOfCells(self):
        """
    Returns the number of cells in this layer.

    @return (int) Number of cells
    """
        return self.numberOfColumns() * self.cellsPerColumn

    def getActiveCells(self):
        """
    Returns the indices of the active cells.

    @return (list) Indices of active cells.
    """
        return self.getCellIndices(self.activeCells)

    def getPredictiveCells(self):
        """
    Returns the indices of the predictive cells.

    @return (list) Indices of predictive cells.
    """
        return self.getCellIndices(self.predictiveCells)

    def getWinnerCells(self):
        """
    Returns the indices of the winner cells.

    @return (list) Indices of winner cells.
    """
        return self.getCellIndices(self.winnerCells)

    def getMatchingCells(self):
        """
    Returns the indices of the matching cells.

    @return (list) Indices of matching cells.
    """
        return self.getCellIndices(self.matchingCells)

    def mapCellsToColumns(self, cells):
        """
    Maps cells to the columns they belong to

    @param cells (set) Cells

    @return (dict) Mapping from columns to their cells in `cells`
    """
        cellsForColumns = defaultdict(set)

        for cell in cells:
            column = self.columnForCell(cell)
            cellsForColumns[column].add(cell)

        return cellsForColumns

    def write(self, proto):
        """
    Writes serialized data to proto object

    @param proto (DynamicStructBuilder) Proto object
    """
        proto.columnDimensions = self.columnDimensions
        proto.cellsPerColumn = self.cellsPerColumn
        proto.activationThreshold = self.activationThreshold
        proto.initialPermanence = self.initialPermanence
        proto.connectedPermanence = self.connectedPermanence
        proto.minThreshold = self.minThreshold
        proto.maxNewSynapseCount = self.maxNewSynapseCount
        proto.permanenceIncrement = self.permanenceIncrement
        proto.permanenceDecrement = self.permanenceDecrement
        proto.predictedSegmentDecrement = self.predictedSegmentDecrement

        self.connections.write(proto.connections)
        self._random.write(proto.random)

        proto.activeCells = list(self.activeCells)
        proto.predictiveCells = list(self.predictiveCells)
        proto.activeSegments = list(self.activeSegments)
        proto.winnerCells = list(self.winnerCells)
        proto.matchingSegments = list(self.matchingSegments)
        proto.matchingCells = list(self.matchingCells)

    @classmethod
    def read(cls, proto):
        """
    Reads deserialized data from proto object

    @param proto (DynamicStructBuilder) Proto object

    @return (TemporalMemory) TemporalMemory instance
    """
        tm = object.__new__(cls)

        tm.columnDimensions = list(proto.columnDimensions)
        tm.cellsPerColumn = int(proto.cellsPerColumn)
        tm.activationThreshold = int(proto.activationThreshold)
        tm.initialPermanence = proto.initialPermanence
        tm.connectedPermanence = proto.connectedPermanence
        tm.minThreshold = int(proto.minThreshold)
        tm.maxNewSynapseCount = int(proto.maxNewSynapseCount)
        tm.permanenceIncrement = proto.permanenceIncrement
        tm.permanenceDecrement = proto.permanenceDecrement
        tm.predictedSegmentDecrement = proto.predictedSegmentDecrement

        tm.connections = Connections.read(proto.connections)
        tm._random = Random()
        tm._random.read(proto.random)

        tm.activeCells = set([int(x) for x in proto.activeCells])
        tm.predictiveCells = set([int(x) for x in proto.predictiveCells])
        tm.activeSegments = set([int(x) for x in proto.activeSegments])
        tm.winnerCells = set([int(x) for x in proto.winnerCells])
        tm.matchingSegments = set([int(x) for x in proto.matchingSegments])
        tm.matchingCells = set([int(x) for x in proto.matchingCells])

        return tm

    def __eq__(self, other):
        """
    Equality operator for TemporalMemory instances.
    Checks if two instances are functionally identical
    (might have different internal state).

    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
        if self.columnDimensions != other.columnDimensions: return False
        if self.cellsPerColumn != other.cellsPerColumn: return False
        if self.activationThreshold != other.activationThreshold: return False
        if abs(self.initialPermanence - other.initialPermanence) > EPSILON:
            return False
        if abs(self.connectedPermanence - other.connectedPermanence) > EPSILON:
            return False
        if self.minThreshold != other.minThreshold: return False
        if self.maxNewSynapseCount != other.maxNewSynapseCount: return False
        if abs(self.permanenceIncrement - other.permanenceIncrement) > EPSILON:
            return False
        if abs(self.permanenceDecrement - other.permanenceDecrement) > EPSILON:
            return False
        if abs(self.predictedSegmentDecrement -
               other.predictedSegmentDecrement) > EPSILON:
            return False

        if self.connections != other.connections: return False

        if self.activeCells != other.activeCells: return False
        if self.predictiveCells != other.predictiveCells: return False
        if self.winnerCells != other.winnerCells: return False
        if self.matchingSegments != other.matchingSegments: return False
        if self.matchingCells != other.matchingCells: return False

        return True

    def __ne__(self, other):
        """
    Non-equality operator for TemporalMemory instances.
    Checks if two instances are not functionally identical
    (might have different internal state).

    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
        return not self.__eq__(other)

    def _validateColumn(self, column):
        """
    Raises an error if column index is invalid.

    @param column (int) Column index
    """
        if column >= self.numberOfColumns() or column < 0:
            raise IndexError("Invalid column")

    def _validateCell(self, cell):
        """
    Raises an error if cell index is invalid.

    @param cell (int) Cell index
    """
        if cell >= self.numberOfCells() or cell < 0:
            raise IndexError("Invalid cell")

    @classmethod
    def getCellIndices(cls, cells):
        return [cls.getCellIndex(c) for c in cells]

    @staticmethod
    def getCellIndex(cell):
        return cell
예제 #4
0
class TemporalMemory(object):
  """
  Class implementing the Temporal Memory algorithm.
  """

  def __init__(self,
               columnDimensions=(2048,),
               cellsPerColumn=32,
               activationThreshold=13,
               initialPermanence=0.21,
               connectedPermanence=0.50,
               minThreshold=10,
               maxNewSynapseCount=20,
               permanenceIncrement=0.10,
               permanenceDecrement=0.10,
               predictedSegmentDecrement = 0.004,
               seed=42):
    """
    @param columnDimensions          (list)  Dimensions of the column space
    @param cellsPerColumn            (int)   Number of cells per column
    @param activationThreshold       (int)   If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active.
    @param initialPermanence         (float) Initial permanence of a new synapse.
    @param connectedPermanence       (float) If the permanence value for a synapse is greater than this value, it is said to be connected.
    @param minThreshold              (int)   If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column.
    @param maxNewSynapseCount        (int)   The maximum number of synapses added to a segment during learning.
    @param permanenceIncrement       (float) Amount by which permanences of synapses are incremented during learning.
    @param permanenceDecrement       (float) Amount by which permanences of synapses are decremented during learning.
    @param predictedSegmentDecrement (float) Amount by which active permanences of synapses of previously predicted but inactive segments are decremented.
    @param seed                      (int)   Seed for the random number generator.
    """
    # Error checking
    if not len(columnDimensions):
      raise ValueError("Number of column dimensions must be greater than 0")

    if not cellsPerColumn > 0:
      raise ValueError("Number of cells per column must be greater than 0")

    # TODO: Validate all parameters (and add validation tests)

    # Save member variables
    self.columnDimensions = columnDimensions
    self.cellsPerColumn = cellsPerColumn
    self.activationThreshold = activationThreshold
    self.initialPermanence = initialPermanence
    self.connectedPermanence = connectedPermanence
    self.minThreshold = minThreshold
    self.maxNewSynapseCount = maxNewSynapseCount
    self.permanenceIncrement = permanenceIncrement
    self.permanenceDecrement = permanenceDecrement
    self.predictedSegmentDecrement = predictedSegmentDecrement
    # Initialize member variables
    self.connections = Connections(self.numberOfCells())
    self._random = Random(seed)

    self.activeCells = set()
    self.predictiveCells = set()
    self.activeSegments = set()
    self.winnerCells = set()
    self.matchingSegments = set()
    self.matchingCells = set()

  # ==============================
  # Main functions
  # ==============================

  def compute(self, activeColumns, learn=True):
    """
    Feeds input record through TM, performing inference and learning.
    Updates member variables with new state.

    @param activeColumns (set) Indices of active columns in `t`
    """
    (activeCells,
     winnerCells,
     activeSegments,
     predictiveCells,
     predictedColumns,
     matchingSegments,
     matchingCells) = self.computeFn(activeColumns,
                                     self.predictiveCells,
                                     self.activeSegments,
                                     self.activeCells,
                                     self.winnerCells,
                                     self.matchingSegments,
                                     self.matchingCells,
                                     self.connections,
                                     learn=learn)

    self.activeCells = activeCells
    self.winnerCells = winnerCells
    self.activeSegments = activeSegments
    self.predictiveCells = predictiveCells
    self.matchingSegments = matchingSegments
    self.matchingCells = matchingCells

  def computeFn(self,
                activeColumns,
                prevPredictiveCells,
                prevActiveSegments,
                prevActiveCells,
                prevWinnerCells,
                prevMatchingSegments,
                prevMatchingCells,
                connections,
                learn=True):
    """
    'Functional' version of compute.
    Returns new state.

    @param activeColumns         (set)         Indices of active columns in `t`
    @param prevPredictiveCells   (set)         Indices of predictive cells in `t-1`
    @param prevActiveSegments    (set)         Indices of active segments in `t-1`
    @param prevActiveCells       (set)         Indices of active cells in `t-1`
    @param prevWinnerCells       (set)         Indices of winner cells in `t-1`
    @param prevMatchingSegments  (set)         Indices of matching segments in `t-1`
    @param prevMatchingCells     (set)         Indices of matching cells in `t-1`
    @param connections           (Connections) Connectivity of layer
    @param learn                 (bool)        Whether or not learning is enabled

    @return (tuple) Contains:
                      `activeCells`     (set),
                      `winnerCells`     (set),
                      `activeSegments`  (set),
                      `predictiveCells` (set),
                      'matchingSegments'(set),
                      'matchingCells'   (set)
    """
    activeCells = set()
    winnerCells = set()

    (_activeCells,
     _winnerCells,
     predictedColumns,
     predictedInactiveCells) = self.activateCorrectlyPredictiveCells(
       prevPredictiveCells,
       prevMatchingCells,
       activeColumns)

    activeCells.update(_activeCells)
    winnerCells.update(_winnerCells)

    (_activeCells,
     _winnerCells,
     learningSegments) = self.burstColumns(activeColumns,
                                           predictedColumns,
                                           prevActiveCells,
                                           prevWinnerCells,
                                           connections)

    activeCells.update(_activeCells)
    winnerCells.update(_winnerCells)

    if learn:
      self.learnOnSegments(prevActiveSegments,
                           learningSegments,
                           prevActiveCells,
                           winnerCells,
                           prevWinnerCells,
                           connections,
                           predictedInactiveCells,
                           prevMatchingSegments)

    (activeSegments,
     predictiveCells,
     matchingSegments,
     matchingCells) = self.computePredictiveCells(activeCells, connections)

    return (activeCells,
            winnerCells,
            activeSegments,
            predictiveCells,
            predictedColumns,
            matchingSegments,
            matchingCells)


  def reset(self):
    """
    Indicates the start of a new sequence. Resets sequence state of the TM.
    """
    self.activeCells = set()
    self.predictiveCells = set()
    self.activeSegments = set()
    self.winnerCells = set()


  # ==============================
  # Phases
  # ==============================

  def activateCorrectlyPredictiveCells(self,
                                       prevPredictiveCells,
                                       prevMatchingCells,
                                       activeColumns):
    """
    Phase 1: Activate the correctly predictive cells.

    Pseudocode:

      - for each prev predictive cell
        - if in active column
          - mark it as active
          - mark it as winner cell
          - mark column as predicted
        - if not in active column
          - mark it as an predicted but inactive cell

    @param prevPredictiveCells (set) Indices of predictive cells in `t-1`
    @param activeColumns       (set) Indices of active columns in `t`

    @return (tuple) Contains:
                      `activeCells`               (set),
                      `winnerCells`               (set),
                      `predictedColumns`          (set),
                      `predictedInactiveCells`    (set)
    """
    activeCells = set()
    winnerCells = set()
    predictedColumns = set()
    predictedInactiveCells = set()

    for cell in prevPredictiveCells:
      column = self.columnForCell(cell)

      if column in activeColumns:
        activeCells.add(cell)
        winnerCells.add(cell)
        predictedColumns.add(column)

    if self.predictedSegmentDecrement > 0:
      for cell in prevMatchingCells:
        column = self.columnForCell(cell)

        if column not in activeColumns:
          predictedInactiveCells.add(cell)

    return activeCells, winnerCells, predictedColumns, predictedInactiveCells


  def burstColumns(self,
                   activeColumns,
                   predictedColumns,
                   prevActiveCells,
                   prevWinnerCells,
                   connections):
    """
    Phase 2: Burst unpredicted columns.

    Pseudocode:

      - for each unpredicted active column
        - mark all cells as active
        - mark the best matching cell as winner cell
          - (learning)
            - if it has no matching segment
              - (optimization) if there are prev winner cells
                - add a segment to it
            - mark the segment as learning

    @param activeColumns                   (set)         Indices of active columns in `t`
    @param predictedColumns                (set)         Indices of predicted columns in `t`
    @param prevActiveCells                 (set)         Indices of active cells in `t-1`
    @param prevWinnerCells                 (set)         Indices of winner cells in `t-1`
    @param connections                     (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeCells`      (set),
                      `winnerCells`      (set),
                      `learningSegments` (set)
    """
    activeCells = set()
    winnerCells = set()
    learningSegments = set()

    unpredictedColumns = activeColumns - predictedColumns

    for column in unpredictedColumns:
      cells = self.cellsForColumn(column)
      activeCells.update(cells)

      (bestCell,
       bestSegment) = self.bestMatchingCell(cells,
                                            prevActiveCells,
                                            connections)
      winnerCells.add(bestCell)

      if bestSegment is None and len(prevWinnerCells):
        bestSegment = connections.createSegment(bestCell)

      if bestSegment is not None:
        learningSegments.add(bestSegment)

    return activeCells, winnerCells, learningSegments


  def learnOnSegments(self,
                      prevActiveSegments,
                      learningSegments,
                      prevActiveCells,
                      winnerCells,
                      prevWinnerCells,
                      connections,
                      predictedInactiveCells,
                      prevMatchingSegments):
    """
    Phase 3: Perform learning by adapting segments.

    Pseudocode:

      - (learning) for each prev active or learning segment
        - if learning segment or from winner cell
          - strengthen active synapses
          - weaken inactive synapses
        - if learning segment
          - add some synapses to the segment
            - subsample from prev winner cells

      - if predictedSegmentDecrement > 0
        - for each previously matching segment
          - if cell is a predicted inactive cell
            - weaken active synapses but don't touch inactive synapses

    @param prevActiveSegments           (set)         Indices of active segments in `t-1`
    @param learningSegments             (set)         Indices of learning segments in `t`
    @param prevActiveCells              (set)         Indices of active cells in `t-1`
    @param winnerCells                  (set)         Indices of winner cells in `t`
    @param prevWinnerCells              (set)         Indices of winner cells in `t-1`
    @param connections                  (Connections) Connectivity of layer
    @param predictedInactiveCells       (set)         Indices of predicted inactive cells
    @param prevMatchingSegments         (set)         Indices of segments with
    """
    for segment in prevActiveSegments | learningSegments:
      isLearningSegment = segment in learningSegments
      isFromWinnerCell = connections.cellForSegment(segment) in winnerCells

      activeSynapses = self.activeSynapsesForSegment(
        segment, prevActiveCells, connections)

      if isLearningSegment or isFromWinnerCell:
        self.adaptSegment(segment, activeSynapses, connections,
                          self.permanenceIncrement,
                          self.permanenceDecrement)

      if isLearningSegment:
        n = self.maxNewSynapseCount - len(activeSynapses)

        for presynapticCell in self.pickCellsToLearnOn(n,
                                                       segment,
                                                       prevWinnerCells,
                                                       connections):
          connections.createSynapse(segment,
                                    presynapticCell,
                                    self.initialPermanence)

    if self.predictedSegmentDecrement > 0:
      for segment in prevMatchingSegments:
        isPredictedInactiveCell = connections.cellForSegment(segment) in predictedInactiveCells
        activeSynapses = self.activeSynapsesForSegment(
          segment, prevActiveCells, connections)

        if isPredictedInactiveCell:
          self.adaptSegment(segment, activeSynapses, connections,
                            -self.predictedSegmentDecrement,
                            0.0)



  def computePredictiveCells(self, activeCells, connections):
    """
    Phase 4: Compute predictive cells due to lateral input
    on distal dendrites.

    Pseudocode:

      - for each distal dendrite segment with activity >= activationThreshold
        - mark the segment as active
        - mark the cell as predictive

      - if predictedSegmentDecrement > 0
        - for each distal dendrite segment with unconnected
          activity >=  minThreshold
          - mark the segment as matching
          - mark the cell as matching

    Forward propagates activity from active cells to the synapses that touch
    them, to determine which synapses are active.

    @param activeCells (set)         Indices of active cells in `t`
    @param connections (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeSegments`  (set),
                      `predictiveCells` (set),
                      `matchingSegments` (set),
                      `matchingCells`    (set)
    """
    numActiveConnectedSynapsesForSegment = defaultdict(int)
    numActiveSynapsesForSegment = defaultdict(int)
    activeSegments = set()
    predictiveCells = set()

    matchingSegments = set()
    matchingCells = set()

    for cell in activeCells:
      for synapseData in connections.synapsesForPresynapticCell(cell).values():
        segment = synapseData.segment
        permanence = synapseData.permanence

        if permanence >= self.connectedPermanence:
          numActiveConnectedSynapsesForSegment[segment] += 1

          if (numActiveConnectedSynapsesForSegment[segment] >=
              self.activationThreshold):
            activeSegments.add(segment)
            predictiveCells.add(connections.cellForSegment(segment))

        if permanence > 0 and self.predictedSegmentDecrement > 0:
          numActiveSynapsesForSegment[segment] += 1

          if numActiveSynapsesForSegment[segment] >= self.minThreshold:
            matchingSegments.add(segment)
            matchingCells.add(connections.cellForSegment(segment))

    return activeSegments, predictiveCells, matchingSegments, matchingCells


  # ==============================
  # Helper functions
  # ==============================

  def bestMatchingCell(self, cells, activeCells, connections):
    """
    Gets the cell with the best matching segment
    (see `TM.bestMatchingSegment`) that has the largest number of active
    synapses of all best matching segments.

    If none were found, pick the least used cell (see `TM.leastUsedCell`).

    @param cells                       (set)         Indices of cells
    @param activeCells                 (set)         Indices of active cells
    @param connections                 (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `cell`        (int),
                      `bestSegment` (int)
    """
    maxSynapses = 0
    bestCell = None
    bestSegment = None

    for cell in cells:
      segment, numActiveSynapses = self.bestMatchingSegment(
        cell, activeCells, connections)

      if segment is not None and numActiveSynapses > maxSynapses:
        maxSynapses = numActiveSynapses
        bestCell = cell
        bestSegment = segment

    if bestCell is None:
      bestCell = self.leastUsedCell(cells, connections)

    return bestCell, bestSegment


  def bestMatchingSegment(self, cell, activeCells, connections):
    """
    Gets the segment on a cell with the largest number of activate synapses,
    including all synapses with non-zero permanences.

    @param cell                        (int)         Cell index
    @param activeCells                 (set)         Indices of active cells
    @param connections                 (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `segment`                 (int),
                      `connectedActiveSynapses` (set)
    """
    maxSynapses = self.minThreshold
    bestSegment = None
    bestNumActiveSynapses = None

    for segment in connections.segmentsForCell(cell):
      numActiveSynapses = 0

      for synapse in connections.synapsesForSegment(segment):
        synapseData = connections.dataForSynapse(synapse)
        if ( (synapseData.presynapticCell in activeCells) and
            synapseData.permanence > 0):
          numActiveSynapses += 1

      if numActiveSynapses >= maxSynapses:
        maxSynapses = numActiveSynapses
        bestSegment = segment
        bestNumActiveSynapses = numActiveSynapses

    return bestSegment, bestNumActiveSynapses


  def leastUsedCell(self, cells, connections):
    """
    Gets the cell with the smallest number of segments.
    Break ties randomly.

    @param cells       (set)         Indices of cells
    @param connections (Connections) Connectivity of layer

    @return (int) Cell index
    """
    leastUsedCells = set()
    minNumSegments = float("inf")

    for cell in cells:
      numSegments = len(connections.segmentsForCell(cell))

      if numSegments < minNumSegments:
        minNumSegments = numSegments
        leastUsedCells = set()

      if numSegments == minNumSegments:
        leastUsedCells.add(cell)

    i = self._random.getUInt32(len(leastUsedCells))
    return sorted(leastUsedCells)[i]


  @staticmethod
  def activeSynapsesForSegment(segment, activeCells, connections):
    """
    Returns the synapses on a segment that are active due to lateral input
    from active cells.

    @param segment     (int)         Segment index
    @param activeCells (set)         Indices of active cells
    @param connections (Connections) Connectivity of layer

    @return (set) Indices of active synapses on segment
    """
    synapses = set()

    for synapse in connections.synapsesForSegment(segment):
      synapseData = connections.dataForSynapse(synapse)

      if synapseData.presynapticCell in activeCells:
        synapses.add(synapse)

    return synapses


  def adaptSegment(self, segment, activeSynapses, connections,
                   permanenceIncrement, permanenceDecrement):
    """
    Updates synapses on segment.
    Strengthens active synapses; weakens inactive synapses.

    @param segment              (int)         Segment index
    @param activeSynapses       (set)         Indices of active synapses
    @param connections          (Connections) Connectivity of layer
    @param permanenceIncrement  (float)  Amount to increment active synapses
    @param permanenceDecrement  (float)  Amount to decrement inactive synapses
    """
    for synapse in connections.synapsesForSegment(segment):
      synapseData = connections.dataForSynapse(synapse)
      permanence = synapseData.permanence

      if synapse in activeSynapses:
        permanence += permanenceIncrement
      else:
        permanence -= permanenceDecrement

      # Keep permanence within min/max bounds
      permanence = max(0.0, min(1.0, permanence))

      connections.updateSynapsePermanence(synapse, permanence)


  def pickCellsToLearnOn(self, n, segment, winnerCells, connections):
    """
    Pick cells to form distal connections to.

    TODO: Respect topology and learningRadius

    @param n           (int)         Number of cells to pick
    @param segment     (int)         Segment index
    @param winnerCells (set)         Indices of winner cells in `t`
    @param connections (Connections) Connectivity of layer

    @return (set) Indices of cells picked
    """
    candidates = set(winnerCells)

    # Remove cells that are already synapsed on by this segment
    for synapse in connections.synapsesForSegment(segment):
      synapseData = connections.dataForSynapse(synapse)
      presynapticCell = synapseData.presynapticCell

      if presynapticCell in candidates:
        candidates.remove(presynapticCell)

    n = min(n, len(candidates))
    candidates = sorted(candidates)
    cells = set()

    # Pick n cells randomly
    for _ in range(n):
      i = self._random.getUInt32(len(candidates))
      cells.add(candidates[i])
      del candidates[i]

    return cells


  def columnForCell(self, cell):
    """
    Returns the index of the column that a cell belongs to.

    @param cell (int) Cell index

    @return (int) Column index
    """
    self._validateCell(cell)

    return int(cell / self.cellsPerColumn)


  def cellsForColumn(self, column):
    """
    Returns the indices of cells that belong to a column.

    @param column (int) Column index

    @return (set) Cell indices
    """
    self._validateColumn(column)

    start = self.cellsPerColumn * column
    end = start + self.cellsPerColumn
    return set([cell for cell in range(start, end)])


  def numberOfColumns(self):
    """
    Returns the number of columns in this layer.

    @return (int) Number of columns
    """
    return reduce(mul, self.columnDimensions, 1)


  def numberOfCells(self):
    """
    Returns the number of cells in this layer.

    @return (int) Number of cells
    """
    return self.numberOfColumns() * self.cellsPerColumn


  def mapCellsToColumns(self, cells):
    """
    Maps cells to the columns they belong to

    @param cells (set) Cells

    @return (dict) Mapping from columns to their cells in `cells`
    """
    cellsForColumns = defaultdict(set)

    for cell in cells:
      column = self.columnForCell(cell)
      cellsForColumns[column].add(cell)

    return cellsForColumns


  def write(self, proto):
    """
    Writes serialized data to proto object

    @param proto (DynamicStructBuilder) Proto object
    """
    proto.columnDimensions = self.columnDimensions
    proto.cellsPerColumn = self.cellsPerColumn
    proto.activationThreshold = self.activationThreshold
    proto.initialPermanence = self.initialPermanence
    proto.connectedPermanence = self.connectedPermanence
    proto.minThreshold = self.minThreshold
    proto.maxNewSynapseCount = self.maxNewSynapseCount
    proto.permanenceIncrement = self.permanenceIncrement
    proto.permanenceDecrement = self.permanenceDecrement
    proto.predictedSegmentDecrement = self.predictedSegmentDecrement

    self.connections.write(proto.connections)
    self._random.write(proto.random)

    proto.activeCells = list(self.activeCells)
    proto.predictiveCells = list(self.predictiveCells)
    proto.activeSegments = list(self.activeSegments)
    proto.winnerCells = list(self.winnerCells)
    proto.matchingSegments = list(self.matchingSegments)
    proto.matchingCells = list(self.matchingCells)


  @classmethod
  def read(cls, proto):
    """
    Reads deserialized data from proto object

    @param proto (DynamicStructBuilder) Proto object

    @return (TemporalMemory) TemporalMemory instance
    """
    tm = object.__new__(cls)

    tm.columnDimensions = list(proto.columnDimensions)
    tm.cellsPerColumn = int(proto.cellsPerColumn)
    tm.activationThreshold = int(proto.activationThreshold)
    tm.initialPermanence = proto.initialPermanence
    tm.connectedPermanence = proto.connectedPermanence
    tm.minThreshold = int(proto.minThreshold)
    tm.maxNewSynapseCount = int(proto.maxNewSynapseCount)
    tm.permanenceIncrement = proto.permanenceIncrement
    tm.permanenceDecrement = proto.permanenceDecrement
    tm.predictedSegmentDecrement = proto.predictedSegmentDecrement

    tm.connections = Connections.read(proto.connections)
    tm._random = Random()
    tm._random.read(proto.random)

    tm.activeCells = set([int(x) for x in proto.activeCells])
    tm.predictiveCells = set([int(x) for x in proto.predictiveCells])
    tm.activeSegments = set([int(x) for x in proto.activeSegments])
    tm.winnerCells = set([int(x) for x in proto.winnerCells])
    tm.matchingSegments = set([int(x) for x in proto.matchingSegments])
    tm.matchingCells = set([int(x) for x in proto.matchingCells])

    return tm


  def __eq__(self, other):
    """
    Equality operator for TemporalMemory instances.
    Checks if two instances are functionally identical
    (might have different internal state).

    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
    epsilon = 0.0000001

    if self.columnDimensions != other.columnDimensions: return False
    if self.cellsPerColumn != other.cellsPerColumn: return False
    if self.activationThreshold != other.activationThreshold: return False
    if abs(self.initialPermanence - other.initialPermanence) > epsilon:
      return False
    if abs(self.connectedPermanence - other.connectedPermanence) > epsilon:
      return False
    if self.minThreshold != other.minThreshold: return False
    if self.maxNewSynapseCount != other.maxNewSynapseCount: return False
    if abs(self.permanenceIncrement - other.permanenceIncrement) > epsilon:
      return False
    if abs(self.permanenceDecrement - other.permanenceDecrement) > epsilon:
      return False
    if abs(self.predictedSegmentDecrement - other.predictedSegmentDecrement) > epsilon:
      return False

    if self.connections != other.connections: return False

    if self.activeCells != other.activeCells: return False
    if self.predictiveCells != other.predictiveCells: return False
    if self.winnerCells != other.winnerCells: return False
    if self.matchingSegments != other.matchingSegments: return False
    if self.matchingCells != other.matchingCells: return False

    return True


  def __ne__(self, other):
    """
    Non-equality operator for TemporalMemory instances.
    Checks if two instances are not functionally identical
    (might have different internal state).

    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
    return not self.__eq__(other)


  def _validateColumn(self, column):
    """
    Raises an error if column index is invalid.

    @param column (int) Column index
    """
    if column >= self.numberOfColumns() or column < 0:
      raise IndexError("Invalid column")


  def _validateCell(self, cell):
    """
    Raises an error if cell index is invalid.

    @param cell (int) Cell index
    """
    if cell >= self.numberOfCells() or cell < 0:
      raise IndexError("Invalid cell")


  @classmethod
  def getCellIndices(cls, cells):
    return [cls.getCellIndex(c) for c in cells]


  @staticmethod
  def getCellIndex(cell):
    return cell.idx
예제 #5
0
class TemporalMemory(object):
  """ Class implementing the Temporal Memory algorithm. """

  def __init__(self,
               columnDimensions=(2048,),
               cellsPerColumn=32,
               activationThreshold=13,
               initialPermanence=0.21,
               connectedPermanence=0.50,
               minThreshold=10,
               maxNewSynapseCount=20,
               permanenceIncrement=0.10,
               permanenceDecrement=0.10,
               predictedSegmentDecrement=0.0,
               maxSegmentsPerCell=255,
               maxSynapsesPerSegment=255,
               seed=42,
               **kwargs):
    """
    @param columnDimensions          (list)  Dimensions of the column space
    @param cellsPerColumn            (int)   Number of cells per column
    @param activationThreshold       (int)   If the number of active connected
                                             synapses on a segment is at least
                                             this threshold, the segment is said
                                             to be active.
    @param initialPermanence         (float) Initial permanence of a new synapse
    @param connectedPermanence       (float) If the permanence value for a
                                             synapse is greater than this value,
                                             it is said to be connected.
    @param minThreshold              (int)   If the number of synapses active on
                                             a segment is at least this
                                             threshold, it is selected as the
                                             best matching cell in a bursting
                                             column
    @param maxNewSynapseCount        (int)   The maximum number of synapses
                                             added to a segment during learning
    @param permanenceIncrement       (float) Amount by which permanences of
                                             synapses are incremented during
                                             learning.
    @param permanenceDecrement       (float) Amount by which permanences of
                                             synapses are decremented during
                                             learning.
    @param predictedSegmentDecrement (float) Amount by which active permanences
                                             of synapses of previously predicted
                                             but inactive segments are
                                             decremented.
    @param seed                      (int)   Seed for the random number
                                             generator
    Notes:

    predictedSegmentDecrement: A good value is just a bit larger than
    (the column-level sparsity * permanenceIncrement). So, if column-level
    sparsity is 2% and permanenceIncrement is 0.01, this parameter should be
    something like 4% * 0.01 = 0.0004).
    """
    # Error checking
    if not len(columnDimensions):
      raise ValueError("Number of column dimensions must be greater than 0")

    if cellsPerColumn <= 0:
      raise ValueError("Number of cells per column must be greater than 0")

    # TODO: Validate all parameters (and add validation tests)

    # Save member variables
    self.columnDimensions = columnDimensions
    self.cellsPerColumn = cellsPerColumn
    self.activationThreshold = activationThreshold
    self.initialPermanence = initialPermanence
    self.connectedPermanence = connectedPermanence
    self.minThreshold = minThreshold
    self.maxNewSynapseCount = maxNewSynapseCount
    self.permanenceIncrement = permanenceIncrement
    self.permanenceDecrement = permanenceDecrement
    self.predictedSegmentDecrement = predictedSegmentDecrement
    # Initialize member variables
    self.connections = Connections(self.numberOfCells(),
                                   maxSegmentsPerCell=maxSegmentsPerCell,
                                   maxSynapsesPerSegment=maxSynapsesPerSegment)
    self._random = Random(seed)

    self.activeCells = []
    self.winnerCells = []
    self.activeSegments = []
    self.matchingSegments = []

  # ==============================
  # Main functions
  # ==============================

  def compute(self, activeColumns, learn=True):
    """ Feeds input record through TM, performing inference and learning.

    @param activeColumns (set)  Indices of active columns
    @param learn         (bool) Whether or not learning is enabled

    Updates member variables:
      - `activeCells`     (list)
      - `winnerCells`     (list)
      - `activeSegments`  (list)
      - `matchingSegments`(list)

    Pseudocode:
    for each column
      if column is active and has active distal dendrite segments
        call activatePredictedColumn
      if column is active and doesn't have active distal dendrite segments
        call burstColumn
      if column is inactive and has matching distal dendrite segments
        call punishPredictedColumn
    for each distal dendrite segment with activity >= activationThreshold
      mark the segment as active
    for each distal dendrite segment with unconnected activity >= minThreshold
      mark the segment as matching
    """
    prevActiveCells = self.activeCells
    prevWinnerCells = self.winnerCells

    activeColumns = sorted(activeColumns)

    self.activeCells = []
    self.winnerCells = []


    segToCol = lambda segment: int(segment.segment.cell / self.cellsPerColumn)
    identity = lambda column: int(column)

    for columnData in groupby2(activeColumns, identity,
                               self.activeSegments, segToCol,
                               self.matchingSegments, segToCol):
      (column,
       activeColumns,
       activeSegmentsOnCol,
       matchingSegmentsOnCol) = columnData
      if activeColumns is not None:
        if activeSegmentsOnCol is not None:
          cellsToAdd = TemporalMemory.activatePredictedColumn(
            activeSegmentsOnCol,
            self.connections,
            learn,
            self.permanenceDecrement,
            self.permanenceIncrement,
            prevActiveCells)

          self.activeCells += cellsToAdd
          self.winnerCells += cellsToAdd
        else:
          (cellsToAdd,
           winnerCell) = TemporalMemory.burstColumn(self.cellsPerColumn,
                                                    column,
                                                    self.connections,
                                                    self.initialPermanence,
                                                    learn,
                                                    matchingSegmentsOnCol,
                                                    self.maxNewSynapseCount,
                                                    self.permanenceDecrement,
                                                    self.permanenceIncrement,
                                                    prevActiveCells,
                                                    prevWinnerCells,
                                                    self._random)

          self.activeCells += cellsToAdd
          self.winnerCells.append(winnerCell)
      else:
        if learn:
          TemporalMemory.punishPredictedColumn(self.connections,
                                               matchingSegmentsOnCol,
                                               self.predictedSegmentDecrement,
                                               prevActiveCells)

    (activeSegments,
     matchingSegments) = self.connections.computeActivity(
       self.activeCells,
       self.connectedPermanence,
       self.activationThreshold,
       0.0,
       self.minThreshold,
       learn)

    self.activeSegments = activeSegments
    self.matchingSegments = matchingSegments


  def reset(self):
    """ Indicates the start of a new sequence and resets the sequence
        state of the TM. """
    self.activeCells = []
    self.winnerCells = []
    self.activeSegments = []
    self.matchingSegments = []


  @staticmethod
  def activatePredictedColumn(activeSegments, connections, learn,
                              permanenceDecrement, permanenceIncrement,
                              prevActiveCells):
    """ Determines which cells in a predicted column should be added to
    winner cells list and calls adaptSegment on the segments that correctly
    predicted this column.

    @param activeSegments  (iter)   A iterable of SegmentOverlap objects for the
                                    column compute is operating on that are
                                    active
    @param connections     (Object) Connections instance for the tm
    @param learn           (bool)   Determines if permanences are adjusted
    @permanenceDecrement   (float)  Amount by which permanences of synapses are
                                    decremented during learning.
    @permanenceIncrement   (float)  Amount by which permanences of synapses are
                                    incremented during learning.
    @param prevActiveCells (list)   Active cells in `t-1`

    @return cellsToAdd (list) A list of predicted cells that will be added to
                              active cells and winner cells.

    Pseudocode:
    for each cell in the column that has an active distal dendrite segment
      mark the cell as active
      mark the cell as a winner cell
      (learning) for each active distal dendrite segment
        strengthen active synapses
        weaken inactive synapses
    """

    cellsToAdd = []
    cell = None
    for active in activeSegments:
      newCell = cell != active.segment.cell
      if newCell:
        cell = active.segment.cell
        cellsToAdd.append(cell)

      if learn:
        TemporalMemory.adaptSegment(connections, prevActiveCells,
                                    permanenceIncrement, permanenceDecrement,
                                    active.segment)

    return cellsToAdd


  @staticmethod
  def burstColumn(cellsPerColumn, column, connections,
                  initialPermanence, learn, matchingSegments,
                  maxNewSynapseCount, permanenceDecrement, permanenceIncrement,
                  prevActiveCells, prevWinnerCells, random):
    """ Activates all of the cells in an unpredicted active column,
    chooses a winner cell, and, if learning is turned on, either adapts or
    creates a segment. growSynapses is invoked on this segment.

    @param cellsPerColumn      (int)    Number of cells per column
    @param column              (int)    Index of bursting column
    @param connections         (Object) Connections instance for the tm
    @param initialPermanence   (float)  Initial permanence of a new synapse.
    @param learn               (bool)   Whether or not learning is enabled
    @param matchingSegments    (iter)   A iterable of SegmentOverlap objects for
                                        the column compute is operating on that
                                        are matching; None if empty.
    @param maxNewSynapseCount  (int)    The maximum number of synapses added to
                                        a segment during learning
    @param permanenceDecrement (float)  Amount by which permanences of synapses
                                        are decremented during learning
    @param permanenceIncrement (float)  Amount by which permanences of synapses
                                        are incremented during learning
    @param prevActiveCells     (list)   Active cells in `t-1`
    @param prevWinnerCells     (list)   Winner cells in `t-1`
    @param random              (Object) Random number generator

    @return (tuple) Contains:
                      `cells`         (list),
                      `bestCell`      (int),

    Pseudocode:
    mark all cells as active
    if there are any matching distal dendrite segments
      find the most active matching segment
      mark its cell as a winner cell
      (learning)
        grow and reinforce synapses to previous winner cells
    else
      find the cell with the least segments, mark it as a winner cell
      (learning)
        (optimization) if there are prev winner cells
          add a segment to this winner cell
          grow synapses to previous winner cells
    """
    start = cellsPerColumn * column
    cells = range(start, start + cellsPerColumn)

    if matchingSegments is not None:
      bestSegment = max(matchingSegments, key=lambda seg: seg.overlap)
      bestCell = bestSegment.segment.cell
      if learn:
        TemporalMemory.adaptSegment(connections, prevActiveCells,
                                    permanenceIncrement, permanenceDecrement,
                                    bestSegment.segment)

        nGrowDesired = maxNewSynapseCount - bestSegment.overlap

        if nGrowDesired > 0:
          TemporalMemory.growSynapses(connections, initialPermanence,
                                      nGrowDesired, prevWinnerCells,
                                      random, bestSegment.segment)
    else:
      bestCell = TemporalMemory.leastUsedCell(cells, connections, random)
      if learn:
        nGrowExact = min(maxNewSynapseCount, len(prevWinnerCells))
        if nGrowExact > 0:
          bestSegment = connections.createSegment(bestCell)
          TemporalMemory.growSynapses(connections, initialPermanence,
                                      nGrowExact, prevWinnerCells,
                                      random, bestSegment)

    return cells, bestCell


  @staticmethod
  def punishPredictedColumn(connections, matchingSegments,
                            predictedSegmentDecrement, prevActiveCells):
    """Punishes the Segments that incorrectly predicted a column to be active.

    @param connections         (Object) Connections instance for the tm
    @param matchingSegments    (iter)   An iterable of SegmentOverlap objects
                                        for the column compute is operating on
                                        that are matching; None if empty
    @param permanenceDecrement (float)  Amount by which permanences of synapses
                                        are decremented during learning.
    @param prevActiveCells     (list)   Active cells in `t-1`

    Pseudocode:
    for each matching segment in the column
      weaken active synapses
    """
    if predictedSegmentDecrement > 0.0 and matchingSegments is not None:
      for segment in matchingSegments:
        TemporalMemory.adaptSegment(connections, prevActiveCells,
                                    -predictedSegmentDecrement,
                                    0.0, segment.segment)

  # ==============================
  # Helper functions
  # ==============================


  @staticmethod
  def leastUsedCell(cells, connections, random):
    """ Gets the cell with the smallest number of segments.
    Break ties randomly.

    @param cells       (list)   Indices of cells
    @param connections (Object) Connections instance for the tm
    @param random      (Object) Random number generator

    @return (int) Cell index
    """
    leastUsedCells = []
    minNumSegments = float("inf")
    for cell in cells:
      numSegments = len(connections.segmentsForCell(cell))

      if numSegments < minNumSegments:
        minNumSegments = numSegments
        leastUsedCells = []

      if numSegments == minNumSegments:
        leastUsedCells.append(cell)

    i = random.getUInt32(len(leastUsedCells))
    return leastUsedCells[i]


  @staticmethod
  def growSynapses(connections, initialPermanence, nDesiredNewSynapes,
                   prevWinnerCells, random, segment):
    """ Creates nDesiredNewSynapes synapses on the segment passed in if
    possible, choosing random cells from the previous winner cells that are
    not already on the segment.

    @param  connections        (Object) Connections instance for the tm
    @param  initialPermanence  (float)  Initial permanence of a new synapse.
    @params nDesiredNewSynapes (int)    Desired number of synapses to grow
    @params prevWinnerCells    (list)   Winner cells in `t-1`
    @param  random             (Object) Tm object used to generate random
                                        numbers
    @param  segment            (int)    Segment to grow synapses on.

    Notes: The process of writing the last value into the index in the array
    that was most recently changed is to ensure the same results that we get
    in the c++ implentation using iter_swap with vectors.
    """
    candidates = list(prevWinnerCells)
    eligibleEnd = len(candidates) - 1

    for synapse in connections.synapsesForSegment(segment):
      presynapticCell = connections.dataForSynapse(synapse).presynapticCell
      try:
        index = candidates[:eligibleEnd + 1].index(presynapticCell)
      except ValueError:
        index = -1
      if index != -1:
        candidates[index] = candidates[eligibleEnd]
        eligibleEnd -= 1

    candidatesLength = eligibleEnd + 1
    nActual = min(nDesiredNewSynapes, candidatesLength)

    for _ in range(nActual):
      rand = random.getUInt32(candidatesLength)
      connections.createSynapse(segment, candidates[rand],
                                initialPermanence)
      candidates[rand] = candidates[candidatesLength - 1]
      candidatesLength -= 1


  @staticmethod
  def adaptSegment(connections, prevActiveCells, permanenceIncrement,
                   permanenceDecrement, segment):
    """ Updates synapses on segment.
    Strengthens active synapses; weakens inactive synapses.

    @param connections          (Object) Connections instance for the tm
    @param prevActiveCells      (list)   Active cells in `t-1`
    @param permanenceIncrement  (float)  Amount to increment active synapses
    @param permanenceDecrement  (float)  Amount to decrement inactive synapses
    @param segment              (int)    Segment to adapt
    """

    for synapse in connections.synapsesForSegment(segment):
      synapseData = connections.dataForSynapse(synapse)
      permanence = synapseData.permanence

      if binSearch(prevActiveCells, synapseData.presynapticCell) != -1:
        permanence += permanenceIncrement
      else:
        permanence -= permanenceDecrement

      # Keep permanence within min/max bounds
      permanence = max(0.0, min(1.0, permanence))

      if permanence < EPSILON:
        connections.destroySynapse(synapse)
      else:
        connections.updateSynapsePermanence(synapse, permanence)

    if connections.numSynapses(segment) == 0:
      connections.destroySegment(segment)


  def columnForCell(self, cell):
    """ Returns the index of the column that a cell belongs to.

    @param cell (int) Cell index

    @return (int) Column index
    """
    self._validateCell(cell)

    return int(cell / self.cellsPerColumn)


  def cellsForColumn(self, column):
    """ Returns the indices of cells that belong to a column.

    @param column (int) Column index

    @return (list) Cell indices
    """
    self._validateColumn(column)

    start = self.cellsPerColumn * column
    end = start + self.cellsPerColumn
    return range(start, end)


  def numberOfColumns(self):
    """ Returns the number of columns in this layer.

    @return (int) Number of columns
    """
    return reduce(mul, self.columnDimensions, 1)


  def numberOfCells(self):
    """ Returns the number of cells in this layer.

    @return (int) Number of cells
    """
    return self.numberOfColumns() * self.cellsPerColumn


  def mapCellsToColumns(self, cells):
    """ Maps cells to the columns they belong to

    @param cells (set) Cells

    @return (dict) Mapping from columns to their cells in `cells`
    """
    cellsForColumns = defaultdict(set)

    for cell in cells:
      column = self.columnForCell(cell)
      cellsForColumns[column].add(cell)

    return cellsForColumns


  def getActiveCells(self):
    """ Returns the indices of the active cells.

    @return (list) Indices of active cells.
    """
    return self.getCellIndices(self.activeCells)


  def getPredictiveCells(self):
    """ Returns the indices of the predictive cells.

    @return (list) Indices of predictive cells.
    """
    predictiveCells = set()
    for activeSegment in self.activeSegments:
      cell = activeSegment.segment.cell
      if not cell in predictiveCells:
        predictiveCells.add(cell)

    return sorted(predictiveCells)


  def getWinnerCells(self):
    """ Returns the indices of the winner cells.

    @return (list) Indices of winner cells.
    """
    return self.getCellIndices(self.winnerCells)


  def getCellsPerColumn(self):
    """ Returns the number of cells per column.

    @return (int) The number of cells per column.
    """
    return self.cellsPerColumn


  def getColumnDimensions(self):
    """
    Returns the dimensions of the columns in the region.
    @return (tuple) Column dimensions
    """
    return self.columnDimensions


  def getActivationThreshold(self):
    """
    Returns the activation threshold.
    @return (int) The activation threshold.
    """
    return self.activationThreshold


  def setActivationThreshold(self, activationThreshold):
    """
    Sets the activation threshold.
    @param activationThreshold (int) activation threshold.
    """
    self.activationThreshold = activationThreshold


  def getInitialPermanence(self):
    """
    Get the initial permanence.
    @return (float) The initial permanence.
    """
    return self.initialPermanence


  def setInitialPermanence(self, initialPermanence):
    """
    Sets the initial permanence.
    @param initialPermanence (float) The initial permanence.
    """
    self.initialPermanence = initialPermanence


  def getMinThreshold(self):
    """
    Returns the min threshold.
    @return (int) The min threshold.
    """
    return self.minThreshold


  def setMinThreshold(self, minThreshold):
    """
    Sets the min threshold.
    @param minThreshold (int) min threshold.
    """
    self.minThreshold = minThreshold


  def getMaxNewSynapseCount(self):
    """
    Returns the max new synapse count.
    @return (int) The max new synapse count.
    """
    return self.maxNewSynapseCount


  def setMaxNewSynapseCount(self, maxNewSynapseCount):
    """
    Sets the max new synapse count.
    @param maxNewSynapseCount (int) Max new synapse count.
    """
    self.maxNewSynapseCount = maxNewSynapseCount


  def getPermanenceIncrement(self):
    """
    Get the permanence increment.
    @return (float) The permanence increment.
    """
    return self.permanenceIncrement


  def setPermanenceIncrement(self, permanenceIncrement):
    """
    Sets the permanence increment.
    @param permanenceIncrement (float) The permanence increment.
    """
    self.permanenceIncrement = permanenceIncrement


  def getPermanenceDecrement(self):
    """
    Get the permanence decrement.
    @return (float) The permanence decrement.
    """
    return self.permanenceDecrement


  def setPermanenceDecrement(self, permanenceDecrement):
    """
    Sets the permanence decrement.
    @param permanenceDecrement (float) The permanence decrement.
    """
    self.permanenceDecrement = permanenceDecrement


  def getPredictedSegmentDecrement(self):
    """
    Get the predicted segment decrement.
    @return (float) The predicted segment decrement.
    """
    return self.predictedSegmentDecrement


  def setPredictedSegmentDecrement(self, predictedSegmentDecrement):
    """
    Sets the predicted segment decrement.
    @param predictedSegmentDecrement (float) The predicted segment decrement.
    """
    self.predictedSegmentDecrement = predictedSegmentDecrement


  def getConnectedPermanence(self):
    """
    Get the connected permanence.
    @return (float) The connected permanence.
    """
    return self.connectedPermanence


  def setConnectedPermanence(self, connectedPermanence):
    """
    Sets the connected permanence.
    @param connectedPermanence (float) The connected permanence.
    """
    self.connectedPermanence = connectedPermanence


  def write(self, proto):
    """ Writes serialized data to proto object

    @param proto (DynamicStructBuilder) Proto object
    """
    proto.columnDimensions = self.columnDimensions
    proto.cellsPerColumn = self.cellsPerColumn
    proto.activationThreshold = self.activationThreshold
    proto.initialPermanence = self.initialPermanence
    proto.connectedPermanence = self.connectedPermanence
    proto.minThreshold = self.minThreshold
    proto.maxNewSynapseCount = self.maxNewSynapseCount
    proto.permanenceIncrement = self.permanenceIncrement
    proto.permanenceDecrement = self.permanenceDecrement
    proto.predictedSegmentDecrement = self.predictedSegmentDecrement

    self.connections.write(proto.connections)
    self._random.write(proto.random)

    proto.activeCells = list(self.activeCells)
    proto.winnerCells = list(self.winnerCells)
    activeSegmentOverlaps = \
        proto.init('activeSegmentOverlaps', len(self.activeSegments))
    for i, active in enumerate(self.activeSegments):
      activeSegmentOverlaps[i].cell = active.segment.cell
      activeSegmentOverlaps[i].segment = active.segment.idx
      activeSegmentOverlaps[i].overlap = active.overlap

    matchingSegmentOverlaps = \
        proto.init('matchingSegmentOverlaps', len(self.matchingSegments))
    for i, matching in enumerate(self.matchingSegments):
      matchingSegmentOverlaps[i].cell = matching.segment.cell
      matchingSegmentOverlaps[i].segment = matching.segment.idx
      matchingSegmentOverlaps[i].overlap = matching.overlap



  @classmethod
  def read(cls, proto):
    """ Reads deserialized data from proto object

    @param proto (DynamicStructBuilder) Proto object

    @return (TemporalMemory) TemporalMemory instance
    """
    tm = object.__new__(cls)

    tm.columnDimensions = list(proto.columnDimensions)
    tm.cellsPerColumn = int(proto.cellsPerColumn)
    tm.activationThreshold = int(proto.activationThreshold)
    tm.initialPermanence = proto.initialPermanence
    tm.connectedPermanence = proto.connectedPermanence
    tm.minThreshold = int(proto.minThreshold)
    tm.maxNewSynapseCount = int(proto.maxNewSynapseCount)
    tm.permanenceIncrement = proto.permanenceIncrement
    tm.permanenceDecrement = proto.permanenceDecrement
    tm.predictedSegmentDecrement = proto.predictedSegmentDecrement

    tm.connections = Connections.read(proto.connections)
    #pylint: disable=W0212
    tm._random = Random()
    tm._random.read(proto.random)
    #pylint: enable=W0212

    tm.activeCells = [int(x) for x in proto.activeCells]
    tm.winnerCells = [int(x) for x in proto.winnerCells]

    tm.activeSegments = []
    tm.matchingSegments = []

    for i in xrange(len(proto.activeSegmentOverlaps)):
      protoSegmentOverlap = proto.activeSegmentOverlaps[i]
      segment = tm.connections.getSegment(protoSegmentOverlap.segment,
                                          protoSegmentOverlap.cell)
      segmentOverlap = SegmentOverlap(segment, protoSegmentOverlap.overlap)
      tm.activeSegments.append(segmentOverlap)

    for i in xrange(len(proto.matchingSegmentOverlaps)):
      protoSegmentOverlap = proto.matchingSegmentOverlaps[i]
      segment = tm.connections.getSegment(protoSegmentOverlap.segment,
                                          protoSegmentOverlap.cell)
      segmentOverlap = SegmentOverlap(segment, protoSegmentOverlap.overlap)
      tm.matchingSegments.append(segmentOverlap)

    return tm


  def __eq__(self, other):
    """ Equality operator for TemporalMemory instances.
    Checks if two instances are functionally identical
    (might have different internal state).

    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
    if self.columnDimensions != other.columnDimensions:
      return False
    if self.cellsPerColumn != other.cellsPerColumn:
      return False
    if self.activationThreshold != other.activationThreshold:
      return False
    if abs(self.initialPermanence - other.initialPermanence) > EPSILON:
      return False
    if abs(self.connectedPermanence - other.connectedPermanence) > EPSILON:
      return False
    if self.minThreshold != other.minThreshold:
      return False
    if self.maxNewSynapseCount != other.maxNewSynapseCount:
      return False
    if abs(self.permanenceIncrement - other.permanenceIncrement) > EPSILON:
      return False
    if abs(self.permanenceDecrement - other.permanenceDecrement) > EPSILON:
      return False
    if abs(self.predictedSegmentDecrement -
           other.predictedSegmentDecrement) > EPSILON:
      return False

    if self.connections != other.connections:
      return False
    if self.activeCells != other.activeCells:
      return False
    if self.winnerCells != other.winnerCells:
      return False

    if self.matchingSegments != other.matchingSegments:
      return False
    if self.activeSegments != other.activeSegments:
      return False

    return True


  def __ne__(self, other):
    """ Non-equality operator for TemporalMemory instances.
    Checks if two instances are not functionally identical
    (might have different internal state).

    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
    return not self.__eq__(other)


  def _validateColumn(self, column):
    """ Raises an error if column index is invalid.

    @param column (int) Column index
    """
    if column >= self.numberOfColumns() or column < 0:
      raise IndexError("Invalid column")


  def _validateCell(self, cell):
    """ Raises an error if cell index is invalid.

    @param cell (int) Cell index
    """
    if cell >= self.numberOfCells() or cell < 0:
      raise IndexError("Invalid cell")


  @classmethod
  def getCellIndices(cls, cells):
    """ Returns the indices of the cells passed in.

    @param cells (list) cells to find the indices of
    """
    return [cls.getCellIndex(c) for c in cells]


  @staticmethod
  def getCellIndex(cell):
    """ Returns the index of the cell

    @param cell (int) cell to find the index of
    """
    return cell
예제 #6
0
class TemporalMemory(object):
    """ Class implementing the Temporal Memory algorithm. """
    def __init__(self,
                 columnDimensions=(2048, ),
                 cellsPerColumn=32,
                 activationThreshold=13,
                 initialPermanence=0.21,
                 connectedPermanence=0.50,
                 minThreshold=10,
                 maxNewSynapseCount=20,
                 permanenceIncrement=0.10,
                 permanenceDecrement=0.10,
                 predictedSegmentDecrement=0.0,
                 maxSegmentsPerCell=255,
                 maxSynapsesPerSegment=255,
                 seed=42,
                 **kwargs):
        """
    @param columnDimensions          (list)  Dimensions of the column space
    @param cellsPerColumn            (int)   Number of cells per column
    @param activationThreshold       (int)   If the number of active connected
                                             synapses on a segment is at least
                                             this threshold, the segment is said
                                             to be active.
    @param initialPermanence         (float) Initial permanence of a new synapse
    @param connectedPermanence       (float) If the permanence value for a
                                             synapse is greater than this value,
                                             it is said to be connected.
    @param minThreshold              (int)   If the number of synapses active on
                                             a segment is at least this
                                             threshold, it is selected as the
                                             best matching cell in a bursting
                                             column
    @param maxNewSynapseCount        (int)   The maximum number of synapses
                                             added to a segment during learning
    @param permanenceIncrement       (float) Amount by which permanences of
                                             synapses are incremented during
                                             learning.
    @param permanenceDecrement       (float) Amount by which permanences of
                                             synapses are decremented during
                                             learning.
    @param predictedSegmentDecrement (float) Amount by which active permanences
                                             of synapses of previously predicted
                                             but inactive segments are
                                             decremented.
    @param seed                      (int)   Seed for the random number
                                             generator
    Notes:

    predictedSegmentDecrement: A good value is just a bit larger than
    (the column-level sparsity * permanenceIncrement). So, if column-level
    sparsity is 2% and permanenceIncrement is 0.01, this parameter should be
    something like 4% * 0.01 = 0.0004).
    """
        # Error checking
        if not len(columnDimensions):
            raise ValueError(
                "Number of column dimensions must be greater than 0")

        if cellsPerColumn <= 0:
            raise ValueError(
                "Number of cells per column must be greater than 0")

        if minThreshold > activationThreshold:
            raise ValueError(
                "The min threshold can't be greater than the activation threshold"
            )

        # TODO: Validate all parameters (and add validation tests)

        # Save member variables
        self.columnDimensions = columnDimensions
        self.cellsPerColumn = cellsPerColumn
        self.activationThreshold = activationThreshold
        self.initialPermanence = initialPermanence
        self.connectedPermanence = connectedPermanence
        self.minThreshold = minThreshold
        self.maxNewSynapseCount = maxNewSynapseCount
        self.permanenceIncrement = permanenceIncrement
        self.permanenceDecrement = permanenceDecrement
        self.predictedSegmentDecrement = predictedSegmentDecrement
        # Initialize member variables
        self.connections = Connections(
            self.numberOfCells(),
            maxSegmentsPerCell=maxSegmentsPerCell,
            maxSynapsesPerSegment=maxSynapsesPerSegment)
        self._random = Random(seed)

        self.activeCells = []
        self.winnerCells = []
        self.activeSegments = []
        self.matchingSegments = []

    # ==============================
    # Main functions
    # ==============================

    def compute(self, activeColumns, learn=True):
        """ Feeds input record through TM, performing inference and learning.

    @param activeColumns (set)  Indices of active columns
    @param learn         (bool) Whether or not learning is enabled
    """
        self.activateCells(sorted(activeColumns), learn)
        self.activateDendrites(learn)

    def activateCells(self, activeColumns, learn=True):
        """ Calculate the active cells, using the current active columns and
    dendrite segments. Grow and reinforce synapses.

    @param activeColumns (list) A sorted list of active column indices.
    @param learn (bool) If true, reinforce / punish / grow synapses.

    Pseudocode:
    for each column
      if column is active and has active distal dendrite segments
        call activatePredictedColumn
      if column is active and doesn't have active distal dendrite segments
        call burstColumn
      if column is inactive and has matching distal dendrite segments
        call punishPredictedColumn
    """
        prevActiveCells = self.activeCells
        prevWinnerCells = self.winnerCells
        self.activeCells = []
        self.winnerCells = []

        segToCol = lambda segment: int(segment.segment.cell / self.
                                       cellsPerColumn)
        identity = lambda x: x

        for columnData in groupby2(activeColumns, identity,
                                   self.activeSegments, segToCol,
                                   self.matchingSegments, segToCol):
            (column, activeColumns, activeSegmentsOnCol,
             matchingSegmentsOnCol) = columnData
            if activeColumns is not None:
                if activeSegmentsOnCol is not None:
                    cellsToAdd = TemporalMemory.activatePredictedColumn(
                        self.connections, self._random, activeSegmentsOnCol,
                        matchingSegmentsOnCol, prevActiveCells,
                        prevWinnerCells, self.maxNewSynapseCount,
                        self.initialPermanence, self.permanenceIncrement,
                        self.permanenceDecrement, learn)

                    self.activeCells += cellsToAdd
                    self.winnerCells += cellsToAdd
                else:
                    (cellsToAdd, winnerCell) = TemporalMemory.burstColumn(
                        self.connections, self._random, column,
                        matchingSegmentsOnCol, prevActiveCells,
                        prevWinnerCells, self.cellsPerColumn,
                        self.maxNewSynapseCount, self.initialPermanence,
                        self.permanenceIncrement, self.permanenceDecrement,
                        learn)

                    self.activeCells += cellsToAdd
                    self.winnerCells.append(winnerCell)
            else:
                if learn:
                    TemporalMemory.punishPredictedColumn(
                        self.connections, matchingSegmentsOnCol,
                        self.predictedSegmentDecrement, prevActiveCells)

    def activateDendrites(self, learn=True):
        """ Calculate dendrite segment activity, using the current active cells.

    @param learn (bool)
    If true, segment activations will be recorded. This information is used
    during segment cleanup.

    Pseudocode:
    for each distal dendrite segment with activity >= activationThreshold
      mark the segment as active
    for each distal dendrite segment with unconnected activity >= minThreshold
      mark the segment as matching
    """
        (activeSegments, matchingSegments) = self.connections.computeActivity(
            self.activeCells, self.connectedPermanence,
            self.activationThreshold, 0.0, self.minThreshold, learn)

        self.activeSegments = activeSegments
        self.matchingSegments = matchingSegments

    def reset(self):
        """ Indicates the start of a new sequence and resets the sequence
        state of the TM. """
        self.activeCells = []
        self.winnerCells = []
        self.activeSegments = []
        self.matchingSegments = []

    @staticmethod
    def activatePredictedColumn(connections, random, activeSegments,
                                matchingSegments, prevActiveCells,
                                prevWinnerCells, maxNewSynapseCount,
                                initialPermanence, permanenceIncrement,
                                permanenceDecrement, learn):
        """ Determines which cells in a predicted column should be added to winner
    cells list, and learns on the segments that correctly predicted this column.

    @param connections     (Object) Connections for the TM. Gets mutated.
    @param random          (Object) Random number generator. Gets mutated.
    @param activeSegments  (iter)   An iterable of SegmentOverlap objects.
                                    Active segments for this column, and
                                    an overlap for each segment.
    @param matchingSegments (iter)  An iterable of SegmentOverlap objects.
                                    Matching segments for this column, and
                                    an overlap for each segment.
    @param prevActiveCells (list)   Active cells in `t-1`.
    @param prevWinnerCells     (list)   Winner cells in `t-1`.
    @param maxNewSynapseCount  (int)    The maximum number of synapses added to
                                        a segment during learning.
    @param initialPermanence   (float)  Initial permanence of a new synapse.
    @permanenceIncrement   (float)  Amount by which permanences of synapses are
                                    incremented during learning.
    @permanenceDecrement   (float)  Amount by which permanences of synapses are
                                    decremented during learning.
    @param learn           (bool)   Determines if permanences are adjusted.

    @return cellsToAdd (list) A list of predicted cells that will be added to
                              active cells and winner cells.

    Pseudocode:
    for each cell in the column that has an active distal dendrite segment
      mark the cell as active
      mark the cell as a winner cell
      (learning) for each active distal dendrite segment
        strengthen active synapses
        weaken inactive synapses
        grow synapses to previous winner cells
    """

        cellsToAdd = []

        byCell = lambda x: x.segment.cell
        for cellData in groupby2(activeSegments, byCell, matchingSegments,
                                 byCell):
            (cell, activeSegmentsOnCell, matchingSegmentsOnCell) = cellData

            if activeSegmentsOnCell is not None:
                cellsToAdd.append(cell)

                if learn:
                    # Learn on every active segment.
                    #
                    # For each active segment, get its corresponding matching
                    # segment so that we can use its overlap to compute the
                    # number of synapses to grow.
                    bySegment = lambda x: x.segment
                    for segmentData in groupby2(activeSegmentsOnCell,
                                                bySegment,
                                                matchingSegmentsOnCell,
                                                bySegment):
                        (segment, activeOverlaps,
                         matchingOverlaps) = segmentData

                        if activeOverlaps is not None:
                            # Active segments are a superset of matching segments,
                            # so this iterator must contain a segment (and overlap).
                            matching = matchingOverlaps.next()

                            TemporalMemory.adaptSegment(
                                connections, segment, prevActiveCells,
                                permanenceIncrement, permanenceDecrement)

                            nGrowDesired = maxNewSynapseCount - matching.overlap
                            if nGrowDesired > 0:
                                TemporalMemory.growSynapses(
                                    connections, random, segment, nGrowDesired,
                                    prevWinnerCells, initialPermanence)

        return cellsToAdd

    @staticmethod
    def burstColumn(connections, random, column, matchingSegments,
                    prevActiveCells, prevWinnerCells, cellsPerColumn,
                    maxNewSynapseCount, initialPermanence, permanenceIncrement,
                    permanenceDecrement, learn):
        """ Activates all of the cells in an unpredicted active column, chooses a
    winner cell, and, if learning is turned on, learns on one segment, growing a
    new segment if necessary.

    @param connections         (Object) Connections for the TM. Gets mutated.
    @param random              (Object) Random number generator. Gets mutated.
    @param column              (int)    Index of bursting column.
    @param matchingSegments    (iter)   An iterable of SegmentOverlap objects.
                                        Matching segments for this column, and
                                        an overlap for each segment.
    @param prevActiveCells     (list)   Active cells in `t-1`.
    @param prevWinnerCells     (list)   Winner cells in `t-1`.
    @param cellsPerColumn      (int)    Number of cells per column.
    @param maxNewSynapseCount  (int)    The maximum number of synapses added to
                                        a segment during learning.
    @param initialPermanence   (float)  Initial permanence of a new synapse.
    @param permanenceIncrement (float)  Amount by which permanences of synapses
                                        are incremented during learning.
    @param permanenceDecrement (float)  Amount by which permanences of synapses
                                        are decremented during learning.
    @param learn               (bool)   Whether or not learning is enabled.

    @return (tuple) Contains:
                      `cells`         (iter),
                      `winnerCell`    (int),

    Pseudocode:
    mark all cells as active
    if there are any matching distal dendrite segments
      find the most active matching segment
      mark its cell as a winner cell
      (learning)
        grow and reinforce synapses to previous winner cells
    else
      find the cell with the least segments, mark it as a winner cell
      (learning)
        (optimization) if there are prev winner cells
          add a segment to this winner cell
          grow synapses to previous winner cells
    """
        start = cellsPerColumn * column
        cells = xrange(start, start + cellsPerColumn)

        if matchingSegments is not None:
            bestMatching = max(matchingSegments, key=lambda seg: seg.overlap)
            winnerCell = bestMatching.segment.cell
            if learn:
                TemporalMemory.adaptSegment(connections, bestMatching.segment,
                                            prevActiveCells,
                                            permanenceIncrement,
                                            permanenceDecrement)

                nGrowDesired = maxNewSynapseCount - bestMatching.overlap

                if nGrowDesired > 0:
                    TemporalMemory.growSynapses(connections, random,
                                                bestMatching.segment,
                                                nGrowDesired, prevWinnerCells,
                                                initialPermanence)
        else:
            winnerCell = TemporalMemory.leastUsedCell(cells, connections,
                                                      random)
            if learn:
                nGrowExact = min(maxNewSynapseCount, len(prevWinnerCells))
                if nGrowExact > 0:
                    segment = connections.createSegment(winnerCell)
                    TemporalMemory.growSynapses(connections, random, segment,
                                                nGrowExact, prevWinnerCells,
                                                initialPermanence)

        return cells, winnerCell

    @staticmethod
    def punishPredictedColumn(connections, matchingSegments,
                              predictedSegmentDecrement, prevActiveCells):
        """Punishes the Segments that incorrectly predicted a column to be active.

    @param connections         (Object) Connections for the TM. Gets mutated.
    @param matchingSegments    (iter)   An iterable of SegmentOverlap objects.
                                        Matching segments for this column, and
                                        an overlap for each segment.
    @param permanenceDecrement (float)  Amount by which permanences of synapses
                                        are decremented during learning.
    @param prevActiveCells     (list)   Active cells in `t-1`

    Pseudocode:
    for each matching segment in the column
      weaken active synapses
    """
        if predictedSegmentDecrement > 0.0 and matchingSegments is not None:
            for matching in matchingSegments:
                TemporalMemory.adaptSegment(connections, matching.segment,
                                            prevActiveCells,
                                            -predictedSegmentDecrement, 0.0)

    # ==============================
    # Helper functions
    # ==============================

    @staticmethod
    def leastUsedCell(cells, connections, random):
        """ Gets the cell with the smallest number of segments.
    Break ties randomly.

    @param cells       (list)   Indices of cells
    @param connections (Object) Connections instance for the tm
    @param random      (Object) Random number generator

    @return (int) Cell index
    """
        leastUsedCells = []
        minNumSegments = float("inf")
        for cell in cells:
            numSegments = len(connections.segmentsForCell(cell))

            if numSegments < minNumSegments:
                minNumSegments = numSegments
                leastUsedCells = []

            if numSegments == minNumSegments:
                leastUsedCells.append(cell)

        i = random.getUInt32(len(leastUsedCells))
        return leastUsedCells[i]

    @staticmethod
    def growSynapses(connections, random, segment, nDesiredNewSynapes,
                     prevWinnerCells, initialPermanence):
        """ Creates nDesiredNewSynapes synapses on the segment passed in if
    possible, choosing random cells from the previous winner cells that are
    not already on the segment.

    @param  connections        (Object) Connections instance for the tm
    @param  random             (Object) Tm object used to generate random
                                        numbers
    @param  segment            (int)    Segment to grow synapses on.
    @params nDesiredNewSynapes (int)    Desired number of synapses to grow
    @params prevWinnerCells    (list)   Winner cells in `t-1`
    @param  initialPermanence  (float)  Initial permanence of a new synapse.

    Notes: The process of writing the last value into the index in the array
    that was most recently changed is to ensure the same results that we get
    in the c++ implentation using iter_swap with vectors.
    """
        candidates = list(prevWinnerCells)
        eligibleEnd = len(candidates) - 1

        for synapse in connections.synapsesForSegment(segment):
            presynapticCell = connections.dataForSynapse(
                synapse).presynapticCell
            try:
                index = candidates[:eligibleEnd + 1].index(presynapticCell)
            except ValueError:
                index = -1
            if index != -1:
                candidates[index] = candidates[eligibleEnd]
                eligibleEnd -= 1

        candidatesLength = eligibleEnd + 1
        nActual = min(nDesiredNewSynapes, candidatesLength)

        for _ in range(nActual):
            rand = random.getUInt32(candidatesLength)
            connections.createSynapse(segment, candidates[rand],
                                      initialPermanence)
            candidates[rand] = candidates[candidatesLength - 1]
            candidatesLength -= 1

    @staticmethod
    def adaptSegment(connections, segment, prevActiveCells,
                     permanenceIncrement, permanenceDecrement):
        """ Updates synapses on segment.
    Strengthens active synapses; weakens inactive synapses.

    @param connections          (Object) Connections instance for the tm
    @param segment              (int)    Segment to adapt
    @param prevActiveCells      (list)   Active cells in `t-1`
    @param permanenceIncrement  (float)  Amount to increment active synapses
    @param permanenceDecrement  (float)  Amount to decrement inactive synapses
    """

        for synapse in connections.synapsesForSegment(segment):
            synapseData = connections.dataForSynapse(synapse)
            permanence = synapseData.permanence

            if binSearch(prevActiveCells, synapseData.presynapticCell) != -1:
                permanence += permanenceIncrement
            else:
                permanence -= permanenceDecrement

            # Keep permanence within min/max bounds
            permanence = max(0.0, min(1.0, permanence))

            if permanence < EPSILON:
                connections.destroySynapse(synapse)
            else:
                connections.updateSynapsePermanence(synapse, permanence)

        if connections.numSynapses(segment) == 0:
            connections.destroySegment(segment)

    def columnForCell(self, cell):
        """ Returns the index of the column that a cell belongs to.

    @param cell (int) Cell index

    @return (int) Column index
    """
        self._validateCell(cell)

        return int(cell / self.cellsPerColumn)

    def cellsForColumn(self, column):
        """ Returns the indices of cells that belong to a column.

    @param column (int) Column index

    @return (list) Cell indices
    """
        self._validateColumn(column)

        start = self.cellsPerColumn * column
        end = start + self.cellsPerColumn
        return range(start, end)

    def numberOfColumns(self):
        """ Returns the number of columns in this layer.

    @return (int) Number of columns
    """
        return reduce(mul, self.columnDimensions, 1)

    def numberOfCells(self):
        """ Returns the number of cells in this layer.

    @return (int) Number of cells
    """
        return self.numberOfColumns() * self.cellsPerColumn

    def mapCellsToColumns(self, cells):
        """ Maps cells to the columns they belong to

    @param cells (set) Cells

    @return (dict) Mapping from columns to their cells in `cells`
    """
        cellsForColumns = defaultdict(set)

        for cell in cells:
            column = self.columnForCell(cell)
            cellsForColumns[column].add(cell)

        return cellsForColumns

    def getActiveCells(self):
        """ Returns the indices of the active cells.

    @return (list) Indices of active cells.
    """
        return self.getCellIndices(self.activeCells)

    def getPredictiveCells(self):
        """ Returns the indices of the predictive cells.

    @return (list) Indices of predictive cells.
    """
        predictiveCells = set()
        for activeSegment in self.activeSegments:
            cell = activeSegment.segment.cell
            if not cell in predictiveCells:
                predictiveCells.add(cell)

        return sorted(predictiveCells)

    def getWinnerCells(self):
        """ Returns the indices of the winner cells.

    @return (list) Indices of winner cells.
    """
        return self.getCellIndices(self.winnerCells)

    def getCellsPerColumn(self):
        """ Returns the number of cells per column.

    @return (int) The number of cells per column.
    """
        return self.cellsPerColumn

    def getColumnDimensions(self):
        """
    Returns the dimensions of the columns in the region.
    @return (tuple) Column dimensions
    """
        return self.columnDimensions

    def getActivationThreshold(self):
        """
    Returns the activation threshold.
    @return (int) The activation threshold.
    """
        return self.activationThreshold

    def setActivationThreshold(self, activationThreshold):
        """
    Sets the activation threshold.
    @param activationThreshold (int) activation threshold.
    """
        self.activationThreshold = activationThreshold

    def getInitialPermanence(self):
        """
    Get the initial permanence.
    @return (float) The initial permanence.
    """
        return self.initialPermanence

    def setInitialPermanence(self, initialPermanence):
        """
    Sets the initial permanence.
    @param initialPermanence (float) The initial permanence.
    """
        self.initialPermanence = initialPermanence

    def getMinThreshold(self):
        """
    Returns the min threshold.
    @return (int) The min threshold.
    """
        return self.minThreshold

    def setMinThreshold(self, minThreshold):
        """
    Sets the min threshold.
    @param minThreshold (int) min threshold.
    """
        self.minThreshold = minThreshold

    def getMaxNewSynapseCount(self):
        """
    Returns the max new synapse count.
    @return (int) The max new synapse count.
    """
        return self.maxNewSynapseCount

    def setMaxNewSynapseCount(self, maxNewSynapseCount):
        """
    Sets the max new synapse count.
    @param maxNewSynapseCount (int) Max new synapse count.
    """
        self.maxNewSynapseCount = maxNewSynapseCount

    def getPermanenceIncrement(self):
        """
    Get the permanence increment.
    @return (float) The permanence increment.
    """
        return self.permanenceIncrement

    def setPermanenceIncrement(self, permanenceIncrement):
        """
    Sets the permanence increment.
    @param permanenceIncrement (float) The permanence increment.
    """
        self.permanenceIncrement = permanenceIncrement

    def getPermanenceDecrement(self):
        """
    Get the permanence decrement.
    @return (float) The permanence decrement.
    """
        return self.permanenceDecrement

    def setPermanenceDecrement(self, permanenceDecrement):
        """
    Sets the permanence decrement.
    @param permanenceDecrement (float) The permanence decrement.
    """
        self.permanenceDecrement = permanenceDecrement

    def getPredictedSegmentDecrement(self):
        """
    Get the predicted segment decrement.
    @return (float) The predicted segment decrement.
    """
        return self.predictedSegmentDecrement

    def setPredictedSegmentDecrement(self, predictedSegmentDecrement):
        """
    Sets the predicted segment decrement.
    @param predictedSegmentDecrement (float) The predicted segment decrement.
    """
        self.predictedSegmentDecrement = predictedSegmentDecrement

    def getConnectedPermanence(self):
        """
    Get the connected permanence.
    @return (float) The connected permanence.
    """
        return self.connectedPermanence

    def setConnectedPermanence(self, connectedPermanence):
        """
    Sets the connected permanence.
    @param connectedPermanence (float) The connected permanence.
    """
        self.connectedPermanence = connectedPermanence

    def write(self, proto):
        """ Writes serialized data to proto object

    @param proto (DynamicStructBuilder) Proto object
    """
        # capnp fails to save a tuple.  Let's force columnDimensions to list.
        proto.columnDimensions = list(self.columnDimensions)
        proto.cellsPerColumn = self.cellsPerColumn
        proto.activationThreshold = self.activationThreshold
        proto.initialPermanence = self.initialPermanence
        proto.connectedPermanence = self.connectedPermanence
        proto.minThreshold = self.minThreshold
        proto.maxNewSynapseCount = self.maxNewSynapseCount
        proto.permanenceIncrement = self.permanenceIncrement
        proto.permanenceDecrement = self.permanenceDecrement
        proto.predictedSegmentDecrement = self.predictedSegmentDecrement

        self.connections.write(proto.connections)
        self._random.write(proto.random)

        proto.activeCells = list(self.activeCells)
        proto.winnerCells = list(self.winnerCells)
        activeSegmentOverlaps = \
            proto.init('activeSegmentOverlaps', len(self.activeSegments))
        for i, active in enumerate(self.activeSegments):
            activeSegmentOverlaps[i].cell = active.segment.cell
            activeSegmentOverlaps[i].segment = active.segment.idx
            activeSegmentOverlaps[i].overlap = active.overlap

        matchingSegmentOverlaps = \
            proto.init('matchingSegmentOverlaps', len(self.matchingSegments))
        for i, matching in enumerate(self.matchingSegments):
            matchingSegmentOverlaps[i].cell = matching.segment.cell
            matchingSegmentOverlaps[i].segment = matching.segment.idx
            matchingSegmentOverlaps[i].overlap = matching.overlap

    @classmethod
    def read(cls, proto):
        """ Reads deserialized data from proto object

    @param proto (DynamicStructBuilder) Proto object

    @return (TemporalMemory) TemporalMemory instance
    """
        tm = object.__new__(cls)

        # capnp fails to save a tuple, so proto.columnDimensions was forced to
        # serialize as a list.  We prefer a tuple, however, because columnDimensions
        # should be regarded as immutable.
        tm.columnDimensions = tuple(proto.columnDimensions)
        tm.cellsPerColumn = int(proto.cellsPerColumn)
        tm.activationThreshold = int(proto.activationThreshold)
        tm.initialPermanence = proto.initialPermanence
        tm.connectedPermanence = proto.connectedPermanence
        tm.minThreshold = int(proto.minThreshold)
        tm.maxNewSynapseCount = int(proto.maxNewSynapseCount)
        tm.permanenceIncrement = proto.permanenceIncrement
        tm.permanenceDecrement = proto.permanenceDecrement
        tm.predictedSegmentDecrement = proto.predictedSegmentDecrement

        tm.connections = Connections.read(proto.connections)
        #pylint: disable=W0212
        tm._random = Random()
        tm._random.read(proto.random)
        #pylint: enable=W0212

        tm.activeCells = [int(x) for x in proto.activeCells]
        tm.winnerCells = [int(x) for x in proto.winnerCells]

        tm.activeSegments = []
        tm.matchingSegments = []

        for i in xrange(len(proto.activeSegmentOverlaps)):
            protoSegmentOverlap = proto.activeSegmentOverlaps[i]
            segment = tm.connections.getSegment(protoSegmentOverlap.segment,
                                                protoSegmentOverlap.cell)
            segmentOverlap = SegmentOverlap(segment,
                                            protoSegmentOverlap.overlap)
            tm.activeSegments.append(segmentOverlap)

        for i in xrange(len(proto.matchingSegmentOverlaps)):
            protoSegmentOverlap = proto.matchingSegmentOverlaps[i]
            segment = tm.connections.getSegment(protoSegmentOverlap.segment,
                                                protoSegmentOverlap.cell)
            segmentOverlap = SegmentOverlap(segment,
                                            protoSegmentOverlap.overlap)
            tm.matchingSegments.append(segmentOverlap)

        return tm

    def __eq__(self, other):
        """ Equality operator for TemporalMemory instances.
    Checks if two instances are functionally identical
    (might have different internal state).

    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
        if self.columnDimensions != other.columnDimensions:
            return False
        if self.cellsPerColumn != other.cellsPerColumn:
            return False
        if self.activationThreshold != other.activationThreshold:
            return False
        if abs(self.initialPermanence - other.initialPermanence) > EPSILON:
            return False
        if abs(self.connectedPermanence - other.connectedPermanence) > EPSILON:
            return False
        if self.minThreshold != other.minThreshold:
            return False
        if self.maxNewSynapseCount != other.maxNewSynapseCount:
            return False
        if abs(self.permanenceIncrement - other.permanenceIncrement) > EPSILON:
            return False
        if abs(self.permanenceDecrement - other.permanenceDecrement) > EPSILON:
            return False
        if abs(self.predictedSegmentDecrement -
               other.predictedSegmentDecrement) > EPSILON:
            return False

        if self.connections != other.connections:
            return False
        if self.activeCells != other.activeCells:
            return False
        if self.winnerCells != other.winnerCells:
            return False

        if self.matchingSegments != other.matchingSegments:
            return False
        if self.activeSegments != other.activeSegments:
            return False

        return True

    def __ne__(self, other):
        """ Non-equality operator for TemporalMemory instances.
    Checks if two instances are not functionally identical
    (might have different internal state).

    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
        return not self.__eq__(other)

    def _validateColumn(self, column):
        """ Raises an error if column index is invalid.

    @param column (int) Column index
    """
        if column >= self.numberOfColumns() or column < 0:
            raise IndexError("Invalid column")

    def _validateCell(self, cell):
        """ Raises an error if cell index is invalid.

    @param cell (int) Cell index
    """
        if cell >= self.numberOfCells() or cell < 0:
            raise IndexError("Invalid cell")

    @classmethod
    def getCellIndices(cls, cells):
        """ Returns the indices of the cells passed in.

    @param cells (list) cells to find the indices of
    """
        return [cls.getCellIndex(c) for c in cells]

    @staticmethod
    def getCellIndex(cell):
        """ Returns the index of the cell

    @param cell (int) cell to find the index of
    """
        return cell
예제 #7
0
class TemporalMemory(object):
    """ Class implementing the Temporal Memory algorithm. """
    def __init__(self,
                 columnDimensions=(2048, ),
                 cellsPerColumn=32,
                 activationThreshold=13,
                 initialPermanence=0.21,
                 connectedPermanence=0.50,
                 minThreshold=10,
                 maxNewSynapseCount=20,
                 permanenceIncrement=0.10,
                 permanenceDecrement=0.10,
                 predictedSegmentDecrement=0.0,
                 maxSegmentsPerCell=255,
                 maxSynapsesPerSegment=255,
                 seed=42,
                 **kwargs):
        """
    @param columnDimensions          (list)  Dimensions of the column space
    @param cellsPerColumn            (int)   Number of cells per column
    @param activationThreshold       (int)   If the number of active connected
                                             synapses on a segment is at least
                                             this threshold, the segment is said
                                             to be active.
    @param initialPermanence         (float) Initial permanence of a new synapse
    @param connectedPermanence       (float) If the permanence value for a
                                             synapse is greater than this value,
                                             it is said to be connected.
    @param minThreshold              (int)   If the number of synapses active on
                                             a segment is at least this
                                             threshold, it is selected as the
                                             best matching cell in a bursting
                                             column
    @param maxNewSynapseCount        (int)   The maximum number of synapses
                                             added to a segment during learning
    @param permanenceIncrement       (float) Amount by which permanences of
                                             synapses are incremented during
                                             learning.
    @param permanenceDecrement       (float) Amount by which permanences of
                                             synapses are decremented during
                                             learning.
    @param predictedSegmentDecrement (float) Amount by which active permanences
                                             of synapses of previously predicted
                                             but inactive segments are
                                             decremented.
    @param seed                      (int)   Seed for the random number
                                             generator
    Notes:

    predictedSegmentDecrement: A good value is just a bit larger than
    (the column-level sparsity * permanenceIncrement). So, if column-level
    sparsity is 2% and permanenceIncrement is 0.01, this parameter should be
    something like 4% * 0.01 = 0.0004).
    """
        # Error checking
        if not len(columnDimensions):
            raise ValueError(
                "Number of column dimensions must be greater than 0")

        if cellsPerColumn <= 0:
            raise ValueError(
                "Number of cells per column must be greater than 0")

        # TODO: Validate all parameters (and add validation tests)

        # Save member variables
        self.columnDimensions = columnDimensions
        self.cellsPerColumn = cellsPerColumn
        self.activationThreshold = activationThreshold
        self.initialPermanence = initialPermanence
        self.connectedPermanence = connectedPermanence
        self.minThreshold = minThreshold
        self.maxNewSynapseCount = maxNewSynapseCount
        self.permanenceIncrement = permanenceIncrement
        self.permanenceDecrement = permanenceDecrement
        self.predictedSegmentDecrement = predictedSegmentDecrement
        # Initialize member variables
        self.connections = Connections(
            self.numberOfCells(),
            maxSegmentsPerCell=maxSegmentsPerCell,
            maxSynapsesPerSegment=maxSynapsesPerSegment)
        self._random = Random(seed)

        self.activeCells = []
        self.winnerCells = []
        self.activeSegments = []
        self.matchingSegments = []

    # ==============================
    # Main functions
    # ==============================

    def compute(self, activeColumns, learn=True):
        """ Feeds input record through TM, performing inference and learning.

    @param activeColumns (set)  Indices of active columns
    @param learn         (bool) Whether or not learning is enabled

    Updates member variables:
      - `activeCells`     (list)
      - `winnerCells`     (list)
      - `activeSegments`  (list)
      - `matchingSegments`(list)

    Pseudocode:
    for each column
      if column is active and has active distal dendrite segments
        call activatePredictedColumn
      if column is active and doesn't have active distal dendrite segments
        call burstColumn
      if column is inactive and has matching distal dendrite segments
        call punishPredictedColumn
    for each distal dendrite segment with activity >= activationThreshold
      mark the segment as active
    for each distal dendrite segment with unconnected activity >= minThreshold
      mark the segment as matching
    """
        prevActiveCells = self.activeCells
        prevWinnerCells = self.winnerCells

        activeColumns = sorted(activeColumns)

        self.activeCells = []
        self.winnerCells = []

        for excitedColumn in excitedColumnsGenerator(activeColumns,
                                                     self.activeSegments,
                                                     self.matchingSegments,
                                                     self.cellsPerColumn,
                                                     self.connections):
            if excitedColumn["isActiveColumn"]:
                if excitedColumn["activeSegmentsCount"] != 0:
                    cellsToAdd = TemporalMemory.activatePredictedColumn(
                        self.connections, excitedColumn, learn,
                        self.permanenceDecrement, self.permanenceIncrement,
                        prevActiveCells)

                    self.activeCells += cellsToAdd
                    self.winnerCells += cellsToAdd
                else:
                    (cellsToAdd, winnerCell) = TemporalMemory.burstColumn(
                        self.cellsPerColumn, self.connections, excitedColumn,
                        learn, self.initialPermanence, self.maxNewSynapseCount,
                        self.permanenceDecrement, self.permanenceIncrement,
                        prevActiveCells, prevWinnerCells, self._random)

                    self.activeCells += cellsToAdd
                    self.winnerCells.append(winnerCell)
            else:
                if learn:
                    TemporalMemory.punishPredictedColumn(
                        self.connections, excitedColumn,
                        self.predictedSegmentDecrement, prevActiveCells)

        (activeSegments, matchingSegments) = self.connections.computeActivity(
            self.activeCells, self.connectedPermanence,
            self.activationThreshold, 0.0, self.minThreshold)

        self.activeSegments = activeSegments
        self.matchingSegments = matchingSegments

    def reset(self):
        """ Indicates the start of a new sequence and resets the sequence
        state of the TM. """
        self.activeCells = []
        self.winnerCells = []
        self.activeSegments = []
        self.matchingSegments = []

    @staticmethod
    def activatePredictedColumn(connections, excitedColumn, learn,
                                permanenceDecrement, permanenceIncrement,
                                prevActiveCells):
        """ Determines which cells in a predicted column should be added to
    winner cells list and calls adaptSegment on the segments that correctly
    predicted this column.

    @param connections     (Object) Connections instance for the tm
    @param excitedColumn   (dict)   Dict generated by excitedColumnsGenerator
    @param learn           (bool)   Determines if permanences are adjusted
    @permanenceDecrement   (float)  Amount by which permanences of synapses are
                                    decremented during learning.
    @permanenceIncrement   (float)  Amount by which permanences of synapses are
                                    incremented during learning.
    @param prevActiveCells (list)   Active cells in `t-1`

    @return cellsToAdd (list) A list of predicted cells that will be added to
                              active cells and winner cells.
                              
    Pseudocode:
    for each cell in the column that has an active distal dendrite segment
      mark the cell as active
      mark the cell as a winner cell
      (learning) for each active distal dendrite segment
        strengthen active synapses
        weaken inactive synapses
    """

        cellsToAdd = []
        cell = None
        for active in excitedColumn["activeSegments"]:
            newCell = not cell == connections.cellForSegment(active)
            if newCell:
                cell = connections.cellForSegment(active)
                cellsToAdd.append(cell)

            if learn:
                TemporalMemory.adaptSegment(connections, prevActiveCells,
                                            permanenceIncrement,
                                            permanenceDecrement, active)

        return cellsToAdd

    @staticmethod
    def burstColumn(cellsPerColumn, connections, excitedColumn, learn,
                    initialPermanence, maxNewSynapseCount, permanenceDecrement,
                    permanenceIncrement, prevActiveCells, prevWinnerCells,
                    random):
        """ Activates all of the cells in an unpredicted active column,
    chooses a winner cell, and, if learning is turned on, either adapts or
    creates a segment. growSynapses is invoked on this segment.

    @param cellsPerColumn      (int)    Number of cells per column
    @param connections         (Object) Connections instance for the tm
    @param excitedColumn       (dict)   Excited Column instance from
                                        excitedColumnsGenerator
    @param learn               (bool)   Whether or not learning is enabled
    @param initialPermanence   (float)  Initial permanence of a new synapse.
    @param maxNewSynapseCount  (int)    The maximum number of synapses added to
                                        a segment during learning
    @param permanenceDecrement (float)  Amount by which permanences of synapses
                                        are decremented during learning
    @param permanenceIncrement (float)  Amount by which permanences of synapses
                                        are incremented during learning
    @param prevActiveCells     (list)   Active cells in `t-1`
    @param prevWinnerCells     (list)   Winner cells in `t-1`
    @param random              (object) Random number generator

    @return (tuple) Contains:
                      `cells`         (list),
                      `bestCell`      (int),

    Pseudocode:
    mark all cells as active
    if there are any matching distal dendrite segments
      find the most active matching segment
      mark its cell as a winner cell
      (learning)
        grow and reinforce synapses to previous winner cells
    else
      find the cell with the least segments, mark it as a winner cell
      (learning)
        (optimization) if there are prev winner cells
          add a segment to this winner cell
          grow synapses to previous winner cells
    """
        start = cellsPerColumn * excitedColumn["column"]
        cells = range(start, start + cellsPerColumn)

        if excitedColumn["matchingSegmentsCount"] != 0:
            (bestSegment, overlap) = TemporalMemory.bestMatchingSegment(
                connections, excitedColumn, prevActiveCells)
            bestCell = connections.cellForSegment(bestSegment)
            if learn:
                TemporalMemory.adaptSegment(connections, prevActiveCells,
                                            permanenceIncrement,
                                            permanenceDecrement, bestSegment)

                nGrowDesired = maxNewSynapseCount - overlap

                if nGrowDesired > 0:
                    TemporalMemory.growSynapses(connections, initialPermanence,
                                                nGrowDesired, prevWinnerCells,
                                                random, bestSegment)
        else:
            bestCell = TemporalMemory.leastUsedCell(cells, connections, random)
            if learn:
                nGrowExact = min(maxNewSynapseCount, len(prevWinnerCells))
                if nGrowExact > 0:
                    bestSegment = connections.createSegment(bestCell)
                    TemporalMemory.growSynapses(connections, initialPermanence,
                                                nGrowExact, prevWinnerCells,
                                                random, bestSegment)

        return cells, bestCell

    @staticmethod
    def punishPredictedColumn(connections, excitedColumn,
                              predictedSegmentDecrement, prevActiveCells):
        """Punishes the Segments that incorrectly predicted a column to be active.

    @param connections         (Object) Connections instance for the tm
    @param excitedColumn       (dict)   Excited Column instance from
                                        excitedColumnsGenerator
    @param permanenceDecrement (float)  Amount by which permanences of synapses
                                        are decremented during learning.
    @param prevActiveCells     (list)   Active cells in `t-1`

    Pseudocode:
    for each matching segment in the column
      weaken active synapses
    """
        if predictedSegmentDecrement > 0.0:
            for segment in excitedColumn["matchingSegments"]:
                TemporalMemory.adaptSegment(connections, prevActiveCells,
                                            -predictedSegmentDecrement, 0.0,
                                            segment)

    # ==============================
    # Helper functions
    # ==============================

    @staticmethod
    def bestMatchingSegment(connections, excitedColumn, prevActiveCells):
        """Gets the segment on a cell with the largest number of active synapses.
    Returns an int representing the segment and the number of synapses
    corresponding to it.

    @param connections      (Object) Connections instance for the tm
    @param excitedColumn    (dict)   Excited Column instance from
                                     excitedColumnsGenerator
    @param prevActiveCells  (list)   Active cells in `t-1`

    @return (tuple) Contains:
                      `bestSegment`                 (int),
                      `bestNumActiveSynapses`       (int)
    """
        maxSynapses = 0
        bestSegment = None
        bestNumActiveSynapses = None

        for segment in excitedColumn["matchingSegments"]:
            numActiveSynapses = 0

            for syn in connections.synapsesForSegment(segment):
                synapseData = connections.dataForSynapse(syn)
                if binSearch(prevActiveCells,
                             synapseData.presynapticCell) != -1:
                    numActiveSynapses += 1

            if numActiveSynapses >= maxSynapses:
                maxSynapses = numActiveSynapses
                bestSegment = segment
                bestNumActiveSynapses = numActiveSynapses

        return bestSegment, bestNumActiveSynapses

    @staticmethod
    def leastUsedCell(cells, connections, random):
        """ Gets the cell with the smallest number of segments.
    Break ties randomly.

    @param cells       (list)   Indices of cells
    @param connections (Object) Connections instance for the tm
    @param random      (object) Random number generator

    @return (int) Cell index
    """
        leastUsedCells = []
        minNumSegments = float("inf")
        for cell in cells:
            numSegments = len(connections.segmentsForCell(cell))

            if numSegments < minNumSegments:
                minNumSegments = numSegments
                leastUsedCells = []

            if numSegments == minNumSegments:
                leastUsedCells.append(cell)

        i = random.getUInt32(len(leastUsedCells))
        return leastUsedCells[i]

    @staticmethod
    def growSynapses(connections, initialPermanence, nDesiredNewSynapes,
                     prevWinnerCells, random, segment):
        """ Creates nDesiredNewSynapes synapses on the segment passed in if
    possible, choosing random cells from the previous winner cells that are
    not already on the segment.

    @param  connections        (Object) Connections instance for the tm
    @param  initialPermanence  (float)  Initial permanence of a new synapse.
    @params nDesiredNewSynapes (int)    Desired number of synapses to grow
    @params prevWinnerCells    (list)   Winner cells in `t-1`
    @param  random             (object) Tm object used to generate random
                                        numbers
    @param  segment            (int)    Segment to grow synapses on.

    Notes: The process of writing the last value into the index in the array
    that was most recently changed is to ensure the same results that we get
    in the c++ implentation using iter_swap with vectors.
    """
        candidates = list(prevWinnerCells)
        eligibleEnd = len(candidates) - 1

        for synapse in connections.synapsesForSegment(segment):
            presynapticCell = connections.dataForSynapse(
                synapse).presynapticCell
            index = binSearch(candidates, presynapticCell)
            if index != -1:
                candidates[index] = candidates[eligibleEnd]
                eligibleEnd -= 1

        candidatesLength = eligibleEnd + 1
        nActual = min(nDesiredNewSynapes, candidatesLength)

        for _ in range(nActual):
            rand = random.getUInt32(candidatesLength)
            connections.createSynapse(segment, candidates[rand],
                                      initialPermanence)
            candidates[rand] = candidates[candidatesLength - 1]
            candidatesLength -= 1

    @staticmethod
    def adaptSegment(connections, prevActiveCells, permanenceIncrement,
                     permanenceDecrement, segment):
        """ Updates synapses on segment.
    Strengthens active synapses; weakens inactive synapses.

    @param  connections        (Object) Connections instance for the tm
    @param prevActiveCells      (list)   Active cells in `t-1`
    @param permanenceIncrement  (float)  Amount to increment active synapses
    @param permanenceDecrement  (float)  Amount to decrement inactive synapses
    @param segment              (int)    Segment to adapt
    """

        # Need to copy synapses for segment set below because it will be modified
        # during iteration by `destroySynapse`
        for synapse in set(connections.synapsesForSegment(segment)):
            synapseData = connections.dataForSynapse(synapse)
            permanence = synapseData.permanence

            if binSearch(prevActiveCells, synapseData.presynapticCell) != -1:
                permanence += permanenceIncrement
            else:
                permanence -= permanenceDecrement

            # Keep permanence within min/max bounds
            permanence = max(0.0, min(1.0, permanence))

            if permanence < EPSILON:
                connections.destroySynapse(synapse)
            else:
                connections.updateSynapsePermanence(synapse, permanence)

        # awaiting change to connections.py to facilitate deleting segments
        # and synapses like the c++ implementation.
        # if (len(self.connections.synapsesForSegment(segment)) == 0):
        #   self.connections.destroySegment(segment)

    def columnForCell(self, cell):
        """ Returns the index of the column that a cell belongs to.

    @param cell (int) Cell index

    @return (int) Column index
    """
        self._validateCell(cell)

        return int(cell / self.cellsPerColumn)

    def cellsForColumn(self, column):
        """ Returns the indices of cells that belong to a column.

    @param column (int) Column index

    @return (list) Cell indices
    """
        self._validateColumn(column)

        start = self.cellsPerColumn * column
        end = start + self.cellsPerColumn
        return range(start, end)

    def numberOfColumns(self):
        """ Returns the number of columns in this layer.

    @return (int) Number of columns
    """
        return reduce(mul, self.columnDimensions, 1)

    def numberOfCells(self):
        """ Returns the number of cells in this layer.

    @return (int) Number of cells
    """
        return self.numberOfColumns() * self.cellsPerColumn

    def mapCellsToColumns(self, cells):
        """ Maps cells to the columns they belong to

    @param cells (set) Cells

    @return (dict) Mapping from columns to their cells in `cells`
    """
        cellsForColumns = defaultdict(set)

        for cell in cells:
            column = self.columnForCell(cell)
            cellsForColumns[column].add(cell)

        return cellsForColumns

    def getActiveCells(self):
        """ Returns the indices of the active cells.

    @return (list) Indices of active cells.
    """
        return self.getCellIndices(self.activeCells)

    def getPredictiveCells(self):
        """ Returns the indices of the predictive cells.

    @return (list) Indices of predictive cells.
    """
        predictiveCells = set()
        for activeSegment in self.activeSegments:
            cell = self.connections.cellForSegment(activeSegment)
            if not cell in predictiveCells:
                predictiveCells.add(cell)

        return sorted(predictiveCells)

    def getWinnerCells(self):
        """ Returns the indices of the winner cells.

    @return (list) Indices of winner cells.
    """
        return self.getCellIndices(self.winnerCells)

    def getCellsPerColumn(self):
        """ Returns the number of cells per column.

    @return (int) The number of cells per column.
    """
        return self.cellsPerColumn

    def write(self, proto):
        """ Writes serialized data to proto object

    @param proto (DynamicStructBuilder) Proto object
    """
        proto.columnDimensions = self.columnDimensions
        proto.cellsPerColumn = self.cellsPerColumn
        proto.activationThreshold = self.activationThreshold
        proto.initialPermanence = self.initialPermanence
        proto.connectedPermanence = self.connectedPermanence
        proto.minThreshold = self.minThreshold
        proto.maxNewSynapseCount = self.maxNewSynapseCount
        proto.permanenceIncrement = self.permanenceIncrement
        proto.permanenceDecrement = self.permanenceDecrement
        proto.predictedSegmentDecrement = self.predictedSegmentDecrement

        self.connections.write(proto.connections)
        self._random.write(proto.random)

        proto.activeCells = list(self.activeCells)
        proto.activeSegments = list(self.activeSegments)
        proto.winnerCells = list(self.winnerCells)
        proto.matchingSegments = list(self.matchingSegments)

    @classmethod
    def read(cls, proto):
        """ Reads deserialized data from proto object

    @param proto (DynamicStructBuilder) Proto object

    @return (TemporalMemory) TemporalMemory instance
    """
        tm = object.__new__(cls)

        tm.columnDimensions = list(proto.columnDimensions)
        tm.cellsPerColumn = int(proto.cellsPerColumn)
        tm.activationThreshold = int(proto.activationThreshold)
        tm.initialPermanence = proto.initialPermanence
        tm.connectedPermanence = proto.connectedPermanence
        tm.minThreshold = int(proto.minThreshold)
        tm.maxNewSynapseCount = int(proto.maxNewSynapseCount)
        tm.permanenceIncrement = proto.permanenceIncrement
        tm.permanenceDecrement = proto.permanenceDecrement
        tm.predictedSegmentDecrement = proto.predictedSegmentDecrement

        tm.connections = Connections.read(proto.connections)
        #pylint: disable=W0212
        tm._random = Random()
        tm._random.read(proto.random)
        #pylint: enable=W0212

        tm.activeCells = [int(x) for x in proto.activeCells]
        tm.activeSegments = [int(x) for x in proto.activeSegments]
        tm.winnerCells = [int(x) for x in proto.winnerCells]
        tm.matchingSegments = [int(x) for x in proto.matchingSegments]

        return tm

    def __eq__(self, other):
        """ Equality operator for TemporalMemory instances.
    Checks if two instances are functionally identical
    (might have different internal state).

    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
        if self.columnDimensions != other.columnDimensions:
            return False
        if self.cellsPerColumn != other.cellsPerColumn:
            return False
        if self.activationThreshold != other.activationThreshold:
            return False
        if abs(self.initialPermanence - other.initialPermanence) > EPSILON:
            return False
        if abs(self.connectedPermanence - other.connectedPermanence) > EPSILON:
            return False
        if self.minThreshold != other.minThreshold:
            return False
        if self.maxNewSynapseCount != other.maxNewSynapseCount:
            return False
        if abs(self.permanenceIncrement - other.permanenceIncrement) > EPSILON:
            return False
        if abs(self.permanenceDecrement - other.permanenceDecrement) > EPSILON:
            return False
        if abs(self.predictedSegmentDecrement -
               other.predictedSegmentDecrement) > EPSILON:
            return False

        if self.connections != other.connections:
            return False
        if self.activeCells != other.activeCells:
            return False
        if self.winnerCells != other.winnerCells:
            return False
        if self.matchingSegments != other.matchingSegments:
            return False
        if self.activeSegments != other.activeSegments:
            return False

        return True

    def __ne__(self, other):
        """ Non-equality operator for TemporalMemory instances.
    Checks if two instances are not functionally identical
    (might have different internal state).

    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
        return not self.__eq__(other)

    def _validateColumn(self, column):
        """ Raises an error if column index is invalid.

    @param column (int) Column index
    """
        if column >= self.numberOfColumns() or column < 0:
            raise IndexError("Invalid column")

    def _validateCell(self, cell):
        """ Raises an error if cell index is invalid.

    @param cell (int) Cell index
    """
        if cell >= self.numberOfCells() or cell < 0:
            raise IndexError("Invalid cell")

    @classmethod
    def getCellIndices(cls, cells):
        """ Returns the indices of the cells passed in.

    @param cells (list) cells to find the indices of
    """
        return [cls.getCellIndex(c) for c in cells]

    @staticmethod
    def getCellIndex(cell):
        """ Returns the index of the cell

    @param cell (int) cell to find the index of
    """
        return cell