コード例 #1
0
  def testNupicRandomPickling(self):
    """Test pickling / unpickling of NuPIC randomness."""

    # Simple test: make sure that dumping / loading works...
    r = Random(42)
    pickledR = pickle.dumps(r)

    test1 = [r.getUInt32() for _ in range(10)]
    r = pickle.loads(pickledR)
    test2 = [r.getUInt32() for _ in range(10)]

    self.assertEqual(test1, test2,
                     "Simple NuPIC random pickle/unpickle failed.")

    # A little tricker: dump / load _after_ some numbers have been generated
    # (in the first test).  Things should still work...
    # ...the idea of this test is to make sure that the pickle code isn't just
    # saving the initial seed...
    pickledR = pickle.dumps(r)

    test3 = [r.getUInt32() for _ in range(10)]
    r = pickle.loads(pickledR)
    test4 = [r.getUInt32() for _ in range(10)]

    self.assertEqual(
        test3, test4,
        "NuPIC random pickle/unpickle didn't work for saving later state.")

    self.assertNotEqual(test1, test3,
                        "NuPIC random gave the same result twice?!?")
コード例 #2
0
ファイル: nupic_random_test.py プロジェクト: Aaron-Gao/nupic
  def testNupicRandomPickling(self):
    """Test pickling / unpickling of NuPIC randomness."""

    # Simple test: make sure that dumping / loading works...
    r = Random(42)
    pickledR = pickle.dumps(r)

    test1 = [r.getUInt32() for _ in xrange(10)]
    r = pickle.loads(pickledR)
    test2 = [r.getUInt32() for _ in xrange(10)]

    self.assertEqual(test1, test2,
                     "Simple NuPIC random pickle/unpickle failed.")

    # A little tricker: dump / load _after_ some numbers have been generated
    # (in the first test).  Things should still work...
    # ...the idea of this test is to make sure that the pickle code isn't just
    # saving the initial seed...
    pickledR = pickle.dumps(r)

    test3 = [r.getUInt32() for _ in xrange(10)]
    r = pickle.loads(pickledR)
    test4 = [r.getUInt32() for _ in xrange(10)]

    self.assertEqual(
        test3, test4,
        "NuPIC random pickle/unpickle didn't work for saving later state.")

    self.assertNotEqual(test1, test3,
                        "NuPIC random gave the same result twice?!?")
コード例 #3
0
 def testEquals(self):
   r1 = Random(42)
   v1 = r1.getReal64()
   i1 = r1.getUInt32()
   r2 = Random(42)
   v2 = r2.getReal64()
   i2 = r2.getUInt32()
   self.assertEqual(v1, v2)
   self.assertEqual(r1, r2)
   self.assertEqual(i1, i2)
コード例 #4
0
    def testCapnpSerialization(self):
        """Test capnp serialization of NuPIC randomness."""

        # Simple test: make sure that dumping / loading works...
        r = Random(99)

        builderProto = RandomProto.new_message()
        r.write(builderProto)
        readerProto = RandomProto.from_bytes(builderProto.to_bytes())

        test1 = [r.getUInt32() for _ in range(10)]
        r = Random(1)
        r.read(readerProto)
        self.assertEqual(r.getSeed(), 99)
        test2 = [r.getUInt32() for _ in range(10)]

        self.assertEqual(
            test1, test2,
            "Simple NuPIC random capnp serialization check failed.")

        # A little tricker: dump / load _after_ some numbers have been generated
        # (in the first test).  Things should still work...
        # ...the idea of this test is to make sure that the pickle code isn't just
        # saving the initial seed...
        builderProto = RandomProto.new_message()
        r.write(builderProto)
        readerProto = RandomProto.from_bytes(builderProto.to_bytes())

        test3 = [r.getUInt32() for _ in range(10)]
        r = Random()
        r.read(readerProto)
        self.assertEqual(r.getSeed(), 99)
        test4 = [r.getUInt32() for _ in range(10)]

        self.assertEqual(
            test3, test4,
            "NuPIC random capnp serialization check didn't work for saving later "
            "state.")

        self.assertNotEqual(
            test1, test3,
            "NuPIC random serialization test gave the same result twice?!?")
コード例 #5
0
ファイル: coordinate.py プロジェクト: yangzxstar/nupic
    def _bitForCoordinate(cls, coordinate, n):
        """
    Maps the coordinate to a bit in the SDR.

    @param coordinate (numpy.array) Coordinate
    @param n (int) The number of available bits in the SDR
    @return (int) The index to a bit in the SDR
    """
        seed = cls._hashCoordinate(coordinate)
        rng = Random(seed)
        return rng.getUInt32(n)
コード例 #6
0
ファイル: coordinate.py プロジェクト: alfonsokim/nupic
  def _bitForCoordinate(cls, coordinate, n):
    """
    Maps the coordinate to a bit in the SDR.

    @param coordinate (numpy.array) Coordinate
    @param n (int) The number of available bits in the SDR
    @return (int) The index to a bit in the SDR
    """
    seed = cls._hashCoordinate(coordinate)
    rng = Random(seed)
    return rng.getUInt32(n)
コード例 #7
0
  def testCapnpSerialization(self):
    """Test capnp serialization of NuPIC randomness."""

    # Simple test: make sure that dumping / loading works...
    r = Random(99)

    builderProto = RandomProto.new_message()
    r.write(builderProto)
    readerProto = RandomProto.from_bytes(builderProto.to_bytes())

    test1 = [r.getUInt32() for _ in xrange(10)]
    r = Random(1);
    r.read(readerProto)
    self.assertEqual(r.getSeed(), 99)
    test2 = [r.getUInt32() for _ in xrange(10)]

    self.assertEqual(test1, test2,
                     "Simple NuPIC random capnp serialization check failed.")

    # A little tricker: dump / load _after_ some numbers have been generated
    # (in the first test).  Things should still work...
    # ...the idea of this test is to make sure that the pickle code isn't just
    # saving the initial seed...
    builderProto = RandomProto.new_message()
    r.write(builderProto)
    readerProto = RandomProto.from_bytes(builderProto.to_bytes())

    test3 = [r.getUInt32() for _ in xrange(10)]
    r = Random();
    r.read(readerProto)
    self.assertEqual(r.getSeed(), 99)
    test4 = [r.getUInt32() for _ in xrange(10)]

    self.assertEqual(
      test3, test4,
      "NuPIC random capnp serialization check didn't work for saving later "
      "state.")

    self.assertNotEqual(
      test1, test3,
      "NuPIC random serialization test gave the same result twice?!?")
コード例 #8
0
    def testSerialization(self):
        """Test serialization of NuPIC randomness."""

        path = "RandomSerialization.stream"

        # Simple test: make sure that dumping / loading works...
        r = Random(99)

        r.saveToFile(path)

        test1 = [r.getUInt32() for _ in range(10)]
        r = Random(1)
        r.loadFromFile(path)
        self.assertEqual(r.getSeed(), 99)
        test2 = [r.getUInt32() for _ in range(10)]

        self.assertEqual(test1, test2,
                         "Simple NuPIC random serialization check failed.")

        # A little tricker: dump / load _after_ some numbers have been generated
        # (in the first test).  Things should still work...
        # ...the idea of this test is to make sure that the pickle code isn't just
        # saving the initial seed...
        r.saveToFile(path)

        test3 = [r.getUInt32() for _ in range(10)]
        r = Random()
        r.loadFromFile(path)
        self.assertEqual(r.getSeed(), 99)
        test4 = [r.getUInt32() for _ in range(10)]

        self.assertEqual(
            test3, test4,
            "NuPIC random serialization check didn't work for saving later state."
        )

        self.assertNotEqual(
            test1, test3,
            "NuPIC random serialization test gave the same result twice?!?")
コード例 #9
0
  def testSerialization(self):
    """Test serialization of NuPIC randomness."""
	
    path = "RandomSerialization.stream"

    # Simple test: make sure that dumping / loading works...
    r = Random(99)

    r.saveToFile(path)

    test1 = [r.getUInt32() for _ in range(10)]
    r = Random(1);
    r.loadFromFile(path)
    self.assertEqual(r.getSeed(), 99)
    test2 = [r.getUInt32() for _ in range(10)]

    self.assertEqual(test1, test2,
				"Simple NuPIC random serialization check failed.")

    # A little tricker: dump / load _after_ some numbers have been generated
    # (in the first test).  Things should still work...
    # ...the idea of this test is to make sure that the pickle code isn't just
    # saving the initial seed...
    r.saveToFile(path)

    test3 = [r.getUInt32() for _ in range(10)]
    r = Random();
    r.loadFromFile(path)
    self.assertEqual(r.getSeed(), 99)
    test4 = [r.getUInt32() for _ in range(10)]

    self.assertEqual(
        test3, test4,
        "NuPIC random serialization check didn't work for saving later state.")

    self.assertNotEqual(
        test1, test3,
        "NuPIC random serialization test gave the same result twice?!?")
コード例 #10
0
class PatternMachine(object):
    """
  Base pattern machine class.
  """
    def __init__(self, n, w, num=100, seed=42):
        """
    @param n   (int)      Number of available bits in pattern
    @param w   (int/list) Number of on bits in pattern
                          If list, each pattern will have a `w` randomly
                          selected from the list.
    @param num (int)      Number of available patterns
    """
        # Save member variables
        self._n = n
        self._w = w
        self._num = num

        # Initialize member variables
        self._random = Random(seed)
        self._patterns = dict()

        self._generate()

    def get(self, number):
        """
    Return a pattern for a number.

    @param number (int) Number of pattern

    @return (set) Indices of on bits
    """
        if not number in self._patterns:
            raise IndexError("Invalid number")

        return self._patterns[number]

    def addNoise(self, bits, amount):
        """
    Add noise to pattern.

    @param bits   (set)   Indices of on bits
    @param amount (float) Probability of switching an on bit with a random bit

    @return (set) Indices of on bits in noisy pattern
    """
        newBits = set()

        for bit in bits:
            if self._random.getReal64() < amount:
                newBits.add(self._random.getUInt32(self._n))
            else:
                newBits.add(bit)

        return newBits

    def numbersForBit(self, bit):
        """
    Return the set of pattern numbers that match a bit.

    @param bit (int) Index of bit

    @return (set) Indices of numbers
    """
        if bit >= self._n:
            raise IndexError("Invalid bit")

        numbers = set()

        for index, pattern in self._patterns.items():
            if bit in pattern:
                numbers.add(index)

        return numbers

    def numberMapForBits(self, bits):
        """
    Return a map from number to matching on bits,
    for all numbers that match a set of bits.

    @param bits (set) Indices of bits

    @return (dict) Mapping from number => on bits.
    """
        numberMap = dict()

        for bit in bits:
            numbers = self.numbersForBit(bit)

            for number in numbers:
                if not number in numberMap:
                    numberMap[number] = set()

                numberMap[number].add(bit)

        return numberMap

    def prettyPrintPattern(self, bits, verbosity=1):
        """
    Pretty print a pattern.

    @param bits      (set) Indices of on bits
    @param verbosity (int) Verbosity level

    @return (string) Pretty-printed text
    """
        numberMap = self.numberMapForBits(bits)
        text = ""

        numberList = []
        numberItems = sorted(iter(numberMap.items()),
                             key=lambda number_bits: len(number_bits[1]),
                             reverse=True)

        for number, bits in numberItems:

            if verbosity > 2:
                strBits = [str(n) for n in bits]
                numberText = "{0} (bits: {1})".format(number,
                                                      ",".join(strBits))
            elif verbosity > 1:
                numberText = "{0} ({1} bits)".format(number, len(bits))
            else:
                numberText = str(number)

            numberList.append(numberText)

        text += "[{0}]".format(", ".join(numberList))

        return text

    def _generate(self):
        """
    Generates set of random patterns.
    """
        candidates = np.array(list(range(self._n)), np.uint32)
        for i in range(self._num):
            self._random.shuffle(candidates)
            pattern = candidates[0:self._getW()]
            self._patterns[i] = set(pattern)

    def _getW(self):
        """
    Gets a value of `w` for use in generating a pattern.
    """
        w = self._w

        if type(w) is list:
            return w[self._random.getUInt32(len(w))]
        else:
            return w
コード例 #11
0
class TemporalMemory(object):
    """
  Class implementing the Temporal Memory algorithm.
  """
    def __init__(self,
                 columnDimensions=(2048, ),
                 cellsPerColumn=32,
                 activationThreshold=13,
                 learningRadius=2048,
                 initialPermanence=0.21,
                 connectedPermanence=0.50,
                 minThreshold=10,
                 maxNewSynapseCount=20,
                 permanenceIncrement=0.10,
                 permanenceDecrement=0.10,
                 seed=42):
        """
    @param columnDimensions    (list)   Dimensions of the column space

    @param cellsPerColumn      (int)    Number of cells per column

    @param activationThreshold (int)    If the number of active connected
                                        synapses on a segment is at least
                                        this threshold, the segment is
                                        said to be active.

    @param learningRadius      (int)    Radius around cell from which it can
                                        sample to form distal dendrite
                                        connections.

    @param initialPermanence   (float)  Initial permanence of a new synapse.

    @param connectedPermanence (float)  If the permanence value for a synapse
                                        is greater than this value, it is said
                                        to be connected.

    @param minThreshold        (int)    If the number of synapses active on
                                        a segment is at least this threshold,
                                        it is selected as the best matching
                                        cell in a bursing column.

    @param maxNewSynapseCount  (int)    The maximum number of synapses added
                                        to a segment during learning.

    @param permanenceIncrement (float)  Amount by which permanences of synapses
                                        are incremented during learning.

    @param permanenceDecrement (float)  Amount by which permanences of synapses
                                        are decremented during learning.

    @param seed                (int)    Seed for the random number generator.
    """
        # TODO: Validate all parameters (and add validation tests)

        # Initialize member variables
        self.connections = Connections(columnDimensions, cellsPerColumn)
        self._random = Random(seed)

        self.activeCells = set()
        self.predictiveCells = set()
        self.activeSegments = set()
        self.activeSynapsesForSegment = dict()
        self.winnerCells = set()

        # Save member variables
        self.activationThreshold = activationThreshold
        self.learningRadius = learningRadius
        self.initialPermanence = initialPermanence
        self.connectedPermanence = connectedPermanence
        self.minThreshold = minThreshold
        self.maxNewSynapseCount = maxNewSynapseCount
        self.permanenceIncrement = permanenceIncrement
        self.permanenceDecrement = permanenceDecrement

    # ==============================
    # Main functions
    # ==============================

    def compute(self, activeColumns, learn=True):
        """
    Feeds input record through TM, performing inference and learning.
    Updates member variables with new state.

    @param activeColumns (set) Indices of active columns in `t`
    """
        (activeCells, winnerCells, activeSynapsesForSegment, activeSegments,
         predictiveCells) = self.computeFn(activeColumns,
                                           self.predictiveCells,
                                           self.activeSegments,
                                           self.activeSynapsesForSegment,
                                           self.winnerCells,
                                           self.connections,
                                           learn=learn)

        self.activeCells = activeCells
        self.winnerCells = winnerCells
        self.activeSynapsesForSegment = activeSynapsesForSegment
        self.activeSegments = activeSegments
        self.predictiveCells = predictiveCells

    def computeFn(self,
                  activeColumns,
                  prevPredictiveCells,
                  prevActiveSegments,
                  prevActiveSynapsesForSegment,
                  prevWinnerCells,
                  connections,
                  learn=True):
        """
    'Functional' version of compute.
    Returns new state.

    @param activeColumns                (set)         Indices of active columns
                                                      in `t`
    @param prevPredictiveCells          (set)         Indices of predictive
                                                      cells in `t-1`
    @param prevActiveSegments           (set)         Indices of active segments
                                                      in `t-1`
    @param prevActiveSynapsesForSegment (dict)        Mapping from segments to
                                                      active synapses in `t-1`,
                                                      see
                                                      `TM.computeActiveSynapses`
    @param prevWinnerCells              (set)         Indices of winner cells
                                                      in `t-1`
    @param connections                  (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeCells`               (set),
                      `winnerCells`               (set),
                      `activeSynapsesForSegment`  (dict),
                      `activeSegments`            (set),
                      `predictiveCells`           (set)
    """
        activeCells = set()
        winnerCells = set()

        (_activeCells, _winnerCells,
         predictedColumns) = self.activateCorrectlyPredictiveCells(
             prevPredictiveCells, activeColumns, connections)

        activeCells.update(_activeCells)
        winnerCells.update(_winnerCells)

        (_activeCells, _winnerCells,
         learningSegments) = self.burstColumns(activeColumns, predictedColumns,
                                               prevActiveSynapsesForSegment,
                                               connections)

        activeCells.update(_activeCells)
        winnerCells.update(_winnerCells)

        if learn:
            self.learnOnSegments(prevActiveSegments, learningSegments,
                                 prevActiveSynapsesForSegment, winnerCells,
                                 prevWinnerCells, connections)

        activeSynapsesForSegment = self.computeActiveSynapses(
            activeCells, connections)

        (activeSegments, predictiveCells) = self.computePredictiveCells(
            activeSynapsesForSegment, connections)

        return (activeCells, winnerCells, activeSynapsesForSegment,
                activeSegments, predictiveCells)

    def reset(self):
        """
    Indicates the start of a new sequence. Resets sequence state of the TM.
    """
        self.activeCells = set()
        self.predictiveCells = set()
        self.activeSegments = set()
        self.activeSynapsesForSegment = dict()
        self.winnerCells = set()

    # ==============================
    # Phases
    # ==============================

    @staticmethod
    def activateCorrectlyPredictiveCells(prevPredictiveCells, activeColumns,
                                         connections):
        """
    Phase 1: Activate the correctly predictive cells.

    Pseudocode:

      - for each prev predictive cell
        - if in active column
          - mark it as active
          - mark it as winner cell
          - mark column as predicted

    @param prevPredictiveCells (set) Indices of predictive cells in `t-1`
    @param activeColumns       (set) Indices of active columns in `t`

    @return (tuple) Contains:
                      `activeCells`      (set),
                      `winnerCells`      (set),
                      `predictedColumns` (set)
    """
        activeCells = set()
        winnerCells = set()
        predictedColumns = set()

        for cell in prevPredictiveCells:
            column = connections.columnForCell(cell)

            if column in activeColumns:
                activeCells.add(cell)
                winnerCells.add(cell)
                predictedColumns.add(column)

        return (activeCells, winnerCells, predictedColumns)

    def burstColumns(self, activeColumns, predictedColumns,
                     prevActiveSynapsesForSegment, connections):
        """
    Phase 2: Burst unpredicted columns.

    Pseudocode:

      - for each unpredicted active column
        - mark all cells as active
        - mark the best matching cell as winner cell
          - (learning)
            - if it has no matching segment
              - (optimization) if there are prev winner cells
                - add a segment to it
            - mark the segment as learning

    @param activeColumns                (set)         Indices of active columns
                                                      in `t`
    @param predictedColumns             (set)         Indices of predicted
                                                      columns in `t`
    @param prevActiveSynapsesForSegment (dict)        Mapping from segments to
                                                      active synapses in `t-1`,
                                                      see
                                                      `TM.computeActiveSynapses`
    @param connections                  (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeCells`      (set),
                      `winnerCells`      (set),
                      `learningSegments` (set)
    """
        activeCells = set()
        winnerCells = set()
        learningSegments = set()

        unpredictedColumns = activeColumns - predictedColumns

        for column in unpredictedColumns:
            cells = connections.cellsForColumn(column)
            activeCells.update(cells)

            (bestCell, bestSegment) = self.getBestMatchingCell(
                cells, prevActiveSynapsesForSegment, connections)
            winnerCells.add(bestCell)

            if bestSegment == None:
                # TODO: (optimization) Only do this if there are prev winner cells
                bestSegment = connections.createSegment(bestCell)

            learningSegments.add(bestSegment)

        return (activeCells, winnerCells, learningSegments)

    def learnOnSegments(self, prevActiveSegments, learningSegments,
                        prevActiveSynapsesForSegment, winnerCells,
                        prevWinnerCells, connections):
        """
    Phase 3: Perform learning by adapting segments.

    Pseudocode:

      - (learning) for each prev active or learning segment
        - if learning segment or from winner cell
          - strengthen active synapses
          - weaken inactive synapses
        - if learning segment
          - add some synapses to the segment
            - subsample from prev winner cells

    @param prevActiveSegments           (set)         Indices of active segments
                                                      in `t-1`
    @param learningSegments             (set)         Indices of learning
                                                      segments in `t`
    @param prevActiveSynapsesForSegment (dict)        Mapping from segments to
                                                      active synapses in `t-1`,
                                                      see
                                                      `TM.computeActiveSynapses`
    @param winnerCells                  (set)         Indices of winner cells
                                                      in `t`
    @param prevWinnerCells              (set)         Indices of winner cells
                                                      in `t-1`
    @param connections                  (Connections) Connectivity of layer
    """
        for segment in prevActiveSegments | learningSegments:
            isLearningSegment = segment in learningSegments
            isFromWinnerCell = connections.cellForSegment(
                segment) in winnerCells

            activeSynapses = self.getConnectedActiveSynapsesForSegment(
                segment, prevActiveSynapsesForSegment, 0, connections)

            if isLearningSegment or isFromWinnerCell:
                self.adaptSegment(segment, activeSynapses, connections)

            if isLearningSegment:
                n = self.maxNewSynapseCount - len(activeSynapses)

                for sourceCell in self.pickCellsToLearnOn(
                        n, segment, prevWinnerCells, connections):
                    connections.createSynapse(segment, sourceCell,
                                              self.initialPermanence)

    def computePredictiveCells(self, activeSynapsesForSegment, connections):
        """
    Phase 4: Compute predictive cells due to lateral input
    on distal dendrites.

    Pseudocode:

      - for each distal dendrite segment with activity >= activationThreshold
        - mark the segment as active
        - mark the cell as predictive

    @param activeSynapsesForSegment (dict)        Mapping from segments to
                                                  active synapses (see
                                                  `TM.computeActiveSynapses`)
    @param connections              (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeSegments`  (set),
                      `predictiveCells` (set)
    """
        activeSegments = set()
        predictiveCells = set()

        for segment in activeSynapsesForSegment.keys():
            synapses = self.getConnectedActiveSynapsesForSegment(
                segment, activeSynapsesForSegment, self.connectedPermanence,
                connections)

            if len(synapses) >= self.activationThreshold:
                activeSegments.add(segment)
                predictiveCells.add(connections.cellForSegment(segment))

        return (activeSegments, predictiveCells)

    # ==============================
    # Helper functions
    # ==============================

    @staticmethod
    def computeActiveSynapses(activeCells, connections):
        """
    Forward propagates activity from active cells to the synapses that touch
    them, to determine which synapses are active.

    @param activeCells (set)         Indicies of active cells
    @param connections (Connections) Connectivity of layer

    @return (dict) Mapping from segment (int) to indices of
                   active synapses (set)
    """
        activeSynapsesForSegment = dict()

        for cell in activeCells:
            for synapse in connections.synapsesForSourceCell(cell):
                segment, _, _ = connections.dataForSynapse(synapse)

                if not segment in activeSynapsesForSegment:
                    activeSynapsesForSegment[segment] = set()

                activeSynapsesForSegment[segment].add(synapse)

        return activeSynapsesForSegment

    def getBestMatchingCell(self, cells, activeSynapsesForSegment,
                            connections):
        """
    Gets the cell with the best matching segment
    (see `TM.getBestMatchingSegment`) that has the largest number of active
    synapses of all best matching segments.

    If none were found, pick the least used cell (see `TM.getLeastUsedCell`).

    @param cells                    (set)         Indices of cells
    @param activeSynapsesForSegment (dict)        Mapping from segments to
                                                  active synapses (see
                                                  `TM.computeActiveSynapses`)
    @param connections              (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `cell`        (int),
                      `bestSegment` (int)
    """
        maxSynapses = 0
        bestCell = None
        bestSegment = None

        for cell in cells:
            (segment, connectedActiveSynapses) = self.getBestMatchingSegment(
                cell, activeSynapsesForSegment, connections)

            if segment != None and len(connectedActiveSynapses) > maxSynapses:
                maxSynapses = len(connectedActiveSynapses)
                bestCell = cell
                bestSegment = segment

        if bestCell == None:
            bestCell = self.getLeastUsedCell(cells, connections)

        return (bestCell, bestSegment)

    def getBestMatchingSegment(self, cell, activeSynapsesForSegment,
                               connections):
        """
    Gets the segment on a cell with the largest number of activate synapses,
    including all synapses with non-zero permanences.

    @param cell                     (int)         Cell index
    @param activeSynapsesForSegment (dict)        Mapping from segments to
                                                  active synapses (see
                                                  `TM.computeActiveSynapses`)
    @param connections              (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `segment`                 (int),
                      `connectedActiveSynapses` (set)
    """
        maxSynapses = self.minThreshold
        bestSegment = None
        connectedActiveSynapses = None

        for segment in connections.segmentsForCell(cell):
            synapses = self.getConnectedActiveSynapsesForSegment(
                segment, activeSynapsesForSegment, 0, connections)

            if len(synapses) >= maxSynapses:
                maxSynapses = len(synapses)
                bestSegment = segment
                connectedActiveSynapses = set(synapses)

        return (bestSegment, connectedActiveSynapses)

    def getLeastUsedCell(self, cells, connections):
        """
    Gets the cell with the smallest number of segments.
    Break ties randomly.

    @param cells                    (set)         Indices of cells
    @param connections              (Connections) Connectivity of layer

    @return (int) Cell index
    """
        leastUsedCells = set()
        minNumSegments = float("inf")

        for cell in cells:
            numSegments = len(connections.segmentsForCell(cell))

            if numSegments < minNumSegments:
                minNumSegments = numSegments
                leastUsedCells = set()

            if numSegments == minNumSegments:
                leastUsedCells.add(cell)

        i = self._random.getUInt32(len(leastUsedCells))
        return sorted(leastUsedCells)[i]

    @staticmethod
    def getConnectedActiveSynapsesForSegment(segment, activeSynapsesForSegment,
                                             permanenceThreshold, connections):
        """
    Returns the synapses on a segment that are active due to lateral input
    from active cells.

    @param segment                   (int)         Segment index
    @param activeSynapsesForSegment  (dict)        Mapping from segments to
                                                   active synapses (see
                                                   `TM.computeActiveSynapses`)
    @param permanenceThreshold       (float)       Minimum threshold for
                                                   permanence for synapse to
                                                   be connected
    @param connections               (Connections) Connectivity of layer

    @return (set) Indices of active synapses on segment
    """
        connectedSynapses = set()

        if not segment in activeSynapsesForSegment:
            return connectedSynapses

        # TODO: (optimization) Can skip this logic if permanenceThreshold = 0
        for synapse in activeSynapsesForSegment[segment]:
            (_, _, permanence) = connections.dataForSynapse(synapse)

            if permanence >= permanenceThreshold:
                connectedSynapses.add(synapse)

        return connectedSynapses

    def adaptSegment(self, segment, activeSynapses, connections):
        """
    Updates synapses on segment.
    Strengthens active synapses; weakens inactive synapses.

    @param segment        (int)         Segment index
    @param activeSynapses (set)         Indices of active synapses
    @param connections    (Connections) Connectivity of layer
    """
        for synapse in connections.synapsesForSegment(segment):
            (_, _, permanence) = connections.dataForSynapse(synapse)

            if synapse in activeSynapses:
                permanence += self.permanenceIncrement
            else:
                permanence -= self.permanenceDecrement

            # Keep permanence within min/max bounds
            permanence = max(0.0, min(1.0, permanence))

            connections.updateSynapsePermanence(synapse, permanence)

    def pickCellsToLearnOn(self, n, segment, winnerCells, connections):
        """
    Pick cells to form distal connections to.

    TODO: Respect topology and learningRadius

    @param n           (int)         Number of cells to pick
    @param segment     (int)         Segment index
    @param winnerCells (set)         Indices of winner cells in `t`
    @param connections (Connections) Connectivity of layer

    @return (set) Indices of cells picked
    """
        candidates = set(winnerCells)

        # Remove cells that are already synapsed on by this segment
        for synapse in connections.synapsesForSegment(segment):
            (_, sourceCell, _) = connections.dataForSynapse(synapse)
            if sourceCell in candidates:
                candidates.remove(sourceCell)

        n = min(n, len(candidates))
        candidates = sorted(candidates)
        cells = set()

        # Pick n cells randomly
        for _ in range(n):
            i = self._random.getUInt32(len(candidates))
            cells.add(candidates[i])
            del candidates[i]

        return cells
コード例 #12
0
class ColumnPooler(object):
  """
  This class constitutes a temporary implementation for a cross-column pooler.
  The implementation goal of this class is to prove basic properties before
  creating a cleaner implementation.
  """

  def __init__(self,
               inputWidth,
               numActiveColumnsPerInhArea=40,
               synPermProximalInc=0.1,
               synPermProximalDec=0.001,
               initialProximalPermanence=0.6,
               columnDimensions=(2048,),
               activationThreshold=13,
               minThreshold=10,
               initialPermanence=0.41,
               connectedPermanence=0.50,
               maxNewSynapseCount=20,
               permanenceIncrement=0.10,
               permanenceDecrement=0.10,
               predictedSegmentDecrement=0.0,
               maxSegmentsPerCell=255,
               maxSynapsesPerSegment=255,
               seed=42):
    """
    This classes uses an ExtendedTemporalMemory internally to keep track of
    distal segments. Please see ExtendedTemporalMemory for descriptions of
    constructor parameters not defined below.

    Parameters:
    ----------------------------
    @param  inputWidth (int)
            The number of proximal inputs into this layer

    @param  numActiveColumnsPerInhArea (int)
            Target number of active cells

    @param  synPermProximalInc (float)
            Permanence increment for proximal synapses

    @param  synPermProximalDec (float)
            Permanence decrement for proximal synapses

    @param  initialProximalPermanence (float)
            Initial permanence value for proximal segments

    """

    self.inputWidth = inputWidth
    self.numActiveColumnsPerInhArea = numActiveColumnsPerInhArea
    self.synPermProximalInc = synPermProximalInc
    self.synPermProximalDec = synPermProximalDec
    self.initialProximalPermanence = initialProximalPermanence
    self.connectedPermanence = connectedPermanence
    self.maxNewSynapseCount = maxNewSynapseCount
    self.minThreshold = minThreshold
    self.activeCells = set()
    self._random = Random(seed)

    # Create our own instance of extended temporal memory to handle distal
    # segments.
    self.tm = createModel(
                      modelName="extendedCPP",
                      columnDimensions=columnDimensions,
                      cellsPerColumn=1,
                      activationThreshold=activationThreshold,
                      initialPermanence=initialPermanence,
                      connectedPermanence=connectedPermanence,
                      minThreshold=minThreshold,
                      maxNewSynapseCount=maxNewSynapseCount,
                      permanenceIncrement=permanenceIncrement,
                      permanenceDecrement=permanenceDecrement,
                      predictedSegmentDecrement=predictedSegmentDecrement,
                      maxSegmentsPerCell=maxSegmentsPerCell,
                      maxSynapsesPerSegment=maxSynapsesPerSegment,
                      seed=seed,
                      learnOnOneCell=False,
    )

    # These sparse matrices will hold the synapses for each proximal segment.
    #
    # proximalPermanences - SparseMatrix with permanence values
    # proximalConnections - SparseBinaryMatrix of connected synapses

    self.proximalPermanences = SparseMatrix(self.numberOfColumns(),
                                               inputWidth)
    self.proximalConnections = SparseBinaryMatrix(inputWidth)
    self.proximalConnections.resize(self.numberOfColumns(), inputWidth)



  def compute(self,
              feedforwardInput=None,
              activeExternalCells=None,
              learn=True):
    """
    Parameters:
    ----------------------------
    @param  feedforwardInput     (set)
            Indices of active input bits

    @param  activeExternalCells  (set)
            Indices of active cells that will form connections to distal
            segments.

    @param learn                    (bool)
            If True, we are learning a new object
    """
    if activeExternalCells is None:
      activeExternalCells = set()

    if learn:
      self._computeLearningMode(feedforwardInput=feedforwardInput,
                               lateralInput=activeExternalCells)

    else:
      self._computeInferenceMode(feedforwardInput=feedforwardInput,
                                 lateralInput=activeExternalCells)


  def _computeLearningMode(self, feedforwardInput, lateralInput):
    """
    Learning mode: we are learning a new object. If there is no prior
    activity, we randomly activate 2% of cells and create connections to
    incoming input. If there was prior activity, we maintain it.

    These cells will represent the object and learn distal connections to
    lateral cortical columns.

    Parameters:
    ----------------------------
    @param  feedforwardInput (set)
            Indices of active input bits

    @param  lateralInput (set)
            Indices of active cells from neighboring columns.
    """
    # If there are no previously active cells, select random subset of cells
    if len(self.activeCells) == 0:
      self.activeCells = set(self._random.shuffle(
            numpy.array(range(self.numberOfCells()),
                        dtype="uint32"))[0:self.numActiveColumnsPerInhArea])

    # else we maintain previous activity, nothing to do.

    # Those cells that remain active will learn on their proximal and distal
    # dendrites as long as there is some input.  If there are no
    # cells active, no learning happens.  This only happens in the very
    # beginning if there has been no bottom up activity at all.
    if len(self.activeCells) > 0:

      # Learn on proximal dendrite if appropriate
      if len(feedforwardInput) > 0:
        self._learnProximal(feedforwardInput, self.activeCells,
                            self.maxNewSynapseCount, self.proximalPermanences,
                            self.proximalConnections,
                            self.initialProximalPermanence,
                            self.synPermProximalInc, self.synPermProximalDec,
                            self.connectedPermanence)

      # Learn on distal dendrites if appropriate
      self.tm.compute(activeColumns=self.activeCells,
                      activeExternalCells=lateralInput,
                      formInternalConnections=False,
                      learn=True)


  def _computeInferenceMode(self, feedforwardInput, lateralInput):
    """
    Inference mode: if there is some feedforward activity, perform
    spatial pooling on it to recognize previously known objects. If there
    is no feedforward activity, maintain previous activity.

    Parameters:
    ----------------------------
    @param  feedforwardInput (set)
            Indices of active input bits

    @param  lateralInput (list of lists)
            A list of list of active cells from neighboring columns.
            len(lateralInput) == number of connected neighboring cortical
            columns.

    """
    # Figure out which cells are active due to feedforward proximal inputs
    # In order to form unions, we keep all cells that are over threshold
    inputVector = numpy.zeros(self.numberOfInputs(), dtype=realDType)
    inputVector[list(feedforwardInput)] = 1
    overlaps = numpy.zeros(self.numberOfColumns(), dtype=realDType)
    self.proximalConnections.rightVecSumAtNZ_fast(inputVector.astype(realDType),
                                                 overlaps)
    overlaps[overlaps < self.minThreshold] = 0
    bottomUpActivity =  set(overlaps.nonzero()[0])

    # If there is insufficient current bottom up activity, we incorporate all
    # previous activity. We set their overlaps so they are sure to win.
    if len(bottomUpActivity) < self.numActiveColumnsPerInhArea:
      bottomUpActivity = bottomUpActivity.union(self.activeCells)
      maxOverlap = overlaps.max()
      overlaps[self.getActiveCells()] = maxOverlap+1

    # Narrow down list of active cells based on lateral activity
    self.activeCells = self._winnersBasedOnLateralActivity(
      bottomUpActivity,
      self.getPredictiveCells(),
      overlaps,
      self.numActiveColumnsPerInhArea
    )

    # Update predictive cells for next time step
    self.tm.compute(activeColumns=self.activeCells,
                    activeExternalCells=lateralInput,
                    formInternalConnections=False,
                    learn=False)


  def numberOfInputs(self):
    """
    Returns the number of inputs into this layer
    """
    return self.inputWidth


  def numberOfColumns(self):
    """
    Returns the number of columns in this layer.
    @return (int) Number of columns
    """
    return self.tm.numberOfColumns()


  def numberOfCells(self):
    """
    Returns the number of cells in this layer.
    @return (int) Number of cells
    """
    return self.tm.numberOfCells()


  def getActiveCells(self):
    """
    Returns the indices of the active cells.
    @return (set) Indices of active cells.
    """
    return self.getCellIndices(self.activeCells)


  @classmethod
  def getCellIndices(cls, cells):
    return [cls.getCellIndex(c) for c in cells]


  @staticmethod
  def getCellIndex(cell):
    return cell


  def numberOfConnectedSynapses(self, cells=None):
    """
    Returns the number of proximal connected synapses on these cells.

    Parameters:
    ----------------------------
    @param  cells (set or list)
            Indices of the cells. If None return count for all cells.
    """
    if cells is None:
      cells = xrange(self.numberOfCells())
    n = 0
    for cell in cells:
      n += self.proximalConnections.nNonZerosOnRow(cell)
    return n


  def numberOfSynapses(self, cells=None):
    """
    Returns the number of proximal synapses with permanence>0 on these cells.

    Parameters:
    ----------------------------
    @param  cells (set or list)
            Indices of the cells. If None return count for all cells.
    """
    if cells is None:
      cells = xrange(self.numberOfCells())
    n = 0
    for cell in cells:
      n += self.proximalPermanences.nNonZerosOnRow(cell)
    return n


  def numberOfDistalSegments(self, cells):
    """
    Returns the total number of distal segments for these cells.

    Parameters:
    ----------------------------
    @param  cells (set or list)
            Indices of the cells
    """
    n = 0
    for cell in cells:
      n += len(self.tm.connections.segmentsForCell(cell))
    return n


  def numberOfDistalSynapses(self, cells):
    """
    Returns the total number of distal synapses for these cells.

    Parameters:
    ----------------------------
    @param  cells (set or list)
            Indices of the cells
    """
    n = 0
    for cell in cells:
      segments = self.tm.connections.segmentsForCell(cell)
      for segment in segments:
        n += len(self.tm.connections.synapsesForSegment(segment))
    return n


  def reset(self):
    """
    Reset internal states. When learning this signifies we are to learn a
    unique new object.
    """
    self.activeCells = set()
    self.tm.reset()


  def getPredictiveCells(self):
    """
    Get the set of distally predictive cells as a set.

    @return (list) A list containing indices of the current distally predicted
    cells.
    """
    return self.tm.getPredictiveCells()


  def getPredictedActiveCells(self):
    """
    Get the set of cells that were predicted previously then became active

    @return (set) A set containing indices.
    """
    return self.tm.predictedActiveCellsIndices()


  def getConnections(self):
    """
    Get the Connections structure associated with our TM. Beware of using
    this as it is implementation specific and may change.

    @return (object) A Connections object
    """
    return self.tm.connections


  def _learnProximal(self,
             activeInputs, activeCells, maxNewSynapseCount, proximalPermanences,
             proximalConnections, initialPermanence, synPermProximalInc,
             synPermProximalDec, connectedPermanence):
    """
    Learn on proximal dendrites of active cells.  Updates proximalPermanences
    """
    for cell in activeCells:
      cellPermanencesDense = proximalPermanences.getRow(cell)
      cellNonZeroIndices, _ = proximalPermanences.rowNonZeros(cell)
      cellNonZeroIndices = list(cellNonZeroIndices)

      # Get new and existing connections for this segment
      newInputs, existingInputs = self._pickProximalInputsToLearnOn(
        maxNewSynapseCount, activeInputs, cellNonZeroIndices
      )

      # Adjust existing connections appropriately
      # First we decrement all existing permanences
      if len(cellNonZeroIndices) > 0:
        cellPermanencesDense[cellNonZeroIndices] -= synPermProximalDec

      # Then we add inc + dec to existing active synapses
      if len(existingInputs) > 0:
        cellPermanencesDense[existingInputs] += synPermProximalInc + synPermProximalDec

      # Add new connections
      if len(newInputs) > 0:
        cellPermanencesDense[newInputs] += initialPermanence

      # Update proximalPermanences and proximalConnections
      proximalPermanences.setRowFromDense(cell, cellPermanencesDense)
      newConnected = numpy.where(cellPermanencesDense >= connectedPermanence)[0]
      proximalConnections.replaceSparseRow(cell, newConnected)



  def _pickProximalInputsToLearnOn(self, newSynapseCount, activeInputs,
                                  cellNonZeros):
    """
    Pick inputs to form proximal connections to a particular cell. We just
    randomly subsample from activeInputs, regardless of whether they are already
    connected.

    We return a list of up to newSynapseCount input indices from activeInputs
    that are valid new connections for this cell. We also return a list
    containing all inputs in activeInputs that are already connected to this
    cell.

    Parameters:
    ----------------------------
    @param newSynapseCount  (int)        Number of inputs to pick
    @param cell             (int)        Cell index
    @param activeInputs     (set)        Indices of active inputs
    @param cellNonZeros     (list)       Indices of inputs input this cell with
                                         non-zero permanences.

    @return (list, list) Indices of new inputs to connect to, inputs already
                         connected
    """
    candidates = []
    alreadyConnected = []

    # Collect inputs that already have synapses and list of new candidate inputs
    for inputIdx in activeInputs:
      if inputIdx in cellNonZeros:
        alreadyConnected += [inputIdx]
      else:
        candidates += [inputIdx]

    # Select min(newSynapseCount, len(candidates)) new inputs to connect to
    if newSynapseCount >= len(candidates):
      return candidates, alreadyConnected

    else:
      # Pick newSynapseCount cells randomly
      # TODO: we could maybe implement this more efficiently with shuffle.
      inputs = []
      for _ in range(newSynapseCount):
        i = self._random.getUInt32(len(candidates))
        inputs += [candidates[i]]
        candidates.remove(candidates[i])

      return inputs, alreadyConnected


  def _winnersBasedOnLateralActivity(self,
                                     activeCells,
                                     predictiveCells,
                                     overlaps,
                                     targetActiveCells):
    """
    Given the set of cells active due to feedforward input, narrow down the
    list of active cells based on predictions due to previous lateralInput.

    Parameters:
    ----------------------------
    @param    activeCells           (set)
              Indices of cells activated by bottom-up input.

    @param    predictiveCells       (set)
              Indices of cells that are laterally predicted.

    @param    overlaps              (numpy array)
              Bottom up overlap scores for each proximal segment. This is used
              to select additional cells if the narrowed down list contains less
              than targetActiveCells.

    @param    targetActiveCells     (int)
              The number of active cells we want to have active.

    @return (set) List of new winner cell indices

    """
    # No TM accessors that return set so access internal member directly
    predictedActiveCells = activeCells.intersection(predictiveCells)

    # If predicted cells don't intersect at all with active cells, we go with
    # bottom up input. In these cases we can stick with existing active cells
    # and skip the overlap sorting
    if len(predictedActiveCells) == 0:
      predictedActiveCells = activeCells

    # We want to keep all cells that were predicted and currently active due to
    # feedforward input. This set could be larger than our target number of
    # active cells due to unions, which is ok. However if there are insufficient
    # cells active after this intersection, we fill in with those currently
    # active cells that have highest overlap.
    elif len(predictedActiveCells) < targetActiveCells:
      # Don't want to consider cells already chosen
      overlaps[list(predictedActiveCells)] = 0

      # Add in the desired number of cells with highest activity
      numActive = targetActiveCells - len(predictedActiveCells)
      winnerIndices = numpy.argsort(overlaps, kind='mergesort')
      sortedWinnerIndices = winnerIndices[-numActive:][::-1]
      predictedActiveCells = predictedActiveCells.union(set(sortedWinnerIndices))

    return predictedActiveCells
コード例 #13
0
class TemporalMemory(object):
    """
  Class implementing the Temporal Memory algorithm.
  """
    def __init__(self,
                 columnDimensions=(2048, ),
                 cellsPerColumn=32,
                 activationThreshold=13,
                 learningRadius=2048,
                 initialPermanence=0.21,
                 connectedPermanence=0.50,
                 minThreshold=10,
                 maxNewSynapseCount=20,
                 permanenceIncrement=0.10,
                 permanenceDecrement=0.10,
                 seed=42):
        """
    @param columnDimensions    (list)  Dimensions of the column space
    @param cellsPerColumn      (int)   Number of cells per column
    @param activationThreshold (int)   If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active.
    @param learningRadius      (int)   Radius around cell from which it can sample to form distal dendrite connections.
    @param initialPermanence   (float) Initial permanence of a new synapse.
    @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected.
    @param minThreshold        (int)   If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column.
    @param maxNewSynapseCount  (int)   The maximum number of synapses added to a segment during learning.
    @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning.
    @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning.
    @param seed                (int)   Seed for the random number generator.
    """
        # Error checking
        if not len(columnDimensions):
            raise ValueError(
                "Number of column dimensions must be greater than 0")

        if not cellsPerColumn > 0:
            raise ValueError(
                "Number of cells per column must be greater than 0")

        # TODO: Validate all parameters (and add validation tests)

        # Save member variables
        self.columnDimensions = columnDimensions
        self.cellsPerColumn = cellsPerColumn
        self.activationThreshold = activationThreshold
        self.learningRadius = learningRadius
        self.initialPermanence = initialPermanence
        self.connectedPermanence = connectedPermanence
        self.minThreshold = minThreshold
        self.maxNewSynapseCount = maxNewSynapseCount
        self.permanenceIncrement = permanenceIncrement
        self.permanenceDecrement = permanenceDecrement

        # Initialize member variables
        self.connections = Connections(self.numberOfCells())
        self._random = Random(seed)

        self.activeCells = set()
        self.predictiveCells = set()
        self.activeSegments = set()
        self.winnerCells = set()

    # ==============================
    # Main functions
    # ==============================

    def compute(self, activeColumns, learn=True):
        """
    Feeds input record through TM, performing inference and learning.
    Updates member variables with new state.

    @param activeColumns (set) Indices of active columns in `t`
    """
        (activeCells, winnerCells, activeSegments, predictiveCells,
         predictedColumns) = self.computeFn(activeColumns,
                                            self.predictiveCells,
                                            self.activeSegments,
                                            self.activeCells,
                                            self.winnerCells,
                                            self.connections,
                                            learn=learn)

        self.activeCells = activeCells
        self.winnerCells = winnerCells
        self.activeSegments = activeSegments
        self.predictiveCells = predictiveCells

    def computeFn(self,
                  activeColumns,
                  prevPredictiveCells,
                  prevActiveSegments,
                  prevActiveCells,
                  prevWinnerCells,
                  connections,
                  learn=True):
        """
    'Functional' version of compute.
    Returns new state.

    @param activeColumns                   (set)         Indices of active columns in `t`
    @param prevPredictiveCells             (set)         Indices of predictive cells in `t-1`
    @param prevActiveSegments              (set)         Indices of active segments in `t-1`
    @param prevActiveCells                 (set)         Indices of active cells in `t-1`
    @param prevWinnerCells                 (set)         Indices of winner cells in `t-1`
    @param connections                     (Connections) Connectivity of layer
    @param learn                           (bool)        Whether or not learning is enabled

    @return (tuple) Contains:
                      `activeCells`               (set),
                      `winnerCells`               (set),
                      `activeSegments`            (set),
                      `predictiveCells`           (set)
    """
        activeCells = set()
        winnerCells = set()

        (_activeCells, _winnerCells,
         predictedColumns) = self.activateCorrectlyPredictiveCells(
             prevPredictiveCells, activeColumns)

        activeCells.update(_activeCells)
        winnerCells.update(_winnerCells)

        (_activeCells, _winnerCells,
         learningSegments) = self.burstColumns(activeColumns, predictedColumns,
                                               prevActiveCells,
                                               prevWinnerCells, connections)

        activeCells.update(_activeCells)
        winnerCells.update(_winnerCells)

        if learn:
            self.learnOnSegments(prevActiveSegments, learningSegments,
                                 prevActiveCells, winnerCells, prevWinnerCells,
                                 connections)

        (activeSegments, predictiveCells) = self.computePredictiveCells(
            activeCells, connections)

        return (activeCells, winnerCells, activeSegments, predictiveCells,
                predictedColumns)

    def reset(self):
        """
    Indicates the start of a new sequence. Resets sequence state of the TM.
    """
        self.activeCells = set()
        self.predictiveCells = set()
        self.activeSegments = set()
        self.winnerCells = set()

    # ==============================
    # Phases
    # ==============================

    def activateCorrectlyPredictiveCells(self, prevPredictiveCells,
                                         activeColumns):
        """
    Phase 1: Activate the correctly predictive cells.

    Pseudocode:

      - for each prev predictive cell
        - if in active column
          - mark it as active
          - mark it as winner cell
          - mark column as predicted

    @param prevPredictiveCells (set) Indices of predictive cells in `t-1`
    @param activeColumns       (set) Indices of active columns in `t`

    @return (tuple) Contains:
                      `activeCells`      (set),
                      `winnerCells`      (set),
                      `predictedColumns` (set)
    """
        activeCells = set()
        winnerCells = set()
        predictedColumns = set()

        for cell in prevPredictiveCells:
            column = self.columnForCell(cell)

            if column in activeColumns:
                activeCells.add(cell)
                winnerCells.add(cell)
                predictedColumns.add(column)

        return activeCells, winnerCells, predictedColumns

    def burstColumns(self, activeColumns, predictedColumns, prevActiveCells,
                     prevWinnerCells, connections):
        """
    Phase 2: Burst unpredicted columns.

    Pseudocode:

      - for each unpredicted active column
        - mark all cells as active
        - mark the best matching cell as winner cell
          - (learning)
            - if it has no matching segment
              - (optimization) if there are prev winner cells
                - add a segment to it
            - mark the segment as learning

    @param activeColumns                   (set)         Indices of active columns in `t`
    @param predictedColumns                (set)         Indices of predicted columns in `t`
    @param prevActiveCells                 (set)         Indices of active cells in `t-1`
    @param prevWinnerCells                 (set)         Indices of winner cells in `t-1`
    @param connections                     (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeCells`      (set),
                      `winnerCells`      (set),
                      `learningSegments` (set)
    """
        activeCells = set()
        winnerCells = set()
        learningSegments = set()

        unpredictedColumns = activeColumns - predictedColumns

        for column in unpredictedColumns:
            cells = self.cellsForColumn(column)
            activeCells.update(cells)

            (bestCell,
             bestSegment) = self.bestMatchingCell(cells, prevActiveCells,
                                                  connections)
            winnerCells.add(bestCell)

            if bestSegment is None and len(prevWinnerCells):
                bestSegment = connections.createSegment(bestCell)

            if bestSegment is not None:
                learningSegments.add(bestSegment)

        return activeCells, winnerCells, learningSegments

    def learnOnSegments(self, prevActiveSegments, learningSegments,
                        prevActiveCells, winnerCells, prevWinnerCells,
                        connections):
        """
    Phase 3: Perform learning by adapting segments.

    Pseudocode:

      - (learning) for each prev active or learning segment
        - if learning segment or from winner cell
          - strengthen active synapses
          - weaken inactive synapses
        - if learning segment
          - add some synapses to the segment
            - subsample from prev winner cells

    @param prevActiveSegments           (set)         Indices of active segments in `t-1`
    @param learningSegments             (set)         Indices of learning segments in `t`
    @param prevActiveCells              (set)         Indices of active cells in `t-1`
    @param winnerCells                  (set)         Indices of winner cells in `t`
    @param prevWinnerCells              (set)         Indices of winner cells in `t-1`
    @param connections                  (Connections) Connectivity of layer
    """
        for segment in prevActiveSegments | learningSegments:
            isLearningSegment = segment in learningSegments
            isFromWinnerCell = connections.cellForSegment(
                segment) in winnerCells

            activeSynapses = self.activeSynapsesForSegment(
                segment, prevActiveCells, connections)

            if isLearningSegment or isFromWinnerCell:
                self.adaptSegment(segment, activeSynapses, connections)

            if isLearningSegment:
                n = self.maxNewSynapseCount - len(activeSynapses)

                for presynapticCell in self.pickCellsToLearnOn(
                        n, segment, prevWinnerCells, connections):
                    connections.createSynapse(segment, presynapticCell,
                                              self.initialPermanence)

    def computePredictiveCells(self, activeCells, connections):
        """
    Phase 4: Compute predictive cells due to lateral input
    on distal dendrites.

    Pseudocode:

      - for each distal dendrite segment with activity >= activationThreshold
        - mark the segment as active
        - mark the cell as predictive

    Forward propagates activity from active cells to the synapses that touch
    them, to determine which synapses are active.

    @param activeCells (set)         Indices of active cells in `t`
    @param connections (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeSegments`  (set),
                      `predictiveCells` (set)
    """
        numActiveConnectedSynapsesForSegment = defaultdict(lambda: 0)
        activeSegments = set()
        predictiveCells = set()

        for cell in activeCells:
            for synapseData in connections.synapsesForPresynapticCell(
                    cell).values():
                segment = synapseData.segment
                permanence = synapseData.permanence

                if permanence >= self.connectedPermanence:
                    numActiveConnectedSynapsesForSegment[segment] += 1

                    if (numActiveConnectedSynapsesForSegment[segment] >=
                            self.activationThreshold):
                        activeSegments.add(segment)
                        predictiveCells.add(
                            connections.cellForSegment(segment))

        return activeSegments, predictiveCells

    # ==============================
    # Helper functions
    # ==============================

    def bestMatchingCell(self, cells, activeCells, connections):
        """
    Gets the cell with the best matching segment
    (see `TM.bestMatchingSegment`) that has the largest number of active
    synapses of all best matching segments.

    If none were found, pick the least used cell (see `TM.leastUsedCell`).

    @param cells                       (set)         Indices of cells
    @param activeCells                 (set)         Indices of active cells
    @param connections                 (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `cell`        (int),
                      `bestSegment` (int)
    """
        maxSynapses = 0
        bestCell = None
        bestSegment = None

        for cell in cells:
            segment, numActiveSynapses = self.bestMatchingSegment(
                cell, activeCells, connections)

            if segment is not None and numActiveSynapses > maxSynapses:
                maxSynapses = numActiveSynapses
                bestCell = cell
                bestSegment = segment

        if bestCell is None:
            bestCell = self.leastUsedCell(cells, connections)

        return bestCell, bestSegment

    def bestMatchingSegment(self, cell, activeCells, connections):
        """
    Gets the segment on a cell with the largest number of activate synapses,
    including all synapses with non-zero permanences.

    @param cell                        (int)         Cell index
    @param activeCells                 (set)         Indices of active cells
    @param connections                 (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `segment`                 (int),
                      `connectedActiveSynapses` (set)
    """
        maxSynapses = self.minThreshold
        bestSegment = None
        bestNumActiveSynapses = None

        for segment in connections.segmentsForCell(cell):
            numActiveSynapses = 0

            for synapse in connections.synapsesForSegment(segment):
                synapseData = connections.dataForSynapse(synapse)
                if synapseData.presynapticCell in activeCells:
                    numActiveSynapses += 1

            if numActiveSynapses >= maxSynapses:
                maxSynapses = numActiveSynapses
                bestSegment = segment
                bestNumActiveSynapses = numActiveSynapses

        return bestSegment, bestNumActiveSynapses

    def leastUsedCell(self, cells, connections):
        """
    Gets the cell with the smallest number of segments.
    Break ties randomly.

    @param cells       (set)         Indices of cells
    @param connections (Connections) Connectivity of layer

    @return (int) Cell index
    """
        leastUsedCells = set()
        minNumSegments = float("inf")

        for cell in cells:
            numSegments = len(connections.segmentsForCell(cell))

            if numSegments < minNumSegments:
                minNumSegments = numSegments
                leastUsedCells = set()

            if numSegments == minNumSegments:
                leastUsedCells.add(cell)

        i = self._random.getUInt32(len(leastUsedCells))
        return sorted(leastUsedCells)[i]

    @staticmethod
    def activeSynapsesForSegment(segment, activeCells, connections):
        """
    Returns the synapses on a segment that are active due to lateral input
    from active cells.

    @param segment     (int)         Segment index
    @param activeCells (set)         Indices of active cells
    @param connections (Connections) Connectivity of layer

    @return (set) Indices of active synapses on segment
    """
        synapses = set()

        for synapse in connections.synapsesForSegment(segment):
            synapseData = connections.dataForSynapse(synapse)

            if synapseData.presynapticCell in activeCells:
                synapses.add(synapse)

        return synapses

    def adaptSegment(self, segment, activeSynapses, connections):
        """
    Updates synapses on segment.
    Strengthens active synapses; weakens inactive synapses.

    @param segment        (int)         Segment index
    @param activeSynapses (set)         Indices of active synapses
    @param connections    (Connections) Connectivity of layer
    """
        for synapse in connections.synapsesForSegment(segment):
            synapseData = connections.dataForSynapse(synapse)
            permanence = synapseData.permanence

            if synapse in activeSynapses:
                permanence += self.permanenceIncrement
            else:
                permanence -= self.permanenceDecrement

            # Keep permanence within min/max bounds
            permanence = max(0.0, min(1.0, permanence))

            connections.updateSynapsePermanence(synapse, permanence)

    def pickCellsToLearnOn(self, n, segment, winnerCells, connections):
        """
    Pick cells to form distal connections to.

    TODO: Respect topology and learningRadius

    @param n           (int)         Number of cells to pick
    @param segment     (int)         Segment index
    @param winnerCells (set)         Indices of winner cells in `t`
    @param connections (Connections) Connectivity of layer

    @return (set) Indices of cells picked
    """
        candidates = set(winnerCells)

        # Remove cells that are already synapsed on by this segment
        for synapse in connections.synapsesForSegment(segment):
            synapseData = connections.dataForSynapse(synapse)
            presynapticCell = synapseData.presynapticCell

            if presynapticCell in candidates:
                candidates.remove(presynapticCell)

        n = min(n, len(candidates))
        candidates = sorted(candidates)
        cells = set()

        # Pick n cells randomly
        for _ in range(n):
            i = self._random.getUInt32(len(candidates))
            cells.add(candidates[i])
            del candidates[i]

        return cells

    def columnForCell(self, cell):
        """
    Returns the index of the column that a cell belongs to.

    @param cell (int) Cell index

    @return (int) Column index
    """
        self._validateCell(cell)

        return int(cell / self.cellsPerColumn)

    def cellsForColumn(self, column):
        """
    Returns the indices of cells that belong to a column.

    @param column (int) Column index

    @return (set) Cell indices
    """
        self._validateColumn(column)

        start = self.cellsPerColumn * column
        end = start + self.cellsPerColumn
        return set([cell for cell in range(start, end)])

    def numberOfColumns(self):
        """
    Returns the number of columns in this layer.

    @return (int) Number of columns
    """
        return reduce(mul, self.columnDimensions, 1)

    def numberOfCells(self):
        """
    Returns the number of cells in this layer.

    @return (int) Number of cells
    """
        return self.numberOfColumns() * self.cellsPerColumn

    def mapCellsToColumns(self, cells):
        """
    Maps cells to the columns they belong to

    @param cells (set) Cells

    @return (dict) Mapping from columns to their cells in `cells`
    """
        cellsForColumns = defaultdict(set)

        for cell in cells:
            column = self.columnForCell(cell)
            cellsForColumns[column].add(cell)

        return cellsForColumns

    def _validateColumn(self, column):
        """
    Raises an error if column index is invalid.

    @param column (int) Column index
    """
        if column >= self.numberOfColumns() or column < 0:
            raise IndexError("Invalid column")

    def _validateCell(self, cell):
        """
    Raises an error if cell index is invalid.

    @param cell (int) Cell index
    """
        if cell >= self.numberOfCells() or cell < 0:
            raise IndexError("Invalid cell")

    @classmethod
    def getCellIndices(cls, cells):
        return [cls.getCellIndex(c) for c in cells]

    @staticmethod
    def getCellIndex(cell):
        return cell.idx
コード例 #14
0
class RandomDistributedScalarEncoder(Encoder):
  """
  A scalar encoder encodes a numeric (floating point) value into an array
  of bits.

  This class maps a scalar value into a random distributed representation that
  is suitable as scalar input into the spatial pooler. The encoding scheme is
  designed to replace a simple ScalarEncoder. It preserves the important
  properties around overlapping representations. Unlike ScalarEncoder the min
  and max range can be dynamically increased without any negative effects. The
  only required parameter is resolution, which determines the resolution of
  input values.

  Scalar values are mapped to a bucket. The class maintains a random distributed
  encoding for each bucket. The following properties are maintained by
  RandomDistributedEncoder:

  1) Similar scalars should have high overlap. Overlap should decrease smoothly
  as scalars become less similar. Specifically, neighboring bucket indices must
  overlap by a linearly decreasing number of bits.

  2) Dissimilar scalars should have very low overlap so that the SP does not
  confuse representations. Specifically, buckets that are more than w indices
  apart should have at most maxOverlap bits of overlap. We arbitrarily (and
  safely) define "very low" to be 2 bits of overlap or lower.

  Properties 1 and 2 lead to the following overlap rules for buckets i and j:

      If abs(i-j) < w then:
        overlap(i,j) = w - abs(i-j)
      else:
        overlap(i,j) <= maxOverlap

  3) The representation for a scalar must not change during the lifetime of
  the object. Specifically, as new buckets are created and the min/max range
  is extended, the representation for previously in-range sscalars and
  previously created buckets must not change.
  """


  def __init__(self, resolution, w=21, n=400, name=None, offset=None,
               seed=42, verbosity=0):
    """Constructor

    @param resolution A floating point positive number denoting the resolution
                    of the output representation. Numbers within
                    [offset-resolution/2, offset+resolution/2] will fall into
                    the same bucket and thus have an identical representation.
                    Adjacent buckets will differ in one bit. resolution is a
                    required parameter.

    @param w Number of bits to set in output. w must be odd to avoid centering
                    problems.  w must be large enough that spatial pooler
                    columns will have a sufficiently large overlap to avoid
                    false matches. A value of w=21 is typical.

    @param n Number of bits in the representation (must be > w). n must be
                    large enough such that there is enough room to select
                    new representations as the range grows. With w=21 a value
                    of n=400 is typical. The class enforces n > 6*w.

    @param name An optional string which will become part of the description.

    @param offset A floating point offset used to map scalar inputs to bucket
                    indices. The middle bucket will correspond to numbers in the
                    range [offset - resolution/2, offset + resolution/2). If set
                    to None, the very first input that is encoded will be used
                    to determine the offset.

    @param seed The seed used for numpy's random number generator. If set to -1
                    the generator will be initialized without a fixed seed.

    @param verbosity An integer controlling the level of debugging output. A
                    value of 0 implies no output. verbosity=1 may lead to
                    one-time printouts during construction, serialization or
                    deserialization. verbosity=2 may lead to some output per
                    encode operation. verbosity>2 may lead to significantly
                    more output.
    """
    # Validate inputs
    if (w <= 0) or (w%2 == 0):
      raise ValueError("w must be an odd positive integer")

    if resolution <= 0:
      raise ValueError("resolution must be a positive number")

    if (n <= 6*w) or (not isinstance(n, int)):
      raise ValueError("n must be an int strictly greater than 6*w. For "
                       "good results we recommend n be strictly greater "
                       "than 11*w")

    self.encoders = None
    self.verbosity = verbosity
    self.w = w
    self.n = n
    self.resolution = float(resolution)

    # The largest overlap we allow for non-adjacent encodings
    self._maxOverlap = 2

    # initialize the random number generators
    self._seed(seed)

    # Internal parameters for bucket mapping
    self.minIndex = None
    self.maxIndex = None
    self._offset = None
    self._initializeBucketMap(INITIAL_BUCKETS, offset)

    # A name used for debug printouts
    if name is not None:
      self.name = name
    else:
      self.name = "[%s]" % (self.resolution)

    if self.verbosity > 0:
      self.dump()


  def __setstate__(self, state):
    self.__dict__.update(state)

    # Initialize self.random as an instance of NupicRandom derived from the
    # previous numpy random state
    randomState = state["random"]
    if isinstance(randomState, numpy.random.mtrand.RandomState):
      self.random = NupicRandom(randomState.randint(sys.maxint))


  def _seed(self, seed=-1):
    """
    Initialize the random seed
    """
    if seed != -1:
      self.random = NupicRandom(seed)
    else:
      self.random = NupicRandom()


  def getDecoderOutputFieldTypes(self):
    """ See method description in base.py """
    return (FieldMetaType.float, )


  def getWidth(self):
    """ See method description in base.py """
    return self.n


  def getDescription(self):
    return [(self.name, 0)]


  def getBucketIndices(self, x):
    """ See method description in base.py """

    if ((isinstance(x, float) and math.isnan(x)) or
        x == SENTINEL_VALUE_FOR_MISSING_DATA):
      return [None]

    if self._offset is None:
      self._offset = x

    bucketIdx = (
        (self._maxBuckets/2) + int(round((x - self._offset) / self.resolution))
    )

    if bucketIdx < 0:
      bucketIdx = 0
    elif bucketIdx >= self._maxBuckets:
      bucketIdx = self._maxBuckets-1

    return [bucketIdx]


  def mapBucketIndexToNonZeroBits(self, index):
    """
    Given a bucket index, return the list of non-zero bits. If the bucket
    index does not exist, it is created. If the index falls outside our range
    we clip it.

    @param index The bucket index to get non-zero bits for.
    @returns numpy array of indices of non-zero bits for specified index.
    """
    if index < 0:
      index = 0

    if index >= self._maxBuckets:
      index = self._maxBuckets-1

    if not self.bucketMap.has_key(index):
      if self.verbosity >= 2:
        print "Adding additional buckets to handle index=", index
      self._createBucket(index)
    return self.bucketMap[index]


  def encodeIntoArray(self, x, output):
    """ See method description in base.py """

    if x is not None and not isinstance(x, numbers.Number):
      raise TypeError(
          "Expected a scalar input but got input of type %s" % type(x))

    # Get the bucket index to use
    bucketIdx = self.getBucketIndices(x)[0]

    # None is returned for missing value in which case we return all 0's.
    output[0:self.n] = 0
    if bucketIdx is not None:
      output[self.mapBucketIndexToNonZeroBits(bucketIdx)] = 1


  def _createBucket(self, index):
    """
    Create the given bucket index. Recursively create as many in-between
    bucket indices as necessary.
    """
    if index < self.minIndex:
      if index == self.minIndex - 1:
        # Create a new representation that has exactly w-1 overlapping bits
        # as the min representation
        self.bucketMap[index] = self._newRepresentation(self.minIndex,
                                                        index)
        self.minIndex = index
      else:
        # Recursively create all the indices above and then this index
        self._createBucket(index+1)
        self._createBucket(index)
    else:
      if index == self.maxIndex + 1:
        # Create a new representation that has exactly w-1 overlapping bits
        # as the max representation
        self.bucketMap[index] = self._newRepresentation(self.maxIndex,
                                                        index)
        self.maxIndex = index
      else:
        # Recursively create all the indices below and then this index
        self._createBucket(index-1)
        self._createBucket(index)


  def _newRepresentation(self, index, newIndex):
    """
    Return a new representation for newIndex that overlaps with the
    representation at index by exactly w-1 bits
    """
    newRepresentation = self.bucketMap[index].copy()

    # Choose the bit we will replace in this representation. We need to shift
    # this bit deterministically. If this is always chosen randomly then there
    # is a 1 in w chance of the same bit being replaced in neighboring
    # representations, which is fairly high
    ri = newIndex % self.w

    # Now we choose a bit such that the overlap rules are satisfied.
    newBit = self.random.getUInt32(self.n)
    newRepresentation[ri] = newBit
    while newBit in self.bucketMap[index] or \
          not self._newRepresentationOK(newRepresentation, newIndex):
      self.numTries += 1
      newBit = self.random.getUInt32(self.n)
      newRepresentation[ri] = newBit

    return newRepresentation


  def _newRepresentationOK(self, newRep, newIndex):
    """
    Return True if this new candidate representation satisfies all our overlap
    rules. Since we know that neighboring representations differ by at most
    one bit, we compute running overlaps.
    """
    if newRep.size != self.w:
      return False
    if (newIndex < self.minIndex-1) or (newIndex > self.maxIndex+1):
      raise ValueError("newIndex must be within one of existing indices")

    # A binary representation of newRep. We will use this to test containment
    newRepBinary = numpy.array([False]*self.n)
    newRepBinary[newRep] = True

    # Midpoint
    midIdx = self._maxBuckets/2

    # Start by checking the overlap at minIndex
    runningOverlap = self._countOverlap(self.bucketMap[self.minIndex], newRep)
    if not self._overlapOK(self.minIndex, newIndex, overlap=runningOverlap):
      return False

    # Compute running overlaps all the way to the midpoint
    for i in range(self.minIndex+1, midIdx+1):
      # This is the bit that is going to change
      newBit = (i-1)%self.w

      # Update our running overlap
      if newRepBinary[self.bucketMap[i-1][newBit]]:
        runningOverlap -= 1
      if newRepBinary[self.bucketMap[i][newBit]]:
        runningOverlap += 1

      # Verify our rules
      if not self._overlapOK(i, newIndex, overlap=runningOverlap):
        return False

    # At this point, runningOverlap contains the overlap for midIdx
    # Compute running overlaps all the way to maxIndex
    for i in range(midIdx+1, self.maxIndex+1):
      # This is the bit that is going to change
      newBit = i%self.w

      # Update our running overlap
      if newRepBinary[self.bucketMap[i-1][newBit]]:
        runningOverlap -= 1
      if newRepBinary[self.bucketMap[i][newBit]]:
        runningOverlap += 1

      # Verify our rules
      if not self._overlapOK(i, newIndex, overlap=runningOverlap):
        return False

    return True


  def _countOverlapIndices(self, i, j):
    """
    Return the overlap between bucket indices i and j
    """
    if self.bucketMap.has_key(i) and self.bucketMap.has_key(j):
      iRep = self.bucketMap[i]
      jRep = self.bucketMap[j]
      return self._countOverlap(iRep, jRep)
    else:
      raise ValueError("Either i or j don't exist")


  @staticmethod
  def _countOverlap(rep1, rep2):
    """
    Return the overlap between two representations. rep1 and rep2 are lists of
    non-zero indices.
    """
    overlap = 0
    for e in rep1:
      if e in rep2:
        overlap += 1
    return overlap


  def _overlapOK(self, i, j, overlap=None):
    """
    Return True if the given overlap between bucket indices i and j are
    acceptable. If overlap is not specified, calculate it from the bucketMap
    """
    if overlap is None:
      overlap = self._countOverlapIndices(i, j)
    if abs(i-j) < self.w:
      if overlap == (self.w - abs(i-j)):
        return True
      else:
        return False
    else:
      if overlap <= self._maxOverlap:
        return True
      else:
        return False


  def _initializeBucketMap(self, maxBuckets, offset):
    """
    Initialize the bucket map assuming the given number of maxBuckets.
    """
    # The first bucket index will be _maxBuckets / 2 and bucket indices will be
    # allowed to grow lower or higher as long as they don't become negative.
    # _maxBuckets is required because the current CLA Classifier assumes bucket
    # indices must be non-negative. This normally does not need to be changed
    # but if altered, should be set to an even number.
    self._maxBuckets = maxBuckets
    self.minIndex = self._maxBuckets / 2
    self.maxIndex = self._maxBuckets / 2

    # The scalar offset used to map scalar values to bucket indices. The middle
    # bucket will correspond to numbers in the range
    # [offset-resolution/2, offset+resolution/2).
    # The bucket index for a number x will be:
    #     maxBuckets/2 + int( round( (x-offset)/resolution ) )
    self._offset = offset

    # This dictionary maps a bucket index into its bit representation
    # We initialize the class with a single bucket with index 0
    self.bucketMap = {}

    def _permutation(n):
      r = numpy.arange(n, dtype=numpy.uint32)
      self.random.shuffle(r)
      return r

    self.bucketMap[self.minIndex] = _permutation(self.n)[0:self.w]

    # How often we need to retry when generating valid encodings
    self.numTries = 0


  def dump(self):
    print "RandomDistributedScalarEncoder:"
    print "  minIndex:   %d" % self.minIndex
    print "  maxIndex:   %d" % self.maxIndex
    print "  w:          %d" % self.w
    print "  n:          %d" % self.getWidth()
    print "  resolution: %g" % self.resolution
    print "  offset:     %s" % str(self._offset)
    print "  numTries:   %d" % self.numTries
    print "  name:       %s" % self.name
    if self.verbosity > 2:
      print "  All buckets:     "
      pprint.pprint(self.bucketMap)


  @classmethod
  def read(cls, proto):
    encoder = object.__new__(cls)
    encoder.resolution = proto.resolution
    encoder.w = proto.w
    encoder.n = proto.n
    encoder.name = proto.name
    encoder._offset = proto.offset
    encoder.random = NupicRandom()
    encoder.random.read(proto.random)
    encoder.resolution = proto.resolution
    encoder.verbosity = proto.verbosity
    encoder.minIndex = proto.minIndex
    encoder.maxIndex = proto.maxIndex
    encoder.encoders = None
    encoder._maxBuckets = INITIAL_BUCKETS
    encoder.bucketMap = {x.key: numpy.array(x.value, dtype=numpy.uint32)
                         for x in proto.bucketMap}

    return encoder


  def write(self, proto):
    proto.resolution = self.resolution
    proto.w = self.w
    proto.n = self.n
    proto.name = self.name
    proto.offset = self._offset
    self.random.write(proto.random)
    proto.verbosity = self.verbosity
    proto.minIndex = self.minIndex
    proto.maxIndex = self.maxIndex
    proto.bucketMap = [{"key": key, "value": value.tolist()}
                       for key, value in self.bucketMap.items()]
コード例 #15
0
ファイル: pattern_machine.py プロジェクト: 08s011003/nupic
class PatternMachine(object):
  """
  Base pattern machine class.
  """

  def __init__(self,
               n,
               w,
               num=100,
               seed=42):
    """
    @param n   (int)      Number of available bits in pattern
    @param w   (int/list) Number of on bits in pattern
                          If list, each pattern will have a `w` randomly
                          selected from the list.
    @param num (int)      Number of available patterns
    """
    # Save member variables
    self._n = n
    self._w = w
    self._num = num

    # Initialize member variables
    self._random = Random(seed)
    self._patterns = dict()

    self._generate()


  def get(self, number):
    """
    Return a pattern for a number.

    @param number (int) Number of pattern

    @return (set) Indices of on bits
    """
    if not number in self._patterns:
      raise IndexError("Invalid number")

    return self._patterns[number]


  def addNoise(self, bits, amount):
    """
    Add noise to pattern.

    @param bits   (set)   Indices of on bits
    @param amount (float) Probability of switching an on bit with a random bit

    @return (set) Indices of on bits in noisy pattern
    """
    newBits = set()

    for bit in bits:
      if self._random.getReal64() < amount:
        newBits.add(self._random.getUInt32(self._n))
      else:
        newBits.add(bit)

    return newBits


  def numbersForBit(self, bit):
    """
    Return the set of pattern numbers that match a bit.

    @param bit (int) Index of bit

    @return (set) Indices of numbers
    """
    if bit >= self._n:
      raise IndexError("Invalid bit")

    numbers = set()

    for index, pattern in self._patterns.iteritems():
      if bit in pattern:
        numbers.add(index)

    return numbers


  def numberMapForBits(self, bits):
    """
    Return a map from number to matching on bits,
    for all numbers that match a set of bits.

    @param bits (set) Indices of bits

    @return (dict) Mapping from number => on bits.
    """
    numberMap = dict()

    for bit in bits:
      numbers = self.numbersForBit(bit)

      for number in numbers:
        if not number in numberMap:
          numberMap[number] = set()

        numberMap[number].add(bit)

    return numberMap


  def prettyPrintPattern(self, bits, verbosity=1):
    """
    Pretty print a pattern.

    @param bits      (set) Indices of on bits
    @param verbosity (int) Verbosity level

    @return (string) Pretty-printed text
    """
    numberMap = self.numberMapForBits(bits)
    text = ""

    numberList = []
    numberItems = sorted(numberMap.iteritems(),
                         key=lambda (number, bits): len(bits),
                         reverse=True)

    for number, bits in numberItems:

      if verbosity > 2:
        strBits = [str(n) for n in bits]
        numberText = "{0} (bits: {1})".format(number, ",".join(strBits))
      elif verbosity > 1:
        numberText = "{0} ({1} bits)".format(number, len(bits))
      else:
        numberText = str(number)

      numberList.append(numberText)

    text += "[{0}]".format(", ".join(numberList))

    return text


  def _generate(self):
    """
    Generates set of random patterns.
    """
    candidates = np.array(range(self._n), np.uint32)
    for i in xrange(self._num):
      self._random.shuffle(candidates)
      pattern = candidates[0:self._getW()]
      self._patterns[i] = set(pattern)


  def _getW(self):
    """
    Gets a value of `w` for use in generating a pattern.
    """
    w = self._w

    if type(w) is list:
      return w[self._random.getUInt32(len(w))]
    else:
      return w
コード例 #16
0
ファイル: temporal_memory.py プロジェクト: OspreyX/nupic
class TemporalMemory(object):
  """
  Class implementing the Temporal Memory algorithm.
  """

  def __init__(self,
               columnDimensions=(2048,),
               cellsPerColumn=32,
               activationThreshold=13,
               learningRadius=2048,
               initialPermanence=0.21,
               connectedPermanence=0.50,
               minThreshold=10,
               maxNewSynapseCount=20,
               permanenceIncrement=0.10,
               permanenceDecrement=0.10,
               seed=42):
    """
    @param columnDimensions    (list)  Dimensions of the column space
    @param cellsPerColumn      (int)   Number of cells per column
    @param activationThreshold (int)   If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active.
    @param learningRadius      (int)   Radius around cell from which it can sample to form distal dendrite connections.
    @param initialPermanence   (float) Initial permanence of a new synapse.
    @param connectedPermanence (float) If the permanence value for a synapse is greater than this value, it is said to be connected.
    @param minThreshold        (int)   If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column.
    @param maxNewSynapseCount  (int)   The maximum number of synapses added to a segment during learning.
    @param permanenceIncrement (float) Amount by which permanences of synapses are incremented during learning.
    @param permanenceDecrement (float) Amount by which permanences of synapses are decremented during learning.
    @param seed                (int)   Seed for the random number generator.
    """
    # Error checking
    if not len(columnDimensions):
      raise ValueError("Number of column dimensions must be greater than 0")

    if not cellsPerColumn > 0:
      raise ValueError("Number of cells per column must be greater than 0")

    # TODO: Validate all parameters (and add validation tests)

    # Save member variables
    self.columnDimensions = columnDimensions
    self.cellsPerColumn = cellsPerColumn
    self.activationThreshold = activationThreshold
    self.learningRadius = learningRadius
    self.initialPermanence = initialPermanence
    self.connectedPermanence = connectedPermanence
    self.minThreshold = minThreshold
    self.maxNewSynapseCount = maxNewSynapseCount
    self.permanenceIncrement = permanenceIncrement
    self.permanenceDecrement = permanenceDecrement

    # Initialize member variables
    self.connections = Connections(self.numberOfCells())
    self._random = Random(seed)

    self.activeCells = set()
    self.predictiveCells = set()
    self.activeSegments = set()
    self.winnerCells = set()


  # ==============================
  # Main functions
  # ==============================

  def compute(self, activeColumns, learn=True):
    """
    Feeds input record through TM, performing inference and learning.
    Updates member variables with new state.

    @param activeColumns (set) Indices of active columns in `t`
    """
    (activeCells,
     winnerCells,
     activeSegments,
     predictiveCells,
     predictedColumns) = self.computeFn(activeColumns,
                                        self.predictiveCells,
                                        self.activeSegments,
                                        self.activeCells,
                                        self.winnerCells,
                                        self.connections,
                                        learn=learn)

    self.activeCells = activeCells
    self.winnerCells = winnerCells
    self.activeSegments = activeSegments
    self.predictiveCells = predictiveCells


  def computeFn(self,
                activeColumns,
                prevPredictiveCells,
                prevActiveSegments,
                prevActiveCells,
                prevWinnerCells,
                connections,
                learn=True):
    """
    'Functional' version of compute.
    Returns new state.

    @param activeColumns       (set)         Indices of active columns in `t`
    @param prevPredictiveCells (set)         Indices of predictive cells in `t-1`
    @param prevActiveSegments  (set)         Indices of active segments in `t-1`
    @param prevActiveCells     (set)         Indices of active cells in `t-1`
    @param prevWinnerCells     (set)         Indices of winner cells in `t-1`
    @param connections         (Connections) Connectivity of layer
    @param learn               (bool)        Whether or not learning is enabled

    @return (tuple) Contains:
                      `activeCells`     (set),
                      `winnerCells`     (set),
                      `activeSegments`  (set),
                      `predictiveCells` (set)
    """
    activeCells = set()
    winnerCells = set()

    (_activeCells,
     _winnerCells,
     predictedColumns) = self.activateCorrectlyPredictiveCells(
       prevPredictiveCells,
       activeColumns)

    activeCells.update(_activeCells)
    winnerCells.update(_winnerCells)

    (_activeCells,
     _winnerCells,
     learningSegments) = self.burstColumns(activeColumns,
                                           predictedColumns,
                                           prevActiveCells,
                                           prevWinnerCells,
                                           connections)

    activeCells.update(_activeCells)
    winnerCells.update(_winnerCells)

    if learn:
      self.learnOnSegments(prevActiveSegments,
                           learningSegments,
                           prevActiveCells,
                           winnerCells,
                           prevWinnerCells,
                           connections)

    (activeSegments,
     predictiveCells) = self.computePredictiveCells(activeCells, connections)

    return (activeCells,
            winnerCells,
            activeSegments,
            predictiveCells,
            predictedColumns)


  def reset(self):
    """
    Indicates the start of a new sequence. Resets sequence state of the TM.
    """
    self.activeCells = set()
    self.predictiveCells = set()
    self.activeSegments = set()
    self.winnerCells = set()


  # ==============================
  # Phases
  # ==============================

  def activateCorrectlyPredictiveCells(self,
                                       prevPredictiveCells,
                                       activeColumns):
    """
    Phase 1: Activate the correctly predictive cells.

    Pseudocode:

      - for each prev predictive cell
        - if in active column
          - mark it as active
          - mark it as winner cell
          - mark column as predicted

    @param prevPredictiveCells (set) Indices of predictive cells in `t-1`
    @param activeColumns       (set) Indices of active columns in `t`

    @return (tuple) Contains:
                      `activeCells`      (set),
                      `winnerCells`      (set),
                      `predictedColumns` (set)
    """
    activeCells = set()
    winnerCells = set()
    predictedColumns = set()

    for cell in prevPredictiveCells:
      column = self.columnForCell(cell)

      if column in activeColumns:
        activeCells.add(cell)
        winnerCells.add(cell)
        predictedColumns.add(column)

    return activeCells, winnerCells, predictedColumns


  def burstColumns(self,
                   activeColumns,
                   predictedColumns,
                   prevActiveCells,
                   prevWinnerCells,
                   connections):
    """
    Phase 2: Burst unpredicted columns.

    Pseudocode:

      - for each unpredicted active column
        - mark all cells as active
        - mark the best matching cell as winner cell
          - (learning)
            - if it has no matching segment
              - (optimization) if there are prev winner cells
                - add a segment to it
            - mark the segment as learning

    @param activeColumns                   (set)         Indices of active columns in `t`
    @param predictedColumns                (set)         Indices of predicted columns in `t`
    @param prevActiveCells                 (set)         Indices of active cells in `t-1`
    @param prevWinnerCells                 (set)         Indices of winner cells in `t-1`
    @param connections                     (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeCells`      (set),
                      `winnerCells`      (set),
                      `learningSegments` (set)
    """
    activeCells = set()
    winnerCells = set()
    learningSegments = set()

    unpredictedColumns = activeColumns - predictedColumns

    for column in unpredictedColumns:
      cells = self.cellsForColumn(column)
      activeCells.update(cells)

      (bestCell,
       bestSegment) = self.bestMatchingCell(cells,
                                            prevActiveCells,
                                            connections)
      winnerCells.add(bestCell)

      if bestSegment is None and len(prevWinnerCells):
        bestSegment = connections.createSegment(bestCell)

      if bestSegment is not None:
        learningSegments.add(bestSegment)

    return activeCells, winnerCells, learningSegments


  def learnOnSegments(self,
                      prevActiveSegments,
                      learningSegments,
                      prevActiveCells,
                      winnerCells,
                      prevWinnerCells,
                      connections):
    """
    Phase 3: Perform learning by adapting segments.

    Pseudocode:

      - (learning) for each prev active or learning segment
        - if learning segment or from winner cell
          - strengthen active synapses
          - weaken inactive synapses
        - if learning segment
          - add some synapses to the segment
            - subsample from prev winner cells

    @param prevActiveSegments           (set)         Indices of active segments in `t-1`
    @param learningSegments             (set)         Indices of learning segments in `t`
    @param prevActiveCells              (set)         Indices of active cells in `t-1`
    @param winnerCells                  (set)         Indices of winner cells in `t`
    @param prevWinnerCells              (set)         Indices of winner cells in `t-1`
    @param connections                  (Connections) Connectivity of layer
    """
    for segment in prevActiveSegments | learningSegments:
      isLearningSegment = segment in learningSegments
      isFromWinnerCell = connections.cellForSegment(segment) in winnerCells

      activeSynapses = self.activeSynapsesForSegment(
        segment, prevActiveCells, connections)

      if isLearningSegment or isFromWinnerCell:
        self.adaptSegment(segment, activeSynapses, connections)

      if isLearningSegment:
        n = self.maxNewSynapseCount - len(activeSynapses)

        for presynapticCell in self.pickCellsToLearnOn(n,
                                                       segment,
                                                       prevWinnerCells,
                                                       connections):
          connections.createSynapse(segment,
                                    presynapticCell,
                                    self.initialPermanence)


  def computePredictiveCells(self, activeCells, connections):
    """
    Phase 4: Compute predictive cells due to lateral input
    on distal dendrites.

    Pseudocode:

      - for each distal dendrite segment with activity >= activationThreshold
        - mark the segment as active
        - mark the cell as predictive

    Forward propagates activity from active cells to the synapses that touch
    them, to determine which synapses are active.

    @param activeCells (set)         Indices of active cells in `t`
    @param connections (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeSegments`  (set),
                      `predictiveCells` (set)
    """
    numActiveConnectedSynapsesForSegment = defaultdict(lambda: 0)
    activeSegments = set()
    predictiveCells = set()

    for cell in activeCells:
      for synapseData in connections.synapsesForPresynapticCell(cell).values():
        segment = synapseData.segment
        permanence = synapseData.permanence

        if permanence >= self.connectedPermanence:
          numActiveConnectedSynapsesForSegment[segment] += 1

          if (numActiveConnectedSynapsesForSegment[segment] >=
              self.activationThreshold):
            activeSegments.add(segment)
            predictiveCells.add(connections.cellForSegment(segment))

    return activeSegments, predictiveCells


  # ==============================
  # Helper functions
  # ==============================

  def bestMatchingCell(self, cells, activeCells, connections):
    """
    Gets the cell with the best matching segment
    (see `TM.bestMatchingSegment`) that has the largest number of active
    synapses of all best matching segments.

    If none were found, pick the least used cell (see `TM.leastUsedCell`).

    @param cells                       (set)         Indices of cells
    @param activeCells                 (set)         Indices of active cells
    @param connections                 (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `cell`        (int),
                      `bestSegment` (int)
    """
    maxSynapses = 0
    bestCell = None
    bestSegment = None

    for cell in cells:
      segment, numActiveSynapses = self.bestMatchingSegment(
        cell, activeCells, connections)

      if segment is not None and numActiveSynapses > maxSynapses:
        maxSynapses = numActiveSynapses
        bestCell = cell
        bestSegment = segment

    if bestCell is None:
      bestCell = self.leastUsedCell(cells, connections)

    return bestCell, bestSegment


  def bestMatchingSegment(self, cell, activeCells, connections):
    """
    Gets the segment on a cell with the largest number of activate synapses,
    including all synapses with non-zero permanences.

    @param cell                        (int)         Cell index
    @param activeCells                 (set)         Indices of active cells
    @param connections                 (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `segment`                 (int),
                      `connectedActiveSynapses` (set)
    """
    maxSynapses = self.minThreshold
    bestSegment = None
    bestNumActiveSynapses = None

    for segment in connections.segmentsForCell(cell):
      numActiveSynapses = 0

      for synapse in connections.synapsesForSegment(segment):
        synapseData = connections.dataForSynapse(synapse)
        if synapseData.presynapticCell in activeCells:
          numActiveSynapses += 1

      if numActiveSynapses >= maxSynapses:
        maxSynapses = numActiveSynapses
        bestSegment = segment
        bestNumActiveSynapses = numActiveSynapses

    return bestSegment, bestNumActiveSynapses


  def leastUsedCell(self, cells, connections):
    """
    Gets the cell with the smallest number of segments.
    Break ties randomly.

    @param cells       (set)         Indices of cells
    @param connections (Connections) Connectivity of layer

    @return (int) Cell index
    """
    leastUsedCells = set()
    minNumSegments = float("inf")

    for cell in cells:
      numSegments = len(connections.segmentsForCell(cell))

      if numSegments < minNumSegments:
        minNumSegments = numSegments
        leastUsedCells = set()

      if numSegments == minNumSegments:
        leastUsedCells.add(cell)

    i = self._random.getUInt32(len(leastUsedCells))
    return sorted(leastUsedCells)[i]


  @staticmethod
  def activeSynapsesForSegment(segment, activeCells, connections):
    """
    Returns the synapses on a segment that are active due to lateral input
    from active cells.

    @param segment     (int)         Segment index
    @param activeCells (set)         Indices of active cells
    @param connections (Connections) Connectivity of layer

    @return (set) Indices of active synapses on segment
    """
    synapses = set()

    for synapse in connections.synapsesForSegment(segment):
      synapseData = connections.dataForSynapse(synapse)

      if synapseData.presynapticCell in activeCells:
        synapses.add(synapse)

    return synapses


  def adaptSegment(self, segment, activeSynapses, connections):
    """
    Updates synapses on segment.
    Strengthens active synapses; weakens inactive synapses.

    @param segment        (int)         Segment index
    @param activeSynapses (set)         Indices of active synapses
    @param connections    (Connections) Connectivity of layer
    """
    for synapse in connections.synapsesForSegment(segment):
      synapseData = connections.dataForSynapse(synapse)
      permanence = synapseData.permanence

      if synapse in activeSynapses:
        permanence += self.permanenceIncrement
      else:
        permanence -= self.permanenceDecrement

      # Keep permanence within min/max bounds
      permanence = max(0.0, min(1.0, permanence))

      connections.updateSynapsePermanence(synapse, permanence)


  def pickCellsToLearnOn(self, n, segment, winnerCells, connections):
    """
    Pick cells to form distal connections to.

    TODO: Respect topology and learningRadius

    @param n           (int)         Number of cells to pick
    @param segment     (int)         Segment index
    @param winnerCells (set)         Indices of winner cells in `t`
    @param connections (Connections) Connectivity of layer

    @return (set) Indices of cells picked
    """
    candidates = set(winnerCells)

    # Remove cells that are already synapsed on by this segment
    for synapse in connections.synapsesForSegment(segment):
      synapseData = connections.dataForSynapse(synapse)
      presynapticCell = synapseData.presynapticCell

      if presynapticCell in candidates:
        candidates.remove(presynapticCell)

    n = min(n, len(candidates))
    candidates = sorted(candidates)
    cells = set()

    # Pick n cells randomly
    for _ in range(n):
      i = self._random.getUInt32(len(candidates))
      cells.add(candidates[i])
      del candidates[i]

    return cells


  def columnForCell(self, cell):
    """
    Returns the index of the column that a cell belongs to.

    @param cell (int) Cell index

    @return (int) Column index
    """
    self._validateCell(cell)

    return int(cell / self.cellsPerColumn)


  def cellsForColumn(self, column):
    """
    Returns the indices of cells that belong to a column.

    @param column (int) Column index

    @return (set) Cell indices
    """
    self._validateColumn(column)

    start = self.cellsPerColumn * column
    end = start + self.cellsPerColumn
    return set([cell for cell in range(start, end)])


  def numberOfColumns(self):
    """
    Returns the number of columns in this layer.

    @return (int) Number of columns
    """
    return reduce(mul, self.columnDimensions, 1)


  def numberOfCells(self):
    """
    Returns the number of cells in this layer.

    @return (int) Number of cells
    """
    return self.numberOfColumns() * self.cellsPerColumn


  def mapCellsToColumns(self, cells):
    """
    Maps cells to the columns they belong to

    @param cells (set) Cells

    @return (dict) Mapping from columns to their cells in `cells`
    """
    cellsForColumns = defaultdict(set)

    for cell in cells:
      column = self.columnForCell(cell)
      cellsForColumns[column].add(cell)

    return cellsForColumns


  def _validateColumn(self, column):
    """
    Raises an error if column index is invalid.

    @param column (int) Column index
    """
    if column >= self.numberOfColumns() or column < 0:
      raise IndexError("Invalid column")


  def _validateCell(self, cell):
    """
    Raises an error if cell index is invalid.

    @param cell (int) Cell index
    """
    if cell >= self.numberOfCells() or cell < 0:
      raise IndexError("Invalid cell")


  @classmethod
  def getCellIndices(cls, cells):
    return [cls.getCellIndex(c) for c in cells]


  @staticmethod
  def getCellIndex(cell):
    return cell.idx
コード例 #17
0
class SMSequences(object):

    """
  Class generates sensorimotor sequences
  """

    def __init__(
        self,
        sensoryInputElements,
        spatialConfig,
        sensoryInputElementsPool=list("ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz0123456789"),
        minDisplacement=1,
        maxDisplacement=1,
        numActiveBitsSensoryInput=9,
        numActiveBitsMotorInput=9,
        seed=42,
        verbosity=False,
        useRandomEncoder=False,
    ):
        """
    @param sensoryInputElements       (list)
        Strings or numbers representing the sensory elements that exist in your
        world. Elements can be repeated if multiple of the same exist.

    @param spatialConfig              (numpy.array)
        Array of size: (1, len(sensoryInputElements), dimension). It has a
        coordinate for every element in sensoryInputElements.

    @param sensoryInputElementsPool   (list)
        List of strings representing a readable version of all possible sensory
        elements in this world. Elements don't need to be in any order and there
        should be no duplicates. By default this contains the set of
        alphanumeric characters.

    @param maxDisplacement            (int)
        Maximum `distance` for a motor command. Distance is defined by the
        largest difference along any coordinate dimension.

    @param minDisplacement            (int)
        Minimum `distance` for a motor command. Distance is defined by the
        largest difference along any coordinate dimension.

    @param numActiveBitsSensoryInput  (int)
        Number of active bits for each sensory input.

    @param numActiveBitsMotorInput    (int)
        Number of active bits for each dimension of the motor input.

    @param seed                       (int)
        Random seed for nupic.bindings.Random.

    @param verbosity                  (int)
        Verbosity

    @param useRandomEncoder           (boolean)
        if True, use the random encoder SDRCategoryEncoder. If False,
        use CategoryEncoder. CategoryEncoder encodes categories using contiguous
        non-overlapping bits for each category, which makes it easier to debug.
    """

        # ---------------------------------------------------------------------------------
        # Store creation parameters
        self.sensoryInputElements = sensoryInputElements
        self.sensoryInputElementsPool = sensoryInputElementsPool
        self.spatialConfig = spatialConfig.astype(int)
        self.spatialLength = len(spatialConfig)
        self.maxDisplacement = maxDisplacement
        self.minDisplacement = minDisplacement
        self.numActiveBitsSensoryInput = numActiveBitsSensoryInput
        self.numActiveBitsMotorInput = numActiveBitsMotorInput
        self.verbosity = verbosity
        self.seed = seed

        self.initialize(useRandomEncoder)

    def initialize(self, useRandomEncoder):
        """
    Initialize the various data structures.
    """
        self.setRandomSeed(self.seed)

        self.dim = numpy.shape(self.spatialConfig)[-1]

        self.spatialMap = dict(zip(map(tuple, list(self.spatialConfig)), self.sensoryInputElements))

        self.lengthMotorInput1D = (2 * self.maxDisplacement + 1) * self.numActiveBitsMotorInput

        uniqueSensoryElements = list(set(self.sensoryInputElementsPool))

        if useRandomEncoder:
            self.sensoryEncoder = SDRCategoryEncoder(
                n=1024, w=self.numActiveBitsSensoryInput, categoryList=uniqueSensoryElements, forced=True
            )
            self.lengthSensoryInput = self.sensoryEncoder.getWidth()

        else:
            self.lengthSensoryInput = (len(self.sensoryInputElementsPool) + 1) * self.numActiveBitsSensoryInput

            self.sensoryEncoder = CategoryEncoder(
                w=self.numActiveBitsSensoryInput, categoryList=uniqueSensoryElements, forced=True
            )

        motorEncoder1D = ScalarEncoder(
            n=self.lengthMotorInput1D,
            w=self.numActiveBitsMotorInput,
            minval=-self.maxDisplacement,
            maxval=self.maxDisplacement,
            clipInput=True,
            forced=True,
        )

        self.motorEncoder = VectorEncoder(length=self.dim, encoder=motorEncoder1D)

    def generateSensorimotorSequence(self, sequenceLength):
        """
    Generate sensorimotor sequences of length sequenceLength.

    @param sequenceLength (int)
        Length of the sensorimotor sequence.

    @return (tuple) Contains:
            sensorySequence       (list)
                Encoded sensory input for whole sequence.

            motorSequence         (list)
                Encoded motor input for whole sequence.

            sensorimotorSequence  (list)
                Encoder sensorimotor input for whole sequence. This is useful
                when you want to give external input to temporal memory.
    """
        motorSequence = []
        sensorySequence = []
        sensorimotorSequence = []
        currentEyeLoc = self.nupicRandomChoice(self.spatialConfig)

        for i in xrange(sequenceLength):

            currentSensoryInput = self.spatialMap[tuple(currentEyeLoc)]

            nextEyeLoc, currentEyeV = self.getNextEyeLocation(currentEyeLoc)

            if self.verbosity:
                print "sensory input = ", currentSensoryInput, "eye location = ", currentEyeLoc, " motor command = ", currentEyeV

            sensoryInput = self.encodeSensoryInput(currentSensoryInput)
            motorInput = self.encodeMotorInput(list(currentEyeV))
            sensorimotorInput = numpy.concatenate((sensoryInput, motorInput))

            sensorySequence.append(sensoryInput)
            motorSequence.append(motorInput)
            sensorimotorSequence.append(sensorimotorInput)

            currentEyeLoc = nextEyeLoc

        return (sensorySequence, motorSequence, sensorimotorSequence)

    def encodeSensorimotorSequence(self, eyeLocs):
        """
    Encode sensorimotor sequence given the eye movements. Sequence will have
    length len(eyeLocs) - 1 because only the differences of eye locations can be
    used to encoder motor commands.

    @param eyeLocs  (list)
        Numpy coordinates describing where the eye is looking.

    @return (tuple) Contains:
            sensorySequence       (list)
                Encoded sensory input for whole sequence.

            motorSequence         (list)
                Encoded motor input for whole sequence.

            sensorimotorSequence  (list)
                Encoder sensorimotor input for whole sequence. This is useful
                when you want to give external input to temporal memory.
    """
        sequenceLength = len(eyeLocs) - 1

        motorSequence = []
        sensorySequence = []
        sensorimotorSequence = []

        for i in xrange(sequenceLength):
            currentEyeLoc = eyeLocs[i]
            nextEyeLoc = eyeLocs[i + 1]

            currentSensoryInput = self.spatialMap[currentEyeLoc]

            currentEyeV = nextEyeLoc - currentEyeLoc

            if self.verbosity:
                print "sensory input = ", currentSensoryInput, "eye location = ", currentEyeLoc, " motor command = ", currentEyeV

            sensoryInput = self.encodeSensoryInput(currentSensoryInput)
            motorInput = self.encodeMotorInput(list(currentEyeV))
            sensorimotorInput = numpy.concatenate((sensoryInput, motorInput))

            sensorySequence.append(sensoryInput)
            motorSequence.append(motorInput)
            sensorimotorSequence.append(sensorimotorInput)

        return (sensorySequence, motorSequence, sensorimotorSequence)

    def getNextEyeLocation(self, currentEyeLoc):
        """
    Generate next eye location based on current eye location.

    @param currentEyeLoc (numpy.array)
        Current coordinate describing the eye location in the world.

    @return (tuple) Contains:
            nextEyeLoc  (numpy.array)
                Coordinate of the next eye location.

            eyeDiff     (numpy.array)
                Vector describing change from currentEyeLoc to nextEyeLoc.
    """
        possibleEyeLocs = []
        for loc in self.spatialConfig:
            shift = abs(max(loc - currentEyeLoc))
            if self.minDisplacement <= shift <= self.maxDisplacement:
                possibleEyeLocs.append(loc)

        nextEyeLoc = self.nupicRandomChoice(possibleEyeLocs)

        eyeDiff = nextEyeLoc - currentEyeLoc

        return nextEyeLoc, eyeDiff

    def setRandomSeed(self, seed):
        """
    Reset the nupic random generator. This is necessary to reset random seed to
    generate new sequences.

    @param seed       (int)
        Seed for nupic.bindings.Random.
    """
        self.seed = seed
        self._random = Random()
        self._random.setSeed(seed)

    def nupicRandomChoice(self, array):
        """
    Chooses a random element from an array using the nupic random number
    generator.

    @param array  (list or numpy.array)
        Array to choose random element from.

    @return       (element)
        Element chosen at random.
    """
        return array[self._random.getUInt32(len(array))]

    def encodeMotorInput(self, motorInput):
        """
    Encode motor command to bit vector.

    @param motorInput (1D numpy.array)
        Motor command to be encoded.

    @return           (1D numpy.array)
        Encoded motor command.
    """
        if not hasattr(motorInput, "__iter__"):
            motorInput = list([motorInput])

        return self.motorEncoder.encode(motorInput)

    def decodeMotorInput(self, motorInputPattern):
        """
    Decode motor command from bit vector.

    @param motorInputPattern (1D numpy.array)
        Encoded motor command.

    @return                  (1D numpy.array)
        Decoded motor command.

    """
        key = self.motorEncoder.decode(motorInputPattern)[0].keys()[0]
        motorCommand = self.motorEncoder.decode(motorInputPattern)[0][key][1][0]
        return motorCommand

    def encodeSensoryInput(self, sensoryInputElement):
        """
    Encode sensory input to bit vector

    @param sensoryElement (1D numpy.array)
        Sensory element to be encoded.

    @return               (1D numpy.array)
        Encoded sensory element.
    """
        return self.sensoryEncoder.encode(sensoryInputElement)

    def decodeSensoryInput(self, sensoryInputPattern):
        """
    Decode sensory input from bit vector.

    @param sensoryInputPattern  (1D numpy.array)
        Encoded sensory element.

    @return                     (1D numpy.array)
        Decoded sensory element.
    """
        return self.sensoryEncoder.decode(sensoryInputPattern)[0]["category"][1]

    def printSensoryCodingScheme(self):
        """
    Print sensory inputs along with their encoded versions.
    """
        print "\nsensory coding scheme: "
        for loc in self.spatialConfig:
            sensoryElement = self.spatialMap[tuple(loc)]
            print sensoryElement, "%s : " % loc,
            printSequence(self.encodeSensoryInput(sensoryElement))

    def printMotorCodingScheme(self):
        """
    Print motor commands (displacement vector) along with their encoded
    versions.
    """
        print "\nmotor coding scheme: "
        self.build(self.dim, [])

    def build(self, n, vec):
        """
    Recursive function to help print motor coding scheme.
    """
        for i in range(-self.maxDisplacement, self.maxDisplacement + 1):
            next = vec + [i]
            if n == 1:
                print "{:>5}\t".format(next), " = ",
                printSequence(self.encodeMotorInput(next))
            else:
                self.build(n - 1, next)
コード例 #18
0
class TemporalMemory(object):
  """
  Class implementing the Temporal Memory algorithm.
  """

  def __init__(self,
               columnDimensions=(2048,),
               cellsPerColumn=32,
               activationThreshold=13,
               initialPermanence=0.21,
               connectedPermanence=0.50,
               minThreshold=10,
               maxNewSynapseCount=20,
               permanenceIncrement=0.10,
               permanenceDecrement=0.10,
               predictedSegmentDecrement=0.0,
               maxSegmentsPerCell=255,
               maxSynapsesPerSegment=255,
               seed=42,
               **kwargs):
    """
    @param columnDimensions          (list)  Dimensions of the column space
    @param cellsPerColumn            (int)   Number of cells per column
    @param activationThreshold       (int)   If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active.
    @param initialPermanence         (float) Initial permanence of a new synapse.
    @param connectedPermanence       (float) If the permanence value for a synapse is greater than this value, it is said to be connected.
    @param minThreshold              (int)   If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column.
    @param maxNewSynapseCount        (int)   The maximum number of synapses added to a segment during learning.
    @param permanenceIncrement       (float) Amount by which permanences of synapses are incremented during learning.
    @param permanenceDecrement       (float) Amount by which permanences of synapses are decremented during learning.
    @param predictedSegmentDecrement (float) Amount by which active permanences of synapses of previously predicted but inactive segments are decremented.
    @param seed                      (int)   Seed for the random number generator.
    Notes:
    predictedSegmentDecrement: A good value is just a bit larger than
    (the column-level sparsity * permanenceIncrement). So, if column-level
    sparsity is 2% and permanenceIncrement is 0.01, this parameter should be
    something like 4% * 0.01 = 0.0004).
    """
    # Error checking
    if not len(columnDimensions):
      raise ValueError("Number of column dimensions must be greater than 0")

    if not cellsPerColumn > 0:
      raise ValueError("Number of cells per column must be greater than 0")

    # TODO: Validate all parameters (and add validation tests)

    # Save member variables
    self.columnDimensions = columnDimensions
    self.cellsPerColumn = cellsPerColumn
    self.activationThreshold = activationThreshold
    self.initialPermanence = initialPermanence
    self.connectedPermanence = connectedPermanence
    self.minThreshold = minThreshold
    self.maxNewSynapseCount = maxNewSynapseCount
    self.permanenceIncrement = permanenceIncrement
    self.permanenceDecrement = permanenceDecrement
    self.predictedSegmentDecrement = predictedSegmentDecrement
    # Initialize member variables
    self.connections = Connections(self.numberOfCells(),
                                   maxSegmentsPerCell=maxSegmentsPerCell,
                                   maxSynapsesPerSegment=maxSynapsesPerSegment)
    self._random = Random(seed)

    self.activeCells = set()
    self.predictiveCells = set()
    self.activeSegments = set()
    self.winnerCells = set()
    self.matchingSegments = set()
    self.matchingCells = set()

  # ==============================
  # Main functions
  # ==============================

  def compute(self, activeColumns, learn=True):
    """
    Feeds input record through TM, performing inference and learning.
    @param activeColumns (set)  Indices of active columns
    @param learn         (bool) Whether or not learning is enabled
    Updates member variables:
      - `activeCells`     (set)
      - `winnerCells`     (set)
      - `activeSegments`  (set)
      - `predictiveCells` (set)
      - `matchingSegments`(set)
      - `matchingCells`   (set)
    """
    prevPredictiveCells = self.predictiveCells
    prevActiveSegments = self.activeSegments
    prevActiveCells = self.activeCells
    prevWinnerCells = self.winnerCells
    prevMatchingSegments = self.matchingSegments
    prevMatchingCells = self.matchingCells

    activeCells = set()
    winnerCells = set()

    (_activeCells,
     _winnerCells,
     predictedActiveColumns,
     predictedInactiveCells) = self.activateCorrectlyPredictiveCells(
       prevPredictiveCells,
       prevMatchingCells,
       activeColumns)

    activeCells.update(_activeCells)
    winnerCells.update(_winnerCells)

    (_activeCells,
     _winnerCells,
     learningSegments) = self.burstColumns(activeColumns,
                                           predictedActiveColumns,
                                           prevActiveCells,
                                           prevWinnerCells,
                                           self.connections)

    activeCells.update(_activeCells)
    winnerCells.update(_winnerCells)

    if learn:
      self.learnOnSegments(prevActiveSegments,
                           learningSegments,
                           prevActiveCells,
                           winnerCells,
                           prevWinnerCells,
                           self.connections,
                           predictedInactiveCells,
                           prevMatchingSegments)

    (activeSegments,
     predictiveCells,
     matchingSegments,
     matchingCells) = self.computePredictiveCells(activeCells, self.connections)

    self.activeCells = activeCells
    self.winnerCells = winnerCells
    self.activeSegments = activeSegments
    self.predictiveCells = predictiveCells
    self.matchingSegments = matchingSegments
    self.matchingCells = matchingCells


  def reset(self):
    """
    Indicates the start of a new sequence. Resets sequence state of the TM.
    """
    self.activeCells = set()
    self.predictiveCells = set()
    self.activeSegments = set()
    self.winnerCells = set()


  # ==============================
  # Phases
  # ==============================

  def activateCorrectlyPredictiveCells(self,
                                       prevPredictiveCells,
                                       prevMatchingCells,
                                       activeColumns):
    """
    Phase 1: Activate the correctly predictive cells.
    Pseudocode:
      - for each prev predictive cell
        - if in active column
          - mark it as active
          - mark it as winner cell
          - mark column as predicted => active
        - if not in active column
          - mark it as an predicted but inactive cell
    @param prevPredictiveCells (set) Indices of predictive cells in `t-1`
    @param activeColumns       (set) Indices of active columns in `t`
    @return (tuple) Contains:
                      `activeCells`               (set),
                      `winnerCells`               (set),
                      `predictedActiveColumns`    (set),
                      `predictedInactiveCells`    (set)
    """
    activeCells = set()
    winnerCells = set()
    predictedActiveColumns = set()
    predictedInactiveCells = set()

    for cell in prevPredictiveCells:
      column = self.columnForCell(cell)

      if column in activeColumns:
        activeCells.add(cell)
        winnerCells.add(cell)
        predictedActiveColumns.add(column)

    if self.predictedSegmentDecrement > 0:
      for cell in prevMatchingCells:
        column = self.columnForCell(cell)

        if column not in activeColumns:
          predictedInactiveCells.add(cell)

    return (activeCells,
            winnerCells,
            predictedActiveColumns,
            predictedInactiveCells)


  def burstColumns(self,
                   activeColumns,
                   predictedActiveColumns,
                   prevActiveCells,
                   prevWinnerCells,
                   connections):
    """
    Phase 2: Burst unpredicted columns.
    Pseudocode:
      - for each unpredicted active column
        - mark all cells as active
        - mark the best matching cell as winner cell
          - (learning)
            - if it has no matching segment
              - (optimization) if there are prev winner cells
                - add a segment to it
            - mark the segment as learning
    @param activeColumns                   (set)         Indices of active columns in `t`
    @param predictedActiveColumns          (set)         Indices of predicted => active columns in `t`
    @param prevActiveCells                 (set)         Indices of active cells in `t-1`
    @param prevWinnerCells                 (set)         Indices of winner cells in `t-1`
    @param connections                     (Connections) Connectivity of layer
    @return (tuple) Contains:
                      `activeCells`      (set),
                      `winnerCells`      (set),
                      `learningSegments` (set)
    """
    activeCells = set()
    winnerCells = set()
    learningSegments = set()

    unpredictedActiveColumns = activeColumns - predictedActiveColumns

    # Sort unpredictedActiveColumns before iterating for compatibility with C++
    for column in sorted(unpredictedActiveColumns):
      cells = self.cellsForColumn(column)
      activeCells.update(cells)

      (bestCell,
       bestSegment) = self.bestMatchingCell(cells,
                                            prevActiveCells,
                                            connections)
      winnerCells.add(bestCell)

      if bestSegment is None and len(prevWinnerCells):
        bestSegment = connections.createSegment(bestCell)

      if bestSegment is not None:
        learningSegments.add(bestSegment)

    return activeCells, winnerCells, learningSegments


  def learnOnSegments(self,
                      prevActiveSegments,
                      learningSegments,
                      prevActiveCells,
                      winnerCells,
                      prevWinnerCells,
                      connections,
                      predictedInactiveCells,
                      prevMatchingSegments):
    """
    Phase 3: Perform learning by adapting segments.
    Pseudocode:
      - (learning) for each prev active or learning segment
        - if learning segment or from winner cell
          - strengthen active synapses
          - weaken inactive synapses
        - if learning segment
          - add some synapses to the segment
            - subsample from prev winner cells
      - if predictedSegmentDecrement > 0
        - for each previously matching segment
          - if cell is a predicted inactive cell
            - weaken active synapses but don't touch inactive synapses
    @param prevActiveSegments           (set)         Indices of active segments in `t-1`
    @param learningSegments             (set)         Indices of learning segments in `t`
    @param prevActiveCells              (set)         Indices of active cells in `t-1`
    @param winnerCells                  (set)         Indices of winner cells in `t`
    @param prevWinnerCells              (set)         Indices of winner cells in `t-1`
    @param connections                  (Connections) Connectivity of layer
    @param predictedInactiveCells       (set)         Indices of predicted inactive cells
    @param prevMatchingSegments         (set)         Indices of segments with
    """
    segments = prevActiveSegments | learningSegments

    # Sort segments before iterating for compatibility with C++
    # Sort with primary key = cell idx, secondary key = segment idx
    segments = sorted(
      segments,
      key=lambda segment: (connections.cellForSegment(segment), segment))

    for segment in segments:
      isLearningSegment = segment in learningSegments
      isFromWinnerCell = connections.cellForSegment(segment) in winnerCells

      activeSynapses = self.activeSynapsesForSegment(
        segment, prevActiveCells, connections)

      if isLearningSegment or isFromWinnerCell:
        self.adaptSegment(segment, activeSynapses, connections,
                          self.permanenceIncrement,
                          self.permanenceDecrement)

      if isLearningSegment:
        n = self.maxNewSynapseCount - len(activeSynapses)
        # Fix for NUP #3268 is commented out for now until test failures are
        # addressed.
        # n = min(self.maxNewSynapseCount,
        #         connections.maxSynapsesPerSegment
        #         - len(connections.synapsesForSegment(segment)))

        for presynapticCell in self.pickCellsToLearnOn(n,
                                                       segment,
                                                       prevWinnerCells,
                                                       connections):
          connections.createSynapse(segment,
                                    presynapticCell,
                                    self.initialPermanence)

    if self.predictedSegmentDecrement > 0:
      for segment in prevMatchingSegments:
        isPredictedInactiveCell = connections.cellForSegment(segment) in predictedInactiveCells
        activeSynapses = self.activeSynapsesForSegment(
          segment, prevActiveCells, connections)

        if isPredictedInactiveCell:
          self.adaptSegment(segment, activeSynapses, connections,
                            -self.predictedSegmentDecrement,
                            0.0)



  def computePredictiveCells(self, activeCells, connections):
    """
    Phase 4: Compute predictive cells due to lateral input
    on distal dendrites.
    Pseudocode:
      - for each distal dendrite segment with activity >= activationThreshold
        - mark the segment as active
        - mark the cell as predictive
      - if predictedSegmentDecrement > 0
        - for each distal dendrite segment with unconnected
          activity >=  minThreshold
          - mark the segment as matching
          - mark the cell as matching
    Forward propagates activity from active cells to the synapses that touch
    them, to determine which synapses are active.
    @param activeCells (set)         Indices of active cells in `t`
    @param connections (Connections) Connectivity of layer
    @return (tuple) Contains:
                      `activeSegments`  (set),
                      `predictiveCells` (set),
                      `matchingSegments` (set),
                      `matchingCells`    (set)
    """
    numActiveConnectedSynapsesForSegment = defaultdict(int)
    numActiveSynapsesForSegment = defaultdict(int)
    activeSegments = set()
    predictiveCells = set()

    matchingSegments = set()
    matchingCells = set()

    for cell in activeCells:
      for synapseData in connections.synapsesForPresynapticCell(cell).values():
        segment = synapseData.segment
        permanence = synapseData.permanence

        if permanence >= self.connectedPermanence:
          numActiveConnectedSynapsesForSegment[segment] += 1

          if (numActiveConnectedSynapsesForSegment[segment] >=
              self.activationThreshold):
            activeSegments.add(segment)
            predictiveCells.add(connections.cellForSegment(segment))

        if permanence > 0 and self.predictedSegmentDecrement > 0:
          numActiveSynapsesForSegment[segment] += 1

          if numActiveSynapsesForSegment[segment] >= self.minThreshold:
            matchingSegments.add(segment)
            matchingCells.add(connections.cellForSegment(segment))

    return activeSegments, predictiveCells, matchingSegments, matchingCells


  # ==============================
  # Helper functions
  # ==============================

  def bestMatchingCell(self, cells, activeCells, connections):
    """
    Gets the cell with the best matching segment
    (see `TM.bestMatchingSegment`) that has the largest number of active
    synapses of all best matching segments.
    If none were found, pick the least used cell (see `TM.leastUsedCell`).
    @param cells                       (set)         Indices of cells
    @param activeCells                 (set)         Indices of active cells
    @param connections                 (Connections) Connectivity of layer
    @return (tuple) Contains:
                      `cell`        (int),
                      `bestSegment` (int)
    """
    maxSynapses = 0
    bestCell = None
    bestSegment = None

    for cell in cells:
      segment, numActiveSynapses = self.bestMatchingSegment(
        cell, activeCells, connections)

      if segment is not None and numActiveSynapses > maxSynapses:
        maxSynapses = numActiveSynapses
        bestCell = cell
        bestSegment = segment

    if bestCell is None:
      bestCell = self.leastUsedCell(cells, connections)

    return bestCell, bestSegment


  def bestMatchingSegment(self, cell, activeCells, connections):
    """
    Gets the segment on a cell with the largest number of activate synapses,
    including all synapses with non-zero permanences.
    @param cell                        (int)         Cell index
    @param activeCells                 (set)         Indices of active cells
    @param connections                 (Connections) Connectivity of layer
    @return (tuple) Contains:
                      `segment`                 (int),
                      `connectedActiveSynapses` (set)
    """
    maxSynapses = self.minThreshold
    bestSegment = None
    bestNumActiveSynapses = None

    for segment in connections.segmentsForCell(cell):
      numActiveSynapses = 0

      for synapse in connections.synapsesForSegment(segment):
        synapseData = connections.dataForSynapse(synapse)
        if ( (synapseData.presynapticCell in activeCells) and
            synapseData.permanence > 0):
          numActiveSynapses += 1

      if numActiveSynapses >= maxSynapses:
        maxSynapses = numActiveSynapses
        bestSegment = segment
        bestNumActiveSynapses = numActiveSynapses

    return bestSegment, bestNumActiveSynapses


  def leastUsedCell(self, cells, connections):
    """
    Gets the cell with the smallest number of segments.
    Break ties randomly.
    @param cells       (set)         Indices of cells
    @param connections (Connections) Connectivity of layer
    @return (int) Cell index
    """
    leastUsedCells = set()
    minNumSegments = float("inf")

    for cell in cells:
      numSegments = len(connections.segmentsForCell(cell))

      if numSegments < minNumSegments:
        minNumSegments = numSegments
        leastUsedCells = set()

      if numSegments == minNumSegments:
        leastUsedCells.add(cell)

    i = self._random.getUInt32(len(leastUsedCells))
    return sorted(leastUsedCells)[i]


  @staticmethod
  def activeSynapsesForSegment(segment, activeCells, connections):
    """
    Returns the synapses on a segment that are active due to lateral input
    from active cells.
    @param segment     (int)         Segment index
    @param activeCells (set)         Indices of active cells
    @param connections (Connections) Connectivity of layer
    @return (set) Indices of active synapses on segment
    """
    synapses = set()

    for synapse in connections.synapsesForSegment(segment):
      synapseData = connections.dataForSynapse(synapse)

      if synapseData.presynapticCell in activeCells:
        synapses.add(synapse)

    return synapses


  def adaptSegment(self, segment, activeSynapses, connections,
                   permanenceIncrement, permanenceDecrement):
    """
    Updates synapses on segment.
    Strengthens active synapses; weakens inactive synapses.
    @param segment              (int)         Segment index
    @param activeSynapses       (set)         Indices of active synapses
    @param connections          (Connections) Connectivity of layer
    @param permanenceIncrement  (float)  Amount to increment active synapses
    @param permanenceDecrement  (float)  Amount to decrement inactive synapses
    """
    # Need to copy synapses for segment set below because it will be modified
    # during iteration by `destroySynapse`
    for synapse in set(connections.synapsesForSegment(segment)):
      synapseData = connections.dataForSynapse(synapse)
      permanence = synapseData.permanence

      if synapse in activeSynapses:
        permanence += permanenceIncrement
      else:
        permanence -= permanenceDecrement

      # Keep permanence within min/max bounds
      permanence = max(0.0, min(1.0, permanence))

      if (permanence < EPSILON):
        connections.destroySynapse(synapse)
      else:
        connections.updateSynapsePermanence(synapse, permanence)


  def pickCellsToLearnOn(self, n, segment, winnerCells, connections):
    """
    Pick cells to form distal connections to.
    TODO: Respect topology and learningRadius
    @param n           (int)         Number of cells to pick
    @param segment     (int)         Segment index
    @param winnerCells (set)         Indices of winner cells in `t`
    @param connections (Connections) Connectivity of layer
    @return (set) Indices of cells picked
    """
    candidates = set(winnerCells)

    # Remove cells that are already synapsed on by this segment
    for synapse in connections.synapsesForSegment(segment):
      synapseData = connections.dataForSynapse(synapse)
      presynapticCell = synapseData.presynapticCell

      if presynapticCell in candidates:
        candidates.remove(presynapticCell)

    n = min(n, len(candidates))
    candidates = sorted(candidates)
    cells = set()

    # Pick n cells randomly
    for _ in range(n):
      i = self._random.getUInt32(len(candidates))
      cells.add(candidates[i])
      del candidates[i]

    return cells


  def columnForCell(self, cell):
    """
    Returns the index of the column that a cell belongs to.
    @param cell (int) Cell index
    @return (int) Column index
    """
    self._validateCell(cell)

    return int(cell / self.cellsPerColumn)


  def cellsForColumn(self, column):
    """
    Returns the indices of cells that belong to a column.
    @param column (int) Column index
    @return (set) Cell indices
    """
    self._validateColumn(column)

    start = self.cellsPerColumn * self.getCellIndex(column)
    end = start + self.cellsPerColumn
    return set(xrange(start, end))


  def numberOfColumns(self):
    """
    Returns the number of columns in this layer.
    @return (int) Number of columns
    """
    return reduce(mul, self.columnDimensions, 1)


  def numberOfCells(self):
    """
    Returns the number of cells in this layer.
    @return (int) Number of cells
    """
    return self.numberOfColumns() * self.cellsPerColumn


  def getActiveCells(self):
    """
    Returns the indices of the active cells.
    @return (list) Indices of active cells.
    """
    return self.getCellIndices(self.activeCells)


  def getPredictiveCells(self):
    """
    Returns the indices of the predictive cells.
    @return (list) Indices of predictive cells.
    """
    return self.getCellIndices(self.predictiveCells)


  def getWinnerCells(self):
    """
    Returns the indices of the winner cells.
    @return (list) Indices of winner cells.
    """
    return self.getCellIndices(self.winnerCells)


  def getMatchingCells(self):
    """
    Returns the indices of the matching cells.
    @return (list) Indices of matching cells.
    """
    return self.getCellIndices(self.matchingCells)


  def getColumnDimensions(self):
    """
    Returns the dimensions of the columns in the region.
    @return (tuple) Column dimensions
    """
    return self.columnDimensions


  def getCellsPerColumn(self):
    """
    Returns the number of cells per column.
    @return (int) The number of cells per column.
    """
    return self.cellsPerColumn


  def getActivationThreshold(self):
    """
    Returns the activation threshold.
    @return (int) The activation threshold.
    """
    return self.activationThreshold


  def setActivationThreshold(self, activationThreshold):
    """
    Sets the activation threshold.
    @param activationThreshold (int) activation threshold.
    """
    self.activationThreshold = activationThreshold


  def getInitialPermanence(self):
    """
    Get the initial permanence.
    @return (float) The initial permanence.
    """
    return self.initialPermanence


  def setInitialPermanence(self, initialPermanence):
    """
    Sets the initial permanence.
    @param initialPermanence (float) The initial permanence.
    """
    self.initialPermanence = initialPermanence


  def getMinThreshold(self):
    """
    Returns the min threshold.
    @return (int) The min threshold.
    """
    return self.minThreshold


  def setMinThreshold(self, minThreshold):
    """
    Sets the min threshold.
    @param minThreshold (int) min threshold.
    """
    self.minThreshold = minThreshold


  def getMaxNewSynapseCount(self):
    """
    Returns the max new synapse count.
    @return (int) The max new synapse count.
    """
    return self.maxNewSynapseCount


  def setMaxNewSynapseCount(self, maxNewSynapseCount):
    """
    Sets the max new synapse count.
    @param maxNewSynapseCount (int) Max new synapse count.
    """
    self.maxNewSynapseCount = maxNewSynapseCount


  def getPermanenceIncrement(self):
    """
    Get the permanence increment.
    @return (float) The permanence increment.
    """
    return self.permanenceIncrement


  def setPermanenceIncrement(self, permanenceIncrement):
    """
    Sets the permanence increment.
    @param permanenceIncrement (float) The permanence increment.
    """
    self.permanenceIncrement = permanenceIncrement


  def getPermanenceDecrement(self):
    """
    Get the permanence decrement.
    @return (float) The permanence decrement.
    """
    return self.permanenceDecrement


  def setPermanenceDecrement(self, permanenceDecrement):
    """
    Sets the permanence decrement.
    @param permanenceDecrement (float) The permanence decrement.
    """
    self.permanenceDecrement = permanenceDecrement


  def getPredictedSegmentDecrement(self):
    """
    Get the predicted segment decrement.
    @return (float) The predicted segment decrement.
    """
    return self.predictedSegmentDecrement


  def setPredictedSegmentDecrement(self, predictedSegmentDecrement):
    """
    Sets the predicted segment decrement.
    @param predictedSegmentDecrement (float) The predicted segment decrement.
    """
    self.predictedSegmentDecrement = predictedSegmentDecrement


  def getConnectedPermanence(self):
    """
    Get the connected permanence.
    @return (float) The connected permanence.
    """
    return self.connectedPermanence


  def setConnectedPermanence(self, connectedPermanence):
    """
    Sets the connected permanence.
    @param connectedPermanence (float) The connected permanence.
    """
    self.connectedPermanence = connectedPermanence


  def mapCellsToColumns(self, cells):
    """
    Maps cells to the columns they belong to
    @param cells (set) Cells
    @return (dict) Mapping from columns to their cells in `cells`
    """
    cellsForColumns = defaultdict(set)

    for cell in cells:
      column = self.columnForCell(cell)
      cellsForColumns[column].add(cell)

    return cellsForColumns


  def write(self, proto):
    """
    Writes serialized data to proto object
    @param proto (DynamicStructBuilder) Proto object
    """
    proto.columnDimensions = self.columnDimensions
    proto.cellsPerColumn = self.cellsPerColumn
    proto.activationThreshold = self.activationThreshold
    proto.initialPermanence = self.initialPermanence
    proto.connectedPermanence = self.connectedPermanence
    proto.minThreshold = self.minThreshold
    proto.maxNewSynapseCount = self.maxNewSynapseCount
    proto.permanenceIncrement = self.permanenceIncrement
    proto.permanenceDecrement = self.permanenceDecrement
    proto.predictedSegmentDecrement = self.predictedSegmentDecrement

    self.connections.write(proto.connections)
    self._random.write(proto.random)

    proto.activeCells = list(self.activeCells)
    proto.predictiveCells = list(self.predictiveCells)
    proto.activeSegments = list(self.activeSegments)
    proto.winnerCells = list(self.winnerCells)
    proto.matchingSegments = list(self.matchingSegments)
    proto.matchingCells = list(self.matchingCells)


  @classmethod
  def read(cls, proto):
    """
    Reads deserialized data from proto object
    @param proto (DynamicStructBuilder) Proto object
    @return (TemporalMemory) TemporalMemory instance
    """
    tm = object.__new__(cls)

    tm.columnDimensions = list(proto.columnDimensions)
    tm.cellsPerColumn = int(proto.cellsPerColumn)
    tm.activationThreshold = int(proto.activationThreshold)
    tm.initialPermanence = proto.initialPermanence
    tm.connectedPermanence = proto.connectedPermanence
    tm.minThreshold = int(proto.minThreshold)
    tm.maxNewSynapseCount = int(proto.maxNewSynapseCount)
    tm.permanenceIncrement = proto.permanenceIncrement
    tm.permanenceDecrement = proto.permanenceDecrement
    tm.predictedSegmentDecrement = proto.predictedSegmentDecrement

    tm.connections = Connections.read(proto.connections)
    tm._random = Random()
    tm._random.read(proto.random)

    tm.activeCells = set([int(x) for x in proto.activeCells])
    tm.predictiveCells = set([int(x) for x in proto.predictiveCells])
    tm.activeSegments = set([int(x) for x in proto.activeSegments])
    tm.winnerCells = set([int(x) for x in proto.winnerCells])
    tm.matchingSegments = set([int(x) for x in proto.matchingSegments])
    tm.matchingCells = set([int(x) for x in proto.matchingCells])

    return tm


  def __eq__(self, other):
    """
    Equality operator for TemporalMemory instances.
    Checks if two instances are functionally identical
    (might have different internal state).
    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
    if self.columnDimensions != other.columnDimensions: return False
    if self.cellsPerColumn != other.cellsPerColumn: return False
    if self.activationThreshold != other.activationThreshold: return False
    if abs(self.initialPermanence - other.initialPermanence) > EPSILON:
      return False
    if abs(self.connectedPermanence - other.connectedPermanence) > EPSILON:
      return False
    if self.minThreshold != other.minThreshold: return False
    if self.maxNewSynapseCount != other.maxNewSynapseCount: return False
    if abs(self.permanenceIncrement - other.permanenceIncrement) > EPSILON:
      return False
    if abs(self.permanenceDecrement - other.permanenceDecrement) > EPSILON:
      return False
    if abs(self.predictedSegmentDecrement - other.predictedSegmentDecrement) > EPSILON:
      return False

    if self.connections != other.connections: return False

    if self.activeCells != other.activeCells: return False
    if self.predictiveCells != other.predictiveCells: return False
    if self.winnerCells != other.winnerCells: return False
    if self.matchingSegments != other.matchingSegments: return False
    if self.matchingCells != other.matchingCells: return False

    return True


  def __ne__(self, other):
    """
    Non-equality operator for TemporalMemory instances.
    Checks if two instances are not functionally identical
    (might have different internal state).
    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
    return not self.__eq__(other)


  def _validateColumn(self, column):
    """
    Raises an error if column index is invalid.
    @param column (int) Column index
    """
    if column >= self.numberOfColumns() or column < 0:
      raise IndexError("Invalid column")


  def _validateCell(self, cell):
    """
    Raises an error if cell index is invalid.
    @param cell (int) Cell index
    """
    if cell >= self.numberOfCells() or cell < 0:
      raise IndexError("Invalid cell")


  @classmethod
  def getCellIndices(cls, cells):
    return [cls.getCellIndex(c) for c in cells]


  @staticmethod
  def getCellIndex(cell):
    return cell
コード例 #19
0
ファイル: temporal_memory.py プロジェクト: HuaWenCHEN/nupic
class TemporalMemory(object):
  """
  Class implementing the Temporal Memory algorithm.
  """

  def __init__(self,
               columnDimensions=(2048,),
               cellsPerColumn=32,
               activationThreshold=13,
               learningRadius=2048,
               initialPermanence=0.21,
               connectedPermanence=0.50,
               minThreshold=10,
               maxNewSynapseCount=20,
               permanenceIncrement=0.10,
               permanenceDecrement=0.10,
               seed=42):
    """
    @param columnDimensions    (list)   Dimensions of the column space

    @param cellsPerColumn      (int)    Number of cells per column

    @param activationThreshold (int)    If the number of active connected
                                        synapses on a segment is at least
                                        this threshold, the segment is
                                        said to be active.

    @param learningRadius      (int)    Radius around cell from which it can
                                        sample to form distal dendrite
                                        connections.

    @param initialPermanence   (float)  Initial permanence of a new synapse.

    @param connectedPermanence (float)  If the permanence value for a synapse
                                        is greater than this value, it is said
                                        to be connected.

    @param minThreshold        (int)    If the number of synapses active on
                                        a segment is at least this threshold,
                                        it is selected as the best matching
                                        cell in a bursing column.

    @param maxNewSynapseCount  (int)    The maximum number of synapses added
                                        to a segment during learning.

    @param permanenceIncrement (float)  Amount by which permanences of synapses
                                        are incremented during learning.

    @param permanenceDecrement (float)  Amount by which permanences of synapses
                                        are decremented during learning.

    @param seed                (int)    Seed for the random number generator.
    """
    # TODO: Validate all parameters (and add validation tests)

    # Initialize member variables
    self.connections = Connections(columnDimensions, cellsPerColumn)
    self._random = Random(seed)

    self.activeCells = set()
    self.predictiveCells = set()
    self.activeSegments = set()
    self.activeSynapsesForSegment = dict()
    self.winnerCells = set()

    # Save member variables
    self.activationThreshold = activationThreshold
    self.learningRadius = learningRadius
    self.initialPermanence = initialPermanence
    self.connectedPermanence = connectedPermanence
    self.minThreshold = minThreshold
    self.maxNewSynapseCount = maxNewSynapseCount
    self.permanenceIncrement = permanenceIncrement
    self.permanenceDecrement = permanenceDecrement


  # ==============================
  # Main functions
  # ==============================

  def compute(self, activeColumns, learn=True):
    """
    Feeds input record through TM, performing inference and learning.
    Updates member variables with new state.

    @param activeColumns (set) Indices of active columns in `t`
    """
    (activeCells,
     winnerCells,
     activeSynapsesForSegment,
     activeSegments,
     predictiveCells) = self.computeFn(activeColumns,
                                       self.predictiveCells,
                                       self.activeSegments,
                                       self.activeSynapsesForSegment,
                                       self.winnerCells,
                                       self.connections,
                                       learn=learn)

    self.activeCells = activeCells
    self.winnerCells = winnerCells
    self.activeSynapsesForSegment = activeSynapsesForSegment
    self.activeSegments = activeSegments
    self.predictiveCells = predictiveCells


  def computeFn(self,
                activeColumns,
                prevPredictiveCells,
                prevActiveSegments,
                prevActiveSynapsesForSegment,
                prevWinnerCells,
                connections,
                learn=True):
    """
    'Functional' version of compute.
    Returns new state.

    @param activeColumns                (set)         Indices of active columns
                                                      in `t`
    @param prevPredictiveCells          (set)         Indices of predictive
                                                      cells in `t-1`
    @param prevActiveSegments           (set)         Indices of active segments
                                                      in `t-1`
    @param prevActiveSynapsesForSegment (dict)        Mapping from segments to
                                                      active synapses in `t-1`,
                                                      see
                                                      `TM.computeActiveSynapses`
    @param prevWinnerCells              (set)         Indices of winner cells
                                                      in `t-1`
    @param connections                  (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeCells`               (set),
                      `winnerCells`               (set),
                      `activeSynapsesForSegment`  (dict),
                      `activeSegments`            (set),
                      `predictiveCells`           (set)
    """
    activeCells = set()
    winnerCells = set()

    (_activeCells,
     _winnerCells,
     predictedColumns) = self.activateCorrectlyPredictiveCells(
       prevPredictiveCells,
       activeColumns,
       connections)

    activeCells.update(_activeCells)
    winnerCells.update(_winnerCells)

    (_activeCells,
     _winnerCells,
     learningSegments) = self.burstColumns(activeColumns,
                                           predictedColumns,
                                           prevActiveSynapsesForSegment,
                                           connections)

    activeCells.update(_activeCells)
    winnerCells.update(_winnerCells)

    if learn:
      self.learnOnSegments(prevActiveSegments,
                           learningSegments,
                           prevActiveSynapsesForSegment,
                           winnerCells,
                           prevWinnerCells,
                           connections)

    activeSynapsesForSegment = self.computeActiveSynapses(activeCells,
                                                          connections)

    (activeSegments,
     predictiveCells) = self.computePredictiveCells(activeSynapsesForSegment,
                                                    connections)

    return (activeCells,
            winnerCells,
            activeSynapsesForSegment,
            activeSegments,
            predictiveCells)


  def reset(self):
    """
    Indicates the start of a new sequence. Resets sequence state of the TM.
    """
    self.activeCells = set()
    self.predictiveCells = set()
    self.activeSegments = set()
    self.activeSynapsesForSegment = dict()
    self.winnerCells = set()


  # ==============================
  # Phases
  # ==============================

  @staticmethod
  def activateCorrectlyPredictiveCells(prevPredictiveCells,
                                       activeColumns,
                                       connections):
    """
    Phase 1: Activate the correctly predictive cells.

    Pseudocode:

      - for each prev predictive cell
        - if in active column
          - mark it as active
          - mark it as winner cell
          - mark column as predicted

    @param prevPredictiveCells (set) Indices of predictive cells in `t-1`
    @param activeColumns       (set) Indices of active columns in `t`

    @return (tuple) Contains:
                      `activeCells`      (set),
                      `winnerCells`      (set),
                      `predictedColumns` (set)
    """
    activeCells = set()
    winnerCells = set()
    predictedColumns = set()

    for cell in prevPredictiveCells:
      column = connections.columnForCell(cell)

      if column in activeColumns:
        activeCells.add(cell)
        winnerCells.add(cell)
        predictedColumns.add(column)

    return (activeCells, winnerCells, predictedColumns)


  def burstColumns(self,
                   activeColumns,
                   predictedColumns,
                   prevActiveSynapsesForSegment,
                   connections):
    """
    Phase 2: Burst unpredicted columns.

    Pseudocode:

      - for each unpredicted active column
        - mark all cells as active
        - mark the best matching cell as winner cell
          - (learning)
            - if it has no matching segment
              - (optimization) if there are prev winner cells
                - add a segment to it
            - mark the segment as learning

    @param activeColumns                (set)         Indices of active columns
                                                      in `t`
    @param predictedColumns             (set)         Indices of predicted
                                                      columns in `t`
    @param prevActiveSynapsesForSegment (dict)        Mapping from segments to
                                                      active synapses in `t-1`,
                                                      see
                                                      `TM.computeActiveSynapses`
    @param connections                  (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeCells`      (set),
                      `winnerCells`      (set),
                      `learningSegments` (set)
    """
    activeCells = set()
    winnerCells = set()
    learningSegments = set()

    unpredictedColumns = activeColumns - predictedColumns

    for column in unpredictedColumns:
      cells = connections.cellsForColumn(column)
      activeCells.update(cells)

      (bestCell,
       bestSegment) = self.getBestMatchingCell(column,
                                               prevActiveSynapsesForSegment,
                                               connections)
      winnerCells.add(bestCell)

      if bestSegment == None:
        # TODO: (optimization) Only do this if there are prev winner cells
        bestSegment = connections.createSegment(bestCell)

      learningSegments.add(bestSegment)

    return (activeCells, winnerCells, learningSegments)


  def learnOnSegments(self,
                      prevActiveSegments,
                      learningSegments,
                      prevActiveSynapsesForSegment,
                      winnerCells,
                      prevWinnerCells,
                      connections):
    """
    Phase 3: Perform learning by adapting segments.

    Pseudocode:

      - (learning) for each prev active or learning segment
        - if learning segment or from winner cell
          - strengthen active synapses
          - weaken inactive synapses
        - if learning segment
          - add some synapses to the segment
            - subsample from prev winner cells

    @param prevActiveSegments           (set)         Indices of active segments
                                                      in `t-1`
    @param learningSegments             (set)         Indices of learning
                                                      segments in `t`
    @param prevActiveSynapsesForSegment (dict)        Mapping from segments to
                                                      active synapses in `t-1`,
                                                      see
                                                      `TM.computeActiveSynapses`
    @param winnerCells                  (set)         Indices of winner cells
                                                      in `t`
    @param prevWinnerCells              (set)         Indices of winner cells
                                                      in `t-1`
    @param connections                  (Connections) Connectivity of layer
    """
    for segment in prevActiveSegments | learningSegments:
      isLearningSegment = segment in learningSegments
      isFromWinnerCell  = connections.cellForSegment(segment) in winnerCells

      activeSynapses = self.getConnectedActiveSynapsesForSegment(
        segment,
        prevActiveSynapsesForSegment,
        0,
        connections)

      if isLearningSegment or isFromWinnerCell:
        self.adaptSegment(segment, activeSynapses, connections)

      if isLearningSegment:
        n = self.maxNewSynapseCount - len(activeSynapses)

        for sourceCell in self.pickCellsToLearnOn(n,
                                                  segment,
                                                  prevWinnerCells,
                                                  connections):
          connections.createSynapse(segment, sourceCell, self.initialPermanence)


  def computePredictiveCells(self, activeSynapsesForSegment, connections):
    """
    Phase 4: Compute predictive cells due to lateral input
    on distal dendrites.

    Pseudocode:

      - for each distal dendrite segment with activity >= activationThreshold
        - mark the segment as active
        - mark the cell as predictive

    @param activeSynapsesForSegment (dict)        Mapping from segments to
                                                  active synapses (see
                                                  `TM.computeActiveSynapses`)
    @param connections              (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeSegments`  (set),
                      `predictiveCells` (set)
    """
    activeSegments = set()
    predictiveCells = set()

    for segment in activeSynapsesForSegment.keys():
      synapses = self.getConnectedActiveSynapsesForSegment(
        segment,
        activeSynapsesForSegment,
        self.connectedPermanence,
        connections)

      if len(synapses) >= self.activationThreshold:
        activeSegments.add(segment)
        predictiveCells.add(connections.cellForSegment(segment))

    return (activeSegments, predictiveCells)


  # ==============================
  # Helper functions
  # ==============================

  @staticmethod
  def computeActiveSynapses(activeCells, connections):
    """
    Forward propagates activity from active cells to the synapses that touch
    them, to determine which synapses are active.

    @param activeCells (set)         Indicies of active cells
    @param connections (Connections) Connectivity of layer

    @return (dict) Mapping from segment (int) to indices of
                   active synapses (set)
    """
    activeSynapsesForSegment = dict()

    for cell in activeCells:
      for synapse in connections.synapsesForSourceCell(cell):
        segment, _, _ = connections.dataForSynapse(synapse)

        if not segment in activeSynapsesForSegment:
          activeSynapsesForSegment[segment] = set()

        activeSynapsesForSegment[segment].add(synapse)

    return activeSynapsesForSegment


  def getBestMatchingCell(self, column, activeSynapsesForSegment, connections):
    """
    Gets the cell with the best matching segment
    (see `TM.getBestMatchingSegment`) that has the largest number of active
    synapses of all best matching segments.

    If none were found, pick the least used cell (see `TM.getLeastUsedCell`).

    @param column                   (int)         Column index
    @param activeSynapsesForSegment (dict)        Mapping from segments to
                                                  active synapses (see
                                                  `TM.computeActiveSynapses`)
    @param connections              (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `cell`        (int),
                      `bestSegment` (int)
    """
    maxSynapses = 0
    bestCell = None
    bestSegment = None

    cells = connections.cellsForColumn(column)

    for cell in cells:
      (
        segment,
        connectedActiveSynapses
      ) = self.getBestMatchingSegment(cell,
                                      activeSynapsesForSegment,
                                      connections)

      if segment != None and len(connectedActiveSynapses) > maxSynapses:
        maxSynapses = len(connectedActiveSynapses)
        bestCell = cell
        bestSegment = segment

    if bestCell == None:
      bestCell = self.getLeastUsedCell(column, connections)

    return (bestCell, bestSegment)


  def getBestMatchingSegment(self, cell, activeSynapsesForSegment, connections):
    """
    Gets the segment on a cell with the largest number of activate synapses,
    including all synapses with non-zero permanences.

    @param cell                     (int)         Cell index
    @param activeSynapsesForSegment (dict)        Mapping from segments to
                                                  active synapses (see
                                                  `TM.computeActiveSynapses`)
    @param connections              (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `segment`                 (int),
                      `connectedActiveSynapses` (set)
    """
    maxSynapses = self.minThreshold
    bestSegment = None
    connectedActiveSynapses = None

    for segment in connections.segmentsForCell(cell):
      synapses = self.getConnectedActiveSynapsesForSegment(
        segment,
        activeSynapsesForSegment,
        0,
        connections)

      if len(synapses) >= maxSynapses:
        maxSynapses = len(synapses)
        bestSegment = segment
        connectedActiveSynapses = set(synapses)

    return (bestSegment, connectedActiveSynapses)


  def getLeastUsedCell(self, column, connections):
    """
    Gets the cell with the smallest number of segments.
    Break ties randomly.

    @param column                   (int)         Column index
    @param connections              (Connections) Connectivity of layer

    @return (int) Cell index
    """
    cells = connections.cellsForColumn(column)
    leastUsedCells = set()
    minNumSegments = float("inf")

    for cell in cells:
      numSegments = len(connections.segmentsForCell(cell))

      if numSegments < minNumSegments:
        minNumSegments = numSegments
        leastUsedCells = set()

      if numSegments == minNumSegments:
        leastUsedCells.add(cell)

    i = self._random.getUInt32(len(leastUsedCells))
    return sorted(leastUsedCells)[i]


  @staticmethod
  def getConnectedActiveSynapsesForSegment(segment,
                                           activeSynapsesForSegment,
                                           permanenceThreshold,
                                           connections):
    """
    Returns the synapses on a segment that are active due to lateral input
    from active cells.

    @param segment                   (int)         Segment index
    @param activeSynapsesForSegment  (dict)        Mapping from segments to
                                                   active synapses (see
                                                   `TM.computeActiveSynapses`)
    @param permanenceThreshold       (float)       Minimum threshold for
                                                   permanence for synapse to
                                                   be connected
    @param connections               (Connections) Connectivity of layer

    @return (set) Indices of active synapses on segment
    """
    connectedSynapses = set()

    if not segment in activeSynapsesForSegment:
      return connectedSynapses

    # TODO: (optimization) Can skip this logic if permanenceThreshold = 0
    for synapse in activeSynapsesForSegment[segment]:
      (_, _, permanence) = connections.dataForSynapse(synapse)

      if permanence >= permanenceThreshold:
        connectedSynapses.add(synapse)

    return connectedSynapses


  def adaptSegment(self, segment, activeSynapses, connections):
    """
    Updates synapses on segment.
    Strengthens active synapses; weakens inactive synapses.

    @param segment        (int)         Segment index
    @param activeSynapses (set)         Indices of active synapses
    @param connections    (Connections) Connectivity of layer
    """
    for synapse in connections.synapsesForSegment(segment):
      (_, _, permanence) = connections.dataForSynapse(synapse)

      if synapse in activeSynapses:
        permanence += self.permanenceIncrement
      else:
        permanence -= self.permanenceDecrement

      # Keep permanence within min/max bounds
      permanence = max(0.0, min(1.0, permanence))

      connections.updateSynapsePermanence(synapse, permanence)


  def pickCellsToLearnOn(self, n, segment, winnerCells, connections):
    """
    Pick cells to form distal connections to.

    TODO: Respect topology and learningRadius

    @param n           (int)         Number of cells to pick
    @param segment     (int)         Segment index
    @param winnerCells (set)         Indices of winner cells in `t`
    @param connections (Connections) Connectivity of layer

    @return (set) Indices of cells picked
    """
    candidates = set(winnerCells)

    # Remove cells that are already synapsed on by this segment
    for synapse in connections.synapsesForSegment(segment):
      (_, sourceCell, _) = connections.dataForSynapse(synapse)
      if sourceCell in candidates:
        candidates.remove(sourceCell)

    n = min(n, len(candidates))
    candidates = sorted(candidates)
    cells = set()

    # Pick n cells randomly
    for _ in range(n):
      i = self._random.getUInt32(len(candidates))
      cells.add(candidates[i])
      del candidates[i]

    return cells
コード例 #20
0
class RandomDistributedScalarEncoder(Encoder):
    """
  A scalar encoder encodes a numeric (floating point) value into an array
  of bits.

  This class maps a scalar value into a random distributed representation that
  is suitable as scalar input into the spatial pooler. The encoding scheme is
  designed to replace a simple ScalarEncoder. It preserves the important
  properties around overlapping representations. Unlike ScalarEncoder the min
  and max range can be dynamically increased without any negative effects. The
  only required parameter is resolution, which determines the resolution of
  input values.

  Scalar values are mapped to a bucket. The class maintains a random distributed
  encoding for each bucket. The following properties are maintained by
  RandomDistributedEncoder:

  1) Similar scalars should have high overlap. Overlap should decrease smoothly
  as scalars become less similar. Specifically, neighboring bucket indices must
  overlap by a linearly decreasing number of bits.

  2) Dissimilar scalars should have very low overlap so that the SP does not
  confuse representations. Specifically, buckets that are more than w indices
  apart should have at most maxOverlap bits of overlap. We arbitrarily (and
  safely) define "very low" to be 2 bits of overlap or lower.

  Properties 1 and 2 lead to the following overlap rules for buckets i and j:

      If abs(i-j) < w then:
        overlap(i,j) = w - abs(i-j)
      else:
        overlap(i,j) <= maxOverlap

  3) The representation for a scalar must not change during the lifetime of
  the object. Specifically, as new buckets are created and the min/max range
  is extended, the representation for previously in-range sscalars and
  previously created buckets must not change.
  """
    def __init__(self,
                 resolution,
                 w=21,
                 n=400,
                 name=None,
                 offset=None,
                 seed=42,
                 verbosity=0):
        """Constructor

    @param resolution A floating point positive number denoting the resolution
                    of the output representation. Numbers within
                    [offset-resolution/2, offset+resolution/2] will fall into
                    the same bucket and thus have an identical representation.
                    Adjacent buckets will differ in one bit. resolution is a
                    required parameter.

    @param w Number of bits to set in output. w must be odd to avoid centering
                    problems.  w must be large enough that spatial pooler
                    columns will have a sufficiently large overlap to avoid
                    false matches. A value of w=21 is typical.

    @param n Number of bits in the representation (must be > w). n must be
                    large enough such that there is enough room to select
                    new representations as the range grows. With w=21 a value
                    of n=400 is typical. The class enforces n > 6*w.

    @param name An optional string which will become part of the description.

    @param offset A floating point offset used to map scalar inputs to bucket
                    indices. The middle bucket will correspond to numbers in the
                    range [offset - resolution/2, offset + resolution/2). If set
                    to None, the very first input that is encoded will be used
                    to determine the offset.

    @param seed The seed used for numpy's random number generator. If set to -1
                    the generator will be initialized without a fixed seed.

    @param verbosity An integer controlling the level of debugging output. A
                    value of 0 implies no output. verbosity=1 may lead to
                    one-time printouts during construction, serialization or
                    deserialization. verbosity=2 may lead to some output per
                    encode operation. verbosity>2 may lead to significantly
                    more output.
    """
        # Validate inputs
        if (w <= 0) or (w % 2 == 0):
            raise ValueError("w must be an odd positive integer")

        if resolution <= 0:
            raise ValueError("resolution must be a positive number")

        if (n <= 6 * w) or (not isinstance(n, int)):
            raise ValueError("n must be an int strictly greater than 6*w. For "
                             "good results we recommend n be strictly greater "
                             "than 11*w")

        self.encoders = None
        self.verbosity = verbosity
        self.w = w
        self.n = n
        self.resolution = float(resolution)

        # The largest overlap we allow for non-adjacent encodings
        self._maxOverlap = 2

        # initialize the random number generators
        self._seed(seed)

        # Internal parameters for bucket mapping
        self.minIndex = None
        self.maxIndex = None
        self._offset = None
        self._initializeBucketMap(INITIAL_BUCKETS, offset)

        # A name used for debug printouts
        if name is not None:
            self.name = name
        else:
            self.name = "[%s]" % (self.resolution)

        if self.verbosity > 0:
            self.dump()

    def __setstate__(self, state):
        self.__dict__.update(state)

        # Initialize self.random as an instance of NupicRandom derived from the
        # previous numpy random state
        randomState = state["random"]
        if isinstance(randomState, numpy.random.mtrand.RandomState):
            self.random = NupicRandom(randomState.randint(sys.maxint))

    def _seed(self, seed=-1):
        """
    Initialize the random seed
    """
        if seed != -1:
            self.random = NupicRandom(seed)
        else:
            self.random = NupicRandom()

    def getDecoderOutputFieldTypes(self):
        """ See method description in base.py """
        return (FieldMetaType.float, )

    def getWidth(self):
        """ See method description in base.py """
        return self.n

    def getDescription(self):
        return [(self.name, 0)]

    def getBucketIndices(self, x):
        """ See method description in base.py """

        if ((isinstance(x, float) and math.isnan(x))
                or x == SENTINEL_VALUE_FOR_MISSING_DATA):
            return [None]

        if self._offset is None:
            self._offset = x

        bucketIdx = ((self._maxBuckets / 2) +
                     int(round((x - self._offset) / self.resolution)))

        if bucketIdx < 0:
            bucketIdx = 0
        elif bucketIdx >= self._maxBuckets:
            bucketIdx = self._maxBuckets - 1

        return [bucketIdx]

    def mapBucketIndexToNonZeroBits(self, index):
        """
    Given a bucket index, return the list of non-zero bits. If the bucket
    index does not exist, it is created. If the index falls outside our range
    we clip it.
    """
        if index < 0:
            index = 0

        if index >= self._maxBuckets:
            index = self._maxBuckets - 1

        if not self.bucketMap.has_key(index):
            if self.verbosity >= 2:
                print "Adding additional buckets to handle index=", index
            self._createBucket(index)
        return self.bucketMap[index]

    def encodeIntoArray(self, x, output):
        """ See method description in base.py """

        if x is not None and not isinstance(x, numbers.Number):
            raise TypeError(
                "Expected a scalar input but got input of type %s" % type(x))

        # Get the bucket index to use
        bucketIdx = self.getBucketIndices(x)[0]

        # None is returned for missing value in which case we return all 0's.
        output[0:self.n] = 0
        if bucketIdx is not None:
            output[self.mapBucketIndexToNonZeroBits(bucketIdx)] = 1

    def _createBucket(self, index):
        """
    Create the given bucket index. Recursively create as many in-between
    bucket indices as necessary.
    """
        if index < self.minIndex:
            if index == self.minIndex - 1:
                # Create a new representation that has exactly w-1 overlapping bits
                # as the min representation
                self.bucketMap[index] = self._newRepresentation(
                    self.minIndex, index)
                self.minIndex = index
            else:
                # Recursively create all the indices above and then this index
                self._createBucket(index + 1)
                self._createBucket(index)
        else:
            if index == self.maxIndex + 1:
                # Create a new representation that has exactly w-1 overlapping bits
                # as the max representation
                self.bucketMap[index] = self._newRepresentation(
                    self.maxIndex, index)
                self.maxIndex = index
            else:
                # Recursively create all the indices below and then this index
                self._createBucket(index - 1)
                self._createBucket(index)

    def _newRepresentation(self, index, newIndex):
        """
    Return a new representation for newIndex that overlaps with the
    representation at index by exactly w-1 bits
    """
        newRepresentation = self.bucketMap[index].copy()

        # Choose the bit we will replace in this representation. We need to shift
        # this bit deterministically. If this is always chosen randomly then there
        # is a 1 in w chance of the same bit being replaced in neighboring
        # representations, which is fairly high
        ri = newIndex % self.w

        # Now we choose a bit such that the overlap rules are satisfied.
        newBit = self.random.getUInt32(self.n)
        newRepresentation[ri] = newBit
        while newBit in self.bucketMap[index] or \
              not self._newRepresentationOK(newRepresentation, newIndex):
            self.numTries += 1
            newBit = self.random.getUInt32(self.n)
            newRepresentation[ri] = newBit

        return newRepresentation

    def _newRepresentationOK(self, newRep, newIndex):
        """
    Return True if this new candidate representation satisfies all our overlap
    rules. Since we know that neighboring representations differ by at most
    one bit, we compute running overlaps.
    """
        if newRep.size != self.w:
            return False
        if (newIndex < self.minIndex - 1) or (newIndex > self.maxIndex + 1):
            raise ValueError("newIndex must be within one of existing indices")

        # A binary representation of newRep. We will use this to test containment
        newRepBinary = numpy.array([False] * self.n)
        newRepBinary[newRep] = True

        # Midpoint
        midIdx = self._maxBuckets / 2

        # Start by checking the overlap at minIndex
        runningOverlap = self._countOverlap(self.bucketMap[self.minIndex],
                                            newRep)
        if not self._overlapOK(self.minIndex, newIndex,
                               overlap=runningOverlap):
            return False

        # Compute running overlaps all the way to the midpoint
        for i in range(self.minIndex + 1, midIdx + 1):
            # This is the bit that is going to change
            newBit = (i - 1) % self.w

            # Update our running overlap
            if newRepBinary[self.bucketMap[i - 1][newBit]]:
                runningOverlap -= 1
            if newRepBinary[self.bucketMap[i][newBit]]:
                runningOverlap += 1

            # Verify our rules
            if not self._overlapOK(i, newIndex, overlap=runningOverlap):
                return False

        # At this point, runningOverlap contains the overlap for midIdx
        # Compute running overlaps all the way to maxIndex
        for i in range(midIdx + 1, self.maxIndex + 1):
            # This is the bit that is going to change
            newBit = i % self.w

            # Update our running overlap
            if newRepBinary[self.bucketMap[i - 1][newBit]]:
                runningOverlap -= 1
            if newRepBinary[self.bucketMap[i][newBit]]:
                runningOverlap += 1

            # Verify our rules
            if not self._overlapOK(i, newIndex, overlap=runningOverlap):
                return False

        return True

    def _countOverlapIndices(self, i, j):
        """
    Return the overlap between bucket indices i and j
    """
        if self.bucketMap.has_key(i) and self.bucketMap.has_key(j):
            iRep = self.bucketMap[i]
            jRep = self.bucketMap[j]
            return self._countOverlap(iRep, jRep)
        else:
            raise ValueError("Either i or j don't exist")

    @staticmethod
    def _countOverlap(rep1, rep2):
        """
    Return the overlap between two representations. rep1 and rep2 are lists of
    non-zero indices.
    """
        overlap = 0
        for e in rep1:
            if e in rep2:
                overlap += 1
        return overlap

    def _overlapOK(self, i, j, overlap=None):
        """
    Return True if the given overlap between bucket indices i and j are
    acceptable. If overlap is not specified, calculate it from the bucketMap
    """
        if overlap is None:
            overlap = self._countOverlapIndices(i, j)
        if abs(i - j) < self.w:
            if overlap == (self.w - abs(i - j)):
                return True
            else:
                return False
        else:
            if overlap <= self._maxOverlap:
                return True
            else:
                return False

    def _initializeBucketMap(self, maxBuckets, offset):
        """
    Initialize the bucket map assuming the given number of maxBuckets.
    """
        # The first bucket index will be _maxBuckets / 2 and bucket indices will be
        # allowed to grow lower or higher as long as they don't become negative.
        # _maxBuckets is required because the current CLA Classifier assumes bucket
        # indices must be non-negative. This normally does not need to be changed
        # but if altered, should be set to an even number.
        self._maxBuckets = maxBuckets
        self.minIndex = self._maxBuckets / 2
        self.maxIndex = self._maxBuckets / 2

        # The scalar offset used to map scalar values to bucket indices. The middle
        # bucket will correspond to numbers in the range
        # [offset-resolution/2, offset+resolution/2).
        # The bucket index for a number x will be:
        #     maxBuckets/2 + int( round( (x-offset)/resolution ) )
        self._offset = offset

        # This dictionary maps a bucket index into its bit representation
        # We initialize the class with a single bucket with index 0
        self.bucketMap = {}

        def _permutation(n):
            r = numpy.arange(n, dtype=numpy.uint32)
            self.random.shuffle(r)
            return r

        self.bucketMap[self.minIndex] = _permutation(self.n)[0:self.w]

        # How often we need to retry when generating valid encodings
        self.numTries = 0

    def dump(self):
        print "RandomDistributedScalarEncoder:"
        print "  minIndex:   %d" % self.minIndex
        print "  maxIndex:   %d" % self.maxIndex
        print "  w:          %d" % self.w
        print "  n:          %d" % self.getWidth()
        print "  resolution: %g" % self.resolution
        print "  offset:     %s" % str(self._offset)
        print "  numTries:   %d" % self.numTries
        print "  name:       %s" % self.name
        if self.verbosity > 2:
            print "  All buckets:     "
            pprint.pprint(self.bucketMap)

    @classmethod
    def read(cls, proto):
        encoder = object.__new__(cls)
        encoder.resolution = proto.resolution
        encoder.w = proto.w
        encoder.n = proto.n
        encoder.name = proto.name
        encoder._offset = proto.offset
        encoder.random = NupicRandom()
        encoder.random.read(proto.random)
        encoder.resolution = proto.resolution
        encoder.verbosity = proto.verbosity
        encoder.minIndex = proto.minIndex
        encoder.maxIndex = proto.maxIndex
        encoder.encoders = None
        encoder._maxBuckets = INITIAL_BUCKETS
        encoder.bucketMap = {
            x.key: numpy.array(x.value, dtype=numpy.uint32)
            for x in proto.bucketMap
        }

        return encoder

    def write(self, proto):
        proto.resolution = self.resolution
        proto.w = self.w
        proto.n = self.n
        proto.name = self.name
        proto.offset = self._offset
        self.random.write(proto.random)
        proto.verbosity = self.verbosity
        proto.minIndex = self.minIndex
        proto.maxIndex = self.maxIndex
        proto.bucketMap = [{
            "key": key,
            "value": value.tolist()
        } for key, value in self.bucketMap.items()]
コード例 #21
0
class SMSequences(object):
    """
  Class generates sensorimotor sequences
  """
    def __init__(self,
                 sensoryInputElements,
                 spatialConfig,
                 sensoryInputElementsPool=list(
                     "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
                     "abcdefghijklmnopqrstuvwxyz0123456789"),
                 minDisplacement=1,
                 maxDisplacement=1,
                 numActiveBitsSensoryInput=9,
                 numActiveBitsMotorInput=9,
                 seed=42,
                 verbosity=False,
                 useRandomEncoder=False):
        """
    @param sensoryInputElements       (list)
        Strings or numbers representing the sensory elements that exist in your
        world. Elements can be repeated if multiple of the same exist.

    @param spatialConfig              (numpy.array)
        Array of size: (1, len(sensoryInputElements), dimension). It has a
        coordinate for every element in sensoryInputElements.

    @param sensoryInputElementsPool   (list)
        List of strings representing a readable version of all possible sensory
        elements in this world. Elements don't need to be in any order and there
        should be no duplicates. By default this contains the set of
        alphanumeric characters.

    @param maxDisplacement            (int)
        Maximum `distance` for a motor command. Distance is defined by the
        largest difference along any coordinate dimension.

    @param minDisplacement            (int)
        Minimum `distance` for a motor command. Distance is defined by the
        largest difference along any coordinate dimension.

    @param numActiveBitsSensoryInput  (int)
        Number of active bits for each sensory input.

    @param numActiveBitsMotorInput    (int)
        Number of active bits for each dimension of the motor input.

    @param seed                       (int)
        Random seed for nupic.bindings.Random.

    @param verbosity                  (int)
        Verbosity

    @param useRandomEncoder           (boolean)
        if True, use the random encoder SDRCategoryEncoder. If False,
        use CategoryEncoder. CategoryEncoder encodes categories using contiguous
        non-overlapping bits for each category, which makes it easier to debug.
    """

        #---------------------------------------------------------------------------------
        # Store creation parameters
        self.sensoryInputElements = sensoryInputElements
        self.sensoryInputElementsPool = sensoryInputElementsPool
        self.spatialConfig = spatialConfig.astype(int)
        self.spatialLength = len(spatialConfig)
        self.maxDisplacement = maxDisplacement
        self.minDisplacement = minDisplacement
        self.numActiveBitsSensoryInput = numActiveBitsSensoryInput
        self.numActiveBitsMotorInput = numActiveBitsMotorInput
        self.verbosity = verbosity
        self.seed = seed

        self.initialize(useRandomEncoder)

    def initialize(self, useRandomEncoder):
        """
    Initialize the various data structures.
    """
        self.setRandomSeed(self.seed)

        self.dim = numpy.shape(self.spatialConfig)[-1]

        self.spatialMap = dict(
            zip(map(tuple, list(self.spatialConfig)),
                self.sensoryInputElements))

        self.lengthMotorInput1D = (2*self.maxDisplacement + 1) * \
                                                        self.numActiveBitsMotorInput

        uniqueSensoryElements = list(set(self.sensoryInputElementsPool))

        if useRandomEncoder:
            self.sensoryEncoder = SDRCategoryEncoder(
                n=1024,
                w=self.numActiveBitsSensoryInput,
                categoryList=uniqueSensoryElements,
                forced=True)
            self.lengthSensoryInput = self.sensoryEncoder.getWidth()

        else:
            self.lengthSensoryInput = (len(self.sensoryInputElementsPool)+1) * \
                                                self.numActiveBitsSensoryInput

            self.sensoryEncoder = CategoryEncoder(
                w=self.numActiveBitsSensoryInput,
                categoryList=uniqueSensoryElements,
                forced=True)

        motorEncoder1D = ScalarEncoder(n=self.lengthMotorInput1D,
                                       w=self.numActiveBitsMotorInput,
                                       minval=-self.maxDisplacement,
                                       maxval=self.maxDisplacement,
                                       clipInput=True,
                                       forced=True)

        self.motorEncoder = VectorEncoder(length=self.dim,
                                          encoder=motorEncoder1D)

    def generateSensorimotorSequence(self, sequenceLength):
        """
    Generate sensorimotor sequences of length sequenceLength.

    @param sequenceLength (int)
        Length of the sensorimotor sequence.

    @return (tuple) Contains:
            sensorySequence       (list)
                Encoded sensory input for whole sequence.

            motorSequence         (list)
                Encoded motor input for whole sequence.

            sensorimotorSequence  (list)
                Encoder sensorimotor input for whole sequence. This is useful
                when you want to give external input to temporal memory.
    """
        motorSequence = []
        sensorySequence = []
        sensorimotorSequence = []
        currentEyeLoc = self.nupicRandomChoice(self.spatialConfig)

        for i in xrange(sequenceLength):

            currentSensoryInput = self.spatialMap[tuple(currentEyeLoc)]

            nextEyeLoc, currentEyeV = self.getNextEyeLocation(currentEyeLoc)

            if self.verbosity:
                print "sensory input = ", currentSensoryInput, \
                  "eye location = ", currentEyeLoc, \
                  " motor command = ", currentEyeV

            sensoryInput = self.encodeSensoryInput(currentSensoryInput)
            motorInput = self.encodeMotorInput(list(currentEyeV))
            sensorimotorInput = numpy.concatenate((sensoryInput, motorInput))

            sensorySequence.append(sensoryInput)
            motorSequence.append(motorInput)
            sensorimotorSequence.append(sensorimotorInput)

            currentEyeLoc = nextEyeLoc

        return (sensorySequence, motorSequence, sensorimotorSequence)

    def encodeSensorimotorSequence(self, eyeLocs):
        """
    Encode sensorimotor sequence given the eye movements. Sequence will have
    length len(eyeLocs) - 1 because only the differences of eye locations can be
    used to encoder motor commands.

    @param eyeLocs  (list)
        Numpy coordinates describing where the eye is looking.

    @return (tuple) Contains:
            sensorySequence       (list)
                Encoded sensory input for whole sequence.

            motorSequence         (list)
                Encoded motor input for whole sequence.

            sensorimotorSequence  (list)
                Encoder sensorimotor input for whole sequence. This is useful
                when you want to give external input to temporal memory.
    """
        sequenceLength = len(eyeLocs) - 1

        motorSequence = []
        sensorySequence = []
        sensorimotorSequence = []

        for i in xrange(sequenceLength):
            currentEyeLoc = eyeLocs[i]
            nextEyeLoc = eyeLocs[i + 1]

            currentSensoryInput = self.spatialMap[currentEyeLoc]

            currentEyeV = nextEyeLoc - currentEyeLoc

            if self.verbosity:
                print "sensory input = ", currentSensoryInput, \
                    "eye location = ", currentEyeLoc, \
                    " motor command = ", currentEyeV

            sensoryInput = self.encodeSensoryInput(currentSensoryInput)
            motorInput = self.encodeMotorInput(list(currentEyeV))
            sensorimotorInput = numpy.concatenate((sensoryInput, motorInput))

            sensorySequence.append(sensoryInput)
            motorSequence.append(motorInput)
            sensorimotorSequence.append(sensorimotorInput)

        return (sensorySequence, motorSequence, sensorimotorSequence)

    def getNextEyeLocation(self, currentEyeLoc):
        """
    Generate next eye location based on current eye location.

    @param currentEyeLoc (numpy.array)
        Current coordinate describing the eye location in the world.

    @return (tuple) Contains:
            nextEyeLoc  (numpy.array)
                Coordinate of the next eye location.

            eyeDiff     (numpy.array)
                Vector describing change from currentEyeLoc to nextEyeLoc.
    """
        possibleEyeLocs = []
        for loc in self.spatialConfig:
            shift = abs(max(loc - currentEyeLoc))
            if self.minDisplacement <= shift <= self.maxDisplacement:
                possibleEyeLocs.append(loc)

        nextEyeLoc = self.nupicRandomChoice(possibleEyeLocs)

        eyeDiff = nextEyeLoc - currentEyeLoc

        return nextEyeLoc, eyeDiff

    def setRandomSeed(self, seed):
        """
    Reset the nupic random generator. This is necessary to reset random seed to
    generate new sequences.

    @param seed       (int)
        Seed for nupic.bindings.Random.
    """
        self.seed = seed
        self._random = Random()
        self._random.setSeed(seed)

    def nupicRandomChoice(self, array):
        """
    Chooses a random element from an array using the nupic random number
    generator.

    @param array  (list or numpy.array)
        Array to choose random element from.

    @return       (element)
        Element chosen at random.
    """
        return array[self._random.getUInt32(len(array))]

    def encodeMotorInput(self, motorInput):
        """
    Encode motor command to bit vector.

    @param motorInput (1D numpy.array)
        Motor command to be encoded.

    @return           (1D numpy.array)
        Encoded motor command.
    """
        if not hasattr(motorInput, "__iter__"):
            motorInput = list([motorInput])

        return self.motorEncoder.encode(motorInput)

    def decodeMotorInput(self, motorInputPattern):
        """
    Decode motor command from bit vector.

    @param motorInputPattern (1D numpy.array)
        Encoded motor command.

    @return                  (1D numpy.array)
        Decoded motor command.

    """
        key = self.motorEncoder.decode(motorInputPattern)[0].keys()[0]
        motorCommand = self.motorEncoder.decode(
            motorInputPattern)[0][key][1][0]
        return motorCommand

    def encodeSensoryInput(self, sensoryInputElement):
        """
    Encode sensory input to bit vector

    @param sensoryElement (1D numpy.array)
        Sensory element to be encoded.

    @return               (1D numpy.array)
        Encoded sensory element.
    """
        return self.sensoryEncoder.encode(sensoryInputElement)

    def decodeSensoryInput(self, sensoryInputPattern):
        """
    Decode sensory input from bit vector.

    @param sensoryInputPattern  (1D numpy.array)
        Encoded sensory element.

    @return                     (1D numpy.array)
        Decoded sensory element.
    """
        return self.sensoryEncoder.decode(
            sensoryInputPattern)[0]['category'][1]

    def printSensoryCodingScheme(self):
        """
    Print sensory inputs along with their encoded versions.
    """
        print "\nsensory coding scheme: "
        for loc in self.spatialConfig:
            sensoryElement = self.spatialMap[tuple(loc)]
            print sensoryElement, "%s : " % loc,
            printSequence(self.encodeSensoryInput(sensoryElement))

    def printMotorCodingScheme(self):
        """
    Print motor commands (displacement vector) along with their encoded
    versions.
    """
        print "\nmotor coding scheme: "
        self.build(self.dim, [])

    def build(self, n, vec):
        """
    Recursive function to help print motor coding scheme.
    """
        for i in range(-self.maxDisplacement, self.maxDisplacement + 1):
            next = vec + [i]
            if n == 1:
                print '{:>5}\t'.format(next), " = ",
                printSequence(self.encodeMotorInput(next))
            else:
                self.build(n - 1, next)
コード例 #22
0
 def testPlatformSame(self): 
   r = Random(42)
   [r.getUInt32() for _ in range(80085)]
   v = r.getUInt32()
   self.assertEqual(v, 1651991554)
コード例 #23
0
class TemporalMemory(object):
    """
  Class implementing the Temporal Memory algorithm.
  """
    def __init__(self,
                 columnDimensions=(2048, ),
                 cellsPerColumn=32,
                 activationThreshold=13,
                 initialPermanence=0.21,
                 connectedPermanence=0.50,
                 minThreshold=10,
                 maxNewSynapseCount=20,
                 permanenceIncrement=0.10,
                 permanenceDecrement=0.10,
                 predictedSegmentDecrement=0.0,
                 maxSegmentsPerCell=255,
                 maxSynapsesPerSegment=255,
                 seed=42,
                 **kwargs):
        """
    @param columnDimensions          (list)  Dimensions of the column space
    @param cellsPerColumn            (int)   Number of cells per column
    @param activationThreshold       (int)   If the number of active connected synapses on a segment is at least this threshold, the segment is said to be active.
    @param initialPermanence         (float) Initial permanence of a new synapse.
    @param connectedPermanence       (float) If the permanence value for a synapse is greater than this value, it is said to be connected.
    @param minThreshold              (int)   If the number of synapses active on a segment is at least this threshold, it is selected as the best matching cell in a bursting column.
    @param maxNewSynapseCount        (int)   The maximum number of synapses added to a segment during learning.
    @param permanenceIncrement       (float) Amount by which permanences of synapses are incremented during learning.
    @param permanenceDecrement       (float) Amount by which permanences of synapses are decremented during learning.
    @param predictedSegmentDecrement (float) Amount by which active permanences of synapses of previously predicted but inactive segments are decremented.
    @param seed                      (int)   Seed for the random number generator.

    Notes:

    predictedSegmentDecrement: A good value is just a bit larger than
    (the column-level sparsity * permanenceIncrement). So, if column-level
    sparsity is 2% and permanenceIncrement is 0.01, this parameter should be
    something like 4% * 0.01 = 0.0004).
    """
        # Error checking
        if not len(columnDimensions):
            raise ValueError(
                "Number of column dimensions must be greater than 0")

        if not cellsPerColumn > 0:
            raise ValueError(
                "Number of cells per column must be greater than 0")

        # TODO: Validate all parameters (and add validation tests)

        # Save member variables
        self.columnDimensions = columnDimensions
        self.cellsPerColumn = cellsPerColumn
        self.activationThreshold = activationThreshold
        self.initialPermanence = initialPermanence
        self.connectedPermanence = connectedPermanence
        self.minThreshold = minThreshold
        self.maxNewSynapseCount = maxNewSynapseCount
        self.permanenceIncrement = permanenceIncrement
        self.permanenceDecrement = permanenceDecrement
        self.predictedSegmentDecrement = predictedSegmentDecrement
        # Initialize member variables
        self.connections = Connections(
            self.numberOfCells(),
            maxSegmentsPerCell=maxSegmentsPerCell,
            maxSynapsesPerSegment=maxSynapsesPerSegment)
        self._random = Random(seed)

        self.activeCells = set()
        self.predictiveCells = set()
        self.activeSegments = set()
        self.winnerCells = set()
        self.matchingSegments = set()
        self.matchingCells = set()

    # ==============================
    # Main functions
    # ==============================

    def compute(self, activeColumns, learn=True):
        """
    Feeds input record through TM, performing inference and learning.

    @param activeColumns (set)  Indices of active columns
    @param learn         (bool) Whether or not learning is enabled

    Updates member variables:
      - `activeCells`     (set)
      - `winnerCells`     (set)
      - `activeSegments`  (set)
      - `predictiveCells` (set)
      - `matchingSegments`(set)
      - `matchingCells`   (set)
    """
        prevPredictiveCells = self.predictiveCells
        prevActiveSegments = self.activeSegments
        prevActiveCells = self.activeCells
        prevWinnerCells = self.winnerCells
        prevMatchingSegments = self.matchingSegments
        prevMatchingCells = self.matchingCells

        activeCells = set()
        winnerCells = set()

        (_activeCells, _winnerCells, predictedActiveColumns,
         predictedInactiveCells) = self.activateCorrectlyPredictiveCells(
             prevPredictiveCells, prevMatchingCells, activeColumns)

        activeCells.update(_activeCells)
        winnerCells.update(_winnerCells)

        (_activeCells, _winnerCells, learningSegments) = self.burstColumns(
            activeColumns, predictedActiveColumns, prevActiveCells,
            prevWinnerCells, self.connections)

        activeCells.update(_activeCells)
        winnerCells.update(_winnerCells)

        if learn:
            self.learnOnSegments(prevActiveSegments, learningSegments,
                                 prevActiveCells, winnerCells, prevWinnerCells,
                                 self.connections, predictedInactiveCells,
                                 prevMatchingSegments)

        (activeSegments, predictiveCells, matchingSegments,
         matchingCells) = self.computePredictiveCells(activeCells,
                                                      self.connections)

        self.activeCells = activeCells
        self.winnerCells = winnerCells
        self.activeSegments = activeSegments
        self.predictiveCells = predictiveCells
        self.matchingSegments = matchingSegments
        self.matchingCells = matchingCells

    def reset(self):
        """
    Indicates the start of a new sequence. Resets sequence state of the TM.
    """
        self.activeCells = set()
        self.predictiveCells = set()
        self.activeSegments = set()
        self.winnerCells = set()

    # ==============================
    # Phases
    # ==============================

    def activateCorrectlyPredictiveCells(self, prevPredictiveCells,
                                         prevMatchingCells, activeColumns):
        """
    Phase 1: Activate the correctly predictive cells.

    Pseudocode:

      - for each prev predictive cell
        - if in active column
          - mark it as active
          - mark it as winner cell
          - mark column as predicted => active
        - if not in active column
          - mark it as an predicted but inactive cell

    @param prevPredictiveCells (set) Indices of predictive cells in `t-1`
    @param activeColumns       (set) Indices of active columns in `t`

    @return (tuple) Contains:
                      `activeCells`               (set),
                      `winnerCells`               (set),
                      `predictedActiveColumns`    (set),
                      `predictedInactiveCells`    (set)
    """
        activeCells = set()
        winnerCells = set()
        predictedActiveColumns = set()
        predictedInactiveCells = set()

        for cell in prevPredictiveCells:
            column = self.columnForCell(cell)

            if column in activeColumns:
                activeCells.add(cell)
                winnerCells.add(cell)
                predictedActiveColumns.add(column)

        if self.predictedSegmentDecrement > 0:
            for cell in prevMatchingCells:
                column = self.columnForCell(cell)

                if column not in activeColumns:
                    predictedInactiveCells.add(cell)

        return (activeCells, winnerCells, predictedActiveColumns,
                predictedInactiveCells)

    def burstColumns(self, activeColumns, predictedActiveColumns,
                     prevActiveCells, prevWinnerCells, connections):
        """
    Phase 2: Burst unpredicted columns.

    Pseudocode:

      - for each unpredicted active column
        - mark all cells as active
        - mark the best matching cell as winner cell
          - (learning)
            - if it has no matching segment
              - (optimization) if there are prev winner cells
                - add a segment to it
            - mark the segment as learning

    @param activeColumns                   (set)         Indices of active columns in `t`
    @param predictedActiveColumns          (set)         Indices of predicted => active columns in `t`
    @param prevActiveCells                 (set)         Indices of active cells in `t-1`
    @param prevWinnerCells                 (set)         Indices of winner cells in `t-1`
    @param connections                     (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeCells`      (set),
                      `winnerCells`      (set),
                      `learningSegments` (set)
    """
        activeCells = set()
        winnerCells = set()
        learningSegments = set()

        unpredictedActiveColumns = activeColumns - predictedActiveColumns

        for column in unpredictedActiveColumns:
            cells = self.cellsForColumn(column)
            activeCells.update(cells)

            (bestCell,
             bestSegment) = self.bestMatchingCell(cells, prevActiveCells,
                                                  connections)
            winnerCells.add(bestCell)

            if bestSegment is None and len(prevWinnerCells):
                bestSegment = connections.createSegment(bestCell)

            if bestSegment is not None:
                learningSegments.add(bestSegment)

        return activeCells, winnerCells, learningSegments

    def learnOnSegments(self, prevActiveSegments, learningSegments,
                        prevActiveCells, winnerCells, prevWinnerCells,
                        connections, predictedInactiveCells,
                        prevMatchingSegments):
        """
    Phase 3: Perform learning by adapting segments.

    Pseudocode:

      - (learning) for each prev active or learning segment
        - if learning segment or from winner cell
          - strengthen active synapses
          - weaken inactive synapses
        - if learning segment
          - add some synapses to the segment
            - subsample from prev winner cells

      - if predictedSegmentDecrement > 0
        - for each previously matching segment
          - if cell is a predicted inactive cell
            - weaken active synapses but don't touch inactive synapses

    @param prevActiveSegments           (set)         Indices of active segments in `t-1`
    @param learningSegments             (set)         Indices of learning segments in `t`
    @param prevActiveCells              (set)         Indices of active cells in `t-1`
    @param winnerCells                  (set)         Indices of winner cells in `t`
    @param prevWinnerCells              (set)         Indices of winner cells in `t-1`
    @param connections                  (Connections) Connectivity of layer
    @param predictedInactiveCells       (set)         Indices of predicted inactive cells
    @param prevMatchingSegments         (set)         Indices of segments with
    """
        for segment in prevActiveSegments | learningSegments:
            isLearningSegment = segment in learningSegments
            isFromWinnerCell = connections.cellForSegment(
                segment) in winnerCells

            activeSynapses = self.activeSynapsesForSegment(
                segment, prevActiveCells, connections)

            if isLearningSegment or isFromWinnerCell:
                self.adaptSegment(segment, activeSynapses, connections,
                                  self.permanenceIncrement,
                                  self.permanenceDecrement)

            if isLearningSegment:
                n = self.maxNewSynapseCount - len(activeSynapses)

                for presynapticCell in self.pickCellsToLearnOn(
                        n, segment, prevWinnerCells, connections):
                    connections.createSynapse(segment, presynapticCell,
                                              self.initialPermanence)

        if self.predictedSegmentDecrement > 0:
            for segment in prevMatchingSegments:
                isPredictedInactiveCell = connections.cellForSegment(
                    segment) in predictedInactiveCells
                activeSynapses = self.activeSynapsesForSegment(
                    segment, prevActiveCells, connections)

                if isPredictedInactiveCell:
                    self.adaptSegment(segment, activeSynapses, connections,
                                      -self.predictedSegmentDecrement, 0.0)

    def computePredictiveCells(self, activeCells, connections):
        """
    Phase 4: Compute predictive cells due to lateral input
    on distal dendrites.

    Pseudocode:

      - for each distal dendrite segment with activity >= activationThreshold
        - mark the segment as active
        - mark the cell as predictive

      - if predictedSegmentDecrement > 0
        - for each distal dendrite segment with unconnected
          activity >=  minThreshold
          - mark the segment as matching
          - mark the cell as matching

    Forward propagates activity from active cells to the synapses that touch
    them, to determine which synapses are active.

    @param activeCells (set)         Indices of active cells in `t`
    @param connections (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `activeSegments`  (set),
                      `predictiveCells` (set),
                      `matchingSegments` (set),
                      `matchingCells`    (set)
    """
        numActiveConnectedSynapsesForSegment = defaultdict(int)
        numActiveSynapsesForSegment = defaultdict(int)
        activeSegments = set()
        predictiveCells = set()

        matchingSegments = set()
        matchingCells = set()

        for cell in activeCells:
            for synapseData in connections.synapsesForPresynapticCell(
                    cell).values():
                segment = synapseData.segment
                permanence = synapseData.permanence

                if permanence >= self.connectedPermanence:
                    numActiveConnectedSynapsesForSegment[segment] += 1

                    if (numActiveConnectedSynapsesForSegment[segment] >=
                            self.activationThreshold):
                        activeSegments.add(segment)
                        predictiveCells.add(
                            connections.cellForSegment(segment))

                if permanence > 0 and self.predictedSegmentDecrement > 0:
                    numActiveSynapsesForSegment[segment] += 1

                    if numActiveSynapsesForSegment[
                            segment] >= self.minThreshold:
                        matchingSegments.add(segment)
                        matchingCells.add(connections.cellForSegment(segment))

        return activeSegments, predictiveCells, matchingSegments, matchingCells

    # ==============================
    # Helper functions
    # ==============================

    def bestMatchingCell(self, cells, activeCells, connections):
        """
    Gets the cell with the best matching segment
    (see `TM.bestMatchingSegment`) that has the largest number of active
    synapses of all best matching segments.

    If none were found, pick the least used cell (see `TM.leastUsedCell`).

    @param cells                       (set)         Indices of cells
    @param activeCells                 (set)         Indices of active cells
    @param connections                 (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `cell`        (int),
                      `bestSegment` (int)
    """
        maxSynapses = 0
        bestCell = None
        bestSegment = None

        for cell in cells:
            segment, numActiveSynapses = self.bestMatchingSegment(
                cell, activeCells, connections)

            if segment is not None and numActiveSynapses > maxSynapses:
                maxSynapses = numActiveSynapses
                bestCell = cell
                bestSegment = segment

        if bestCell is None:
            bestCell = self.leastUsedCell(cells, connections)

        return bestCell, bestSegment

    def bestMatchingSegment(self, cell, activeCells, connections):
        """
    Gets the segment on a cell with the largest number of activate synapses,
    including all synapses with non-zero permanences.

    @param cell                        (int)         Cell index
    @param activeCells                 (set)         Indices of active cells
    @param connections                 (Connections) Connectivity of layer

    @return (tuple) Contains:
                      `segment`                 (int),
                      `connectedActiveSynapses` (set)
    """
        maxSynapses = self.minThreshold
        bestSegment = None
        bestNumActiveSynapses = None

        for segment in connections.segmentsForCell(cell):
            numActiveSynapses = 0

            for synapse in connections.synapsesForSegment(segment):
                synapseData = connections.dataForSynapse(synapse)
                if ((synapseData.presynapticCell in activeCells)
                        and synapseData.permanence > 0):
                    numActiveSynapses += 1

            if numActiveSynapses >= maxSynapses:
                maxSynapses = numActiveSynapses
                bestSegment = segment
                bestNumActiveSynapses = numActiveSynapses

        return bestSegment, bestNumActiveSynapses

    def leastUsedCell(self, cells, connections):
        """
    Gets the cell with the smallest number of segments.
    Break ties randomly.

    @param cells       (set)         Indices of cells
    @param connections (Connections) Connectivity of layer

    @return (int) Cell index
    """
        leastUsedCells = set()
        minNumSegments = float("inf")

        for cell in cells:
            numSegments = len(connections.segmentsForCell(cell))

            if numSegments < minNumSegments:
                minNumSegments = numSegments
                leastUsedCells = set()

            if numSegments == minNumSegments:
                leastUsedCells.add(cell)

        i = self._random.getUInt32(len(leastUsedCells))
        return sorted(leastUsedCells)[i]

    @staticmethod
    def activeSynapsesForSegment(segment, activeCells, connections):
        """
    Returns the synapses on a segment that are active due to lateral input
    from active cells.

    @param segment     (int)         Segment index
    @param activeCells (set)         Indices of active cells
    @param connections (Connections) Connectivity of layer

    @return (set) Indices of active synapses on segment
    """
        synapses = set()

        for synapse in connections.synapsesForSegment(segment):
            synapseData = connections.dataForSynapse(synapse)

            if synapseData.presynapticCell in activeCells:
                synapses.add(synapse)

        return synapses

    def adaptSegment(self, segment, activeSynapses, connections,
                     permanenceIncrement, permanenceDecrement):
        """
    Updates synapses on segment.
    Strengthens active synapses; weakens inactive synapses.

    @param segment              (int)         Segment index
    @param activeSynapses       (set)         Indices of active synapses
    @param connections          (Connections) Connectivity of layer
    @param permanenceIncrement  (float)  Amount to increment active synapses
    @param permanenceDecrement  (float)  Amount to decrement inactive synapses
    """
        # Need to copy synapses for segment set below because it will be modified
        # during iteration by `destroySynapse`
        for synapse in set(connections.synapsesForSegment(segment)):
            synapseData = connections.dataForSynapse(synapse)
            permanence = synapseData.permanence

            if synapse in activeSynapses:
                permanence += permanenceIncrement
            else:
                permanence -= permanenceDecrement

            # Keep permanence within min/max bounds
            permanence = max(0.0, min(1.0, permanence))

            if (abs(permanence) < EPSILON):
                connections.destroySynapse(synapse)
            else:
                connections.updateSynapsePermanence(synapse, permanence)

    def pickCellsToLearnOn(self, n, segment, winnerCells, connections):
        """
    Pick cells to form distal connections to.

    TODO: Respect topology and learningRadius

    @param n           (int)         Number of cells to pick
    @param segment     (int)         Segment index
    @param winnerCells (set)         Indices of winner cells in `t`
    @param connections (Connections) Connectivity of layer

    @return (set) Indices of cells picked
    """
        candidates = set(winnerCells)

        # Remove cells that are already synapsed on by this segment
        for synapse in connections.synapsesForSegment(segment):
            synapseData = connections.dataForSynapse(synapse)
            presynapticCell = synapseData.presynapticCell

            if presynapticCell in candidates:
                candidates.remove(presynapticCell)

        n = min(n, len(candidates))
        candidates = sorted(candidates)
        cells = set()

        # Pick n cells randomly
        for _ in range(n):
            i = self._random.getUInt32(len(candidates))
            cells.add(candidates[i])
            del candidates[i]

        return cells

    def columnForCell(self, cell):
        """
    Returns the index of the column that a cell belongs to.

    @param cell (int) Cell index

    @return (int) Column index
    """
        self._validateCell(cell)

        return int(cell / self.cellsPerColumn)

    def cellsForColumn(self, column):
        """
    Returns the indices of cells that belong to a column.

    @param column (int) Column index

    @return (set) Cell indices
    """
        self._validateColumn(column)

        start = self.cellsPerColumn * self.getCellIndex(column)
        end = start + self.cellsPerColumn
        return set(xrange(start, end))

    def numberOfColumns(self):
        """
    Returns the number of columns in this layer.

    @return (int) Number of columns
    """
        return reduce(mul, self.columnDimensions, 1)

    def numberOfCells(self):
        """
    Returns the number of cells in this layer.

    @return (int) Number of cells
    """
        return self.numberOfColumns() * self.cellsPerColumn

    def getActiveCells(self):
        """
    Returns the indices of the active cells.

    @return (list) Indices of active cells.
    """
        return self.getCellIndices(self.activeCells)

    def getPredictiveCells(self):
        """
    Returns the indices of the predictive cells.

    @return (list) Indices of predictive cells.
    """
        return self.getCellIndices(self.predictiveCells)

    def getWinnerCells(self):
        """
    Returns the indices of the winner cells.

    @return (list) Indices of winner cells.
    """
        return self.getCellIndices(self.winnerCells)

    def getMatchingCells(self):
        """
    Returns the indices of the matching cells.

    @return (list) Indices of matching cells.
    """
        return self.getCellIndices(self.matchingCells)

    def mapCellsToColumns(self, cells):
        """
    Maps cells to the columns they belong to

    @param cells (set) Cells

    @return (dict) Mapping from columns to their cells in `cells`
    """
        cellsForColumns = defaultdict(set)

        for cell in cells:
            column = self.columnForCell(cell)
            cellsForColumns[column].add(cell)

        return cellsForColumns

    def write(self, proto):
        """
    Writes serialized data to proto object

    @param proto (DynamicStructBuilder) Proto object
    """
        proto.columnDimensions = self.columnDimensions
        proto.cellsPerColumn = self.cellsPerColumn
        proto.activationThreshold = self.activationThreshold
        proto.initialPermanence = self.initialPermanence
        proto.connectedPermanence = self.connectedPermanence
        proto.minThreshold = self.minThreshold
        proto.maxNewSynapseCount = self.maxNewSynapseCount
        proto.permanenceIncrement = self.permanenceIncrement
        proto.permanenceDecrement = self.permanenceDecrement
        proto.predictedSegmentDecrement = self.predictedSegmentDecrement

        self.connections.write(proto.connections)
        self._random.write(proto.random)

        proto.activeCells = list(self.activeCells)
        proto.predictiveCells = list(self.predictiveCells)
        proto.activeSegments = list(self.activeSegments)
        proto.winnerCells = list(self.winnerCells)
        proto.matchingSegments = list(self.matchingSegments)
        proto.matchingCells = list(self.matchingCells)

    @classmethod
    def read(cls, proto):
        """
    Reads deserialized data from proto object

    @param proto (DynamicStructBuilder) Proto object

    @return (TemporalMemory) TemporalMemory instance
    """
        tm = object.__new__(cls)

        tm.columnDimensions = list(proto.columnDimensions)
        tm.cellsPerColumn = int(proto.cellsPerColumn)
        tm.activationThreshold = int(proto.activationThreshold)
        tm.initialPermanence = proto.initialPermanence
        tm.connectedPermanence = proto.connectedPermanence
        tm.minThreshold = int(proto.minThreshold)
        tm.maxNewSynapseCount = int(proto.maxNewSynapseCount)
        tm.permanenceIncrement = proto.permanenceIncrement
        tm.permanenceDecrement = proto.permanenceDecrement
        tm.predictedSegmentDecrement = proto.predictedSegmentDecrement

        tm.connections = Connections.read(proto.connections)
        tm._random = Random()
        tm._random.read(proto.random)

        tm.activeCells = set([int(x) for x in proto.activeCells])
        tm.predictiveCells = set([int(x) for x in proto.predictiveCells])
        tm.activeSegments = set([int(x) for x in proto.activeSegments])
        tm.winnerCells = set([int(x) for x in proto.winnerCells])
        tm.matchingSegments = set([int(x) for x in proto.matchingSegments])
        tm.matchingCells = set([int(x) for x in proto.matchingCells])

        return tm

    def __eq__(self, other):
        """
    Equality operator for TemporalMemory instances.
    Checks if two instances are functionally identical
    (might have different internal state).

    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
        if self.columnDimensions != other.columnDimensions: return False
        if self.cellsPerColumn != other.cellsPerColumn: return False
        if self.activationThreshold != other.activationThreshold: return False
        if abs(self.initialPermanence - other.initialPermanence) > EPSILON:
            return False
        if abs(self.connectedPermanence - other.connectedPermanence) > EPSILON:
            return False
        if self.minThreshold != other.minThreshold: return False
        if self.maxNewSynapseCount != other.maxNewSynapseCount: return False
        if abs(self.permanenceIncrement - other.permanenceIncrement) > EPSILON:
            return False
        if abs(self.permanenceDecrement - other.permanenceDecrement) > EPSILON:
            return False
        if abs(self.predictedSegmentDecrement -
               other.predictedSegmentDecrement) > EPSILON:
            return False

        if self.connections != other.connections: return False

        if self.activeCells != other.activeCells: return False
        if self.predictiveCells != other.predictiveCells: return False
        if self.winnerCells != other.winnerCells: return False
        if self.matchingSegments != other.matchingSegments: return False
        if self.matchingCells != other.matchingCells: return False

        return True

    def __ne__(self, other):
        """
    Non-equality operator for TemporalMemory instances.
    Checks if two instances are not functionally identical
    (might have different internal state).

    @param other (TemporalMemory) TemporalMemory instance to compare to
    """
        return not self.__eq__(other)

    def _validateColumn(self, column):
        """
    Raises an error if column index is invalid.

    @param column (int) Column index
    """
        if column >= self.numberOfColumns() or column < 0:
            raise IndexError("Invalid column")

    def _validateCell(self, cell):
        """
    Raises an error if cell index is invalid.

    @param cell (int) Cell index
    """
        if cell >= self.numberOfCells() or cell < 0:
            raise IndexError("Invalid cell")

    @classmethod
    def getCellIndices(cls, cells):
        return [cls.getCellIndex(c) for c in cells]

    @staticmethod
    def getCellIndex(cell):
        return cell