Beispiel #1
0
  def testRecycleLeastRecentlyActiveSegmentToMakeRoomForNewSegment(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=1,
      activationThreshold=3,
      initialPermanence=.50,
      connectedPermanence=.50,
      minThreshold=2,
      maxNewSynapseCount=3,
      permanenceIncrement=.02,
      permanenceDecrement=.02,
      predictedSegmentDecrement=0.0,
      seed=42,
      maxSegmentsPerCell=2)

    prevActiveColumns1 = [0, 1, 2]
    prevActiveColumns2 = [3, 4, 5]
    prevActiveColumns3 = [6, 7, 8]
    activeColumns = [9]

    tm.compute(prevActiveColumns1)
    tm.compute(activeColumns)

    self.assertEqual(1, tm.connections.numSegments(9))
    oldestSegment = list(tm.connections.segmentsForCell(9))[0]
    tm.reset()
    tm.compute(prevActiveColumns2)
    tm.compute(activeColumns)

    self.assertEqual(2, tm.connections.numSegments(9))

    oldPresynaptic = \
      set(synapse.presynapticCell
          for synapse in tm.connections.synapsesForSegment(oldestSegment))

    tm.reset()
    tm.compute(prevActiveColumns3)
    tm.compute(activeColumns)
    self.assertEqual(2, tm.connections.numSegments(9))

    # Verify none of the segments are connected to the cells the old
    # segment was connected to.

    for segment in tm.connections.segmentsForCell(9):
      newPresynaptic = set(synapse.presynapticCell
                           for synapse
                           in tm.connections.synapsesForSegment(segment))
      self.assertEqual([], list(oldPresynaptic & newPresynaptic))
  def testRecycleLeastRecentlyActiveSegmentToMakeRoomForNewSegment(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=1,
      activationThreshold=3,
      initialPermanence=.50,
      connectedPermanence=.50,
      minThreshold=2,
      maxNewSynapseCount=3,
      permanenceIncrement=.02,
      permanenceDecrement=.02,
      predictedSegmentDecrement=0.0,
      seed=42,
      maxSegmentsPerCell=2)

    prevActiveColumns1 = [0, 1, 2]
    prevActiveColumns2 = [3, 4, 5]
    prevActiveColumns3 = [6, 7, 8]
    activeColumns = [9]

    tm.compute(prevActiveColumns1)
    tm.compute(activeColumns)

    self.assertEqual(1, tm.connections.numSegments(9))
    oldestSegment = list(tm.connections.segmentsForCell(9))[0]
    tm.reset()
    tm.compute(prevActiveColumns2)
    tm.compute(activeColumns)

    self.assertEqual(2, tm.connections.numSegments(9))

    oldPresynaptic = \
      set(synapse.presynapticCell
          for synapse in tm.connections.synapsesForSegment(oldestSegment))

    tm.reset()
    tm.compute(prevActiveColumns3)
    tm.compute(activeColumns)
    self.assertEqual(2, tm.connections.numSegments(9))

    # Verify none of the segments are connected to the cells the old
    # segment was connected to.

    for segment in tm.connections.segmentsForCell(9):
      newPresynaptic = set(synapse.presynapticCell
                           for synapse
                           in tm.connections.synapsesForSegment(segment))
      self.assertEqual([], list(oldPresynaptic & newPresynaptic))
Beispiel #3
0
  def testRecycleLeastRecentlyActiveSegmentToMakeRoomForNewSegment(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=1,
      activationThreshold=3,
      initialPermanence=.50,
      connectedPermanence=.50,
      minThreshold=2,
      maxNewSynapseCount=3,
      permanenceIncrement=.02,
      permanenceDecrement=.02,
      predictedSegmentDecrement=0.0,
      seed=42,
      maxSegmentsPerCell=2)

    prevActiveColumns1 = [0, 1, 2]
    prevActiveColumns2 = [3, 4, 5]
    prevActiveColumns3 = [6, 7, 8]
    activeColumns = [9]

    tm.compute(prevActiveColumns1)
    tm.compute(activeColumns)

    self.assertEqual(1, len(tm.connections.segmentsForCell(9)))
    oldestSegment = sorted(tm.connections.segmentsForCell(9))[0]

    tm.reset()
    tm.compute(prevActiveColumns2)
    tm.compute(activeColumns)

    self.assertEqual(2, len(tm.connections.segmentsForCell(9)))

    tm.reset()
    tm.compute(prevActiveColumns3)
    tm.compute(activeColumns)

    self.assertEqual(2, len(tm.connections.segmentsForCell(9)))

    synapses = tm.connections.synapsesForSegment(oldestSegment)
    self.assertEqual(3, len(synapses))
    presynapticCells = set()

    for synapse in synapses:
      synapseData = tm.connections.dataForSynapse(synapse)
      presynapticCells.add(synapseData.presynapticCell)

    expected = set([6,7,8])
    self.assertEqual(expected, presynapticCells)
Beispiel #4
0
    def testRecycleLeastRecentlyActiveSegmentToMakeRoomForNewSegment(self):
        tm = TemporalMemory(columnDimensions=[32],
                            cellsPerColumn=1,
                            activationThreshold=3,
                            initialPermanence=.50,
                            connectedPermanence=.50,
                            minThreshold=2,
                            maxNewSynapseCount=3,
                            permanenceIncrement=.02,
                            permanenceDecrement=.02,
                            predictedSegmentDecrement=0.0,
                            seed=42,
                            maxSegmentsPerCell=2)

        prevActiveColumns1 = [0, 1, 2]
        prevActiveColumns2 = [3, 4, 5]
        prevActiveColumns3 = [6, 7, 8]
        activeColumns = [9]

        tm.compute(prevActiveColumns1)
        tm.compute(activeColumns)

        self.assertEqual(1, len(tm.connections.segmentsForCell(9)))
        oldestSegment = sorted(tm.connections.segmentsForCell(9))[0]

        tm.reset()
        tm.compute(prevActiveColumns2)
        tm.compute(activeColumns)

        self.assertEqual(2, len(tm.connections.segmentsForCell(9)))

        tm.reset()
        tm.compute(prevActiveColumns3)
        tm.compute(activeColumns)

        self.assertEqual(2, len(tm.connections.segmentsForCell(9)))

        synapses = tm.connections.synapsesForSegment(oldestSegment)
        self.assertEqual(3, len(synapses))
        presynapticCells = set()

        for synapse in synapses:
            synapseData = tm.connections.dataForSynapse(synapse)
            presynapticCells.add(synapseData.presynapticCell)

        expected = set([6, 7, 8])
        self.assertEqual(expected, presynapticCells)
Beispiel #5
0
        # The compute method performs one step of learning and/or inference. Note:
        # here we just perform learning but you can perform prediction/inference and
        # learning in the same step if you want (online learning).
        tm.compute(activeColumns, learn=True)

        # The following print statements can be ignored.
        # Useful for tracing internal states
        print("active cells " + str(tm.getActiveCells()))
        print("predictive cells " + str(tm.getPredictiveCells()))
        print("winner cells " + str(tm.getWinnerCells()))
        print("# of active segments " + str(tm.connections.numSegments()))

    # The reset command tells the TP that a sequence just ended and essentially
    # zeros out all the states. It is not strictly necessary but it's a bit
    # messier without resets, and the TP learns quicker with resets.
    tm.reset()

#######################################################################
#
# Step 3: send the same sequence of vectors and look at predictions made by
# temporal memory
for j in range(5):
    print "\n\n--------", "ABCDE"[j], "-----------"
    print "Raw input vector : " + formatRow(x[j])
    activeColumns = set([i for i, j in zip(count(), x[j]) if j == 1])
    # Send each vector to the TM, with learning turned off
    tm.compute(activeColumns, learn=False)

    # The following print statements prints out the active cells, predictive
    # cells, active segments and winner cells.
    #
Beispiel #6
0
    # The compute method performs one step of learning and/or inference. Note:
    # here we just perform learning but you can perform prediction/inference and
    # learning in the same step if you want (online learning).
    tm.compute(activeColumns, learn = True)

    # The following print statements can be ignored.
    # Useful for tracing internal states
    print("active cells " + str(tm.getActiveCells()))
    print("predictive cells " + str(tm.getPredictiveCells()))
    print("winner cells " + str(tm.getWinnerCells()))
    print("# of active segments " + str(tm.connections.numSegments()))

  # The reset command tells the TP that a sequence just ended and essentially
  # zeros out all the states. It is not strictly necessary but it's a bit
  # messier without resets, and the TP learns quicker with resets.
  tm.reset()


#######################################################################
#
# Step 3: send the same sequence of vectors and look at predictions made by
# temporal memory
for j in range(5):
  print "\n\n--------","ABCDE"[j],"-----------"
  print "Raw input vector : " + formatRow(x[j])
  activeColumns = set([i for i, j in zip(count(), x[j]) if j == 1])
  # Send each vector to the TM, with learning turned off
  tm.compute(activeColumns, learn = False)
  
  # The following print statements prints out the active cells, predictive
  # cells, active segments and winner cells. 
Beispiel #7
0




# sorted_cells = sorted(cell_activation_dict.iteritems(), key=lambda (k,v): (v,k), reverse=False)
# pickle.dump(sorted_cells, open( "1Cells.p", "wb" ) )

# for i in range(len(num_data)):
for i in range(2000):
	# print([str(x) for x in num_data_np[i]])
	# print(num_data[i])
	# print_num(num_data[i], inactive_char='-')
	# print 'The answer is ',answers[i] 
	# print(cell_activation_dict)
	learn_num(num_data[i], answers[i], i)
	ret_tp.reset()

print(float(sum(col_guesses)) / float(len(col_guesses)))
# col_guesses = col_guesses[2500:]
performance = []
for i in range(len(col_guesses)):
	performance.append(float(sum(col_guesses[:i])) / float(i+1))

plt.plot(performance)
plt.show()
# plt.plot(cell_guesses)
# plt.show()