Пример #1
0
  def testNewSegmentAddSynapsesToSubsetOfWinnerCells(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=4,
      activationThreshold=3,
      initialPermanence=.21,
      connectedPermanence=.50,
      minThreshold=2,
      maxNewSynapseCount=2,
      permanenceIncrement=.10,
      permanenceDecrement=.10,
      predictedSegmentDecrement=0.0,
      seed=42)

    previousActiveColumns = [0, 1, 2]
    activeColumns = [4]

    tm.compute(previousActiveColumns, True)

    prevWinnerCells = tm.getWinnerCells() #[0, 8, 7]
    self.assertEqual(3, len(prevWinnerCells))

    tm.compute(activeColumns, True)

    winnerCells = tm.getWinnerCells() #[18]
    self.assertEqual(1, len(winnerCells))
    segments = list(tm.connections.segmentsForCell(winnerCells[0]))
    self.assertEqual(1, len(segments))
    synapses = list(tm.connections.synapsesForSegment(segments[0]))
    self.assertEqual(2, len(synapses))

    for synapse in synapses:
      synapseData = tm.connections.dataForSynapse(synapse)
      self.assertAlmostEqual(.21, synapseData.permanence)
      self.assertTrue(synapseData.presynapticCell in prevWinnerCells)
Пример #2
0
  def testZeroActiveColumns(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=4,
      activationThreshold=3,
      initialPermanence=.21,
      connectedPermanence=.5,
      minThreshold=2,
      maxNewSynapseCount=3,
      permanenceIncrement=.10,
      permanenceDecrement=.10,
      predictedSegmentDecrement=0.0,
      seed=42)

    previousActiveColumns = [0]
    previousActiveCells = [0, 1, 2, 3]
    expectedActiveCells = [4]

    segment = tm.createSegment(expectedActiveCells[0])
    tm.connections.createSynapse(segment, previousActiveCells[0], .5)
    tm.connections.createSynapse(segment, previousActiveCells[1], .5)
    tm.connections.createSynapse(segment, previousActiveCells[2], .5)
    tm.connections.createSynapse(segment, previousActiveCells[3], .5)

    tm.compute(previousActiveColumns, True)
    self.assertFalse(len(tm.getActiveCells()) == 0)
    self.assertFalse(len(tm.getWinnerCells()) == 0)
    self.assertFalse(len(tm.getPredictiveCells()) == 0)

    zeroColumns = []
    tm.compute(zeroColumns, True)

    self.assertTrue(len(tm.getActiveCells()) == 0)
    self.assertTrue(len(tm.getWinnerCells()) == 0)
    self.assertTrue(len(tm.getPredictiveCells()) == 0)
  def testNewSegmentAddSynapsesToSubsetOfWinnerCells(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=4,
      activationThreshold=3,
      initialPermanence=.21,
      connectedPermanence=.50,
      minThreshold=2,
      maxNewSynapseCount=2,
      permanenceIncrement=.10,
      permanenceDecrement=.10,
      predictedSegmentDecrement=0.0,
      seed=42)

    previousActiveColumns = [0, 1, 2]
    activeColumns = [4]

    tm.compute(previousActiveColumns, True)

    prevWinnerCells = tm.getWinnerCells() #[0, 8, 7]
    self.assertEqual(3, len(prevWinnerCells))

    tm.compute(activeColumns, True)

    winnerCells = tm.getWinnerCells() #[18]
    self.assertEqual(1, len(winnerCells))
    segments = list(tm.connections.segmentsForCell(winnerCells[0]))
    self.assertEqual(1, len(segments))
    synapses = list(tm.connections.synapsesForSegment(segments[0]))
    self.assertEqual(2, len(synapses))

    for synapse in synapses:
      synapseData = tm.connections.dataForSynapse(synapse)
      self.assertAlmostEqual(.21, synapseData.permanence)
      self.assertTrue(synapseData.presynapticCell in prevWinnerCells)
  def testZeroActiveColumns(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=4,
      activationThreshold=3,
      initialPermanence=.21,
      connectedPermanence=.5,
      minThreshold=2,
      maxNewSynapseCount=3,
      permanenceIncrement=.10,
      permanenceDecrement=.10,
      predictedSegmentDecrement=0.0,
      seed=42)

    previousActiveColumns = [0]
    previousActiveCells = [0, 1, 2, 3]
    expectedActiveCells = [4]

    segment = tm.createSegment(expectedActiveCells[0])
    tm.connections.createSynapse(segment, previousActiveCells[0], .5)
    tm.connections.createSynapse(segment, previousActiveCells[1], .5)
    tm.connections.createSynapse(segment, previousActiveCells[2], .5)
    tm.connections.createSynapse(segment, previousActiveCells[3], .5)

    tm.compute(previousActiveColumns, True)
    self.assertFalse(len(tm.getActiveCells()) == 0)
    self.assertFalse(len(tm.getWinnerCells()) == 0)
    self.assertFalse(len(tm.getPredictiveCells()) == 0)

    zeroColumns = []
    tm.compute(zeroColumns, True)

    self.assertTrue(len(tm.getActiveCells()) == 0)
    self.assertTrue(len(tm.getWinnerCells()) == 0)
    self.assertTrue(len(tm.getPredictiveCells()) == 0)
Пример #5
0
  def testMatchingSegmentAddSynapsesToAllWinnerCells(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=1,
      activationThreshold=3,
      initialPermanence=.21,
      connectedPermanence=.50,
      minThreshold=1,
      maxNewSynapseCount=3,
      permanenceIncrement=.10,
      permanenceDecrement=.10,
      predictedSegmentDecrement=0.0,
      seed=42)

    previousActiveColumns = [0, 1]
    prevWinnerCells = [0, 1]
    activeColumns = [4]

    matchingSegment = tm.createSegment(4)
    tm.connections.createSynapse(matchingSegment, 0, .5)

    tm.compute(previousActiveColumns, True)
    self.assertEqual(prevWinnerCells, tm.getWinnerCells())

    tm.compute(activeColumns)

    synapses = tm.connections.synapsesForSegment(matchingSegment)
    self.assertEqual(2, len(synapses))

    for synapse in synapses:
      synapseData = tm.connections.dataForSynapse(synapse)
      if synapseData.presynapticCell != 0:
        self.assertAlmostEqual(.21, synapseData.permanence)
        self.assertEqual(prevWinnerCells[1], synapseData.presynapticCell)
Пример #6
0
  def testPredictedActiveCellsAreAlwaysWinners(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=4,
      activationThreshold=3,
      initialPermanence=.21,
      connectedPermanence=.5,
      minThreshold=2,
      maxNewSynapseCount=3,
      permanenceIncrement=.10,
      permanenceDecrement=.10,
      predictedSegmentDecrement=0.0,
      seed=42)

    previousActiveColumns = [0]
    activeColumns = [1]
    previousActiveCells = [0, 1, 2, 3]
    expectedWinnerCells = [4, 6]

    activeSegment1 = tm.createSegment(expectedWinnerCells[0])
    tm.connections.createSynapse(activeSegment1, previousActiveCells[0], .5)
    tm.connections.createSynapse(activeSegment1, previousActiveCells[1], .5)
    tm.connections.createSynapse(activeSegment1, previousActiveCells[2], .5)

    activeSegment2 = tm.createSegment(expectedWinnerCells[1])
    tm.connections.createSynapse(activeSegment2, previousActiveCells[0], .5)
    tm.connections.createSynapse(activeSegment2, previousActiveCells[1], .5)
    tm.connections.createSynapse(activeSegment2, previousActiveCells[2], .5)

    tm.compute(previousActiveColumns, False)
    tm.compute(activeColumns, False)

    self.assertEqual(expectedWinnerCells, tm.getWinnerCells())
Пример #7
0
    def testRecycleWeakestSynapseToMakeRoomForNewSynapse(self):
        tm = TemporalMemory(columnDimensions=[32],
                            cellsPerColumn=1,
                            activationThreshold=3,
                            initialPermanence=.21,
                            connectedPermanence=.50,
                            minThreshold=1,
                            maxNewSynapseCount=3,
                            permanenceIncrement=.02,
                            permanenceDecrement=.02,
                            predictedSegmentDecrement=0.0,
                            seed=42,
                            maxSynapsesPerSegment=3)

        prevActiveColumns = [0, 1, 2]
        prevWinnerCells = [0, 1, 2]
        activeColumns = [4]

        matchingSegment = tm.connections.createSegment(4)
        tm.connections.createSynapse(matchingSegment, 81, .6)

        weakestSynapse = tm.connections.createSynapse(matchingSegment, 0, .11)

        tm.compute(prevActiveColumns)
        self.assertEqual(prevWinnerCells, tm.getWinnerCells())
        tm.compute(activeColumns)

        synapses = tm.connections.synapsesForSegment(matchingSegment)
        self.assertEqual(3, len(synapses))
        presynapticCells = set(synapse.presynapticCell for synapse in synapses)
        self.assertFalse(0 in presynapticCells)
  def testMatchingSegmentAddSynapsesToAllWinnerCells(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=1,
      activationThreshold=3,
      initialPermanence=.21,
      connectedPermanence=.50,
      minThreshold=1,
      maxNewSynapseCount=3,
      permanenceIncrement=.10,
      permanenceDecrement=.10,
      predictedSegmentDecrement=0.0,
      seed=42)

    previousActiveColumns = [0, 1]
    prevWinnerCells = [0, 1]
    activeColumns = [4]

    matchingSegment = tm.createSegment(4)
    tm.connections.createSynapse(matchingSegment, 0, .5)

    tm.compute(previousActiveColumns, True)
    self.assertEqual(prevWinnerCells, tm.getWinnerCells())

    tm.compute(activeColumns)

    synapses = tm.connections.synapsesForSegment(matchingSegment)
    self.assertEqual(2, len(synapses))

    for synapse in synapses:
      synapseData = tm.connections.dataForSynapse(synapse)
      if synapseData.presynapticCell != 0:
        self.assertAlmostEqual(.21, synapseData.permanence)
        self.assertEqual(prevWinnerCells[1], synapseData.presynapticCell)
  def testPredictedActiveCellsAreAlwaysWinners(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=4,
      activationThreshold=3,
      initialPermanence=.21,
      connectedPermanence=.5,
      minThreshold=2,
      maxNewSynapseCount=3,
      permanenceIncrement=.10,
      permanenceDecrement=.10,
      predictedSegmentDecrement=0.0,
      seed=42)

    previousActiveColumns = [0]
    activeColumns = [1]
    previousActiveCells = [0, 1, 2, 3]
    expectedWinnerCells = [4, 6]

    activeSegment1 = tm.createSegment(expectedWinnerCells[0])
    tm.connections.createSynapse(activeSegment1, previousActiveCells[0], .5)
    tm.connections.createSynapse(activeSegment1, previousActiveCells[1], .5)
    tm.connections.createSynapse(activeSegment1, previousActiveCells[2], .5)

    activeSegment2 = tm.createSegment(expectedWinnerCells[1])
    tm.connections.createSynapse(activeSegment2, previousActiveCells[0], .5)
    tm.connections.createSynapse(activeSegment2, previousActiveCells[1], .5)
    tm.connections.createSynapse(activeSegment2, previousActiveCells[2], .5)

    tm.compute(previousActiveColumns, False)
    tm.compute(activeColumns, False)

    self.assertEqual(expectedWinnerCells, tm.getWinnerCells())
Пример #10
0
  def testRecycleWeakestSynapseToMakeRoomForNewSynapse(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=1,
      activationThreshold=3,
      initialPermanence=.21,
      connectedPermanence=.50,
      minThreshold=1,
      maxNewSynapseCount=3,
      permanenceIncrement=.02,
      permanenceDecrement=.02,
      predictedSegmentDecrement=0.0,
      seed=42,
      maxSynapsesPerSegment=3)

    prevActiveColumns = [0, 1, 2]
    prevWinnerCells = [0, 1, 2]
    activeColumns = [4]

    matchingSegment = tm.connections.createSegment(4)
    tm.connections.createSynapse(matchingSegment, 81, .6)

    weakestSynapse = tm.connections.createSynapse(matchingSegment, 0, .11)

    tm.compute(prevActiveColumns)
    self.assertEqual(prevWinnerCells, tm.getWinnerCells())
    tm.compute(activeColumns)

    synapses = tm.connections.synapsesForSegment(matchingSegment)
    self.assertEqual(3, len(synapses))
    presynapticCells = set(synapse.presynapticCell for synapse in synapses)
    self.assertFalse(0 in presynapticCells)
  def testRecycleWeakestSynapseToMakeRoomForNewSynapse(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=1,
      activationThreshold=3,
      initialPermanence=.21,
      connectedPermanence=.50,
      minThreshold=1,
      maxNewSynapseCount=3,
      permanenceIncrement=.02,
      permanenceDecrement=.02,
      predictedSegmentDecrement=0.0,
      seed=42,
      maxSynapsesPerSegment=4)

    prevActiveColumns = [1, 2, 3]
    prevWinnerCells = [1, 2, 3]
    activeColumns = [4]

    matchingSegment = tm.createSegment(4)
    tm.connections.createSynapse(matchingSegment, 81, .6)

    # Create a weak synapse. Make sure it's not so weak that permanenceIncrement
    # destroys it.
    tm.connections.createSynapse(matchingSegment, 0, .11)

    # Create a synapse that will match.
    tm.connections.createSynapse(matchingSegment, 1, .20)

    # Create a synapse with a high permanence
    tm.connections.createSynapse(matchingSegment, 31, .60)

    tm.compute(prevActiveColumns)
    self.assertEqual(prevWinnerCells, tm.getWinnerCells())
    tm.compute(activeColumns)

    synapses = tm.connections.synapsesForSegment(matchingSegment)
    self.assertEqual(4, len(synapses))
    presynapticCells = set(synapse.presynapticCell for synapse in synapses)
    self.assertEqual(set([1, 2, 3, 31]), presynapticCells)
Пример #12
0
  def testRecycleWeakestSynapseToMakeRoomForNewSynapse(self):
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=1,
      activationThreshold=3,
      initialPermanence=.21,
      connectedPermanence=.50,
      minThreshold=1,
      maxNewSynapseCount=3,
      permanenceIncrement=.02,
      permanenceDecrement=.02,
      predictedSegmentDecrement=0.0,
      seed=42,
      maxSynapsesPerSegment=4)

    prevActiveColumns = [1, 2, 3]
    prevWinnerCells = [1, 2, 3]
    activeColumns = [4]

    matchingSegment = tm.createSegment(4)
    tm.connections.createSynapse(matchingSegment, 81, .6)

    # Create a weak synapse. Make sure it's not so weak that permanenceIncrement
    # destroys it.
    tm.connections.createSynapse(matchingSegment, 0, .11)

    # Create a synapse that will match.
    tm.connections.createSynapse(matchingSegment, 1, .20)

    # Create a synapse with a high permanence
    tm.connections.createSynapse(matchingSegment, 31, .60)

    tm.compute(prevActiveColumns)
    self.assertEqual(prevWinnerCells, tm.getWinnerCells())
    tm.compute(activeColumns)

    synapses = tm.connections.synapsesForSegment(matchingSegment)
    self.assertEqual(4, len(synapses))
    presynapticCells = set(synapse.presynapticCell for synapse in synapses)
    self.assertEqual(set([1, 2, 3, 31]), presynapticCells)
Пример #13
0
  def testActiveSegmentGrowSynapsesAccordingToPotentialOverlap(self):
    """
    When a segment becomes active, grow synapses to previous winner cells.

    The number of grown synapses is calculated from the "matching segment"
    overlap, not the "active segment" overlap.
    """
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=1,
      activationThreshold=2,
      initialPermanence=.21,
      connectedPermanence=.50,
      minThreshold=1,
      maxNewSynapseCount=4,
      permanenceIncrement=.10,
      permanenceDecrement=.10,
      predictedSegmentDecrement=0.0,
      seed=42)

    # Use 1 cell per column so that we have easy control over the winner cells.
    previousActiveColumns = [0, 1, 2, 3, 4]
    prevWinnerCells = [0, 1, 2, 3, 4]
    activeColumns = [5]

    activeSegment = tm.createSegment(5)
    tm.connections.createSynapse(activeSegment, 0, .5)
    tm.connections.createSynapse(activeSegment, 1, .5)
    tm.connections.createSynapse(activeSegment, 2, .2)

    tm.compute(previousActiveColumns, True)
    self.assertEqual(prevWinnerCells, tm.getWinnerCells())
    tm.compute(activeColumns, True)

    presynapticCells = set(synapse.presynapticCell for synapse in
                           tm.connections.synapsesForSegment(activeSegment))
    self.assertTrue(presynapticCells == set([0, 1, 2, 3]) or
                    presynapticCells == set([0, 1, 2, 4]))
  def testActiveSegmentGrowSynapsesAccordingToPotentialOverlap(self):
    """
    When a segment becomes active, grow synapses to previous winner cells.

    The number of grown synapses is calculated from the "matching segment"
    overlap, not the "active segment" overlap.
    """
    tm = TemporalMemory(
      columnDimensions=[32],
      cellsPerColumn=1,
      activationThreshold=2,
      initialPermanence=.21,
      connectedPermanence=.50,
      minThreshold=1,
      maxNewSynapseCount=4,
      permanenceIncrement=.10,
      permanenceDecrement=.10,
      predictedSegmentDecrement=0.0,
      seed=42)

    # Use 1 cell per column so that we have easy control over the winner cells.
    previousActiveColumns = [0, 1, 2, 3, 4]
    prevWinnerCells = [0, 1, 2, 3, 4]
    activeColumns = [5]

    activeSegment = tm.createSegment(5)
    tm.connections.createSynapse(activeSegment, 0, .5)
    tm.connections.createSynapse(activeSegment, 1, .5)
    tm.connections.createSynapse(activeSegment, 2, .2)

    tm.compute(previousActiveColumns, True)
    self.assertEqual(prevWinnerCells, tm.getWinnerCells())
    tm.compute(activeColumns, True)

    presynapticCells = set(synapse.presynapticCell for synapse in
                           tm.connections.synapsesForSegment(activeSegment))
    self.assertTrue(presynapticCells == set([0, 1, 2, 3]) or
                    presynapticCells == set([0, 1, 2, 4]))
Пример #15
0
def main():

    numberImages = 212
    DIR = "./sim_data"

    # Experiments
    #D0 = np.loadtxt(DIR + '/seq_multi_loop_noise01_al0.txt', dtype='i', delimiter=',')
    D1 = np.loadtxt(DIR + '/seq_multi_loop_noise0_al1.txt',
                    dtype='i',
                    delimiter=',')
    D = np.loadtxt(DIR + '/seq_multi_loop_noise05_al5.txt',
                   dtype='i',
                   delimiter=',')

    tm = TemporalMemory(
        # Must be the same dimensions as the SP
        columnDimensions=(2048, ),
        # How many cells in each mini-column.
        cellsPerColumn=4,
        # A segment is active if it has >= activationThreshold connected synapses
        # that are active due to infActiveState
        activationThreshold=13,
        #initialPermanence=0.21,
        connectedPermanence=0.5,
        # Minimum number of active synapses for a segment to be considered during
        # search for the best-matching segments.
        minThreshold=1,
        # The max number of synapses added to a segment during learning
        maxNewSynapseCount=3,
        #permanenceIncrement=0.01,
        #permanenceDecrement=0.01,
        predictedSegmentDecrement=0.0005,
        maxSegmentsPerCell=3,
        maxSynapsesPerSegment=3,
        seed=42)

    # Simple HTM parameters
    params = Params()
    params.maxPredDepth = 0
    params.probAdditionalCon = 0.05  # probability for random connection
    params.nCellPerCol = 32  # number of cells per minicolumn
    params.nInConPerCol = int(round(np.count_nonzero(D) / D.shape[0]))
    params.minColumnActivity = int(round(0.25 * params.nInConPerCol))
    params.nColsPerPattern = 10  # minimum number of active minicolumns k_min
    params.kActiveColumn = 100  # maximum number of active minicolumns k_max
    params.kMin = 1

    # run HTM
    t = time.time()
    print('Simple HTM')
    htm = MCN('htm', params)

    outputSDR = []
    max_index = []
    nCols_MCN = []
    nCols_HTM = []
    '''
    for i in range (min(numberImages,D1.shape[0])):
        loop = 0 
        #print('\n-------- ITERATION %d ---------' %i)
        # skip empty vectors
        if np.count_nonzero(D1[i,:]) == 0:
            print('empty vector, skip\n')
            continue
        loop += 1
        htm.compute(D1[i,:])

        max_index.append(max(htm.winnerCells))
        outputSDR.append(htm.winnerCells)
    elapsed = time.time() - t
    print("Elapsed time: %f seconds\n" %elapsed)
    '''
    for i in range(min(numberImages, D.shape[0])):
        loop = 0
        #print('\n-------- ITERATION %d ---------' %i)
        # skip empty vectors
        if np.count_nonzero(D[i, :]) == 0:
            print('empty vector, skip\n')
            continue
        loop += 1
        htm.compute(D[i, :])
        nCols_MCN.append(htm.nCols)
        nCols_HTM.append(2048)
        max_index.append(max(htm.winnerCells))
        outputSDR.append(htm.winnerCells)
    elapsed = time.time() - t
    print("Elapsed time: %f seconds\n" % elapsed)

    # create output SDR matrix from HTM winner cell output
    M = np.zeros((len(outputSDR), max(max_index) + 1), dtype=int)
    for i in range(len(outputSDR)):
        for j in range(len(outputSDR[i])):
            winner = outputSDR[i][j]
            M[i][winner] = 1

    print 'Temporal Pooler descriptors'
    D1_tm = []
    id_max1 = []
    t = time.time()

    for i in range(min(numberImages, D.shape[0])):
        D1_sp = np.nonzero(D[i, :])[0]
        tm.compute(D1_sp, learn=True)
        activeCells = tm.getWinnerCells()
        D1_tm.append(activeCells)
        id_max1.append(max(activeCells))

    elapsed = time.time() - t
    print("Elapsed time: %f seconds\n" % elapsed)

    # create output SDR matrix from HTM winner cell output
    T = np.zeros((len(D1_tm), max(id_max1) + 1), dtype=int)
    for i in range(len(D1_tm)):
        for j in range(len(D1_tm[i])):
            winner = D1_tm[i][j]
            T[i][winner] = 1

    # Create ground truth and show precision-recall curves
    GT_data = np.loadtxt(DIR + '/seq_multi_loop_noNoise_gt.txt',
                         dtype='i',
                         delimiter=',',
                         skiprows=1)
    GT = np.zeros((numberImages, numberImages), dtype=int)
    for i in range(GT.shape[0]):
        for j in range(i, GT.shape[1]):
            GT[i, j] = (np.any(GT_data[i, :] != GT_data[j, :]) == False)

    # Results
    print('Results')
    fig, ax = plt.subplots()

    S0 = evaluateSimilarity(D)
    P, R = createPR(S0, GT)
    ax.plot(R, P, label='InputSDR: (AUC=%f)' % np.trapz(P, R))

    S1 = evaluateSimilarity(M)
    P, R = createPR(S1, GT)
    ax.plot(R, P, label='MCN (AUC=%f)' % np.trapz(P, R))

    S2 = evaluateSimilarity(T)
    P, R = createPR(S2, GT)
    ax.plot(R, P, label='HTM (AUC=%f)' % np.trapz(P, R))

    ax.legend()
    ax.grid(True)
    plt.xlabel("Recall")
    plt.ylabel("Precision")
    plt.show()

    fig2, (ax2, ax3) = plt.subplots(nrows=1, ncols=2)  # two axes on figure

    ax2.imshow(S0, vmin=0, vmax=50, interpolation='nearest', cmap='binary')
    ax2.set_title('Input descriptors')

    ax3.imshow(S2, vmin=0, vmax=30, interpolation='nearest', cmap='binary')
    ax3.set_title('Winner cell outputs')

    plt.show()

    plt.plot(nCols_MCN, 'g', label='MCN')
    plt.plot(nCols_HTM, 'b', label='HTM')
    plt.xlabel('Number of seen images')
    plt.ylabel('Number of MiniColumns')
    plt.legend()
    plt.grid(True)

    #ax4.plot(nCols_MCN,'g',nCols_HTM,'b')
    #ax4.set_title ('Number of MiniColumns (MCN x HTM')
    #ax4.xlabel('Number of seen images')
    #ax4.ylabel('Number of MiniColumns')

    plt.show()
Пример #16
0
def main():

    DIR = "./sim_data"

    # Odom Encoder 
    xSDR = ScalarEncoder(w=21,minval=0,maxval=20,n=256)
    ySDR = ScalarEncoder(w=21,minval=0,maxval=20,n=256)
    xyWidth = xSDR.getWidth() + ySDR.getWidth()

    # Visual input
    D = np.loadtxt(DIR + '/seq_multi_loop_noise05_al5.txt', dtype='i', delimiter=',')
    numberImages = D[:,0].size
    nColumns = D[0,:].size
    #time.sleep(10)
    
    # Odom input
    odom = np.loadtxt(DIR + '/seq_multi_loop_noise05_al5_gt.txt', dtype='f', delimiter=',')
    x = odom[:,0]
    y = odom[:,1]

    # Encoder Odom input
    odomSDR = np.zeros((numberImages,xyWidth), dtype=int)
    for i in range(1):
        _xSDR = np.zeros(xSDR.getWidth(), dtype=int)
        xSDR.encodeIntoArray(x[i], _xSDR)
        _ySDR = np.zeros(ySDR.getWidth(), dtype=int)
        ySDR.encodeIntoArray(y[i], _ySDR)
        odomSDR[i,:] = np.concatenate([_xSDR, _ySDR])
    
    tm0 = TM(
        columnCount=nColumns,
        cellsPerColumn=4,
        initialPermanence=0.21,
        connectedPermanence=0.5,
        permanenceIncrement=0.1,
        permanenceDecrement=0.1,
        minThreshold=15,
        basalInputSize= 512,
        reducedBasalThreshold=1000,
        activationThreshold=1000,
        apicalInputSize=0,
        maxSynapsesPerSegment=-1,
        sampleSize=1,
        seed = 42
        )
        
    tm = TemporalMemory(
        # Must be the same dimensions as the SP
        columnDimensions=(2048,),
        # How many cells in each mini-column.
        cellsPerColumn=4,
        # A segment is active if it has >= activationThreshold connected synapses
        # that are active due to infActiveState
        activationThreshold=13,
        initialPermanence=0.21,
        connectedPermanence=0.5,
        # Minimum number of active synapses for a segment to be considered during
        # search for the best-matching segments.
        minThreshold=1,
        # The max number of synapses added to a segment during learning
        maxNewSynapseCount=3,
        #permanenceIncrement=0.01,
        #permanenceDecrement=0.01,
        predictedSegmentDecrement=0.0005,
        maxSegmentsPerCell=3,
        maxSynapsesPerSegment=3,
        seed=42
    )

    #time.sleep(10)

    # Simple HTM parameters
    params = Params()
    params.maxPredDepth = 0
    params.probAdditionalCon = 0.05 # probability for random connection
    params.nCellPerCol = 32 # number of cells per minicolumn
    params.nInConPerCol = int(round(np.count_nonzero(D) / D.shape[0]))
    #print params.nInConPerCol
    params.minColumnActivity = int(round(0.25*params.nInConPerCol))
    params.nColsPerPattern = 10     # minimum number of active minicolumns k_min
    params.kActiveColumn = 100      # maximum number of active minicolumns k_max
    params.kMin = 1

    # run HTM
    t = time.time()
    print ('Simple HTM')
    htm = MCN('htm',params)

    outputSDR = []
    max_index = []

    for i in range (min(numberImages,D.shape[0])):
        loop = 0 
        #print('\n-------- ITERATION %d ---------' %i)
        # skip empty vectors
        if np.count_nonzero(D[i,:]) == 0:
            print('empty vector, skip\n')
            continue
        loop += 1
        #print D[i,:]
        htm.compute(D[i,:])

        max_index.append(max(htm.winnerCells))
        outputSDR.append(htm.winnerCells)
        
    elapsed = time.time() - t
    print("Elapsed time: %f seconds\n" %elapsed)

    # create output SDR matrix from HTM winner cell output
    M = np.zeros((len(outputSDR),max(max_index)+1), dtype=int)
    for i in range(len(outputSDR)):
        for j in range(len(outputSDR[i])):
            winner = outputSDR[i][j]
            M[i][winner] = 1

    # Temporal Pooler descriptors
    print 'Temporal Pooler descriptors'
    D1_tm=[]
    id_max1=[]
    t = time.time()

    for i in range(min(numberImages,D.shape[0])):
        D1_sp = np.nonzero(D[i,:])[0]
        tm.compute(D1_sp, learn=True)
        activeCells = tm.getWinnerCells()
        D1_tm.append(activeCells)
        id_max1.append(max(activeCells))
    
    elapsed = time.time() - t
    print( "Elapsed time: %f seconds\n" %elapsed)

    # create output SDR matrix from HTM winner cell output
    T = np.zeros((len(D1_tm),max(id_max1)+1), dtype=int)
    for i in range(len(D1_tm)):
        for j in range(len(D1_tm[i])):
            winner = D1_tm[i][j]
            T[i][winner] = 1


    # Temporal Pooler - Distal connections
    print 'Temporal Pooler - Distal connections'
    D2_tm=[]
    id_max2=[]
    t = time.time()

    for i in range(min(numberImages,D.shape[0])):
        D2_sp = np.nonzero(D[i,:])[0]
        basalInputs = np.nonzero(odomSDR[i,:])[0]
        tm0.compute(sorted(D2_sp), sorted(basalInputs), apicalInput=(), basalGrowthCandidates=None, apicalGrowthCandidates=None, learn=True)
        activeCells2 = tm0.getWinnerCells()
        D2_tm.append(activeCells2)
        id_max2.append(max(activeCells2))
    
    elapsed = time.time() - t
    print( "Elapsed time: %f seconds\n" %elapsed)

    # create output SDR matrix from HTM winner cell output
    T2 = np.zeros((len(D2_tm),max(id_max2)+1), dtype=int)
    for i in range(len(D2_tm)):
        for j in range(len(D2_tm[i])):
            winner = D2_tm[i][j]
            T2[i][winner] = 1

    # Create ground truth and show precision-recall curves
    GT_data = np.loadtxt(DIR + '/seq_multi_loop_noNoise_gt.txt', dtype='i', delimiter=',',skiprows=1)
    GT = np.zeros((numberImages,numberImages), dtype=int)
    for i in range(GT.shape[0]):
        for j in range(i,GT.shape[1]):
            GT[i,j] = (np.any(GT_data[i,:] != GT_data[j,:])==False)

    # Results
    print ('Results')
    fig, ax = plt.subplots()

    S0 = evaluateSimilarity(D)
    P, R = createPR(S0,GT)
    ax.plot(R, P, label='InputSDR: (avgP=%f)' %np.trapz(P,R))

    S1 = evaluateSimilarity(M)
    P, R = createPR(S1,GT)
    ax.plot(R, P, label='MCN (avgP=%f)' %np.trapz(P,R))

    S2 = evaluateSimilarity(T)
    P, R = createPR(S2,GT)
    ax.plot(R, P, label='HTM (avgP=%f)' %np.trapz(P,R))

    S3 = evaluateSimilarity(T2)
    P, R = createPR(S3,GT)
    ax.plot(R, P, label='HTM Distal (avgP=%f)' %np.trapz(P,R))

    ax.legend()
    ax.grid(True)
    plt.xlabel("Recall")
    plt.ylabel("Precision")
    plt.show()

    '''
Пример #17
0
for i in range(10):

  # Send each letter in the sequence in order
  for j in range(5):
    activeColumns = set([i for i, j in zip(count(), x[j]) if j == 1])

    # The compute method performs one step of learning and/or inference. Note:
    # here we just perform learning but you can perform prediction/inference and
    # learning in the same step if you want (online learning).
    tm.compute(activeColumns, learn = True)

    # The following print statements can be ignored.
    # Useful for tracing internal states
    print("active cells " + str(tm.getActiveCells()))
    print("predictive cells " + str(tm.getPredictiveCells()))
    print("winner cells " + str(tm.getWinnerCells()))
    print("# of active segments " + str(tm.connections.numSegments()))

  # The reset command tells the TM that a sequence just ended and essentially
  # zeros out all the states. It is not strictly necessary but it's a bit
  # messier without resets, and the TM learns quicker with resets.
  tm.reset()


#######################################################################
#
# Step 3: send the same sequence of vectors and look at predictions made by
# temporal memory
for j in range(5):
  print "\n\n--------","ABCDE"[j],"-----------"
  print "Raw input vector : " + formatRow(x[j])
Пример #18
0
def main():

    tm = TemporalMemory(
        # Must be the same dimensions as the SP
        columnDimensions=(2048, ),
        #columnDimensions=(32768,),
        # How many cells in each mini-column.
        cellsPerColumn=16,
        # A segment is active if it has >= activationThreshold connected synapses
        # that are active due to infActiveState
        activationThreshold=4,  #1,4(melhor),
        initialPermanence=0.55,
        connectedPermanence=0.5,
        # Minimum number of active synapses for a segment to be considered during
        # search for the best-matching segments.
        minThreshold=1,  #1
        # The max number of synapses added to a segment during learning
        maxNewSynapseCount=20,  #6
        permanenceIncrement=0.01,
        permanenceDecrement=0.01,
        predictedSegmentDecrement=0.0005,  #0.0001,#0.0005,
        maxSegmentsPerCell=100,  #8 16(colou)
        maxSynapsesPerSegment=100,  #8 16(colou)
        seed=42)

    numberImages = 288
    DIR = "/media/cappizzino/OS/Documents and Settings/cappi/Documents/MATLAB/MCN_v0_1"

    # Experiments
    # Ground truth
    GT = np.identity(numberImages, dtype=bool)
    for i in range(GT.shape[0]):
        for j in range(GT.shape[0] - 1):
            if i == j:
                GT[i, j] = 1

    # MCN (MCN descriptors)
    print 'MCN'
    id_max1 = []
    id_max2 = []
    with open('outputSDR1.txt', 'r') as f:
        D1_MCN = [[int(entry) for entry in line.split(',')]
                  for line in f.readlines()]
    for i in range(len(D1_MCN)):
        id_max1.append(max(D1_MCN[i]))

    with open('outputSDR2.txt', 'r') as f:
        D2_MCN = [[int(entry) for entry in line.split(',')]
                  for line in f.readlines()]
    for i in range(len(D2_MCN)):
        id_max2.append(max(D2_MCN[i]))

    id_max = max(max(id_max1), max(id_max2))
    '''
    D1_sparse = sparse.lil_matrix((len(D1_MCN), id_max+1), dtype='int8')
    for i in range(len(D1_MCN)):
        D1_sparse[i,D1_MCN[i]] = 1

    D2_sparse = sparse.lil_matrix((len(D2_MCN), id_max+1), dtype='int8')
    for i in range(len(D2_MCN)):
        D2_sparse[i,D2_MCN[i]] = 1
    '''
    D1_sparse = sparse.lil_matrix((numberImages, id_max + 1), dtype='int8')
    for i in range(numberImages):
        D1_sparse[i, D1_MCN[i]] = 1

    D2_sparse = sparse.lil_matrix((numberImages, id_max + 1), dtype='int8')
    for i in range(numberImages):
        D2_sparse[i, D2_MCN[i]] = 1

    S_MCN = pairwiseDescriptors(D1_sparse, D2_sparse)

    # Pairwise (raw descriptors)
    print 'Pairwise descriptors'
    D1 = np.loadtxt(DIR + "/D1.txt", dtype='f', delimiter=',')
    D2 = np.loadtxt(DIR + "/D2.txt", dtype='f', delimiter=',')
    S_pairwise = cosine_similarity(D1[:numberImages], D2[:numberImages])

    # Dimension Reduction and binarizarion
    print 'Dimension Reduction'
    P = np.random.randn(D1.shape[1], 1024)
    P = normc(P)
    #D1h = np.dot(D1[:numberImages],P)
    #D2h = np.dot(D2[:numberImages],P)
    #S_Dh = cosine_similarity(D1h, D2h)

    # sLSBH (binarized descriptors)
    print 'sLSBH'
    D1_slsbh = getLSBH(D1[:numberImages], P, 0.25)  #0.025 0.25
    D2_slsbh = getLSBH(D2[:numberImages], P, 0.25)
    #D1_slsbh = np.loadtxt(DIR + "/D1_slsbh.txt", dtype='i', delimiter=',')
    #D2_slsbh = np.loadtxt(DIR + "/D2_slsbh.txt", dtype='i', delimiter=',')
    Sb_pairwise = pairwiseDescriptors(D1_slsbh[:numberImages],
                                      D2_slsbh[:numberImages])
    '''
    # Binarizarion ans Sparsification
    print 'Binarizarion and Sparsification'
    D1_slsbh = np.zeros((D1h.shape[0],2*D1h.shape[1]), dtype = bool)
    D2_slsbh = np.zeros((D2h.shape[0],2*D2h.shape[1]), dtype = bool)
    for i in range(numberImages):
        D1_slsbh[i,:] = generate_LSBH((D1h[i,:]),(D1h.shape[1]),0.25)
        D2_slsbh[i,:] = generate_LSBH((D2h[i,:]),(D2h.shape[1]),0.25)
    Sb_pairwise = pairwiseDescriptors(D1_slsbh, D2_slsbh)
    '''
    '''
    print 'Spatial Pooler descriptors'
    # Create an array to represent active columns, all initially zero. This
    # will be populated by the compute method below. It must have the same
    # dimensions as the Spatial Pooler.
    activeColumns = np.zeros(2048)
    D1_htm=[]
    D2_htm=[]
    id_max=[]
    id_max1=[]
    id_max2=[]

    for i in range (numberImages):
        # Execute Spatial Pooling algorithm over input space.
        # Feed the examples to the SP
        sp.compute(D1_slsbh[i,:], False, activeColumns)
        activeColumnIndices = np.nonzero(activeColumns)[0]
        D1_htm.append(activeColumnIndices)
        id_max1.append(max(activeColumnIndices))

    for i in range (numberImages):
        # Execute Spatial Pooling algorithm over input space.
        # Feed the examples to the SP
        sp.compute(D2_slsbh[i,:], False, activeColumns)
        activeColumnIndices = np.nonzero(activeColumns)[0]
        D2_htm.append(activeColumnIndices)
        id_max2.append(max(activeColumnIndices))
    
    id_max = max(max(id_max1),max(id_max2))
 
    D1_sparse = sparse.lil_matrix((numberImages, id_max+1), dtype='int8')
    for i in range(numberImages):
        D1_sparse[i,D1_htm[i]] = 1

    D2_sparse = sparse.lil_matrix((numberImages, id_max+1), dtype='int8')
    for i in range(numberImages):
        D2_sparse[i,D2_htm[i]] = 1

    S_SP = pairwiseDescriptors(D1_sparse, D2_sparse)
    '''

    print 'Temporal Pooler (1) descriptors'
    D1_tm = []
    D2_tm = []
    id_max = []
    id_max1 = []
    id_max2 = []
    '''
    for _ in range(5):
        for i in range(numberImages):
            activeColumnIndices = np.nonzero(D1_slsbh[i,:])[0]
            tm.compute(activeColumnIndices, learn=True)
            #tm.compute(D1_htm[i], learn=True)
        for i in range(numberImages):
            activeColumnIndices = np.nonzero(D2_slsbh[i,:])[0]
            tm.compute(activeColumnIndices, learn=True)
            #tm.compute(D2_htm[i], learn=True)
        tm.reset()
    '''
    for i in range(numberImages):
        for _ in range(1):
            activeColumnIndices = np.nonzero(D1_slsbh[i, :])[0]
            #print activeColumnIndices
            time.sleep(2)
            tm.compute(activeColumnIndices, learn=True)
            #tm.compute(D1_htm[i], learn=True)
            activeCells = tm.getWinnerCells()
            #print activeCells
            #time.sleep(5)
            D1_tm.append(activeCells)
            id_max1.append(max(activeCells))
            #tm.reset()

    print 'Temporal Pooler (2) descriptors'
    '''
    for _ in range(2):
        for i in range(numberImages):
            activeColumnIndices = np.nonzero(D2_slsbh[i,:])[0]
            tm.compute(activeColumnIndices, learn=True)
            #tm.compute(D2_htm[i], learn=True)
    '''

    for i in range(numberImages):
        activeColumnIndices = np.nonzero(D2_slsbh[i, :])[0]
        tm.compute(activeColumnIndices, learn=False)
        #tm.compute(D2_htm[i], learn=True)
        activeCells = tm.getWinnerCells()
        D2_tm.append(activeCells)
        id_max2.append(max(activeCells))

    id_max = max(max(id_max1), max(id_max2))

    D1_sparse = sparse.lil_matrix((numberImages, id_max + 1), dtype='int8')
    for i in range(numberImages):
        D1_sparse[i, D1_tm[i]] = 1

    D2_sparse = sparse.lil_matrix((numberImages, id_max + 1), dtype='int8')
    for i in range(numberImages):
        D2_sparse[i, D2_tm[i]] = 1

    S_TM = pairwiseDescriptors(D1_sparse, D2_sparse)

    # Results
    print 'Results'
    fig, ax = plt.subplots()

    P, R = createPR(S_pairwise, GT)
    ax.plot(R, P, label='pairwise / raw (avgP=%f)' % np.trapz(P, R))

    P, R = createPR(S_MCN, GT)
    ax.plot(R, P, label='MCN (avgP=%f)' % np.trapz(P, R))

    #P, R = createPR(S_Dh,GT)
    #ax.plot(R, P, label='pairwise RP / raw (avgP=%f)' %np.trapz(P,R))

    P, R = createPR(Sb_pairwise, GT)
    ax.plot(R, P, label='sLSBH / raw (avgP=%f)' % np.trapz(P, R))

    #P, R = createPR(S_SP,GT)
    #ax.plot(R, P, label='HTM SP (avgP=%f)' %np.trapz(P,R))

    P, R = createPR(S_TM, GT)
    ax.plot(R, P, label='HTM TM (avgP=%f)' % np.trapz(P, R))

    ax.legend()
    ax.grid(True)
    plt.xlabel("Recall")
    plt.ylabel("Precision")
    plt.show()
for i in range(10):

  # Send each letter in the sequence in order
  for j in range(5):
    activeColumns = set([i for i, j in zip(count(), x[j]) if j == 1])

    # The compute method performs one step of learning and/or inference. Note:
    # here we just perform learning but you can perform prediction/inference and
    # learning in the same step if you want (online learning).
    tm.compute(activeColumns, learn = True)

    # The following print statements can be ignored.
    # Useful for tracing internal states
    print("active cells " + str(tm.getActiveCells()))
    print("predictive cells " + str(tm.getPredictiveCells()))
    print("winner cells " + str(tm.getWinnerCells()))
    print("# of active segments " + str(tm.connections.numSegments()))

  # The reset command tells the TM that a sequence just ended and essentially
  # zeros out all the states. It is not strictly necessary but it's a bit
  # messier without resets, and the TM learns quicker with resets.
  tm.reset()


#######################################################################
#
# Step 3: send the same sequence of vectors and look at predictions made by
# temporal memory
for j in range(5):
  print "\n\n--------","ABCDE"[j],"-----------"
  print "Raw input vector : " + formatRow(x[j])
Пример #20
0
def main():

    # Load CNN
    original_model = models.alexnet(pretrained=True)
    class AlexNetConv3(nn.Module):
                def __init__(self):
                    super(AlexNetConv3, self).__init__()
                    self.features = nn.Sequential(
                        # stop at conv3
                        *list(original_model.features.children())[:7]
                    )
                def forward(self, x):
                    x = self.features(x)
                    return x

    model = AlexNetConv3()
    model.eval()

    tm = TemporalMemory(
        # Must be the same dimensions as the SP
        columnDimensions=(2048,),
        # How many cells in each mini-column.
        cellsPerColumn=32,
        # A segment is active if it has >= activationThreshold connected synapses
        # that are active due to infActiveState
        activationThreshold=4,#1,4(melhor),
        initialPermanence=0.55,
        connectedPermanence=0.5,
        # Minimum number of active synapses for a segment to be considered during
        # search for the best-matching segments.
        minThreshold=1, #1
        # The max number of synapses added to a segment during learning
        maxNewSynapseCount=20, #6
        permanenceIncrement=0.01,
        permanenceDecrement=0.01,
        predictedSegmentDecrement=0.0005,#0.0001,#0.0005,
        maxSegmentsPerCell=100, #8 16(colou)
        maxSynapsesPerSegment=100, #8 16(colou)
        seed=42
    )

    numberImages = 200
    features = []
    labels = []

    DIR = "/home/cappizzino/Documentos/doutorado/dataset"

    path_im = [os.path.join(DIR,sp) for sp in [
        'fall/',
        'spring/',
        'summer/',
        'winter/']]

    # Seasons to compare.
    # First season is the input one. Second season is the reference season.
    # 0 = fall, 1 = spring, 2 = summer, 3 = winter.
    # simul 1 = 2 and 3
    # simul 2 = 1 and 0
    # simul 3 = 0 and 3
    reference_season = 2
    input_season = 3

    # Extract Features
    reference_features, reference_labels = extractFeatures(numberImages, reference_season, model,path_im)
    input_features, input_labels = extractFeatures(numberImages, input_season, model, path_im)

    #print len(input_features[0])
    #print input_labels[0]
    #print input_features

    # Experiments
    # Ground truth
    print 'Ground truth'
    GT = np.identity(numberImages, dtype = bool)
    for i in range(GT.shape[0]):
        for j in range(GT.shape[0]-1):
            if i==j:
                GT[i,j]=1

    # Pairwise (raw descriptors)
    print 'Pairwise descriptors'
    t = time.time()
    S_pairwise = cosine_similarity(reference_features[:numberImages], input_features[:numberImages])
    elapsed = time.time() - t
    print("Elapsed time: %f seconds\n" %elapsed)              

    # Dimension Reduction and binarizarion
    print 'Dimension Reduction'
    P = np.random.randn(len(input_features[0]), 1024)
    P = normc(P)

    # sLSBH (binarized descriptors)
    print 'sLSBH'
    t = time.time()
    D1_slsbh = getLSBH(reference_features[:numberImages],P,0.25)
    D2_slsbh = getLSBH(input_features[:numberImages],P,0.25)
    Sb_pairwise = pairwiseDescriptors(D1_slsbh[:numberImages], D2_slsbh[:numberImages])
    elapsed = time.time() - t
    print("Elapsed time: %f seconds\n" %elapsed)   
    #print len(np.nonzero(D1_slsbh[0])[0])

    D1_tm=[]
    D2_tm=[]
    id_max=[]
    id_max1=[]
    id_max2=[]

    print 'Temporal Pooler (1) descriptors'
    t = time.time()
    for i in range(numberImages):
        for _ in range(1):
            activeColumnIndices = np.nonzero(D1_slsbh[i,:])[0]
            tm.compute(activeColumnIndices, learn=True)
            activeCells = tm.getWinnerCells()
            D1_tm.append(activeCells)
            id_max1.append(max(activeCells))

    print 'Temporal Pooler (2) descriptors'
    for i in range(numberImages):
        activeColumnIndices = np.nonzero(D2_slsbh[i,:])[0]
        tm.compute(activeColumnIndices, learn=False)
        activeCells = tm.getWinnerCells()
        D2_tm.append(activeCells)
        id_max2.append(max(activeCells))

    id_max = max(max(id_max1),max(id_max2))
 
    D1_sparse = sparse.lil_matrix((numberImages, id_max+1), dtype='int8')
    for i in range(numberImages):
        D1_sparse[i,D1_tm[i]] = 1

    D2_sparse = sparse.lil_matrix((numberImages, id_max+1), dtype='int8')
    for i in range(numberImages):
        D2_sparse[i,D2_tm[i]] = 1

    S_TM = pairwiseDescriptors(D1_sparse, D2_sparse)
    elapsed = time.time() - t
    print("Elapsed time: %f seconds\n" %elapsed)

    D1_mcn=[]
    D2_mcn=[]
    id_max=[]
    id_max1=[]
    id_max2=[]

    # Simple HTM parameters
    params = Params()
    params.probAdditionalCon = 0.05    # probability for random connection
    params.nCellPerCol = 32            # number of cells per minicolumn
    params.nInConPerCol = 200          # number of connections per minicolumn
    params.minColumnActivity = 0.75    # minicolumn activation threshold
    params.nColsPerPattern = 50        # minimum number of active minicolumns k_min
    params.kActiveColumn = 100         # maximum number of active minicolumns k_max

    # conversion of the parameter to a natural number that contains the
    # required number of 1s for activation
    params.minColumnActivity = np.round(params.minColumnActivity*params.nInConPerCol)

    htm = MCN('htm',params)

    nCols_MCN=[]
    nCols_HTM=[]

    print ('Simple HTM (1)')
    t = time.time()
    for i in range(numberImages):
        htm.compute(D1_slsbh[i,:],0)
        nCols_MCN.append(htm.nCols)
        nCols_HTM.append(tm.columnDimensions[0])
        id_max1.append(max(htm.winnerCells))
        D1_mcn.append(htm.winnerCells)

    print ('Simple HTM (2)')
    for i in range(numberImages):
        htm.compute(D2_slsbh[i,:],1)
        #nCols_MCN.append(htm.nCols)
        #nCols_HTM.append(tm.columnDimensions[0])
        id_max2.append(max(htm.winnerCells))
        D2_mcn.append(htm.winnerCells)

    id_max = max(max(id_max1),max(id_max2))

    D1_sparse = sparse.lil_matrix((numberImages, id_max+1), dtype='int8')
    for i in range(numberImages):
        D1_sparse[i,D1_mcn[i]] = 1

    D2_sparse = sparse.lil_matrix((numberImages, id_max+1), dtype='int8')
    for i in range(numberImages):
        D2_sparse[i,D2_mcn[i]] = 1

    S_MCN = pairwiseDescriptors(D1_sparse, D2_sparse)
    elapsed = time.time() - t
    print("Elapsed time: %f seconds\n" %elapsed)

    # Results
    print 'Results 1'
    fig, ax = plt.subplots()

    P, R = createPR(S_pairwise,GT)
    ax.plot(R, P, label='pairwise / raw (avgP=%f)' %np.trapz(P,R))

    P, R = createPR(S_MCN,GT)
    ax.plot(R, P, label='MCN (avgP=%f)' %np.trapz(P,R))

    P, R = createPR(Sb_pairwise,GT)
    ax.plot(R, P, label='sLSBH / raw (avgP=%f)' %np.trapz(P,R))

    P, R = createPR(S_TM,GT)
    ax.plot(R, P, label='HTM TM (avgP=%f)' %np.trapz(P,R))

    ax.legend()
    ax.grid(True)
    plt.xlabel("Recall")
    plt.ylabel("Precision")
    plt.show()

    print 'Results 2'
    fig2, ax2 = plt.subplots()

    ax2.plot(nCols_MCN,'g',label='MCN = %i cols' %htm.nCols)
    ax2.plot(nCols_HTM,'b',label='HTM TM = %i cols' %tm.columnDimensions[0])

    ax2.legend()
    ax2.grid(True)
    plt.xlabel('Number of seen images')
    plt.ylabel('Number of MiniColumns')
    plt.show()

    print 'Results 3'
    fig3, (ax3, ax4) = plt.subplots(nrows=1, ncols=2, gridspec_kw={'width_ratios': [2, 1]}, figsize=(9,4))

    P, R = createPR(S_pairwise,GT)
    ax3.plot(R, P, label='pairwise / raw (AUC=%f)' %np.trapz(P,R))

    P, R = createPR(S_MCN,GT)
    ax3.plot(R, P, label='MCN (AUC=%f)' %np.trapz(P,R))

    P, R = createPR(Sb_pairwise,GT)
    ax3.plot(R, P, label='sLSBH / raw (AUC=%f)' %np.trapz(P,R))

    P, R = createPR(S_TM,GT)
    ax3.plot(R, P, label='HTM TM (AUC=%f)' %np.trapz(P,R))

    ax3.grid(True)

    ax3.set_xlabel("Recall", fontsize = 12.0)
    ax3.set_ylabel("Precision", fontsize = 12.0)
    ax3.legend(fontsize=10)

    ax4.plot(nCols_MCN,'g',label='MCN = %i cols' %htm.nCols)
    ax4.plot(nCols_HTM,'b',label='HTM TM = %i cols' %tm.columnDimensions[0])

    ax4.grid(True)

    ax4.tick_params(axis='both', labelsize=6)
    ax4.set_xlabel('Number of seen images', fontsize = 12.0)
    ax4.set_ylabel('Number of MiniColumns', fontsize = 12.0)
    ax4.legend(fontsize=10)

    fig3.savefig('tes.eps')
    plt.show()