def testZeroActiveColumns(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.5, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0] previousActiveCells = [0, 1, 2, 3] expectedActiveCells = [4] segment = tm.createSegment(expectedActiveCells[0]) tm.connections.createSynapse(segment, previousActiveCells[0], .5) tm.connections.createSynapse(segment, previousActiveCells[1], .5) tm.connections.createSynapse(segment, previousActiveCells[2], .5) tm.connections.createSynapse(segment, previousActiveCells[3], .5) tm.compute(previousActiveColumns, True) self.assertFalse(len(tm.getActiveCells()) == 0) self.assertFalse(len(tm.getWinnerCells()) == 0) self.assertFalse(len(tm.getPredictiveCells()) == 0) zeroColumns = [] tm.compute(zeroColumns, True) self.assertTrue(len(tm.getActiveCells()) == 0) self.assertTrue(len(tm.getWinnerCells()) == 0) self.assertTrue(len(tm.getPredictiveCells()) == 0)
def testReinforceCorrectlyActiveSegments(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.2, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=4, permanenceIncrement=.10, permanenceDecrement=.08, predictedSegmentDecrement=0.02, seed=42) prevActiveColumns = [0] prevActiveCells = [0,1,2,3] activeColumns = [1] activeCell = 5 activeSegment = tm.createSegment(activeCell) as1 = tm.connections.createSynapse(activeSegment, prevActiveCells[0], .5) as2 = tm.connections.createSynapse(activeSegment, prevActiveCells[1], .5) as3 = tm.connections.createSynapse(activeSegment, prevActiveCells[2], .5) is1 = tm.connections.createSynapse(activeSegment, 81, .5) #inactive synapse tm.compute(prevActiveColumns, True) tm.compute(activeColumns, True) self.assertAlmostEqual(.6, tm.connections.dataForSynapse(as1).permanence) self.assertAlmostEqual(.6, tm.connections.dataForSynapse(as2).permanence) self.assertAlmostEqual(.6, tm.connections.dataForSynapse(as3).permanence) self.assertAlmostEqual(.42, tm.connections.dataForSynapse(is1).permanence)
def testRecycleWeakestSynapseToMakeRoomForNewSynapse(self): tm = TemporalMemory(columnDimensions=[32], cellsPerColumn=1, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=1, maxNewSynapseCount=3, permanenceIncrement=.02, permanenceDecrement=.02, predictedSegmentDecrement=0.0, seed=42, maxSynapsesPerSegment=3) prevActiveColumns = [0, 1, 2] prevWinnerCells = [0, 1, 2] activeColumns = [4] matchingSegment = tm.connections.createSegment(4) tm.connections.createSynapse(matchingSegment, 81, .6) weakestSynapse = tm.connections.createSynapse(matchingSegment, 0, .11) tm.compute(prevActiveColumns) self.assertEqual(prevWinnerCells, tm.getWinnerCells()) tm.compute(activeColumns) synapses = tm.connections.synapsesForSegment(matchingSegment) self.assertEqual(3, len(synapses)) presynapticCells = set(synapse.presynapticCell for synapse in synapses) self.assertFalse(0 in presynapticCells)
def testMatchingSegmentAddSynapsesToAllWinnerCells(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=1, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=1, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0, 1] prevWinnerCells = [0, 1] activeColumns = [4] matchingSegment = tm.createSegment(4) tm.connections.createSynapse(matchingSegment, 0, .5) tm.compute(previousActiveColumns, True) self.assertEqual(prevWinnerCells, tm.getWinnerCells()) tm.compute(activeColumns) synapses = tm.connections.synapsesForSegment(matchingSegment) self.assertEqual(2, len(synapses)) for synapse in synapses: synapseData = tm.connections.dataForSynapse(synapse) if synapseData.presynapticCell != 0: self.assertAlmostEqual(.21, synapseData.permanence) self.assertEqual(prevWinnerCells[1], synapseData.presynapticCell)
def testRecycleWeakestSynapseToMakeRoomForNewSynapse(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=1, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=1, maxNewSynapseCount=3, permanenceIncrement=.02, permanenceDecrement=.02, predictedSegmentDecrement=0.0, seed=42, maxSynapsesPerSegment=3) prevActiveColumns = [0, 1, 2] prevWinnerCells = [0, 1, 2] activeColumns = [4] matchingSegment = tm.connections.createSegment(4) tm.connections.createSynapse(matchingSegment, 81, .6) weakestSynapse = tm.connections.createSynapse(matchingSegment, 0, .11) tm.compute(prevActiveColumns) self.assertEqual(prevWinnerCells, tm.getWinnerCells()) tm.compute(activeColumns) synapses = tm.connections.synapsesForSegment(matchingSegment) self.assertEqual(3, len(synapses)) presynapticCells = set(synapse.presynapticCell for synapse in synapses) self.assertFalse(0 in presynapticCells)
def testNewSegmentAddSynapsesToSubsetOfWinnerCells(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=2, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0, 1, 2] activeColumns = [4] tm.compute(previousActiveColumns, True) prevWinnerCells = tm.getWinnerCells() #[0, 8, 7] self.assertEqual(3, len(prevWinnerCells)) tm.compute(activeColumns, True) winnerCells = tm.getWinnerCells() #[18] self.assertEqual(1, len(winnerCells)) segments = list(tm.connections.segmentsForCell(winnerCells[0])) self.assertEqual(1, len(segments)) synapses = list(tm.connections.synapsesForSegment(segments[0])) self.assertEqual(2, len(synapses)) for synapse in synapses: synapseData = tm.connections.dataForSynapse(synapse) self.assertAlmostEqual(.21, synapseData.permanence) self.assertTrue(synapseData.presynapticCell in prevWinnerCells)
def testDestroySegmentsWithTooFewSynapsesToBeMatching(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.2, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=4, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.02, seed=42) prevActiveColumns = [0] prevActiveCells = [0, 1, 2, 3] activeColumns = [2] expectedActiveCell = 5 matchingSegment = tm.createSegment(expectedActiveCell) tm.connections.createSynapse(matchingSegment, prevActiveCells[0], .015) tm.connections.createSynapse(matchingSegment, prevActiveCells[1], .015) tm.connections.createSynapse(matchingSegment, prevActiveCells[2], .015) tm.connections.createSynapse(matchingSegment, prevActiveCells[3], .015) tm.compute(prevActiveColumns, True) tm.compute(activeColumns, True) self.assertEqual(0, tm.connections.numSegments(expectedActiveCell))
def testActivateCorrectlyPredictiveCells(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.5, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0] activeColumns = [1] previousActiveCells = [0,1,2,3] expectedActiveCells = [4] activeSegment = tm.createSegment(expectedActiveCells[0]) tm.connections.createSynapse(activeSegment, previousActiveCells[0], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[1], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[2], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[3], .5) tm.compute(previousActiveColumns, True) self.assertEqual(expectedActiveCells, tm.getPredictiveCells()) tm.compute(activeColumns, True) self.assertEqual(expectedActiveCells, tm.getActiveCells())
def testDestroyWeakSynapseOnActiveReinforce(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.2, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=4, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.02, seed=42) previousActiveColumns = [0] previousActiveCells = [0, 1, 2, 3] activeColumns = [2] activeCell = 5 activeSegment = tm.createSegment(activeCell) tm.connections.createSynapse(activeSegment, previousActiveCells[0], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[1], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[2], .5) # Weak inactive synapse. tm.connections.createSynapse(activeSegment, previousActiveCells[3], .009) tm.compute(previousActiveColumns, True) tm.compute(activeColumns, True) self.assertEqual(3, tm.connections.numSynapses(activeSegment))
def testPunishMatchingSegmentsInInactiveColumns(self): tm = TemporalMemory(columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.2, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=4, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.02, seed=42) previousActiveColumns = [0] previousActiveCells = [0, 1, 2, 3] activeColumns = [1] previousInactiveCell = 81 activeSegment = tm.connections.createSegment(42) as1 = tm.connections.createSynapse(activeSegment, previousActiveCells[0], .5) as2 = tm.connections.createSynapse(activeSegment, previousActiveCells[1], .5) as3 = tm.connections.createSynapse(activeSegment, previousActiveCells[2], .5) is1 = tm.connections.createSynapse(activeSegment, previousInactiveCell, .5) matchingSegment = tm.connections.createSegment(43) as4 = tm.connections.createSynapse(matchingSegment, previousActiveCells[0], .5) as5 = tm.connections.createSynapse(matchingSegment, previousActiveCells[1], .5) is2 = tm.connections.createSynapse(matchingSegment, previousInactiveCell, .5) tm.compute(previousActiveColumns, True) tm.compute(activeColumns, True) self.assertAlmostEqual(.48, tm.connections.dataForSynapse(as1).permanence) self.assertAlmostEqual(.48, tm.connections.dataForSynapse(as2).permanence) self.assertAlmostEqual(.48, tm.connections.dataForSynapse(as3).permanence) self.assertAlmostEqual(.48, tm.connections.dataForSynapse(as4).permanence) self.assertAlmostEqual(.48, tm.connections.dataForSynapse(as5).permanence) self.assertAlmostEqual(.50, tm.connections.dataForSynapse(is1).permanence) self.assertAlmostEqual(.50, tm.connections.dataForSynapse(is2).permanence)
def testNoChangeToMatchingSegmentsInPredictedActiveColumn(self): tm = TemporalMemory(columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0] activeColumns = [1] previousActiveCells = [0, 1, 2, 3] expectedActiveCells = [4] otherburstingCells = [5, 6, 7] activeSegment = tm.connections.createSegment(expectedActiveCells[0]) tm.connections.createSynapse(activeSegment, previousActiveCells[0], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[1], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[2], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[3], .5) matchingSegmentOnSameCell = tm.connections.createSegment( expectedActiveCells[0]) s1 = tm.connections.createSynapse(matchingSegmentOnSameCell, previousActiveCells[0], .3) s2 = tm.connections.createSynapse(matchingSegmentOnSameCell, previousActiveCells[1], .3) matchingSegmentOnOtherCell = tm.connections.createSegment( otherburstingCells[0]) s3 = tm.connections.createSynapse(matchingSegmentOnOtherCell, previousActiveCells[0], .3) s4 = tm.connections.createSynapse(matchingSegmentOnOtherCell, previousActiveCells[1], .3) tm.compute(previousActiveColumns, True) self.assertEqual(expectedActiveCells, tm.getPredictiveCells()) tm.compute(activeColumns, True) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s1).permanence) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s2).permanence) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s3).permanence) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s4).permanence)
def testNoChangeToMatchingSegmentsInPredictedActiveColumn(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0] activeColumns = [1] previousActiveCells = [0,1,2,3] expectedActiveCells = [4] otherburstingCells = [5,6,7] activeSegment = tm.createSegment(expectedActiveCells[0]) tm.connections.createSynapse(activeSegment, previousActiveCells[0], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[1], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[2], .5) tm.connections.createSynapse(activeSegment, previousActiveCells[3], .5) matchingSegmentOnSameCell = tm.createSegment( expectedActiveCells[0]) s1 = tm.connections.createSynapse(matchingSegmentOnSameCell, previousActiveCells[0], .3) s2 = tm.connections.createSynapse(matchingSegmentOnSameCell, previousActiveCells[1], .3) matchingSegmentOnOtherCell = tm.createSegment( otherburstingCells[0]) s3 = tm.connections.createSynapse(matchingSegmentOnOtherCell, previousActiveCells[0], .3) s4 = tm.connections.createSynapse(matchingSegmentOnOtherCell, previousActiveCells[1], .3) tm.compute(previousActiveColumns, True) self.assertEqual(expectedActiveCells, tm.getPredictiveCells()) tm.compute(activeColumns, True) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s1).permanence) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s2).permanence) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s3).permanence) self.assertAlmostEqual(.3, tm.connections.dataForSynapse(s4).permanence)
class HTM(object): """Class implementing Traditional Temporal Memory""" def __init__(self, bottomUpInputSize, bottomUpOnBits, seed ): self.bottomUpInputSize = bottomUpInputSize self.bottomUpOnBits = bottomUpOnBits self.seed = seed self.trainingIterations = 0 self.tm = TemporalMemory( # Must be the same dimensions as the SP columnDimensions=(self.bottomUpInputSize,), # How many cells in each mini-column. cellsPerColumn=4, # A segment is active if it has >= activationThreshold connected synapses # that are active due to infActiveState activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.5, # Minimum number of active synapses for a segment to be considered during # search for the best-matching segments. minThreshold=1, # The max number of synapses added to a segment during learning maxNewSynapseCount=3, #permanenceIncrement=0.01, #permanenceDecrement=0.01, predictedSegmentDecrement=0.0005, maxSegmentsPerCell=3, maxSynapsesPerSegment=3, seed=self.seed ) def compute(self, bottomUpSDR, learn): if learn: # During learning we provide the current pose angle as bottom up input self.train(bottomUpSDR) self.trainingIterations += 1 else: print >>sys.stderr, "Learn: ", learn def train(self, bottomUp): #print >> sys.stderr, "Bottom up: ", bottomUp self.tm.compute(bottomUp, learn=True)
def testPunishMatchingSegmentsInInactiveColumns(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.2, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=4, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.02, seed=42) previousActiveColumns = [0] previousActiveCells = [0, 1, 2, 3] activeColumns = [1] previousInactiveCell = 81 activeSegment = tm.createSegment(42) as1 = tm.connections.createSynapse(activeSegment, previousActiveCells[0], .5) as2 = tm.connections.createSynapse(activeSegment, previousActiveCells[1], .5) as3 = tm.connections.createSynapse(activeSegment, previousActiveCells[2], .5) is1 = tm.connections.createSynapse(activeSegment, previousInactiveCell, .5) matchingSegment = tm.createSegment(43) as4 = tm.connections.createSynapse(matchingSegment, previousActiveCells[0], .5) as5 = tm.connections.createSynapse(matchingSegment, previousActiveCells[1], .5) is2 = tm.connections.createSynapse(matchingSegment, previousInactiveCell, .5) tm.compute(previousActiveColumns, True) tm.compute(activeColumns, True) self.assertAlmostEqual(.48, tm.connections.dataForSynapse(as1).permanence) self.assertAlmostEqual(.48, tm.connections.dataForSynapse(as2).permanence) self.assertAlmostEqual(.48, tm.connections.dataForSynapse(as3).permanence) self.assertAlmostEqual(.48, tm.connections.dataForSynapse(as4).permanence) self.assertAlmostEqual(.48, tm.connections.dataForSynapse(as5).permanence) self.assertAlmostEqual(.50, tm.connections.dataForSynapse(is1).permanence) self.assertAlmostEqual(.50, tm.connections.dataForSynapse(is2).permanence)
def testReinforceSelectedMatchingSegmentInBurstingColumn(self): tm = TemporalMemory(columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.08, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0] previousActiveCells = [0, 1, 2, 3] activeColumns = [1] burstingCells = [4, 5, 6, 7] selectedMatchingSegment = tm.connections.createSegment( burstingCells[0]) as1 = tm.connections.createSynapse(selectedMatchingSegment, previousActiveCells[0], .3) as2 = tm.connections.createSynapse(selectedMatchingSegment, previousActiveCells[1], .3) as3 = tm.connections.createSynapse(selectedMatchingSegment, previousActiveCells[2], .3) is1 = tm.connections.createSynapse(selectedMatchingSegment, 81, .3) otherMatchingSegment = tm.connections.createSegment(burstingCells[1]) tm.connections.createSynapse(otherMatchingSegment, previousActiveCells[0], .3) tm.connections.createSynapse(otherMatchingSegment, previousActiveCells[1], .3) tm.connections.createSynapse(otherMatchingSegment, 81, .3) tm.compute(previousActiveColumns, True) tm.compute(activeColumns, True) self.assertAlmostEqual(.4, tm.connections.dataForSynapse(as1).permanence) self.assertAlmostEqual(.4, tm.connections.dataForSynapse(as2).permanence) self.assertAlmostEqual(.4, tm.connections.dataForSynapse(as3).permanence) self.assertAlmostEqual(.22, tm.connections.dataForSynapse(is1).permanence)
def testBurstUnpredictedColumns(self): tm = TemporalMemory(columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.5, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) activeColumns = [0] burstingCells = [0, 1, 2, 3] tm.compute(activeColumns, True) self.assertEqual(burstingCells, tm.getActiveCells())
def testConnectionsNeverChangeWhenLearningDisabled(self): tm = TemporalMemory(columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.2, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=4, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.02, seed=42) prevActiveColumns = [0] prevActiveCells = [0, 1, 2, 3] activeColumns = [1, 2] #1 is predicted, 2 is bursting prevInactiveCell = 81 expectedActiveCells = [4] correctActiveSegment = tm.connections.createSegment( expectedActiveCells[0]) tm.connections.createSynapse(correctActiveSegment, prevActiveCells[0], .5) tm.connections.createSynapse(correctActiveSegment, prevActiveCells[1], .5) tm.connections.createSynapse(correctActiveSegment, prevActiveCells[2], .5) wrongMatchingSegment = tm.connections.createSegment(43) tm.connections.createSynapse(wrongMatchingSegment, prevActiveCells[0], .5) tm.connections.createSynapse(wrongMatchingSegment, prevActiveCells[1], .5) tm.connections.createSynapse(wrongMatchingSegment, prevInactiveCell, .5) before = copy.deepcopy(tm.connections) tm.compute(prevActiveColumns, False) tm.compute(activeColumns, False) self.assertEqual(before, tm.connections)
def testReinforceSelectedMatchingSegmentInBurstingColumn(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.08, predictedSegmentDecrement=0.0, seed=42) previousActiveColumns = [0] previousActiveCells = [0,1,2,3] activeColumns = [1] burstingCells = [4,5,6,7] selectedMatchingSegment = tm.createSegment(burstingCells[0]) as1 = tm.connections.createSynapse(selectedMatchingSegment, previousActiveCells[0], .3) as2 = tm.connections.createSynapse(selectedMatchingSegment, previousActiveCells[1], .3) as3 = tm.connections.createSynapse(selectedMatchingSegment, previousActiveCells[2], .3) is1 = tm.connections.createSynapse(selectedMatchingSegment, 81, .3) otherMatchingSegment = tm.createSegment(burstingCells[1]) tm.connections.createSynapse(otherMatchingSegment, previousActiveCells[0], .3) tm.connections.createSynapse(otherMatchingSegment, previousActiveCells[1], .3) tm.connections.createSynapse(otherMatchingSegment, 81, .3) tm.compute(previousActiveColumns, True) tm.compute(activeColumns, True) self.assertAlmostEqual(.4, tm.connections.dataForSynapse(as1).permanence) self.assertAlmostEqual(.4, tm.connections.dataForSynapse(as2).permanence) self.assertAlmostEqual(.4, tm.connections.dataForSynapse(as3).permanence) self.assertAlmostEqual(.22, tm.connections.dataForSynapse(is1).permanence)
def testNoNewSegmentIfNotEnoughWinnerCells(self): tm = TemporalMemory(columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) zeroColumns = [] activeColumns = [0] tm.compute(zeroColumns, True) tm.compute(activeColumns, True) self.assertEqual(0, tm.connections.numSegments())
def testBurstUnpredictedColumns(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.5, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) activeColumns = [0] burstingCells = [0, 1, 2, 3] tm.compute(activeColumns, True) self.assertEqual(burstingCells, tm.getActiveCells())
def testNoNewSegmentIfNotEnoughWinnerCells(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) zeroColumns = [] activeColumns = [0] tm.compute(zeroColumns, True) tm.compute(activeColumns, True) self.assertEqual(0, tm.connections.numSegments())
def testRecycleWeakestSynapseToMakeRoomForNewSynapse(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=1, activationThreshold=3, initialPermanence=.21, connectedPermanence=.50, minThreshold=1, maxNewSynapseCount=3, permanenceIncrement=.02, permanenceDecrement=.02, predictedSegmentDecrement=0.0, seed=42, maxSynapsesPerSegment=4) prevActiveColumns = [1, 2, 3] prevWinnerCells = [1, 2, 3] activeColumns = [4] matchingSegment = tm.createSegment(4) tm.connections.createSynapse(matchingSegment, 81, .6) # Create a weak synapse. Make sure it's not so weak that permanenceIncrement # destroys it. tm.connections.createSynapse(matchingSegment, 0, .11) # Create a synapse that will match. tm.connections.createSynapse(matchingSegment, 1, .20) # Create a synapse with a high permanence tm.connections.createSynapse(matchingSegment, 31, .60) tm.compute(prevActiveColumns) self.assertEqual(prevWinnerCells, tm.getWinnerCells()) tm.compute(activeColumns) synapses = tm.connections.synapsesForSegment(matchingSegment) self.assertEqual(4, len(synapses)) presynapticCells = set(synapse.presynapticCell for synapse in synapses) self.assertEqual(set([1, 2, 3, 31]), presynapticCells)
def testActiveSegmentGrowSynapsesAccordingToPotentialOverlap(self): """ When a segment becomes active, grow synapses to previous winner cells. The number of grown synapses is calculated from the "matching segment" overlap, not the "active segment" overlap. """ tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=1, activationThreshold=2, initialPermanence=.21, connectedPermanence=.50, minThreshold=1, maxNewSynapseCount=4, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.0, seed=42) # Use 1 cell per column so that we have easy control over the winner cells. previousActiveColumns = [0, 1, 2, 3, 4] prevWinnerCells = [0, 1, 2, 3, 4] activeColumns = [5] activeSegment = tm.createSegment(5) tm.connections.createSynapse(activeSegment, 0, .5) tm.connections.createSynapse(activeSegment, 1, .5) tm.connections.createSynapse(activeSegment, 2, .2) tm.compute(previousActiveColumns, True) self.assertEqual(prevWinnerCells, tm.getWinnerCells()) tm.compute(activeColumns, True) presynapticCells = set(synapse.presynapticCell for synapse in tm.connections.synapsesForSegment(activeSegment)) self.assertTrue(presynapticCells == set([0, 1, 2, 3]) or presynapticCells == set([0, 1, 2, 4]))
def testConnectionsNeverChangeWhenLearningDisabled(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.2, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=4, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.02, seed=42) prevActiveColumns = [0] prevActiveCells = [0, 1, 2, 3] activeColumns = [1, 2] #1 is predicted, 2 is bursting prevInactiveCell = 81 expectedActiveCells = [4] correctActiveSegment = tm.createSegment(expectedActiveCells[0]) tm.connections.createSynapse(correctActiveSegment, prevActiveCells[0], .5) tm.connections.createSynapse(correctActiveSegment, prevActiveCells[1], .5) tm.connections.createSynapse(correctActiveSegment, prevActiveCells[2], .5) wrongMatchingSegment = tm.createSegment(43) tm.connections.createSynapse(wrongMatchingSegment, prevActiveCells[0], .5) tm.connections.createSynapse(wrongMatchingSegment, prevActiveCells[1], .5) tm.connections.createSynapse(wrongMatchingSegment, prevInactiveCell, .5) before = copy.deepcopy(tm.connections) tm.compute(prevActiveColumns, False) tm.compute(activeColumns, False) self.assertEqual(before, tm.connections)
def buttoncb(index, state): global mode, tm, leds, speaker, bursting if index == 0: mode = 0 tm = TemporalMemory( columnDimensions=(200, ), cellsPerColumn=32, initialPermanence=0.5, connectedPermanence=0.5, minThreshold=10, maxNewSynapseCount=32, permanenceIncrement=0.1, permanenceDecrement=0.1, activationThreshold=13, ) tm.compute(inputColumns[0], learn=True) for y in range(3): for x in range(1, 5): leds[x].on(0.14) sleep(0.25) mode = 1 elif mode == 1: if state: leds[index].on() speaker.play(tones[index]) tm.compute(inputColumns[index], learn=True) if len(tm.getActiveCells()) > 40: bursting = True else: leds[index].off() speaker.stop() if bursting: bursting = False mode = 0 sleep(1) unfold()
def runHotgym(numRecords): with open(_PARAMS_PATH, "r") as f: modelParams = yaml.safe_load(f)["modelParams"] enParams = modelParams["sensorParams"]["encoders"] spParams = modelParams["spParams"] tmParams = modelParams["tmParams"] timeOfDayEncoder = DateEncoder( timeOfDay=enParams["timestamp_timeOfDay"]["timeOfDay"]) weekendEncoder = DateEncoder( weekend=enParams["timestamp_weekend"]["weekend"]) scalarEncoder = RandomDistributedScalarEncoder( enParams["consumption"]["resolution"]) encodingWidth = (timeOfDayEncoder.getWidth() + weekendEncoder.getWidth() + scalarEncoder.getWidth()) sp = SpatialPooler( # How large the input encoding will be. inputDimensions=(encodingWidth, ), # How many mini-columns will be in the Spatial Pooler. columnDimensions=(spParams["columnCount"], ), # What percent of the columns"s receptive field is available for potential # synapses? potentialPct=spParams["potentialPct"], # Potential radius should be set to the input size if there is global # inhibition. potentialRadius=encodingWidth, # This means that the input space has no topology. globalInhibition=spParams["globalInhibition"], localAreaDensity=spParams["localAreaDensity"], # Roughly 2%, giving that there is only one inhibition area because we have # turned on globalInhibition (40 / 2048 = 0.0195) numActiveColumnsPerInhArea=spParams["numActiveColumnsPerInhArea"], # How quickly synapses grow and degrade. synPermInactiveDec=spParams["synPermInactiveDec"], synPermActiveInc=spParams["synPermActiveInc"], synPermConnected=spParams["synPermConnected"], # boostStrength controls the strength of boosting. Boosting encourages # efficient usage of SP columns. boostStrength=spParams["boostStrength"], # Random number generator seed. seed=spParams["seed"], # TODO: is this useful? # Determines if inputs at the beginning and end of an input dimension should # be considered neighbors when mapping columns to inputs. wrapAround=True) tm = TemporalMemory( # Must be the same dimensions as the SP columnDimensions=(tmParams["columnCount"], ), # How many cells in each mini-column. cellsPerColumn=tmParams["cellsPerColumn"], # A segment is active if it has >= activationThreshold connected synapses # that are active due to infActiveState activationThreshold=tmParams["activationThreshold"], initialPermanence=tmParams["initialPerm"], # TODO: This comes from the SP params, is this normal connectedPermanence=spParams["synPermConnected"], # Minimum number of active synapses for a segment to be considered during # search for the best-matching segments. minThreshold=tmParams["minThreshold"], # The max number of synapses added to a segment during learning maxNewSynapseCount=tmParams["newSynapseCount"], permanenceIncrement=tmParams["permanenceInc"], permanenceDecrement=tmParams["permanenceDec"], predictedSegmentDecrement=0.0, maxSegmentsPerCell=tmParams["maxSegmentsPerCell"], maxSynapsesPerSegment=tmParams["maxSynapsesPerSegment"], seed=tmParams["seed"]) classifier = SDRClassifierFactory.create() results = [] with open(_INPUT_FILE_PATH, "r") as fin: reader = csv.reader(fin) headers = reader.next() reader.next() reader.next() for count, record in enumerate(reader): if count >= numRecords: break # Convert data string into Python date object. dateString = datetime.datetime.strptime(record[0], "%m/%d/%y %H:%M") # Convert data value string into float. consumption = float(record[1]) # To encode, we need to provide zero-filled numpy arrays for the encoders # to populate. timeOfDayBits = numpy.zeros(timeOfDayEncoder.getWidth()) weekendBits = numpy.zeros(weekendEncoder.getWidth()) consumptionBits = numpy.zeros(scalarEncoder.getWidth()) # Now we call the encoders create bit representations for each value. timeOfDayEncoder.encodeIntoArray(dateString, timeOfDayBits) weekendEncoder.encodeIntoArray(dateString, weekendBits) scalarEncoder.encodeIntoArray(consumption, consumptionBits) # Concatenate all these encodings into one large encoding for Spatial # Pooling. encoding = numpy.concatenate( [timeOfDayBits, weekendBits, consumptionBits]) # Create an array to represent active columns, all initially zero. This # will be populated by the compute method below. It must have the same # dimensions as the Spatial Pooler. activeColumns = numpy.zeros(spParams["columnCount"]) # Execute Spatial Pooling algorithm over input space. sp.compute(encoding, True, activeColumns) activeColumnIndices = numpy.nonzero(activeColumns)[0] # Execute Temporal Memory algorithm over active mini-columns. tm.compute(activeColumnIndices, learn=True) activeCells = tm.getActiveCells() # Get the bucket info for this input value for classification. bucketIdx = scalarEncoder.getBucketIndices(consumption)[0] # Run classifier to translate active cells back to scalar value. classifierResult = classifier.compute(recordNum=count, patternNZ=activeCells, classification={ "bucketIdx": bucketIdx, "actValue": consumption }, learn=True, infer=True) # Print the best prediction for 1 step out. oneStepConfidence, oneStep = sorted(zip( classifierResult[1], classifierResult["actualValues"]), reverse=True)[0] print("1-step: {:16} ({:4.4}%)".format(oneStep, oneStepConfidence * 100)) results.append([oneStep, oneStepConfidence * 100, None, None]) return results
x[3, 30:40] = 1 # Input SDR representing "D", corresponding to columns 30-39 x[4, 40:50] = 1 # Input SDR representing "E", corresponding to columns 40-49 # Step 3: send this simple sequence to the temporal memory for learning # We repeat the sequence 10 times for i in range(10): # Send each letter in the sequence in order for j in range(5): activeColumns = set([i for i, j in zip(count(), x[j]) if j == 1]) # The compute method performs one step of learning and/or inference. Note: # here we just perform learning but you can perform prediction/inference and # learning in the same step if you want (online learning). tm.compute(activeColumns, learn = True) # The following print statements can be ignored. # Useful for tracing internal states print("active cells " + str(tm.getActiveCells())) print("predictive cells " + str(tm.getPredictiveCells())) print("winner cells " + str(tm.getWinnerCells())) print("# of active segments " + str(tm.connections.numSegments())) # The reset command tells the TM that a sequence just ended and essentially # zeros out all the states. It is not strictly necessary but it's a bit # messier without resets, and the TM learns quicker with resets. tm.reset() #######################################################################
def runHotgym(numRecords): with open(_PARAMS_PATH, "r") as f: modelParams = yaml.safe_load(f)["modelParams"] enParams = modelParams["sensorParams"]["encoders"] spParams = modelParams["spParams"] tmParams = modelParams["tmParams"] timeOfDayEncoder = DateEncoder( timeOfDay=enParams["timestamp_timeOfDay"]["timeOfDay"]) weekendEncoder = DateEncoder( weekend=enParams["timestamp_weekend"]["weekend"]) scalarEncoder = RandomDistributedScalarEncoder( enParams["consumption"]["resolution"]) encodingWidth = (timeOfDayEncoder.getWidth() + weekendEncoder.getWidth() + scalarEncoder.getWidth()) sp = SpatialPooler( inputDimensions=(encodingWidth,), columnDimensions=(spParams["columnCount"],), potentialPct=spParams["potentialPct"], potentialRadius=encodingWidth, globalInhibition=spParams["globalInhibition"], localAreaDensity=spParams["localAreaDensity"], numActiveColumnsPerInhArea=spParams["numActiveColumnsPerInhArea"], synPermInactiveDec=spParams["synPermInactiveDec"], synPermActiveInc=spParams["synPermActiveInc"], synPermConnected=spParams["synPermConnected"], boostStrength=spParams["boostStrength"], seed=spParams["seed"], wrapAround=True ) tm = TemporalMemory( columnDimensions=(tmParams["columnCount"],), cellsPerColumn=tmParams["cellsPerColumn"], activationThreshold=tmParams["activationThreshold"], initialPermanence=tmParams["initialPerm"], connectedPermanence=spParams["synPermConnected"], minThreshold=tmParams["minThreshold"], maxNewSynapseCount=tmParams["newSynapseCount"], permanenceIncrement=tmParams["permanenceInc"], permanenceDecrement=tmParams["permanenceDec"], predictedSegmentDecrement=0.0, maxSegmentsPerCell=tmParams["maxSegmentsPerCell"], maxSynapsesPerSegment=tmParams["maxSynapsesPerSegment"], seed=tmParams["seed"] ) classifier = SDRClassifierFactory.create() results = [] with open(_INPUT_FILE_PATH, "r") as fin: reader = csv.reader(fin) headers = reader.next() reader.next() reader.next() for count, record in enumerate(reader): if count >= numRecords: break # Convert data string into Python date object. dateString = datetime.datetime.strptime(record[0], "%m/%d/%y %H:%M") # Convert data value string into float. consumption = float(record[1]) # To encode, we need to provide zero-filled numpy arrays for the encoders # to populate. timeOfDayBits = numpy.zeros(timeOfDayEncoder.getWidth()) weekendBits = numpy.zeros(weekendEncoder.getWidth()) consumptionBits = numpy.zeros(scalarEncoder.getWidth()) # Now we call the encoders to create bit representations for each value. timeOfDayEncoder.encodeIntoArray(dateString, timeOfDayBits) weekendEncoder.encodeIntoArray(dateString, weekendBits) scalarEncoder.encodeIntoArray(consumption, consumptionBits) # Concatenate all these encodings into one large encoding for Spatial # Pooling. encoding = numpy.concatenate( [timeOfDayBits, weekendBits, consumptionBits] ) # Create an array to represent active columns, all initially zero. This # will be populated by the compute method below. It must have the same # dimensions as the Spatial Pooler. activeColumns = numpy.zeros(spParams["columnCount"]) # Execute Spatial Pooling algorithm over input space. sp.compute(encoding, True, activeColumns) activeColumnIndices = numpy.nonzero(activeColumns)[0] # Execute Temporal Memory algorithm over active mini-columns. tm.compute(activeColumnIndices, learn=True) activeCells = tm.getActiveCells() # Get the bucket info for this input value for classification. bucketIdx = scalarEncoder.getBucketIndices(consumption)[0] # Run classifier to translate active cells back to scalar value. classifierResult = classifier.compute( recordNum=count, patternNZ=activeCells, classification={ "bucketIdx": bucketIdx, "actValue": consumption }, learn=True, infer=True ) # Print the best prediction for 1 step out. oneStepConfidence, oneStep = sorted( zip(classifierResult[1], classifierResult["actualValues"]), reverse=True )[0] print("1-step: {:16} ({:4.4}%)".format(oneStep, oneStepConfidence * 100)) results.append([oneStep, oneStepConfidence * 100, None, None]) return results
print(end - start) print("Temporal pooler learning") start = time.time() A_score = np.zeros(len(Data)) for x in range(len(Data)): encoder = multiencode(var_encoders, Data, x) # e_val = RDSE.encode(Data['value'][x]) # e_tod = TODE.encode(Data.index[x]) # e_wend = WENDE.encode(Data.index[x]) # encoder = np.concatenate([e_val]) SP.compute(encoder, False, active_columns) col_index = active_columns.nonzero()[0] TM.compute(col_index, learn=True) if x > 0: inter = set(col_index).intersection(Prev_pred_col) inter_l = len(inter) active_l = len(col_index) A_score[x] = 1 - (inter_l / active_l) Data.iat[x, -2] = A_score[x] Prev_pred_col = list(set(x // cell_col for x in TM.getPredictiveCells())) end = time.time() print(end - start) W = 72 W_prim = 5 eps = 1e-6
columnDimensions=columnDimensions, globalInhibition=True, numActiveColumnsPerInhArea=21) tm = TemporalMemory(columnDimensions=columnDimensions) c = SDRClassifier(steps=[1], alpha=0.1, actValueAlpha=0.1, verbosity=0) x_true = x[1:] x_predict = np.zeros(len(x) - 1) for i, xi in tqdm(enumerate(x[:-1])): encoded = encoder.encode(xi) bucketIdx = np.where(encoded > 0)[0][0] spd = np.zeros(columnDimensions[0]) sp.compute(encoded, True, spd) active_indices = np.where(spd > 0)[0] tm.compute(active_indices) active_cell_indices = tm.getActiveCells() predictive_cell_indices = tm.getPredictiveCells() patternNZ = np.asarray(active_cell_indices) patternNZ = np.append(patternNZ, predictive_cell_indices) patternNZ = patternNZ.astype(np.int) patternNZ = list(set(patternNZ)) result = c.compute(recordNum=i, patternNZ=patternNZ, classification={ "bucketIdx": bucketIdx, "actValue": xi }, learn=True,
def HTM_AD( Data='Test', vars={'value': ['num']}, prec_param=5, pooler_out=2024, # Number of columns of the pooler output cell_col=5, # HTM cells per column W=72, # Window parameter W_prim=5, # Local window for anomaly detection likelihood eps=1e-6, # to Avoid by zero divisions athreshold=0.95): """ This function performs HTM based anomaly detection on a time series provided :param Data: :param vars: Possible values: num, tod, weekend :param prec_param: A parameter that defines how much precision the number encoder has The encoder precision depends on the variability of the data, The real precision is computed taking into account both the precision parameter and data std A high precision might mean a high error at predicting the variable value in noisy variables :param pooler_out: Number of columns of the pooler output :param cell_col: HTM cells per column :param W: Window parameter :param W_prim: Local window for anomaly detection likelihood :param eps: to Avoid by zero divisions :param athreshold: To classify based on anomaly likelihood whether there is an anomaly or not :return: The Data + 3 columns Anomaly: indicates the error of within the value predicted by the HTM network Anomaly_likelihood: indicates the likelihood of the data into being anomalous Anomaly_flag: classifies the data in anomalous vs non anomalous """ if Data == 'Test': # If there is not data available, simply loads the temperature benchmark dataset # Import data Data = pd.read_csv('anomaly_API/Data/sample.csv', parse_dates=True, index_col='timestamp') Data = Data.resample('H').bfill().interpolate() TODE = DateEncoder(timeOfDay=(21, 1)) WENDE = DateEncoder(weekend=21) var_encoders = set() # Spatial Pooler Parameters for x in vars: for y in vars[x]: if y == 'num': exec( "RDSE_" + x + " = RandomDistributedScalarEncoder(resolution=Data['" + x + "'].std()/prec_param)", locals(), globals()) var_encoders.add(Encoder(x, ["RDSE_" + x])) elif y == 'weekend': var_encoders.add(Encoder(x, ["WENDE"])) elif y == 'tod': var_encoders.add(Encoder(x, ["TODE"])) else: return {"error": "Variable encoder type is not recognized "} encoder_width = 0 # Computes encoder width for x in var_encoders: for y in x.encoders: exec("s = " + y + ".getWidth()", locals(), globals()) encoder_width += s SP = SpatialPooler( inputDimensions=encoder_width, columnDimensions=pooler_out, potentialPct=0.8, globalInhibition=True, numActiveColumnsPerInhArea=pooler_out // 50, # Gets 2% of the total area boostStrength=1.0, wrapAround=False) TM = TemporalMemory(columnDimensions=(pooler_out, ), cellsPerColumn=cell_col) Data['Anomaly'] = 0.0 Data['Anomaly_Likelihood'] = 0.0 # Train Spatial Pooler print("Spatial pooler learning") start = time.time() active_columns = np.zeros(pooler_out) for x in range(len(Data)): encoder = multiencode(var_encoders, Data, x) SP.compute(encoder, True, active_columns) end = time.time() print(end - start) # Temporal pooler print("Temporal pooler learning") start = time.time() A_score = np.zeros(len(Data)) for x in range(len(Data)): encoder = multiencode(var_encoders, Data, x) SP.compute(encoder, False, active_columns) col_index = active_columns.nonzero()[0] TM.compute(col_index, learn=True) if x > 0: inter = set(col_index).intersection(Prev_pred_col) inter_l = len(inter) active_l = len(col_index) A_score[x] = 1 - (inter_l / active_l) Data.iat[x, -2] = A_score[x] Prev_pred_col = list( set(x // cell_col for x in TM.getPredictiveCells())) end = time.time() print(end - start) AL_score = np.zeros(len(Data)) # Computes the likelihood of the anomaly for x in range(len(Data)): if x > 0: W_vec = A_score[max(0, x - W):x] W_prim_vec = A_score[max(0, x - W_prim):x] AL_score[x] = 1 - 2 * norm.sf( abs(np.mean(W_vec) - np.mean(W_prim_vec)) / max(np.std(W_vec), eps)) Data.iat[x, -1] = AL_score[x] Data['Anomaly_flag'] = athreshold < Data['Anomaly_Likelihood'] return Data
if len(tm.getActiveCells()) > 40: bursting = True else: leds[index].off() speaker.stop() if bursting: bursting = False mode = 0 sleep(1) unfold() buttonPins = [10, 9, 11, 5, 6] buttons = [] for x in range(5): buttons.append(Button(x, buttonPins[x], buttoncb)) tm.reset() tm.compute(inputColumns[0], learn=True) for y in range(3): for x in range(1, 5): leds[x].on(0.14) sleep(0.25) mode = 1 try: while True: pass finally: GPIO.cleanup()
consumeEncoder.encodeIntoArray(consumption, consumptionBits) encoding = numpy.concatenate( [timeOfDayBits, weekendBits, consumptionBits]) #ONE = {'id': count, 'input': consumption, 'bucket': bucketIdx, # 'output': consumptionBits.tolist()} #print ONE #REZ.append(ONE) # spatial pooling activeColumns = numpy.zeros(COL_WIDTH, numpy.int8) sp.compute(encoding, True, activeColumns) activeColumnIndices = numpy.nonzero(activeColumns)[0] # temporal memory tm.compute(activeColumnIndices, learn=True) activeCells = tm.getActiveCells() # classification bucketIdx = consumeEncoder.getBucketIndices(consumption)[0] ### #hamming[bucketIdx] = consumptionBits ### classifierResult = classifier.compute(recordNum=count, patternNZ=activeCells, classification={ "bucketIdx": bucketIdx, "actValue": consumption }, learn=True, infer=True)
class Entity(): def __init__(self, columnCount, InputEncoderParams, toL4ConnectorParamsI, toL4ConnectorParamsII, toL5ConnectorParams, toD1ConnectorParams, toD2ConnectorParams, L4Params, L5Params, k, D1Params, D2Params): self.columnCount = columnCount self.toL4ConnectorParamsI = toL4ConnectorParamsI self.toL4ConnectorParamsII = toL4ConnectorParamsII self.toL5ConnectorParams = toL5ConnectorParams self.toD1ConnectorParams = toD1ConnectorParams self.toD2ConnectorParams = toD2ConnectorParams self.L4Params = L4Params self.L5Params = L5Params self.k = k self.D1Params = D1Params self.D2Params = D2Params self.learning = False #encoder from nupic.encoders import MultiEncoder self.InputEncoder = MultiEncoder() self.InputEncoder.addMultipleEncoders(InputEncoderParams) print "Encoder Online" #spatialPoolers from nupic.algorithms.spatial_pooler import SpatialPooler self.toL4ConnectorI = SpatialPooler( inputDimensions=(toL4ConnectorParamsI["inputDimensions"], ), columnDimensions=(columnCount, ), potentialPct=toL4ConnectorParamsI["potentialPct"], globalInhibition=toL4ConnectorParamsI["globalInhibition"], localAreaDensity=toL4ConnectorParamsI["localAreaDensity"], numActiveColumnsPerInhArea=toL4ConnectorParamsI[ "numActiveColumnsPerInhArea"], synPermInactiveDec=toL4ConnectorParamsI["synPermInactiveDec"], synPermActiveInc=toL4ConnectorParamsI["synPermActiveInc"], synPermConnected=toL4ConnectorParamsI["synPermConnected"], boostStrength=toL4ConnectorParamsI["boostStrength"], seed=toL4ConnectorParamsI["seed"], wrapAround=toL4ConnectorParamsI["wrapAround"]) #this part sucks self.toL4ConnectorII = SpatialPooler( inputDimensions=(columnCount * 3, ), columnDimensions=(columnCount, ), potentialPct=toL4ConnectorParamsII["potentialPct"], globalInhibition=toL4ConnectorParamsII["globalInhibition"], localAreaDensity=toL4ConnectorParamsII["localAreaDensity"], numActiveColumnsPerInhArea=toL4ConnectorParamsII[ "numActiveColumnsPerInhArea"], synPermInactiveDec=toL4ConnectorParamsII["synPermInactiveDec"], synPermActiveInc=toL4ConnectorParamsII["synPermActiveInc"], synPermConnected=toL4ConnectorParamsII["synPermConnected"], boostStrength=toL4ConnectorParamsII["boostStrength"], seed=toL4ConnectorParamsII["seed"], wrapAround=toL4ConnectorParamsII["wrapAround"]) print "toL4Connector Online" self.toL5Connector = SpatialPooler( inputDimensions=(columnCount, ), columnDimensions=(columnCount, ), potentialPct=toL5ConnectorParams["potentialPct"], globalInhibition=toL5ConnectorParams["globalInhibition"], localAreaDensity=toL5ConnectorParams["localAreaDensity"], numActiveColumnsPerInhArea=toL5ConnectorParams[ "numActiveColumnsPerInhArea"], synPermInactiveDec=toL5ConnectorParams["synPermInactiveDec"], synPermActiveInc=toL5ConnectorParams["synPermActiveInc"], synPermConnected=toL5ConnectorParams["synPermConnected"], boostStrength=toL5ConnectorParams["boostStrength"], seed=toL5ConnectorParams["seed"], wrapAround=toL5ConnectorParams["wrapAround"]) print "toL5Connector Online" self.toD1Connector = SpatialPooler( inputDimensions=(columnCount, ), columnDimensions=(columnCount, ), potentialPct=toD1ConnectorParams["potentialPct"], globalInhibition=toD1ConnectorParams["globalInhibition"], localAreaDensity=toD1ConnectorParams["localAreaDensity"], numActiveColumnsPerInhArea=toD1ConnectorParams[ "numActiveColumnsPerInhArea"], synPermInactiveDec=toD1ConnectorParams["synPermInactiveDec"], synPermActiveInc=toD1ConnectorParams["synPermActiveInc"], synPermConnected=toD1ConnectorParams["synPermConnected"], boostStrength=toD1ConnectorParams["boostStrength"], seed=toD1ConnectorParams["seed"], wrapAround=toD1ConnectorParams["wrapAround"]) print "toD1Connector Online" self.toD2Connector = SpatialPooler( inputDimensions=(columnCount, ), columnDimensions=(columnCount, ), potentialPct=toD2ConnectorParams["potentialPct"], globalInhibition=toD2ConnectorParams["globalInhibition"], localAreaDensity=toD2ConnectorParams["localAreaDensity"], numActiveColumnsPerInhArea=toD2ConnectorParams[ "numActiveColumnsPerInhArea"], synPermInactiveDec=toD2ConnectorParams["synPermInactiveDec"], synPermActiveInc=toD2ConnectorParams["synPermActiveInc"], synPermConnected=toD2ConnectorParams["synPermConnected"], boostStrength=toD2ConnectorParams["boostStrength"], seed=toD2ConnectorParams["seed"], wrapAround=toD2ConnectorParams["wrapAround"]) print "toD2Connector Online" #HTM Layers from nupic.algorithms.temporal_memory import TemporalMemory self.L4ActiveColumns = numpy.zeros(self.columnCount, dtype=int) self.L4 = TemporalMemory( columnDimensions=(columnCount, ), seed=42, ) print "L4 Online" self.L5ActiveColumns = numpy.zeros(self.columnCount, dtype=int) self.L5 = TemporalMemory( columnDimensions=(columnCount, ), seed=42, ) print "L5 Online" self.D1ActiveColumns = numpy.zeros(self.columnCount, dtype=int) self.D1 = TemporalMemory( columnDimensions=(columnCount, ), seed=42, initialPermanence=0.21, connectedPermanence=0.5, ) print "D1 Online" self.D2ActiveColumns = numpy.zeros(self.columnCount, dtype=int) self.D2 = TemporalMemory( columnDimensions=(columnCount, ), seed=42, initialPermanence=0.21, connectedPermanence=0.5, ) print "D2 Online" def encode_input(sine1, sine2, angularSpeed1, angularSpeed2, efferenceCopy): return self.InputEncoder.encode({ "sine1": sine1, "sine2": sine2, "angularSpeed1": angularSpeed1, "angularSpeed2": angularSpeed2, "efferenceCopy": efferenceCopy }) def reset(self): self.action = 0 self.L4.reset() self.L5.reset() self.D1.reset() self.D2.reset() def mimic(self, observation, action): #mimicking only requires remembering the given obs-act pattern,thus the striatum is neglected in this func self.learning = True self.action = action encodedInput = self.encode_input(observation[0], observation[2], observation[4], observation[5], str(action)) self.toL4ConnectorI.compute(encodedInput, self.learning, self.L4ActiveColumns) self.L4.compute(self.L4ActiveColumns, learn=self.learning) L4activeColumnIndices = numpy.nonzero(self.L4ActiveColumns)[0] L5Temp = numpy.zeros(self.columnCount, dtype=int) for column in L4activeColumnIndices: L5Temp[column] = 1 self.toL5Connector.compute(L5Temp, self.learning, self.L5ActiveColumns) self.L5.compute(self.L5ActiveColumns, learn=self.learning) L5activeColumnIndices = numpy.nonzero(self.L5ActiveColumns)[0] #no action generation is needed in this func def learn(self, env, observation, expectedReaction): #We humans learn by trial and error,so does an AI agent.For neural networks,they have BP,but HTM does not #have a clear way to reinforcement learn(Where to feed in rewards?).Here I try to do something new. self.learning = False #...trial encodedInput = self.encode_input(observation[0], observation[2], observation[4], observation[5], str(self.action)) self.toL4ConnectorI.compute(encodedInput, self.learning, self.L4ActiveColumns) L4Temp = numpy.zeros( self.columnCount * 3, dtype=int ) #ready to receive D1's disinhibition and D2's inhibition L4activeColumnIndices = numpy.nonzero(self.L4ActiveColumns)[0] for column in L4activeColumnIndices: L4Temp[int(column) * 3] = 1 D1ActiveColumnsIndices = numpy.nonzero(self.D1ActiveColumns)[0] for column in D1ActiveColumnsIndices: L4Temp[int(column) * 3 + 1] = 1 D2ActiveColumnsIndices = numpy.nonzero(self.D2ActiveColumns)[0] for i in range(self.columnCount - 1): L4Temp[i * 3 + 2] = 1 for column in D2ActiveColumnsIndices: #achieve inhibition in this way L4Temp[i * 3 + 2] = 0 self.toL4ConnectorII.compute(L4Temp, self.learning, self.L4ActiveColumns) self.L4.compute(self.L4ActiveColumns, learn=self.learning) L4activeColumnIndices = numpy.nonzero(self.L4ActiveColumns)[0] L5Temp = numpy.zeros(self.columnCount, dtype=int) for column in L4activeColumnIndices: L5Temp[column] = 1 self.toL5Connector.compute(L5Temp, self.learning, self.L5ActiveColumns) self.L5.compute(self.L5ActiveColumns, learn=self.learning) L5activeColumnIndices = numpy.nonzero(self.L5ActiveColumns)[0] #Action Generation p = 84 #there are 84 bits in the SDR representing the action fed in the agent,this is the "Efference Copy" count0 = 0 count1 = 0 count2 = 0 for activeIndice in L5activeColumnIndices: convertedIndice = (activeIndice + 1) * 1126 / columnCount if convertedIndice <= 1126 - p / 4 and convertedIndice > 1126 - p / 2: count2 = count2 + 1 if convertedIndice <= 1126 - p / 2 and convertedIndice > 1126 - 3 * p / 4: count1 = count1 + 1 if convertedIndice <= 1126 - 3 * p / 4 and convertedIndice > 1126 - p: count0 = count0 + 1 if count2 == max(count0, count1, count2): self.action = 2 if count1 == max(count0, count1, count2): self.action = 1 if count0 == max(count0, count1, count2): self.action = 0 #...and error if self.action == expectedReaction: reward = 0.1 else: reward = -0.1 self.D1.setConnectedPermanence(self.D1.getConnectedPermanence() * (self.k**(-reward))) #reward self.D2.setConnectedPermanence(self.D2.getConnectedPermanence() * (self.k**reward)) #punishment #Learn to correct mistakes(remember what's right and whats' wrong) self.learning = True DTemp = numpy.zeros(self.columnCount, dtype=int) for column in L5activeColumnIndices: DTemp[column] = 1 self.toD1Connector.compute(DTemp, self.learning, self.D1ActiveColumns) self.toD2Connector.compute(DTemp, self.learning, self.D2ActiveColumns) self.D1.compute(self.D1ActiveColumns, learn=self.learning) self.D2.compute(self.D2ActiveColumns, learn=self.learning) return reward def react(self, observation): self.learning = False encodedInput = self.encode_input(observation[0], observation[2], observation[4], observation[5], str(self.action)) self.toL4ConnectorI.compute(encodedInput, self.learning, self.L4ActiveColumns) L4Temp = numpy.zeros(self.columnCount * 3, dtype=int) L4activeColumnIndices = numpy.nonzero(self.L4ActiveColumns)[0] for column in L4activeColumnIndices: L4Temp[int(column) * 3] = 1 D1ActiveColumnsIndices = numpy.nonzero(self.D1ActiveColumns)[0] for column in D1ActiveColumnsIndices: L4Temp[int(column) * 3 + 1] = 1 D2ActiveColumnsIndices = numpy.nonzero(self.D2ActiveColumns)[0] for i in range(self.columnCount - 1): L4Temp[i * 3 + 2] = 1 for column in D2ActiveColumnsIndices: L4Temp[i * 3 + 2] = 0 self.toL4ConnectorII.compute(L4Temp, self.learning, self.L4ActiveColumns) self.L4.compute(self.L4ActiveColumns, learn=self.learning) L4activeColumnIndices = numpy.nonzero(self.L4ActiveColumns)[0] L5Temp = numpy.zeros(self.columnCount, dtype=int) for column in L4activeColumnIndices: L5Temp[column] = 1 self.toL5Connector.compute(L5Temp, self.learning, self.L5ActiveColumns) self.L5.compute(self.L5ActiveColumns, learn=self.learning) L5activeColumnIndices = numpy.nonzero(self.L5ActiveColumns)[0] p = 84 count0 = 0 count1 = 0 count2 = 0 for activeIndice in L5activeColumnIndices: convertedIndice = (activeIndice + 1) * 1126 / columnCount if convertedIndice <= 1126 - p / 4 and convertedIndice > 1126 - p / 2: count2 = count2 + 1 if convertedIndice <= 1126 - p / 2 and convertedIndice > 1126 - 3 * p / 4: count1 = count1 + 1 if convertedIndice <= 1126 - 3 * p / 4 and convertedIndice > 1126 - p: count0 = count0 + 1 if count2 == max(count0, count1, count2): self.action = 2 if count1 == max(count0, count1, count2): self.action = 1 if count0 == max(count0, count1, count2): self.action = 0 return self.action
def main(): DIR = "./sim_data" # Odom Encoder xSDR = ScalarEncoder(w=21,minval=0,maxval=20,n=256) ySDR = ScalarEncoder(w=21,minval=0,maxval=20,n=256) xyWidth = xSDR.getWidth() + ySDR.getWidth() # Visual input D = np.loadtxt(DIR + '/seq_multi_loop_noise05_al5.txt', dtype='i', delimiter=',') numberImages = D[:,0].size nColumns = D[0,:].size #time.sleep(10) # Odom input odom = np.loadtxt(DIR + '/seq_multi_loop_noise05_al5_gt.txt', dtype='f', delimiter=',') x = odom[:,0] y = odom[:,1] # Encoder Odom input odomSDR = np.zeros((numberImages,xyWidth), dtype=int) for i in range(1): _xSDR = np.zeros(xSDR.getWidth(), dtype=int) xSDR.encodeIntoArray(x[i], _xSDR) _ySDR = np.zeros(ySDR.getWidth(), dtype=int) ySDR.encodeIntoArray(y[i], _ySDR) odomSDR[i,:] = np.concatenate([_xSDR, _ySDR]) tm0 = TM( columnCount=nColumns, cellsPerColumn=4, initialPermanence=0.21, connectedPermanence=0.5, permanenceIncrement=0.1, permanenceDecrement=0.1, minThreshold=15, basalInputSize= 512, reducedBasalThreshold=1000, activationThreshold=1000, apicalInputSize=0, maxSynapsesPerSegment=-1, sampleSize=1, seed = 42 ) tm = TemporalMemory( # Must be the same dimensions as the SP columnDimensions=(2048,), # How many cells in each mini-column. cellsPerColumn=4, # A segment is active if it has >= activationThreshold connected synapses # that are active due to infActiveState activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.5, # Minimum number of active synapses for a segment to be considered during # search for the best-matching segments. minThreshold=1, # The max number of synapses added to a segment during learning maxNewSynapseCount=3, #permanenceIncrement=0.01, #permanenceDecrement=0.01, predictedSegmentDecrement=0.0005, maxSegmentsPerCell=3, maxSynapsesPerSegment=3, seed=42 ) #time.sleep(10) # Simple HTM parameters params = Params() params.maxPredDepth = 0 params.probAdditionalCon = 0.05 # probability for random connection params.nCellPerCol = 32 # number of cells per minicolumn params.nInConPerCol = int(round(np.count_nonzero(D) / D.shape[0])) #print params.nInConPerCol params.minColumnActivity = int(round(0.25*params.nInConPerCol)) params.nColsPerPattern = 10 # minimum number of active minicolumns k_min params.kActiveColumn = 100 # maximum number of active minicolumns k_max params.kMin = 1 # run HTM t = time.time() print ('Simple HTM') htm = MCN('htm',params) outputSDR = [] max_index = [] for i in range (min(numberImages,D.shape[0])): loop = 0 #print('\n-------- ITERATION %d ---------' %i) # skip empty vectors if np.count_nonzero(D[i,:]) == 0: print('empty vector, skip\n') continue loop += 1 #print D[i,:] htm.compute(D[i,:]) max_index.append(max(htm.winnerCells)) outputSDR.append(htm.winnerCells) elapsed = time.time() - t print("Elapsed time: %f seconds\n" %elapsed) # create output SDR matrix from HTM winner cell output M = np.zeros((len(outputSDR),max(max_index)+1), dtype=int) for i in range(len(outputSDR)): for j in range(len(outputSDR[i])): winner = outputSDR[i][j] M[i][winner] = 1 # Temporal Pooler descriptors print 'Temporal Pooler descriptors' D1_tm=[] id_max1=[] t = time.time() for i in range(min(numberImages,D.shape[0])): D1_sp = np.nonzero(D[i,:])[0] tm.compute(D1_sp, learn=True) activeCells = tm.getWinnerCells() D1_tm.append(activeCells) id_max1.append(max(activeCells)) elapsed = time.time() - t print( "Elapsed time: %f seconds\n" %elapsed) # create output SDR matrix from HTM winner cell output T = np.zeros((len(D1_tm),max(id_max1)+1), dtype=int) for i in range(len(D1_tm)): for j in range(len(D1_tm[i])): winner = D1_tm[i][j] T[i][winner] = 1 # Temporal Pooler - Distal connections print 'Temporal Pooler - Distal connections' D2_tm=[] id_max2=[] t = time.time() for i in range(min(numberImages,D.shape[0])): D2_sp = np.nonzero(D[i,:])[0] basalInputs = np.nonzero(odomSDR[i,:])[0] tm0.compute(sorted(D2_sp), sorted(basalInputs), apicalInput=(), basalGrowthCandidates=None, apicalGrowthCandidates=None, learn=True) activeCells2 = tm0.getWinnerCells() D2_tm.append(activeCells2) id_max2.append(max(activeCells2)) elapsed = time.time() - t print( "Elapsed time: %f seconds\n" %elapsed) # create output SDR matrix from HTM winner cell output T2 = np.zeros((len(D2_tm),max(id_max2)+1), dtype=int) for i in range(len(D2_tm)): for j in range(len(D2_tm[i])): winner = D2_tm[i][j] T2[i][winner] = 1 # Create ground truth and show precision-recall curves GT_data = np.loadtxt(DIR + '/seq_multi_loop_noNoise_gt.txt', dtype='i', delimiter=',',skiprows=1) GT = np.zeros((numberImages,numberImages), dtype=int) for i in range(GT.shape[0]): for j in range(i,GT.shape[1]): GT[i,j] = (np.any(GT_data[i,:] != GT_data[j,:])==False) # Results print ('Results') fig, ax = plt.subplots() S0 = evaluateSimilarity(D) P, R = createPR(S0,GT) ax.plot(R, P, label='InputSDR: (avgP=%f)' %np.trapz(P,R)) S1 = evaluateSimilarity(M) P, R = createPR(S1,GT) ax.plot(R, P, label='MCN (avgP=%f)' %np.trapz(P,R)) S2 = evaluateSimilarity(T) P, R = createPR(S2,GT) ax.plot(R, P, label='HTM (avgP=%f)' %np.trapz(P,R)) S3 = evaluateSimilarity(T2) P, R = createPR(S3,GT) ax.plot(R, P, label='HTM Distal (avgP=%f)' %np.trapz(P,R)) ax.legend() ax.grid(True) plt.xlabel("Recall") plt.ylabel("Precision") plt.show() '''
def main(): numberImages = 212 DIR = "./sim_data" # Experiments #D0 = np.loadtxt(DIR + '/seq_multi_loop_noise01_al0.txt', dtype='i', delimiter=',') D1 = np.loadtxt(DIR + '/seq_multi_loop_noise0_al1.txt', dtype='i', delimiter=',') D = np.loadtxt(DIR + '/seq_multi_loop_noise05_al5.txt', dtype='i', delimiter=',') tm = TemporalMemory( # Must be the same dimensions as the SP columnDimensions=(2048, ), # How many cells in each mini-column. cellsPerColumn=4, # A segment is active if it has >= activationThreshold connected synapses # that are active due to infActiveState activationThreshold=13, #initialPermanence=0.21, connectedPermanence=0.5, # Minimum number of active synapses for a segment to be considered during # search for the best-matching segments. minThreshold=1, # The max number of synapses added to a segment during learning maxNewSynapseCount=3, #permanenceIncrement=0.01, #permanenceDecrement=0.01, predictedSegmentDecrement=0.0005, maxSegmentsPerCell=3, maxSynapsesPerSegment=3, seed=42) # Simple HTM parameters params = Params() params.maxPredDepth = 0 params.probAdditionalCon = 0.05 # probability for random connection params.nCellPerCol = 32 # number of cells per minicolumn params.nInConPerCol = int(round(np.count_nonzero(D) / D.shape[0])) params.minColumnActivity = int(round(0.25 * params.nInConPerCol)) params.nColsPerPattern = 10 # minimum number of active minicolumns k_min params.kActiveColumn = 100 # maximum number of active minicolumns k_max params.kMin = 1 # run HTM t = time.time() print('Simple HTM') htm = MCN('htm', params) outputSDR = [] max_index = [] nCols_MCN = [] nCols_HTM = [] ''' for i in range (min(numberImages,D1.shape[0])): loop = 0 #print('\n-------- ITERATION %d ---------' %i) # skip empty vectors if np.count_nonzero(D1[i,:]) == 0: print('empty vector, skip\n') continue loop += 1 htm.compute(D1[i,:]) max_index.append(max(htm.winnerCells)) outputSDR.append(htm.winnerCells) elapsed = time.time() - t print("Elapsed time: %f seconds\n" %elapsed) ''' for i in range(min(numberImages, D.shape[0])): loop = 0 #print('\n-------- ITERATION %d ---------' %i) # skip empty vectors if np.count_nonzero(D[i, :]) == 0: print('empty vector, skip\n') continue loop += 1 htm.compute(D[i, :]) nCols_MCN.append(htm.nCols) nCols_HTM.append(2048) max_index.append(max(htm.winnerCells)) outputSDR.append(htm.winnerCells) elapsed = time.time() - t print("Elapsed time: %f seconds\n" % elapsed) # create output SDR matrix from HTM winner cell output M = np.zeros((len(outputSDR), max(max_index) + 1), dtype=int) for i in range(len(outputSDR)): for j in range(len(outputSDR[i])): winner = outputSDR[i][j] M[i][winner] = 1 print 'Temporal Pooler descriptors' D1_tm = [] id_max1 = [] t = time.time() for i in range(min(numberImages, D.shape[0])): D1_sp = np.nonzero(D[i, :])[0] tm.compute(D1_sp, learn=True) activeCells = tm.getWinnerCells() D1_tm.append(activeCells) id_max1.append(max(activeCells)) elapsed = time.time() - t print("Elapsed time: %f seconds\n" % elapsed) # create output SDR matrix from HTM winner cell output T = np.zeros((len(D1_tm), max(id_max1) + 1), dtype=int) for i in range(len(D1_tm)): for j in range(len(D1_tm[i])): winner = D1_tm[i][j] T[i][winner] = 1 # Create ground truth and show precision-recall curves GT_data = np.loadtxt(DIR + '/seq_multi_loop_noNoise_gt.txt', dtype='i', delimiter=',', skiprows=1) GT = np.zeros((numberImages, numberImages), dtype=int) for i in range(GT.shape[0]): for j in range(i, GT.shape[1]): GT[i, j] = (np.any(GT_data[i, :] != GT_data[j, :]) == False) # Results print('Results') fig, ax = plt.subplots() S0 = evaluateSimilarity(D) P, R = createPR(S0, GT) ax.plot(R, P, label='InputSDR: (AUC=%f)' % np.trapz(P, R)) S1 = evaluateSimilarity(M) P, R = createPR(S1, GT) ax.plot(R, P, label='MCN (AUC=%f)' % np.trapz(P, R)) S2 = evaluateSimilarity(T) P, R = createPR(S2, GT) ax.plot(R, P, label='HTM (AUC=%f)' % np.trapz(P, R)) ax.legend() ax.grid(True) plt.xlabel("Recall") plt.ylabel("Precision") plt.show() fig2, (ax2, ax3) = plt.subplots(nrows=1, ncols=2) # two axes on figure ax2.imshow(S0, vmin=0, vmax=50, interpolation='nearest', cmap='binary') ax2.set_title('Input descriptors') ax3.imshow(S2, vmin=0, vmax=30, interpolation='nearest', cmap='binary') ax3.set_title('Winner cell outputs') plt.show() plt.plot(nCols_MCN, 'g', label='MCN') plt.plot(nCols_HTM, 'b', label='HTM') plt.xlabel('Number of seen images') plt.ylabel('Number of MiniColumns') plt.legend() plt.grid(True) #ax4.plot(nCols_MCN,'g',nCols_HTM,'b') #ax4.set_title ('Number of MiniColumns (MCN x HTM') #ax4.xlabel('Number of seen images') #ax4.ylabel('Number of MiniColumns') plt.show()
def testAddSegmentToCellWithFewestSegments(self): grewOnCell1 = False grewOnCell2 = False for seed in xrange(100): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=4, activationThreshold=3, initialPermanence=.2, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=4, permanenceIncrement=.10, permanenceDecrement=.10, predictedSegmentDecrement=0.02, seed=seed) prevActiveColumns = [1, 2, 3, 4] activeColumns = [0] prevActiveCells = [4, 5, 6, 7] nonMatchingCells = [0, 3] activeCells = [0, 1, 2, 3] segment1 = tm.createSegment(nonMatchingCells[0]) tm.connections.createSynapse(segment1, prevActiveCells[0], .5) segment2 = tm.createSegment(nonMatchingCells[1]) tm.connections.createSynapse(segment2, prevActiveCells[1], .5) tm.compute(prevActiveColumns, True) tm.compute(activeColumns, True) self.assertEqual(activeCells, tm.getActiveCells()) self.assertEqual(3, tm.connections.numSegments()) self.assertEqual(1, tm.connections.numSegments(0)) self.assertEqual(1, tm.connections.numSegments(3)) self.assertEqual(1, tm.connections.numSynapses(segment1)) self.assertEqual(1, tm.connections.numSynapses(segment2)) segments = list(tm.connections.segmentsForCell(1)) if len(segments) == 0: segments2 = list(tm.connections.segmentsForCell(2)) self.assertFalse(len(segments2) == 0) grewOnCell2 = True segments.append(segments2[0]) else: grewOnCell1 = True self.assertEqual(1, len(segments)) synapses = list(tm.connections.synapsesForSegment(segments[0])) self.assertEqual(4, len(synapses)) columnChecklist = set(prevActiveColumns) for synapse in synapses: synapseData = tm.connections.dataForSynapse(synapse) self.assertAlmostEqual(.2, synapseData.permanence) column = tm.columnForCell(synapseData.presynapticCell) self.assertTrue(column in columnChecklist) columnChecklist.remove(column) self.assertTrue(len(columnChecklist) == 0) self.assertTrue(grewOnCell1) self.assertTrue(grewOnCell2)
def main(): tm = TemporalMemory( # Must be the same dimensions as the SP columnDimensions=(2048, ), #columnDimensions=(32768,), # How many cells in each mini-column. cellsPerColumn=16, # A segment is active if it has >= activationThreshold connected synapses # that are active due to infActiveState activationThreshold=4, #1,4(melhor), initialPermanence=0.55, connectedPermanence=0.5, # Minimum number of active synapses for a segment to be considered during # search for the best-matching segments. minThreshold=1, #1 # The max number of synapses added to a segment during learning maxNewSynapseCount=20, #6 permanenceIncrement=0.01, permanenceDecrement=0.01, predictedSegmentDecrement=0.0005, #0.0001,#0.0005, maxSegmentsPerCell=100, #8 16(colou) maxSynapsesPerSegment=100, #8 16(colou) seed=42) numberImages = 288 DIR = "/media/cappizzino/OS/Documents and Settings/cappi/Documents/MATLAB/MCN_v0_1" # Experiments # Ground truth GT = np.identity(numberImages, dtype=bool) for i in range(GT.shape[0]): for j in range(GT.shape[0] - 1): if i == j: GT[i, j] = 1 # MCN (MCN descriptors) print 'MCN' id_max1 = [] id_max2 = [] with open('outputSDR1.txt', 'r') as f: D1_MCN = [[int(entry) for entry in line.split(',')] for line in f.readlines()] for i in range(len(D1_MCN)): id_max1.append(max(D1_MCN[i])) with open('outputSDR2.txt', 'r') as f: D2_MCN = [[int(entry) for entry in line.split(',')] for line in f.readlines()] for i in range(len(D2_MCN)): id_max2.append(max(D2_MCN[i])) id_max = max(max(id_max1), max(id_max2)) ''' D1_sparse = sparse.lil_matrix((len(D1_MCN), id_max+1), dtype='int8') for i in range(len(D1_MCN)): D1_sparse[i,D1_MCN[i]] = 1 D2_sparse = sparse.lil_matrix((len(D2_MCN), id_max+1), dtype='int8') for i in range(len(D2_MCN)): D2_sparse[i,D2_MCN[i]] = 1 ''' D1_sparse = sparse.lil_matrix((numberImages, id_max + 1), dtype='int8') for i in range(numberImages): D1_sparse[i, D1_MCN[i]] = 1 D2_sparse = sparse.lil_matrix((numberImages, id_max + 1), dtype='int8') for i in range(numberImages): D2_sparse[i, D2_MCN[i]] = 1 S_MCN = pairwiseDescriptors(D1_sparse, D2_sparse) # Pairwise (raw descriptors) print 'Pairwise descriptors' D1 = np.loadtxt(DIR + "/D1.txt", dtype='f', delimiter=',') D2 = np.loadtxt(DIR + "/D2.txt", dtype='f', delimiter=',') S_pairwise = cosine_similarity(D1[:numberImages], D2[:numberImages]) # Dimension Reduction and binarizarion print 'Dimension Reduction' P = np.random.randn(D1.shape[1], 1024) P = normc(P) #D1h = np.dot(D1[:numberImages],P) #D2h = np.dot(D2[:numberImages],P) #S_Dh = cosine_similarity(D1h, D2h) # sLSBH (binarized descriptors) print 'sLSBH' D1_slsbh = getLSBH(D1[:numberImages], P, 0.25) #0.025 0.25 D2_slsbh = getLSBH(D2[:numberImages], P, 0.25) #D1_slsbh = np.loadtxt(DIR + "/D1_slsbh.txt", dtype='i', delimiter=',') #D2_slsbh = np.loadtxt(DIR + "/D2_slsbh.txt", dtype='i', delimiter=',') Sb_pairwise = pairwiseDescriptors(D1_slsbh[:numberImages], D2_slsbh[:numberImages]) ''' # Binarizarion ans Sparsification print 'Binarizarion and Sparsification' D1_slsbh = np.zeros((D1h.shape[0],2*D1h.shape[1]), dtype = bool) D2_slsbh = np.zeros((D2h.shape[0],2*D2h.shape[1]), dtype = bool) for i in range(numberImages): D1_slsbh[i,:] = generate_LSBH((D1h[i,:]),(D1h.shape[1]),0.25) D2_slsbh[i,:] = generate_LSBH((D2h[i,:]),(D2h.shape[1]),0.25) Sb_pairwise = pairwiseDescriptors(D1_slsbh, D2_slsbh) ''' ''' print 'Spatial Pooler descriptors' # Create an array to represent active columns, all initially zero. This # will be populated by the compute method below. It must have the same # dimensions as the Spatial Pooler. activeColumns = np.zeros(2048) D1_htm=[] D2_htm=[] id_max=[] id_max1=[] id_max2=[] for i in range (numberImages): # Execute Spatial Pooling algorithm over input space. # Feed the examples to the SP sp.compute(D1_slsbh[i,:], False, activeColumns) activeColumnIndices = np.nonzero(activeColumns)[0] D1_htm.append(activeColumnIndices) id_max1.append(max(activeColumnIndices)) for i in range (numberImages): # Execute Spatial Pooling algorithm over input space. # Feed the examples to the SP sp.compute(D2_slsbh[i,:], False, activeColumns) activeColumnIndices = np.nonzero(activeColumns)[0] D2_htm.append(activeColumnIndices) id_max2.append(max(activeColumnIndices)) id_max = max(max(id_max1),max(id_max2)) D1_sparse = sparse.lil_matrix((numberImages, id_max+1), dtype='int8') for i in range(numberImages): D1_sparse[i,D1_htm[i]] = 1 D2_sparse = sparse.lil_matrix((numberImages, id_max+1), dtype='int8') for i in range(numberImages): D2_sparse[i,D2_htm[i]] = 1 S_SP = pairwiseDescriptors(D1_sparse, D2_sparse) ''' print 'Temporal Pooler (1) descriptors' D1_tm = [] D2_tm = [] id_max = [] id_max1 = [] id_max2 = [] ''' for _ in range(5): for i in range(numberImages): activeColumnIndices = np.nonzero(D1_slsbh[i,:])[0] tm.compute(activeColumnIndices, learn=True) #tm.compute(D1_htm[i], learn=True) for i in range(numberImages): activeColumnIndices = np.nonzero(D2_slsbh[i,:])[0] tm.compute(activeColumnIndices, learn=True) #tm.compute(D2_htm[i], learn=True) tm.reset() ''' for i in range(numberImages): for _ in range(1): activeColumnIndices = np.nonzero(D1_slsbh[i, :])[0] #print activeColumnIndices time.sleep(2) tm.compute(activeColumnIndices, learn=True) #tm.compute(D1_htm[i], learn=True) activeCells = tm.getWinnerCells() #print activeCells #time.sleep(5) D1_tm.append(activeCells) id_max1.append(max(activeCells)) #tm.reset() print 'Temporal Pooler (2) descriptors' ''' for _ in range(2): for i in range(numberImages): activeColumnIndices = np.nonzero(D2_slsbh[i,:])[0] tm.compute(activeColumnIndices, learn=True) #tm.compute(D2_htm[i], learn=True) ''' for i in range(numberImages): activeColumnIndices = np.nonzero(D2_slsbh[i, :])[0] tm.compute(activeColumnIndices, learn=False) #tm.compute(D2_htm[i], learn=True) activeCells = tm.getWinnerCells() D2_tm.append(activeCells) id_max2.append(max(activeCells)) id_max = max(max(id_max1), max(id_max2)) D1_sparse = sparse.lil_matrix((numberImages, id_max + 1), dtype='int8') for i in range(numberImages): D1_sparse[i, D1_tm[i]] = 1 D2_sparse = sparse.lil_matrix((numberImages, id_max + 1), dtype='int8') for i in range(numberImages): D2_sparse[i, D2_tm[i]] = 1 S_TM = pairwiseDescriptors(D1_sparse, D2_sparse) # Results print 'Results' fig, ax = plt.subplots() P, R = createPR(S_pairwise, GT) ax.plot(R, P, label='pairwise / raw (avgP=%f)' % np.trapz(P, R)) P, R = createPR(S_MCN, GT) ax.plot(R, P, label='MCN (avgP=%f)' % np.trapz(P, R)) #P, R = createPR(S_Dh,GT) #ax.plot(R, P, label='pairwise RP / raw (avgP=%f)' %np.trapz(P,R)) P, R = createPR(Sb_pairwise, GT) ax.plot(R, P, label='sLSBH / raw (avgP=%f)' % np.trapz(P, R)) #P, R = createPR(S_SP,GT) #ax.plot(R, P, label='HTM SP (avgP=%f)' %np.trapz(P,R)) P, R = createPR(S_TM, GT) ax.plot(R, P, label='HTM TM (avgP=%f)' % np.trapz(P, R)) ax.legend() ax.grid(True) plt.xlabel("Recall") plt.ylabel("Precision") plt.show()
def testRecycleLeastRecentlyActiveSegmentToMakeRoomForNewSegment(self): tm = TemporalMemory( columnDimensions=[32], cellsPerColumn=1, activationThreshold=3, initialPermanence=.50, connectedPermanence=.50, minThreshold=2, maxNewSynapseCount=3, permanenceIncrement=.02, permanenceDecrement=.02, predictedSegmentDecrement=0.0, seed=42, maxSegmentsPerCell=2) prevActiveColumns1 = [0, 1, 2] prevActiveColumns2 = [3, 4, 5] prevActiveColumns3 = [6, 7, 8] activeColumns = [9] tm.compute(prevActiveColumns1) tm.compute(activeColumns) self.assertEqual(1, tm.connections.numSegments(9)) oldestSegment = list(tm.connections.segmentsForCell(9))[0] tm.reset() tm.compute(prevActiveColumns2) tm.compute(activeColumns) self.assertEqual(2, tm.connections.numSegments(9)) oldPresynaptic = \ set(synapse.presynapticCell for synapse in tm.connections.synapsesForSegment(oldestSegment)) tm.reset() tm.compute(prevActiveColumns3) tm.compute(activeColumns) self.assertEqual(2, tm.connections.numSegments(9)) # Verify none of the segments are connected to the cells the old # segment was connected to. for segment in tm.connections.segmentsForCell(9): newPresynaptic = set(synapse.presynapticCell for synapse in tm.connections.synapsesForSegment(segment)) self.assertEqual([], list(oldPresynaptic & newPresynaptic))
def runHotgym(numRecords): with open(_PARAMS_PATH, "r") as f: modelParams = yaml.safe_load(f)["modelParams"] enParams = modelParams["sensorParams"]["encoders"] spParams = modelParams["spParams"] tmParams = modelParams["tmParams"] scalarEncoder = RandomDistributedScalarEncoder( enParams["consumption"]["resolution"]) scalarEncoder2 = RandomDistributedScalarEncoder( enParams["consumption2"]["resolution"]) encodingWidth = (scalarEncoder.getWidth() + scalarEncoder2.getWidth()) sp = SpatialPooler( inputDimensions=(encodingWidth, ), columnDimensions=(spParams["columnCount"], ), potentialPct=spParams["potentialPct"], potentialRadius=encodingWidth, globalInhibition=spParams["globalInhibition"], localAreaDensity=spParams["localAreaDensity"], numActiveColumnsPerInhArea=spParams["numActiveColumnsPerInhArea"], synPermInactiveDec=spParams["synPermInactiveDec"], synPermActiveInc=spParams["synPermActiveInc"], synPermConnected=spParams["synPermConnected"], boostStrength=spParams["boostStrength"], seed=spParams["seed"], wrapAround=True) tm = TemporalMemory( columnDimensions=(tmParams["columnCount"], ), cellsPerColumn=tmParams["cellsPerColumn"], activationThreshold=tmParams["activationThreshold"], initialPermanence=tmParams["initialPerm"], connectedPermanence=spParams["synPermConnected"], minThreshold=tmParams["minThreshold"], maxNewSynapseCount=tmParams["newSynapseCount"], permanenceIncrement=tmParams["permanenceInc"], permanenceDecrement=tmParams["permanenceDec"], predictedSegmentDecrement=0.0, maxSegmentsPerCell=tmParams["maxSegmentsPerCell"], maxSynapsesPerSegment=tmParams["maxSynapsesPerSegment"], seed=tmParams["seed"]) classifier = SDRClassifierFactory.create() results = [] with open(_INPUT_FILE_PATH, "r") as fin: reader = csv.reader(fin) headers = reader.next() reader.next() reader.next() output = output_anomaly_generic_v1.NuPICFileOutput(_FILE_NAME) for count, record in enumerate(reader): if count >= numRecords: break # Convert data string into Python date object. # dateString = datetime.datetime.strptime(record[0], "%m/%d/%y %H:%M") # Convert data value string into float. prediction = float(record[1]) prediction2 = float(record[2]) # To encode, we need to provide zero-filled numpy arrays for the encoders # to populate. consumptionBits = numpy.zeros(scalarEncoder.getWidth()) consumptionBits2 = numpy.zeros(scalarEncoder2.getWidth()) # Now we call the encoders to create bit representations for each value. scalarEncoder.encodeIntoArray(prediction, consumptionBits) scalarEncoder2.encodeIntoArray(prediction2, consumptionBits2) # Concatenate all these encodings into one large encoding for Spatial # Pooling. encoding = numpy.concatenate([consumptionBits, consumptionBits2]) # Create an array to represent active columns, all initially zero. This # will be populated by the compute method below. It must have the same # dimensions as the Spatial Pooler. activeColumns = numpy.zeros(spParams["columnCount"]) # Execute Spatial Pooling algorithm over input space. sp.compute(encoding, True, activeColumns) activeColumnIndices = numpy.nonzero(activeColumns)[0] # Execute Temporal Memory algorithm over active mini-columns. tm.compute(activeColumnIndices, learn=True) activeCells = tm.getActiveCells() # Get the bucket info for this input value for classification. bucketIdx = scalarEncoder.getBucketIndices(prediction)[0] # Run classifier to translate active cells back to scalar value. classifierResult = classifier.compute(recordNum=count, patternNZ=activeCells, classification={ "bucketIdx": bucketIdx, "actValue": prediction }, learn=True, infer=True) # Print the best prediction for 1 step out. oneStepConfidence, oneStep = sorted(zip( classifierResult[1], classifierResult["actualValues"]), reverse=True)[0] # print("1-step: {:16} ({:4.4}%)".format(oneStep, oneStepConfidence * 100)) # results.append([oneStep, oneStepConfidence * 100, None, None]) results.append( [record[0], prediction, oneStep, oneStepConfidence * 100]) output.write(record[0], prediction, oneStep, oneStepConfidence * 100) output.close() return results
def runHotgym(numRecords): with open(_PARAMS_PATH, "r") as f: modelParams = yaml.safe_load(f)["modelParams"] enParams = modelParams["sensorParams"]["encoders"] spParams = modelParams["spParams"] tmParams = modelParams["tmParams"] timeOfDayEncoder = DateEncoder( timeOfDay=enParams["timestamp_timeOfDay"]["timeOfDay"]) weekendEncoder = DateEncoder( weekend=enParams["timestamp_weekend"]["weekend"]) scalarEncoder = RandomDistributedScalarEncoder( enParams["consumption"]["resolution"]) encodingWidth = (timeOfDayEncoder.getWidth() + weekendEncoder.getWidth() + scalarEncoder.getWidth()) sp = SpatialPooler( # How large the input encoding will be. inputDimensions=(encodingWidth), # How many mini-columns will be in the Spatial Pooler. columnDimensions=(spParams["columnCount"]), # What percent of the columns"s receptive field is available for potential # synapses? potentialPct=spParams["potentialPct"], # This means that the input space has no topology. globalInhibition=spParams["globalInhibition"], localAreaDensity=spParams["localAreaDensity"], # Roughly 2%, giving that there is only one inhibition area because we have # turned on globalInhibition (40 / 2048 = 0.0195) numActiveColumnsPerInhArea=spParams["numActiveColumnsPerInhArea"], # How quickly synapses grow and degrade. synPermInactiveDec=spParams["synPermInactiveDec"], synPermActiveInc=spParams["synPermActiveInc"], synPermConnected=spParams["synPermConnected"], # boostStrength controls the strength of boosting. Boosting encourages # efficient usage of SP columns. boostStrength=spParams["boostStrength"], # Random number generator seed. seed=spParams["seed"], # TODO: is this useful? # Determines if inputs at the beginning and end of an input dimension should # be considered neighbors when mapping columns to inputs. wrapAround=False ) tm = TemporalMemory( # Must be the same dimensions as the SP columnDimensions=(tmParams["columnCount"],), # How many cells in each mini-column. cellsPerColumn=tmParams["cellsPerColumn"], # A segment is active if it has >= activationThreshold connected synapses # that are active due to infActiveState activationThreshold=tmParams["activationThreshold"], initialPermanence=tmParams["initialPerm"], # TODO: This comes from the SP params, is this normal connectedPermanence=spParams["synPermConnected"], # Minimum number of active synapses for a segment to be considered during # search for the best-matching segments. minThreshold=tmParams["minThreshold"], # The max number of synapses added to a segment during learning maxNewSynapseCount=tmParams["newSynapseCount"], permanenceIncrement=tmParams["permanenceInc"], permanenceDecrement=tmParams["permanenceDec"], predictedSegmentDecrement=0.0, maxSegmentsPerCell=tmParams["maxSegmentsPerCell"], maxSynapsesPerSegment=tmParams["maxSynapsesPerSegment"], seed=tmParams["seed"] ) classifier = SDRClassifierFactory.create() results = [] with open(_INPUT_FILE_PATH, "r") as fin: reader = csv.reader(fin) headers = reader.next() reader.next() reader.next() for count, record in enumerate(reader): if count >= numRecords: break # Convert data string into Python date object. dateString = datetime.datetime.strptime(record[0], "%m/%d/%y %H:%M") # Convert data value string into float. consumption = float(record[1]) # To encode, we need to provide zero-filled numpy arrays for the encoders # to populate. timeOfDayBits = numpy.zeros(timeOfDayEncoder.getWidth()) weekendBits = numpy.zeros(weekendEncoder.getWidth()) consumptionBits = numpy.zeros(scalarEncoder.getWidth()) # Now we call the encoders create bit representations for each value. timeOfDayEncoder.encodeIntoArray(dateString, timeOfDayBits) weekendEncoder.encodeIntoArray(dateString, weekendBits) scalarEncoder.encodeIntoArray(consumption, consumptionBits) # Concatenate all these encodings into one large encoding for Spatial # Pooling. encoding = numpy.concatenate( [timeOfDayBits, weekendBits, consumptionBits] ) # Create an array to represent active columns, all initially zero. This # will be populated by the compute method below. It must have the same # dimensions as the Spatial Pooler. activeColumns = numpy.zeros(spParams["columnCount"]) # Execute Spatial Pooling algorithm over input space. sp.compute(encoding, True, activeColumns) activeColumnIndices = numpy.nonzero(activeColumns)[0] # Execute Temporal Memory algorithm over active mini-columns. tm.compute(activeColumnIndices, learn=True) activeCells = tm.getActiveCells() # Get the bucket info for this input value for classification. bucketIdx = scalarEncoder.getBucketIndices(consumption)[0] # Run classifier to translate active cells back to scalar value. classifierResult = classifier.compute( recordNum=count, patternNZ=activeCells, classification={ "bucketIdx": bucketIdx, "actValue": consumption }, learn=True, infer=True ) # Print the best prediction for 1 step out. oneStepConfidence, oneStep = sorted( zip(classifierResult[1], classifierResult["actualValues"]), reverse=True )[0] print("1-step: {:16} ({:4.4}%)".format(oneStep, oneStepConfidence * 100)) results.append([oneStep, oneStepConfidence * 100, None, None]) return results