def testNupicTemporalMemorySavingToString(self): """Test writing to and reading from TemporalMemory.""" inputs = SDR( 100 ).randomize( .05 ) tm = TM( inputs.dimensions) for _ in range(10): tm.compute( inputs, True) # Simple test: make sure that writing/reading works... s = tm.writeToString() tm2 = TM() tm2.loadFromString(s) self.assertEqual(str(tm), str(tm), "TemporalMemory write to/read from string failed.")
def testNupicTemporalMemorySerialization(self): # Test serializing with each type of interface. inputs = SDR( 100 ).randomize( .05 ) tm = TM( inputs.dimensions) for _ in range(10): tm.compute( inputs, True) #print(str(tm)) # The TM now has some data in it, try serialization. file = "temporalMemory_test_save2.bin" tm.saveToFile(file) tm3 = TM() tm3.loadFromFile(file) self.assertEqual(str(tm), str(tm3), "TemporalMemory serialization (using saveToFile/loadFromFile) failed.") os.remove(file)
def testPredictiveCells(self): """ This tests that we don't get empty predicitve cells """ tm = TM( columnDimensions=(parameters1["sp"]["columnCount"], ), cellsPerColumn=parameters1["tm"]["cellsPerColumn"], activationThreshold=parameters1["tm"]["activationThreshold"], initialPermanence=parameters1["tm"]["initialPerm"], connectedPermanence=parameters1["sp"]["synPermConnected"], minThreshold=parameters1["tm"]["minThreshold"], maxNewSynapseCount=parameters1["tm"]["newSynapseCount"], permanenceIncrement=parameters1["tm"]["permanenceInc"], permanenceDecrement=parameters1["tm"]["permanenceDec"], predictedSegmentDecrement=0.0, maxSegmentsPerCell=parameters1["tm"]["maxSegmentsPerCell"], maxSynapsesPerSegment=parameters1["tm"]["maxSynapsesPerSegment"], ) activeColumnsA = SDR(parameters1["sp"]["columnCount"]) activeColumnsB = SDR(parameters1["sp"]["columnCount"]) activeColumnsA.randomize(sparsity=0.4, seed=1) activeColumnsB.randomize(sparsity=0.4, seed=1) # give pattern A - bursting # give pattern B - bursting # give pattern A - should be predicting tm.activateDendrites(True) self.assertTrue(tm.getPredictiveCells().getSum() == 0) predictiveCellsSDR = tm.getPredictiveCells() tm.activateCells(activeColumnsA, True) _print("\nColumnsA") _print("activeCols:" + str(len(activeColumnsA.sparse))) _print("activeCells:" + str(len(tm.getActiveCells().sparse))) _print("predictiveCells:" + str(len(predictiveCellsSDR.sparse))) tm.activateDendrites(True) self.assertTrue(tm.getPredictiveCells().getSum() == 0) predictiveCellsSDR = tm.getPredictiveCells() tm.activateCells(activeColumnsB, True) _print("\nColumnsB") _print("activeCols:" + str(len(activeColumnsB.sparse))) _print("activeCells:" + str(len(tm.getActiveCells().sparse))) _print("predictiveCells:" + str(len(predictiveCellsSDR.sparse))) tm.activateDendrites(True) self.assertTrue(tm.getPredictiveCells().getSum() > 0) predictiveCellsSDR = tm.getPredictiveCells() tm.activateCells(activeColumnsA, True) _print("\nColumnsA") _print("activeCols:" + str(len(activeColumnsA.sparse))) _print("activeCells:" + str(len(tm.getActiveCells().sparse))) _print("predictiveCells:" + str(len(predictiveCellsSDR.sparse)))
def testCompute(self): """ Check that there are no errors in call to compute. """ inputs = SDR( 100 ).randomize( .05 ) tm = TM( inputs.dimensions) tm.compute( inputs, True ) active = tm.getActiveCells() self.assertTrue( active.getSum() > 0 )
def testNupicTemporalMemoryPickling(self): """Test pickling / unpickling of NuPIC TemporalMemory.""" # Simple test: make sure that dumping / loading works... tm = TM(columnDimensions=(16,)) pickledTm = pickle.dumps(tm) tm2 = pickle.loads(pickledTm) self.assertEqual(tm.numberOfCells(), tm2.numberOfCells(), "Simple NuPIC TemporalMemory pickle/unpickle failed.")
def testNupicTemporalMemoryPickling(self): """Test pickling / unpickling of NuPIC TemporalMemory.""" # Simple test: make sure that dumping / loading works... inputs = SDR( 100 ).randomize( .05 ) tm = TM( inputs.dimensions) for _ in range(10): tm.compute( inputs, True) pickledTm = pickle.dumps(tm, 2) tm2 = pickle.loads(pickledTm) self.assertEqual(tm.numberOfCells(), tm2.numberOfCells(), "Simple NuPIC TemporalMemory pickle/unpickle failed.")
def testGetMethods(self): """check if the get-Methodes return correct values""" # first create instance of Tm with some parameters tm = TM( columnDimensions=(parameters1["sp"]["columnCount"],), cellsPerColumn=parameters1["tm"]["cellsPerColumn"], activationThreshold=parameters1["tm"]["activationThreshold"], initialPermanence=parameters1["tm"]["initialPerm"], connectedPermanence=parameters1["sp"]["synPermConnected"], minThreshold=parameters1["tm"]["minThreshold"], maxNewSynapseCount=parameters1["tm"]["newSynapseCount"], permanenceIncrement=parameters1["tm"]["permanenceInc"], permanenceDecrement=parameters1["tm"]["permanenceDec"], predictedSegmentDecrement=0.0, maxSegmentsPerCell=parameters1["tm"]["maxSegmentsPerCell"], maxSynapsesPerSegment=parameters1["tm"]["maxSynapsesPerSegment"], checkInputs = True ) # second call each function to get the values columnDimension = tm.getColumnDimensions() cellsPerColumn = tm.getCellsPerColumn() activationThreshold = tm.getActivationThreshold() initialPermanence = tm.getInitialPermanence() connectedPermanence = tm.getConnectedPermanence() minThreshold = tm.getMinThreshold() maxNewSynapseCount = tm.getMaxNewSynapseCount() permanenceIncrement = tm.getPermanenceIncrement() permanenceDecrement = tm.getPermanenceDecrement() predictedSegmentDecrement = tm.getPredictedSegmentDecrement() maxSegmentsPerCell = tm.getMaxSegmentsPerCell() maxSynapsesPerSegment = tm.getMaxSynapsesPerSegment() checkInputs = tm.getCheckInputs() # third and final, compare the input parameters with the parameters from the get-Methods # floating point numbers maybe not 100 % equal... self.assertEqual([parameters1["sp"]["columnCount"]], columnDimension, "using method (getColumnDimension) failed") self.assertEqual(parameters1["tm"]["cellsPerColumn"], cellsPerColumn, "using method (getCellsPerColumn) failed") self.assertEqual(parameters1["tm"]["activationThreshold"], activationThreshold, "using (getActivationThreshold) failed") self.assertAlmostEqual(parameters1["sp"]["synPermConnected"], connectedPermanence, msg="using method (getConnectedPermanence) failed") self.assertEqual(parameters1["tm"]["minThreshold"], minThreshold, "using method (getMinThreshold) failed") self.assertEqual(parameters1["tm"]["newSynapseCount"], maxNewSynapseCount, "using method (getMaxNewSynapseCount) failed") self.assertAlmostEqual(parameters1["tm"]["permanenceInc"], permanenceIncrement, msg="using method (getPermanenceIncrement) failed") self.assertAlmostEqual(parameters1["tm"]["permanenceDec"], permanenceDecrement, msg="using method (getPermanenceDecrement) failed") self.assertAlmostEqual(0.0, predictedSegmentDecrement, msg="using Methode (getPredictedSegmentDecrement) failed") self.assertEqual(parameters1["tm"]["maxSegmentsPerCell"], maxSegmentsPerCell, "using Method (getMaxSegmentsPerCell) failed") self.assertEqual(parameters1["tm"]["maxSynapsesPerSegment"], maxSynapsesPerSegment, "using Method (getMaxSynapsesPerSegment) failed") self.assertEqual(True, checkInputs, "using Method (getCheckInputs) failed")
def testPerformanceLarge(self): LARGE = 9000 ITERS = 100 # This is lowered for unittest. Try 1000, 5000,... from htm.bindings.engine_internal import Timer t = Timer() inputs = SDR( LARGE ).randomize( .10 ) tm = TM( inputs.dimensions) for i in range(ITERS): inputs = inputs.randomize( .10 ) t.start() tm.compute( inputs, True ) active = tm.getActiveCells() t.stop() self.assertTrue( active.getSum() > 0 ) t_total = t.elapsed() speed = t_total * 1000 / ITERS #time ms/iter self.assertTrue(speed < 40.0)
numSize = 200 # number of bits in SDR numMin = 0 # smallest number we're encoding numMax = 100 # largest number we're encoding numSpars = 0.02 # SDR sparcity tries = 50000 # to make sure we've given it enough examples. Mostly works with fewer than this. # for converting numbers into SDRs as specified above numEncoder = common.ScalarEncoderGenerator(numMin, numMax, numSize, numSpars) # used to predict addition tm = TM( columnDimensions=( numSize * 3, ), # 3 numbers: encodes a + b = c via [a,b,blank] then [blank,blank,c] cellsPerColumn=1, initialPermanence=0.5, connectedPermanence=0.5, minThreshold=8, maxNewSynapseCount=20, permanenceIncrement=0.1, permanenceDecrement=0.0, activationThreshold=8, ) # null SDR for padding emptyBits = SDR(numSize) # we store predictions in this results = [] # generate a bunch of addition examples for x in range(0, tries): # get two random numbers and sum them a = random.randint(0, numMax / 2)
def testTMexposesConnections(self): """TM exposes internal connections as read-only object""" tm = TM(columnDimensions=[2048], connectedPermanence=0.42) self.assertAlmostEqual(tm.connections.connectedThreshold, 0.42, places=3)
matplotlib.use('Agg') import matplotlib.pyplot as plt from htm.bindings.sdr import SDR from htm.algorithms import TemporalMemory as TM print("--------------------------------------------------") print(__doc__) print("--------------------------------------------------") print("") print("Creating the Temporal Memory") tm = TM( columnDimensions = (2048,), cellsPerColumn=8, initialPermanence=0.21, connectedPermanence=0.3, minThreshold=15, maxNewSynapseCount=40, permanenceIncrement=0.1, permanenceDecrement=0.1, activationThreshold=15, predictedSegmentDecrement=0.01, ) tm.printParameters() print(""" We will create a sparse representation of characters A, B, C, D, X, and Y. In this particular example we manually construct them, but usually you would use the spatial pooler to build these.""") sparsity = 0.02 sparseCols = int(tm.numberOfColumns() * sparsity) dataset = {inp : SDR( tm.numberOfColumns() ) for inp in "ABCDXY"} for i, inp in enumerate("ABCDXY"):
tm = TM(columnDimensions = (sp.getColumnDimensions()[0],), # number of cells in a column cellsPerColumn=10, # the initial level of permanence for a connection initialPermanence=0.5, # the level of permanence needed for a connection # to actually be considered connected # this must be permanenceIncrement away from initialPermanence connectedPermanence=0.6, # the number of potential active connections needed # for a segment to be eligible for learning minThreshold=1, # maximum number of synapses allowed per segment per # learning cycle maxNewSynapseCount=40, # how much a permanence per segment is increased # during each learning cycle permanenceIncrement=0.15, # how much a permanence per segment is decreased # during each learning cycle permanenceDecrement=0.01, # number of active connection needed # for a segment to be considered active # this may need to be modified if tm.getPredictiveCells() is # producing 0's activationThreshold=1, predictedSegmentDecrement=0.001, # maximum connections allowed per cell maxSegmentsPerCell = 1, # maximum active connections allowed per cell maxSynapsesPerSegment = 1 )
print( "################################################################################" ) print(__doc__) print( "################################################################################" ) print("") print("Creating the Temporal Memory") tm = TM( columnDimensions=(50, ), cellsPerColumn=1, initialPermanence=0.5, connectedPermanence=0.5, minThreshold=8, maxNewSynapseCount=20, permanenceIncrement=0.1, permanenceDecrement=0.0, activationThreshold=8, ) tm.printParameters() print(""" Creating inputs to feed to the temporal memory. Each input is an SDR representing the active mini-columns. Here we create a simple sequence of 5 SDRs representing the sequence A -> B -> C -> D -> E """) dataset = {inp: SDR(tm.numberOfColumns()) for inp in "ABCDE"} dataset['A'].dense[ 0:10] = 1 # Input SDR representing "A", corresponding to mini-columns 0-9 dataset['B'].dense[