Beispiel #1
0
    def __init__(
            self,
            numberOfCols=500,
            burnIn=2,  # Used for evaluating the prediction score
            collectStats=False,  # If true, collect training and inference stats
            seed=42,
            verbosity=VERBOSITY,
            predictionMethod='random',  # "random" or "zeroth"
            **kwargs):

        # Init the base class
        TP.__init__(self,
                    numberOfCols=numberOfCols,
                    cellsPerColumn=1,
                    burnIn=burnIn,
                    collectStats=collectStats,
                    seed=seed,
                    verbosity=verbosity)

        self.predictionMethod = predictionMethod

        #---------------------------------------------------------------------------------
        # Create basic data structures for keeping track of column statistics

        # Number of times each column has been active during learning
        self.columnCount = numpy.zeros(numberOfCols, dtype="int32")

        # Running average of input density
        self.averageDensity = 0.05
Beispiel #2
0
  def testCheckpointMiddleOfSequence(self):
    # Create a model and give it some inputs to learn.
    tp1 = TP(numberOfCols=100, cellsPerColumn=12, verbosity=VERBOSITY)
    sequences = [self.generateSequence() for _ in xrange(5)]
    train = list(itertools.chain.from_iterable(sequences[:3] +
                                               [sequences[3][:5]]))
    for bottomUpInput in train:
      if bottomUpInput is None:
        tp1.reset()
      else:
        tp1.compute(bottomUpInput, True, True)

    # Serialize and deserialized the TP.
    checkpointPath = os.path.join(self._tmpDir, 'a')
    tp1.saveToFile(checkpointPath)
    tp2 = pickle.loads(pickle.dumps(tp1))
    tp2.loadFromFile(checkpointPath)

    # Check that the TPs are the same.
    self.assertTPsEqual(tp1, tp2)

    # Feed some data into the models.
    test = list(itertools.chain.from_iterable([sequences[3][5:]] +
                                              sequences[3:]))
    for bottomUpInput in test:
      if bottomUpInput is None:
        tp1.reset()
        tp2.reset()
      else:
        result1 = tp1.compute(bottomUpInput, True, True)
        result2 = tp2.compute(bottomUpInput, True, True)

        self.assertTPsEqual(tp1, tp2)
        self.assertTrue(numpy.array_equal(result1, result2))
Beispiel #3
0
  def __init__(self,
               numberOfCols=16384, cellsPerColumn=8,
                initialPerm=0.5, connectedPerm=0.5,
                minThreshold=164, newSynapseCount=164,
                permanenceInc=0.1, permanenceDec=0.0,
                activationThreshold=164,
                pamLength=10,
                checkpointDir=None):

    self.tp = TP(numberOfCols=numberOfCols, cellsPerColumn=cellsPerColumn,
                initialPerm=initialPerm, connectedPerm=connectedPerm,
                minThreshold=minThreshold, newSynapseCount=newSynapseCount,
                permanenceInc=permanenceInc, permanenceDec=permanenceDec,
                
                # 1/2 of the on bits = (16384 * .02) / 2
                activationThreshold=activationThreshold,
                globalDecay=0, burnIn=1,
                #verbosity=3,  # who knows what this does...
                checkSynapseConsistency=False,
                pamLength=pamLength)

    self.checkpointDir = checkpointDir
    self.checkpointPklPath = None
    self.checkpointDataPath = None
    self._initCheckpoint()
Beispiel #4
0
  def __init__(self,
               numberOfCols =500,
               burnIn =2,             # Used for evaluating the prediction score
               collectStats =False,   # If true, collect training and inference stats
               seed =42,
               verbosity =VERBOSITY,
               predictionMethod = 'random',  # "random" or "zeroth"
               **kwargs
               ):

    # Init the base class
    TP.__init__(self,
               numberOfCols = numberOfCols,
               cellsPerColumn = 1,
               burnIn = burnIn,
               collectStats = collectStats,
               seed = seed,
               verbosity = verbosity)

    self.predictionMethod = predictionMethod

    #---------------------------------------------------------------------------------
    # Create basic data structures for keeping track of column statistics

    # Number of times each column has been active during learning
    self.columnCount = numpy.zeros(numberOfCols, dtype="int32")

    # Running average of input density
    self.averageDensity = 0.05
Beispiel #5
0
 def reset(self):
   """ Reset the state of all cells.
   This is normally used between sequences while training. All internal states
   are reset to 0.
   """
   if self.verbosity >= 3:
     print "TP Reset"
   self._setStatePointers()
   self.cells4.reset()
   TP.reset(self)
Beispiel #6
0
 def reset(self):
     """ Reset the state of all cells.
 This is normally used between sequences while training. All internal states
 are reset to 0.
 """
     if self.verbosity >= 3:
         print "TP Reset"
     self._setStatePointers()
     self.cells4.reset()
     TP.reset(self)
Beispiel #7
0
  def testCheckpointMiddleOfSequence2(self):
    """More complex test of checkpointing in the middle of a sequence."""
    tp1 = TP(2048, 32, 0.21, 0.5, 11, 20, 0.1, 0.1, 1.0, 0.0, 14, False, 5, 2,
             False, 1960, 0, False, '', 3, 10, 5, 0, 32, 128, 32, 'normal')
    tp2 = TP(2048, 32, 0.21, 0.5, 11, 20, 0.1, 0.1, 1.0, 0.0, 14, False, 5, 2,
             False, 1960, 0, False, '', 3, 10, 5, 0, 32, 128, 32, 'normal')

    with resource_stream(__name__, 'data/tp_input.csv') as fin:
      reader = csv.reader(fin)
      records = []
      for bottomUpInStr in fin:
        bottomUpIn = numpy.array(eval('[' + bottomUpInStr.strip() + ']'),
                                 dtype='int32')
        records.append(bottomUpIn)

    i = 1
    for r in records[:250]:
      print i
      i += 1
      output1 = tp1.compute(r, True, True)
      output2 = tp2.compute(r, True, True)
      self.assertTrue(numpy.array_equal(output1, output2))

    print 'Serializing and deserializing models.'

    savePath1 = os.path.join(self._tmpDir, 'tp1.bin')
    tp1.saveToFile(savePath1)
    tp3 = pickle.loads(pickle.dumps(tp1))
    tp3.loadFromFile(savePath1)

    savePath2 = os.path.join(self._tmpDir, 'tp2.bin')
    tp2.saveToFile(savePath2)
    tp4 = pickle.loads(pickle.dumps(tp2))
    tp4.loadFromFile(savePath2)

    self.assertTPsEqual(tp1, tp3)
    self.assertTPsEqual(tp2, tp4)

    for r in records[250:]:
      print i
      i += 1
      out1 = tp1.compute(r, True, True)
      out2 = tp2.compute(r, True, True)
      out3 = tp3.compute(r, True, True)
      out4 = tp4.compute(r, True, True)

      self.assertTrue(numpy.array_equal(out1, out2))
      self.assertTrue(numpy.array_equal(out1, out3))
      self.assertTrue(numpy.array_equal(out1, out4))

    self.assertTPsEqual(tp1, tp2)
    self.assertTPsEqual(tp1, tp3)
    self.assertTPsEqual(tp2, tp4)
Beispiel #8
0
    def _initEphemerals(self):
        """
    Initialize all ephemeral members after being restored to a pickled state.
    """
        TP._initEphemerals(self)
        # ---------------------------------------------------------------------------------
        # cells4 specific initialization

        # If True, let C++ allocate memory for activeState, predictedState, and
        # learnState. In this case we can retrieve copies of these states but can't
        # set them directly from Python. If False, Python can allocate them as
        # numpy arrays and we can pass pointers to the C++ using setStatePointers
        self.allocateStatesInCPP = False

        # Set this to true for debugging or accessing learning states
        self.retrieveLearningStates = False

        if self.makeCells4Ephemeral:
            self.cells4 = Cells4(
                self.numberOfCols,
                self.cellsPerColumn,
                self.activationThreshold,
                self.minThreshold,
                self.newSynapseCount,
                self.segUpdateValidDuration,
                self.initialPerm,
                self.connectedPerm,
                self.permanenceMax,
                self.permanenceDec,
                self.permanenceInc,
                self.globalDecay,
                self.doPooling,
                self.seed,
                self.allocateStatesInCPP,
                self.checkSynapseConsistency,
            )

            self.cells4.setVerbosity(self.verbosity)
            self.cells4.setPamLength(self.pamLength)
            self.cells4.setMaxAge(self.maxAge)
            self.cells4.setMaxInfBacktrack(self.maxInfBacktrack)
            self.cells4.setMaxLrnBacktrack(self.maxLrnBacktrack)
            self.cells4.setMaxSeqLength(self.maxSeqLength)
            self.cells4.setMaxSegmentsPerCell(self.maxSegmentsPerCell)
            self.cells4.setMaxSynapsesPerCell(self.maxSynapsesPerSegment)

            self._setStatePointers()
Beispiel #9
0
 def _getEphemeralMembers(self):
   """
   List of our member variables that we don't need to be saved
   """
   e = TP._getEphemeralMembers(self)
   if self.makeCells4Ephemeral:
     e.extend(['cells4'])
   return e
Beispiel #10
0
 def _getEphemeralMembers(self):
     """
 List of our member variables that we don't need to be saved
 """
     e = TP._getEphemeralMembers(self)
     if self.makeCells4Ephemeral:
         e.extend(['cells4'])
     return e
Beispiel #11
0
  def _initEphemerals(self):
    """
    Initialize all ephemeral members after being restored to a pickled state.
    """
    TP._initEphemerals(self)
    #---------------------------------------------------------------------------------
    # cells4 specific initialization

    # If True, let C++ allocate memory for activeState, predictedState, and
    # learnState. In this case we can retrieve copies of these states but can't
    # set them directly from Python. If False, Python can allocate them as
    # numpy arrays and we can pass pointers to the C++ using setStatePointers
    self.allocateStatesInCPP = False

    # Set this to true for debugging or accessing learning states
    self.retrieveLearningStates = False

    if self.makeCells4Ephemeral:
      self.cells4 = Cells4(self.numberOfCols,
                 self.cellsPerColumn,
                 self.activationThreshold,
                 self.minThreshold,
                 self.newSynapseCount,
                 self.segUpdateValidDuration,
                 self.initialPerm,
                 self.connectedPerm,
                 self.permanenceMax,
                 self.permanenceDec,
                 self.permanenceInc,
                 self.globalDecay,
                 self.doPooling,
                 self.seed,
                 self.allocateStatesInCPP,
                 self.checkSynapseConsistency)

      self.cells4.setVerbosity(self.verbosity)
      self.cells4.setPamLength(self.pamLength)
      self.cells4.setMaxAge(self.maxAge)
      self.cells4.setMaxInfBacktrack(self.maxInfBacktrack)
      self.cells4.setMaxLrnBacktrack(self.maxLrnBacktrack)
      self.cells4.setMaxSeqLength(self.maxSeqLength)
      self.cells4.setMaxSegmentsPerCell(self.maxSegmentsPerCell)
      self.cells4.setMaxSynapsesPerCell(self.maxSynapsesPerSegment)

      self._setStatePointers()
Beispiel #12
0
def _createTPs(numCols, cellsPerColumn=4, checkSynapseConsistency=True):
    """Create TP and TP10X instances with identical parameters. """

    # Keep these fixed for both TP's:
    minThreshold = 4
    activationThreshold = 4
    newSynapseCount = 5
    initialPerm = 0.6
    connectedPerm = 0.5
    permanenceInc = 0.1
    permanenceDec = 0.001
    globalDecay = 0.0

    if VERBOSITY > 1:
        print "Creating TP10X instance"

    cppTp = TP10X2(numberOfCols=numCols,
                   cellsPerColumn=cellsPerColumn,
                   initialPerm=initialPerm,
                   connectedPerm=connectedPerm,
                   minThreshold=minThreshold,
                   newSynapseCount=newSynapseCount,
                   permanenceInc=permanenceInc,
                   permanenceDec=permanenceDec,
                   activationThreshold=activationThreshold,
                   globalDecay=globalDecay,
                   burnIn=1,
                   seed=SEED,
                   verbosity=VERBOSITY,
                   checkSynapseConsistency=checkSynapseConsistency,
                   pamLength=1000)

    if VERBOSITY > 1:
        print "Creating PY TP instance"

    pyTp = TP(numberOfCols=numCols,
              cellsPerColumn=cellsPerColumn,
              initialPerm=initialPerm,
              connectedPerm=connectedPerm,
              minThreshold=minThreshold,
              newSynapseCount=newSynapseCount,
              permanenceInc=permanenceInc,
              permanenceDec=permanenceDec,
              activationThreshold=activationThreshold,
              globalDecay=globalDecay,
              burnIn=1,
              seed=SEED,
              verbosity=VERBOSITY,
              pamLength=1000)

    return cppTp, pyTp
Beispiel #13
0
    def setUp(self):
        self.tmPy = TemporalMemoryPy(columnDimensions=[2048],
                                     cellsPerColumn=32,
                                     initialPermanence=0.5,
                                     connectedPermanence=0.8,
                                     minThreshold=10,
                                     maxNewSynapseCount=12,
                                     permanenceIncrement=0.1,
                                     permanenceDecrement=0.05,
                                     activationThreshold=15)

        self.tmCPP = TemporalMemoryCPP(columnDimensions=[2048],
                                       cellsPerColumn=32,
                                       initialPermanence=0.5,
                                       connectedPermanence=0.8,
                                       minThreshold=10,
                                       maxNewSynapseCount=12,
                                       permanenceIncrement=0.1,
                                       permanenceDecrement=0.05,
                                       activationThreshold=15)

        self.tp = TP(numberOfCols=2048,
                     cellsPerColumn=32,
                     initialPerm=0.5,
                     connectedPerm=0.8,
                     minThreshold=10,
                     newSynapseCount=12,
                     permanenceInc=0.1,
                     permanenceDec=0.05,
                     activationThreshold=15,
                     globalDecay=0,
                     burnIn=1,
                     checkSynapseConsistency=False,
                     pamLength=1)

        self.tp10x2 = TP10X2(numberOfCols=2048,
                             cellsPerColumn=32,
                             initialPerm=0.5,
                             connectedPerm=0.8,
                             minThreshold=10,
                             newSynapseCount=12,
                             permanenceInc=0.1,
                             permanenceDec=0.05,
                             activationThreshold=15,
                             globalDecay=0,
                             burnIn=1,
                             checkSynapseConsistency=False,
                             pamLength=1)

        self.patternMachine = PatternMachine(2048, 40, num=100)
        self.sequenceMachine = SequenceMachine(self.patternMachine)
Beispiel #14
0
 def _create_network(self, mean=128):
     """
     :param mean: int, the mean of the frame pix value, will be used in BASE_ENCODE.
     """
     # some rulers of creating network
     # the product of the shape's two dimensions is equal to inputDimensions
     # columnDimensions equal to numberOfCols
     self.enc = MatrixEncoder(shape=self.shape, mean=mean)
     self.sp = SpatialPooler(
         inputDimensions=self.shape[0] * self.shape[1],
         columnDimensions=self.column_dimensions,
         potentialRadius=self.potential_radius,
         numActiveColumnsPerInhArea=self.numActive_columns_perInhArea,
         globalInhibition=self.global_inhibition,
         synPermActiveInc=self.syn_perm_active_inc,
         potentialPct=self.potential_pct,
         synPermInactiveDec=self.synPermInactiveDec,
         synPermConnected=self.synPermConnected,
         seed=self.sp_seed,
         localAreaDensity=self.localAreaDensity,
         stimulusThreshold=self.stimulusThreshold,
         maxBoost=self.maxBoost)
     self.tp = TP(numberOfCols=self.column_dimensions,
                  cellsPerColumn=self.cells_per_column,
                  initialPerm=self.initial_perm,
                  connectedPerm=self.connected_perm,
                  minThreshold=self.min_threshold,
                  newSynapseCount=self.new_synapse_count,
                  permanenceInc=self.permanence_inc,
                  permanenceDec=self.permanence_dec,
                  activationThreshold=self.activation_threshold,
                  globalDecay=self.global_decay,
                  burnIn=self.burn_in,
                  pamLength=self.pam_length,
                  maxSynapsesPerSegment=self.maxSynapsesPerSegment,
                  maxSegmentsPerCell=self.maxSegmentsPerCell,
                  seed=self.tp_seed,
                  maxAge=self.maxAge)
Beispiel #15
0
    def setUp(self):
        self.tmPy = TemporalMemoryPy(columnDimensions=[2048],
                                     cellsPerColumn=32,
                                     initialPermanence=0.5,
                                     connectedPermanence=0.8,
                                     minThreshold=10,
                                     maxNewSynapseCount=12,
                                     permanenceIncrement=0.1,
                                     permanenceDecrement=0.05,
                                     activationThreshold=15)

        self.tmCPP = TemporalMemoryCPP(columnDimensions=[2048],
                                       cellsPerColumn=32,
                                       initialPermanence=0.5,
                                       connectedPermanence=0.8,
                                       minThreshold=10,
                                       maxNewSynapseCount=12,
                                       permanenceIncrement=0.1,
                                       permanenceDecrement=0.05,
                                       activationThreshold=15)

        self.tp = TP(numberOfCols=2048,
                     cellsPerColumn=32,
                     initialPerm=0.5,
                     connectedPerm=0.8,
                     minThreshold=10,
                     newSynapseCount=12,
                     permanenceInc=0.1,
                     permanenceDec=0.05,
                     activationThreshold=15,
                     globalDecay=0,
                     burnIn=1,
                     checkSynapseConsistency=False,
                     pamLength=1)

        self.tp10x2 = TP10X2(numberOfCols=2048,
                             cellsPerColumn=32,
                             initialPerm=0.5,
                             connectedPerm=0.8,
                             minThreshold=10,
                             newSynapseCount=12,
                             permanenceInc=0.1,
                             permanenceDec=0.05,
                             activationThreshold=15,
                             globalDecay=0,
                             burnIn=1,
                             checkSynapseConsistency=False,
                             pamLength=1)

        self.scalarEncoder = RandomDistributedScalarEncoder(0.88)
Beispiel #16
0
def _createTps(numCols):
    """Create two instances of temporal poolers (TP.py and TP10X2.py) with
  identical parameter settings."""

    # Keep these fixed:
    minThreshold = 4
    activationThreshold = 5
    newSynapseCount = 7
    initialPerm = 0.3
    connectedPerm = 0.5
    permanenceInc = 0.1
    permanenceDec = 0.05
    globalDecay = 0
    cellsPerColumn = 1

    cppTp = TP10X2(numberOfCols=numCols,
                   cellsPerColumn=cellsPerColumn,
                   initialPerm=initialPerm,
                   connectedPerm=connectedPerm,
                   minThreshold=minThreshold,
                   newSynapseCount=newSynapseCount,
                   permanenceInc=permanenceInc,
                   permanenceDec=permanenceDec,
                   activationThreshold=activationThreshold,
                   globalDecay=globalDecay,
                   burnIn=1,
                   seed=SEED,
                   verbosity=VERBOSITY,
                   checkSynapseConsistency=True,
                   pamLength=1000)

    # Ensure we are copying over learning states for TPDiff
    cppTp.retrieveLearningStates = True

    pyTp = TP(numberOfCols=numCols,
              cellsPerColumn=cellsPerColumn,
              initialPerm=initialPerm,
              connectedPerm=connectedPerm,
              minThreshold=minThreshold,
              newSynapseCount=newSynapseCount,
              permanenceInc=permanenceInc,
              permanenceDec=permanenceDec,
              activationThreshold=activationThreshold,
              globalDecay=globalDecay,
              burnIn=1,
              seed=SEED,
              verbosity=VERBOSITY,
              pamLength=1000)

    return cppTp, pyTp
Beispiel #17
0
    def testCheckpointMiddleOfSequence2(self):
        """More complex test of checkpointing in the middle of a sequence."""
        tp1 = TP(2048, 32, 0.21, 0.5, 11, 20, 0.1, 0.1, 1.0, 0.0, 14, False, 5,
                 2, False, 1960, 0, False, '', 3, 10, 5, 0, 32, 128, 32,
                 'normal')
        tp2 = TP(2048, 32, 0.21, 0.5, 11, 20, 0.1, 0.1, 1.0, 0.0, 14, False, 5,
                 2, False, 1960, 0, False, '', 3, 10, 5, 0, 32, 128, 32,
                 'normal')

        with resource_stream(__name__, 'data/tp_input.csv') as fin:
            reader = csv.reader(fin)
            records = []
            for bottomUpInStr in fin:
                bottomUpIn = numpy.array(eval('[' + bottomUpInStr.strip() +
                                              ']'),
                                         dtype='int32')
                records.append(bottomUpIn)

        for r in records[:250]:
            output1 = tp1.compute(r, True, True)
            output2 = tp2.compute(r, True, True)
            self.assertTrue(numpy.array_equal(output1, output2))

        tp3 = pickle.loads(pickle.dumps(tp1))
        tp4 = pickle.loads(pickle.dumps(tp2))

        i = 0
        for r in records[250:]:
            print i
            i += 1
            out1 = tp1.compute(r, True, True)
            out2 = tp2.compute(r, True, True)
            out3 = tp3.compute(r, True, True)
            out4 = tp4.compute(r, True, True)

            self.assertTPsEqual(tp1, tp2)

            self.assertTrue(numpy.array_equal(out1, out2))
            self.assertTrue(numpy.array_equal(out1, out3))
            self.assertTrue(numpy.array_equal(out1, out4))
Beispiel #18
0
    def __init__(
        self,
        numberOfCols=500,
        cellsPerColumn=10,
        initialPerm=0.11,  # TODO: check perm numbers with Ron
        connectedPerm=0.50,
        minThreshold=8,
        newSynapseCount=15,
        permanenceInc=0.10,
        permanenceDec=0.10,
        permanenceMax=1.0,  # never exceed this value
        globalDecay=0.10,
        activationThreshold=12,  # 3/4 of newSynapseCount TODO make fraction
        doPooling=False,  # allows to turn off pooling
        segUpdateValidDuration=5,
        burnIn=2,  # Used for evaluating the prediction score
        collectStats=False,  # If true, collect training and inference stats
        seed=42,
        verbosity=VERBOSITY,
        checkSynapseConsistency=False,

        # List (as string) of trivial predictions to compute alongside
        # the full TP. See TrivialPredictor.py for a list of allowed methods
        trivialPredictionMethods='',
        pamLength=1,
        maxInfBacktrack=10,
        maxLrnBacktrack=5,
        maxAge=100000,
        maxSeqLength=32,

        # Fixed size mode params
        maxSegmentsPerCell=-1,
        maxSynapsesPerSegment=-1,

        # Output control
        outputType='normal',
    ):

        #---------------------------------------------------------------------------------
        # Save our __init__ args for debugging
        self._initArgsDict = _extractCallingMethodArgs()

        #---------------------------------------------------------------------------------
        # These two variables are for testing

        # If set to True, Cells4 will perform (time consuming) invariance checks
        self.checkSynapseConsistency = checkSynapseConsistency

        # If set to False, Cells4 will *not* be treated as an ephemeral member
        # and full TP10X pickling is possible. This is useful for testing
        # pickle/unpickle without saving Cells4 to an external file
        self.makeCells4Ephemeral = True

        #---------------------------------------------------------------------------------
        # Init the base class
        TP.__init__(
            self,
            numberOfCols=numberOfCols,
            cellsPerColumn=cellsPerColumn,
            initialPerm=initialPerm,
            connectedPerm=connectedPerm,
            minThreshold=minThreshold,
            newSynapseCount=newSynapseCount,
            permanenceInc=permanenceInc,
            permanenceDec=permanenceDec,
            permanenceMax=permanenceMax,  # never exceed this value
            globalDecay=globalDecay,
            activationThreshold=activationThreshold,
            doPooling=doPooling,
            segUpdateValidDuration=segUpdateValidDuration,
            burnIn=burnIn,
            collectStats=collectStats,
            seed=seed,
            verbosity=verbosity,
            trivialPredictionMethods=trivialPredictionMethods,
            pamLength=pamLength,
            maxInfBacktrack=maxInfBacktrack,
            maxLrnBacktrack=maxLrnBacktrack,
            maxAge=maxAge,
            maxSeqLength=maxSeqLength,
            maxSegmentsPerCell=maxSegmentsPerCell,
            maxSynapsesPerSegment=maxSynapsesPerSegment,
            outputType=outputType,
        )
Beispiel #19
0
class Model():


  def __init__(self,
               numberOfCols=16384, cellsPerColumn=8,
                initialPerm=0.5, connectedPerm=0.5,
                minThreshold=164, newSynapseCount=164,
                permanenceInc=0.1, permanenceDec=0.0,
                activationThreshold=164,
                pamLength=10,
                checkpointDir=None):

    self.tp = TP(numberOfCols=numberOfCols, cellsPerColumn=cellsPerColumn,
                initialPerm=initialPerm, connectedPerm=connectedPerm,
                minThreshold=minThreshold, newSynapseCount=newSynapseCount,
                permanenceInc=permanenceInc, permanenceDec=permanenceDec,
                
                # 1/2 of the on bits = (16384 * .02) / 2
                activationThreshold=activationThreshold,
                globalDecay=0, burnIn=1,
                #verbosity=3,  # who knows what this does...
                checkSynapseConsistency=False,
                pamLength=pamLength)

    self.checkpointDir = checkpointDir
    self.checkpointPklPath = None
    self.checkpointDataPath = None
    self._initCheckpoint()


  def _initCheckpoint(self):
    if self.checkpointDir:
      if not os.path.exists(self.checkpointDir):
        os.makedirs(self.checkpointDir)

      self.checkpointPklPath = self.checkpointDir + "/model.pkl"
      self.checkpointDataPath = self.checkpointDir + "/model.data"


  def canCheckpoint(self):
    return self.checkpointDir != None


  def hasCheckpoint(self):
    return (os.path.exists(self.checkpointPklPath) and
            os.path.exists(self.checkpointDataPath))


  def load(self):
    if not self.checkpointDir:
      raise(Exception("No checkpoint directory specified"))

    if not self.hasCheckpoint():
      raise(Exception("Could not find checkpoint file"))
      
    with open(self.checkpointPklPath, 'rb') as f:
      self.tp = pickle.load(f)

    self.tp.loadFromFile(self.checkpointDataPath)


  def save(self):
    if not self.checkpointDir:
      raise(Exception("No checkpoint directory specified"))

    self.tp.saveToFile(self.checkpointDataPath)

    with open(self.checkpointPklPath, 'wb') as f:
      pickle.dump(self.tp, f)


  def feedTerm(self, term, learn=True):
    """ Feed a Term to model, returning next predicted Term """
    tp = self.tp
    array = numpy.array(term.toArray(), dtype="uint32")
    tp.resetStats()
    tp.compute(array, enableLearn = learn, computeInfOutput = True)
    #print "ret:  " + repr(ret)
    #if ret.all() == array.all():
    #  print "EQUAL to input"
    ret = tp.getStats()
    #ret = tp.printStates()

    print "ret: " + repr(ret)
    print
    print
    print "*****************************************"

    predictedCells = tp.getPredictedState()
    predictedColumns = predictedCells.max(axis=1)
    
    predictedBitmap = predictedColumns.nonzero()[0].tolist()
    return Term().createFromBitmap(predictedBitmap)
  

  def resetSequence(self):
    print "RESET"
    self.tp.reset()
Beispiel #20
0
class HTMNetwork(object):
    """
    Attribute:
    shape: tuple -- set size of the encoder's output, for matrix_encoder,
            it has two int elements.
    """
    def __init__(
            self,
            shape=(32, 32),  # tuple -- two element
            inputDimensions=(1024, ),  # tuple two element or int
            columnDimensions=1024,  # int, tuple is not allowed
            globalInhibition=1,
            sp_seed=1960,
            potentialPct=0.8,
            synPermConnected=0.10,
            synPermActiveInc=0.05,
            synPermInactiveDec=0.0008,
            maxBoost=2.0,
            potentialRadius=16,
            numActiveColumnsPerInhArea=40.0,
            localAreaDensity=-1.0,
            stimulusThreshold=0,
            numberOfCols=1024,  # int
            cellsPerColumn=16,  # 32 is the official setting
            tp_seed=1960,
            newSynapseCount=20,
            maxSynapsesPerSegment=32,
            maxSegmentsPerCell=128,
            initialPerm=0.21,
            permanenceInc=0.1,
            permanenceDec=0.0,  # 0.1 is the official setting
            globalDecay=0,
            maxAge=0,
            minThreshold=12,
            activationThreshold=12,
            pamLength=1,
            connectedPerm=0.5,
            burnIn=2,
            visible=1):

        # size insurance
        if type(inputDimensions) == int:
            self._assert_fun(shape, (inputDimensions, ))
        else:
            self._assert_fun(shape, inputDimensions)
        self._assert_fun((columnDimensions, ), (numberOfCols, ))

        self.shape = shape

        # the params of the sp
        self.input_dimensions = inputDimensions
        self.column_dimensions = columnDimensions
        self.potential_radius = potentialRadius
        self.numActive_columns_perInhArea = numActiveColumnsPerInhArea
        self.global_inhibition = globalInhibition
        self.syn_perm_active_inc = synPermActiveInc
        self.potential_pct = potentialPct
        self.synPermInactiveDec = synPermInactiveDec
        self.synPermConnected = synPermConnected
        self.sp_seed = sp_seed
        self.localAreaDensity = localAreaDensity
        self.stimulusThreshold = stimulusThreshold
        self.maxBoost = maxBoost

        # the params of the tp
        self.number_of_cols = numberOfCols
        self.cells_per_column = cellsPerColumn
        self.initial_perm = initialPerm
        self.connected_perm = connectedPerm
        self.min_threshold = minThreshold
        self.new_synapse_count = newSynapseCount
        self.permanence_inc = permanenceInc
        self.permanence_dec = permanenceDec
        self.activation_threshold = activationThreshold
        self.global_decay = globalDecay
        self.burn_in = burnIn
        self.pam_length = pamLength
        self.maxAge = maxAge
        self.maxSynapsesPerSegment = maxSynapsesPerSegment
        self.maxSegmentsPerCell = maxSegmentsPerCell
        self.tp_seed = tp_seed

        self.visible = visible
        self.label = ""

        # network
        self.enc = None
        self.sp = None
        self.tp = None

        self._create_network()

    def set_label(self, label):
        """
        :param label: str -- the tag of the network
        """
        self.label = label

    def get_label(self):
        return self.label

    def _assert_fun(self, param1, param2):
        """
        :param param1, param2: tuple -- contain int type elements.
        make sure two params have a same size.
        """
        product_elements1 = 1
        product_elements2 = 1

        for e in param1:
            product_elements1 = product_elements1 * e
        for i in param2:
            product_elements2 = product_elements2 * i
        assert product_elements1 == product_elements2

    def _check_type(self):
        pass

    def view(self):
        pass

    def _create_network(self, mean=128):
        """
        :param mean: int, the mean of the frame pix value, will be used in BASE_ENCODE.
        """
        # some rulers of creating network
        # the product of the shape's two dimensions is equal to inputDimensions
        # columnDimensions equal to numberOfCols
        self.enc = MatrixEncoder(shape=self.shape, mean=mean)
        self.sp = SpatialPooler(
            inputDimensions=self.shape[0] * self.shape[1],
            columnDimensions=self.column_dimensions,
            potentialRadius=self.potential_radius,
            numActiveColumnsPerInhArea=self.numActive_columns_perInhArea,
            globalInhibition=self.global_inhibition,
            synPermActiveInc=self.syn_perm_active_inc,
            potentialPct=self.potential_pct,
            synPermInactiveDec=self.synPermInactiveDec,
            synPermConnected=self.synPermConnected,
            seed=self.sp_seed,
            localAreaDensity=self.localAreaDensity,
            stimulusThreshold=self.stimulusThreshold,
            maxBoost=self.maxBoost)
        self.tp = TP(numberOfCols=self.column_dimensions,
                     cellsPerColumn=self.cells_per_column,
                     initialPerm=self.initial_perm,
                     connectedPerm=self.connected_perm,
                     minThreshold=self.min_threshold,
                     newSynapseCount=self.new_synapse_count,
                     permanenceInc=self.permanence_inc,
                     permanenceDec=self.permanence_dec,
                     activationThreshold=self.activation_threshold,
                     globalDecay=self.global_decay,
                     burnIn=self.burn_in,
                     pamLength=self.pam_length,
                     maxSynapsesPerSegment=self.maxSynapsesPerSegment,
                     maxSegmentsPerCell=self.maxSegmentsPerCell,
                     seed=self.tp_seed,
                     maxAge=self.maxAge)

    def _compute(self, a_frame, output, sp_enable_learn, tp_enable_learn):
        """
        the essential proceeding of the network compute,
        the training and prediction is the iteration of it.
        :param a_frame: Array, a frame of the video.
        :param output: np.darray, be used to save the output of the sp.
        """
        matrix = self.enc.encodeIntoArray(a_frame,
                                          encoder_model=matrix_encoder.K_MEANS)

        # TODO(kawawa): show the output encoder and sp.
        # image = (np.int16(matrix)-1)*(-255)
        # cv2.imshow("kkk", np.uint8(image))
        # cv2.waitKey(10)
        self.sp.compute(inputVector=matrix,
                        learn=sp_enable_learn,
                        activeArray=output)
        # a = output
        self.tp.compute(bottomUpInput=output,
                        enableLearn=tp_enable_learn,
                        computeInfOutput=None)

    def train(self, frames_matrix, sp_enable_learn=True, tp_enable_learn=True):
        """
        tran the network by a series of frames
        :param frames_matrix: a array of the frames
        :param sp_enable_learn, tp_enable_learn: set the learning model
        """
        output = np.zeros(self.column_dimensions, dtype=int)

        for i in range(len(frames_matrix)):
            self._compute(frames_matrix[i], output, sp_enable_learn,
                          tp_enable_learn)

    def _formatRow(self, x):
        """make a print format"""
        s = ''
        for c in range(len(x)):
            if c > 0 and c % 10 == 0:
                s += ' '
            s += str(x[c])
        s += ' '
        return s

    def predict_detect(self,
                       frames_matrix,
                       sp_enable_learn=False,
                       tp_enable_learn=False):
        """
        get frames, predict the next frame, compare the predicted one with the next input.
        and give a corresponding mark of them.
        :param frames_matrix: a array of the frames
        :param sp_enable_learn, tp_enable_learn: set the learning model
        :return: float -- the corresponding rank of prediction frames and input frames
        """
        output = np.zeros(self.column_dimensions, dtype=int)
        score_list = []

        self._compute(frames_matrix[0], output, sp_enable_learn,
                      tp_enable_learn)
        pre_prediction = self.tp.getPredictedState()

        # view the prediction state
        if self.visible > 1:
            self.tp.printStates(printPrevious=False, printLearnState=False)
            self._formatRow(pre_prediction.max(axis=1).nonzero())

        for i in range(len(frames_matrix))[1:]:
            self._compute(frames_matrix[i], output, sp_enable_learn,
                          tp_enable_learn)
            score = self._give_a_mark(sp_output=output,
                                      tp_prediction=pre_prediction)
            score_list.append(score)
            pre_prediction = self.tp.getPredictedState()

            # view the prediction state
            if self.visible > 1:
                self.tp.printStates(printPrevious=False, printLearnState=False)
                self._formatRow(pre_prediction.max(axis=1).nonzero())

        return sum(score_list)

    def getPredictedState(self):
        return self.tp.getPredictedState

    def get_sp_active_cells_index(self, sp_cells_state):
        """
        :return index of active cells/columns in format:
        (array([0, 2, 4], dtype=int64),)
        """
        return sp_cells_state.nonzero()

    def get_tp_active_cells_index(self, tp_cells_state):
        """
        eg:
        the tp_cells _state = [[1, 0],
                               [0, 0],
                               [0, 1]
                               [0, 0]
                               [1, 0]] is a np.ndarray
        :return: index of active columns in format:
        (array([0, 2, 4], dtype=int64),)
        """
        return tp_cells_state.max(axis=1).nonzero()

    def get_tp_active_columns(self, sp_cells_state):
        """
        eg:
        the tp_cells _state = [[1, 0],
                               [0, 0],
                               [0, 1]
                               [0, 0]
                               [1, 0]] is a np.ndarray
        :return: active columns coder [1, 0, 1, 0, 1]
        """
        return sp_cells_state.max(axis=1)

    def _corresponding(self, sp_active_column, tp_active_column):
        """
        compute number of bits where two binary array have the same '1' value.
        sp_active_column and tp_active_column have size 1-d binary array.
        """
        sum = sp_active_column + tp_active_column
        corresponding_elements = sum / 2
        return corresponding_elements.sum()

    def _give_a_mark(self, sp_output, tp_prediction):
        """
        for two frames: next input and the prediction at this time.
        (num of same 1 value bit) /  (num of 1 value bit in sp_output)
        :return: a int between 0-1, 1 means have good prediction
        """
        tp_active_columns = self.get_tp_active_columns(tp_prediction)
        corresponding_num = self._corresponding(sp_output, tp_active_columns)

        return float(corresponding_num) / float(sum(sp_output))
Beispiel #21
0
# Create our NuPIC entities

enc = ScalarEncoder(n=50,
                    w=3,
                    minval=0,
                    maxval=100,
                    clipInput=True,
                    forced=True)

tp = TP(numberOfCols=50,
        cellsPerColumn=4,
        initialPerm=0.5,
        connectedPerm=0.5,
        minThreshold=5,
        newSynapseCount=5,
        permanenceInc=0.1,
        permanenceDec=0.1,
        activationThreshold=3,
        globalDecay=0.1,
        burnIn=1,
        checkSynapseConsistency=False,
        pamLength=3)

# Setup our PyAudio Stream

p = pyaudio.PyAudio()
stream = p.open(format=pyaudio.paInt16,
                channels=1,
                rate=int(p.get_device_info_by_index(0)['defaultSampleRate']),
                input=True,
                frames_per_buffer=1024 * 5)
Beispiel #22
0
  def __init__(self,
               numberOfCols = 500,
               cellsPerColumn = 10,
               initialPerm = 0.11, # TODO: check perm numbers with Ron
               connectedPerm = 0.50,
               minThreshold = 8,
               newSynapseCount = 15,
               permanenceInc = 0.10,
               permanenceDec = 0.10,
               permanenceMax = 1.0, # never exceed this value
               globalDecay = 0.10,
               activationThreshold = 12, # 3/4 of newSynapseCount TODO make fraction
               doPooling = False, # allows to turn off pooling
               segUpdateValidDuration = 5,
               burnIn = 2,             # Used for evaluating the prediction score
               collectStats = False,    # If true, collect training and inference stats
               seed = 42,
               verbosity = VERBOSITY,
               checkSynapseConsistency = False,

               # List (as string) of trivial predictions to compute alongside
               # the full TP. See TrivialPredictor.py for a list of allowed methods
               trivialPredictionMethods = '',
               pamLength = 1,
               maxInfBacktrack = 10,
               maxLrnBacktrack = 5,
               maxAge = 100000,
               maxSeqLength = 32,

               # Fixed size mode params
               maxSegmentsPerCell = -1,
               maxSynapsesPerSegment = -1,

               # Output control
               outputType = 'normal',
               ):

    #---------------------------------------------------------------------------------
    # Save our __init__ args for debugging
    self._initArgsDict = _extractCallingMethodArgs()

    #---------------------------------------------------------------------------------
    # These two variables are for testing

    # If set to True, Cells4 will perform (time consuming) invariance checks
    self.checkSynapseConsistency = checkSynapseConsistency

    # If set to False, Cells4 will *not* be treated as an ephemeral member
    # and full TP10X pickling is possible. This is useful for testing
    # pickle/unpickle without saving Cells4 to an external file
    self.makeCells4Ephemeral = True

    #---------------------------------------------------------------------------------
    # Init the base class
    TP.__init__(self,
               numberOfCols = numberOfCols,
               cellsPerColumn = cellsPerColumn,
               initialPerm = initialPerm,
               connectedPerm = connectedPerm,
               minThreshold = minThreshold,
               newSynapseCount = newSynapseCount,
               permanenceInc = permanenceInc,
               permanenceDec = permanenceDec,
               permanenceMax = permanenceMax, # never exceed this value
               globalDecay = globalDecay,
               activationThreshold = activationThreshold,
               doPooling = doPooling,
               segUpdateValidDuration = segUpdateValidDuration,
               burnIn = burnIn,
               collectStats = collectStats,
               seed = seed,
               verbosity = verbosity,
               trivialPredictionMethods = trivialPredictionMethods,
               pamLength = pamLength,
               maxInfBacktrack = maxInfBacktrack,
               maxLrnBacktrack = maxLrnBacktrack,
               maxAge = maxAge,
               maxSeqLength = maxSeqLength,
               maxSegmentsPerCell = maxSegmentsPerCell,
               maxSynapsesPerSegment = maxSynapsesPerSegment,
               outputType = outputType,
               )
Beispiel #23
0
def create_network():
    enc = MatrixEncoder((64, 64))
    sp = SpatialPooler(inputDimensions=4096, columnDimensions=1024)
    tp = TP(numberOfCols=1024)

    return enc, sp, tp
    def basicTest2(self,
                   tp,
                   numPatterns=100,
                   numRepetitions=3,
                   activity=15,
                   testTrimming=False,
                   testRebuild=False):
        """Basic test (basic run of learning and inference)"""
        # Create PY TP object that mirrors the one sent in.
        tpPy = TP(numberOfCols=tp.numberOfCols,
                  cellsPerColumn=tp.cellsPerColumn,
                  initialPerm=tp.initialPerm,
                  connectedPerm=tp.connectedPerm,
                  minThreshold=tp.minThreshold,
                  newSynapseCount=tp.newSynapseCount,
                  permanenceInc=tp.permanenceInc,
                  permanenceDec=tp.permanenceDec,
                  permanenceMax=tp.permanenceMax,
                  globalDecay=tp.globalDecay,
                  activationThreshold=tp.activationThreshold,
                  doPooling=tp.doPooling,
                  segUpdateValidDuration=tp.segUpdateValidDuration,
                  pamLength=tp.pamLength,
                  maxAge=tp.maxAge,
                  maxSeqLength=tp.maxSeqLength,
                  maxSegmentsPerCell=tp.maxSegmentsPerCell,
                  maxSynapsesPerSegment=tp.maxSynapsesPerSegment,
                  seed=tp.seed,
                  verbosity=tp.verbosity)

        # Ensure we are copying over learning states for TPDiff
        tp.retrieveLearningStates = True

        verbosity = VERBOSITY

        # Learn

        # Build up sequences
        sequence = fdrutils.generateCoincMatrix(nCoinc=numPatterns,
                                                length=tp.numberOfCols,
                                                activity=activity)
        for r in xrange(numRepetitions):
            for i in xrange(sequence.nRows()):

                #if i > 11:
                #  setVerbosity(6, tp, tpPy)

                if i % 10 == 0:
                    tp.reset()
                    tpPy.reset()

                if verbosity >= 2:
                    print "\n\n    ===================================\nPattern:",
                    print i, "Round:", r, "input:", sequence.getRow(i)

                y1 = tp.learn(sequence.getRow(i))
                y2 = tpPy.learn(sequence.getRow(i))

                # Ensure everything continues to work well even if we continuously
                # rebuild outSynapses structure
                if testRebuild:
                    tp.cells4.rebuildOutSynapses()

                if testTrimming:
                    tp.trimSegments()
                    tpPy.trimSegments()

                if verbosity > 2:
                    print "\n   ------  CPP states  ------ ",
                    tp.printStates()
                    print "\n   ------  PY states  ------ ",
                    tpPy.printStates()
                    if verbosity > 6:
                        print "C++ cells: "
                        tp.printCells()
                        print "PY cells: "
                        tpPy.printCells()

                if verbosity >= 3:
                    print "Num segments in PY and C++", tpPy.getNumSegments(), \
                        tp.getNumSegments()

                # Check if the two TP's are identical or not. This check is slow so
                # we do it every other iteration. Make it every iteration for debugging
                # as needed.
                self.assertTrue(fdrutils.tpDiff2(tp, tpPy, verbosity, False))

                # Check that outputs are identical
                self.assertLess(abs((y1 - y2).sum()), 3)

        print "Learning completed"

        self.assertTrue(fdrutils.tpDiff2(tp, tpPy, verbosity))

        # TODO: Need to check - currently failing this
        #checkCell0(tpPy)

        # Remove unconnected synapses and check TP's again

        # Test rebuild out synapses
        print "Rebuilding outSynapses"
        tp.cells4.rebuildOutSynapses()
        self.assertTrue(fdrutils.tpDiff2(tp, tpPy, VERBOSITY))

        print "Trimming segments"
        tp.trimSegments()
        tpPy.trimSegments()
        self.assertTrue(fdrutils.tpDiff2(tp, tpPy, VERBOSITY))

        # Save and reload after learning
        print "Pickling and unpickling"
        tp.makeCells4Ephemeral = False
        pickle.dump(tp, open("test_tp10x.pkl", "wb"))
        tp2 = pickle.load(open("test_tp10x.pkl"))
        self.assertTrue(fdrutils.tpDiff2(tp, tp2, VERBOSITY,
                                         checkStates=False))

        # Infer
        print "Testing inference"

        # Setup for inference
        tp.reset()
        tpPy.reset()
        setVerbosity(INFERENCE_VERBOSITY, tp, tpPy)

        patterns = numpy.zeros((40, tp.numberOfCols), dtype='uint32')
        for i in xrange(4):
            _RGEN.initializeUInt32Array(patterns[i], 2)

        for i, x in enumerate(patterns):

            x = numpy.zeros(tp.numberOfCols, dtype='uint32')
            _RGEN.initializeUInt32Array(x, 2)
            y = tp.infer(x)
            yPy = tpPy.infer(x)

            self.assertTrue(
                fdrutils.tpDiff2(tp, tpPy, VERBOSITY, checkLearn=False))
            if abs((y - yPy).sum()) > 0:
                print "C++ output", y
                print "Py output", yPy
                assert False

            if i > 0:
                tp.checkPrediction2(patterns)
                tpPy.checkPrediction2(patterns)

        print "Inference completed"
        print "===================================="

        return tp, tpPy
Beispiel #25
0
    def testCheckpointMiddleOfSequence2(self):
        """More complex test of checkpointing in the middle of a sequence."""
        tp1 = TP(2048, 32, 0.21, 0.5, 11, 20, 0.1, 0.1, 1.0, 0.0, 14, False, 5,
                 2, False, 1960, 0, False, 3, 10, 5, 0, 32, 128, 32, 'normal')
        tp2 = TP(2048, 32, 0.21, 0.5, 11, 20, 0.1, 0.1, 1.0, 0.0, 14, False, 5,
                 2, False, 1960, 0, False, 3, 10, 5, 0, 32, 128, 32, 'normal')

        with open(resource_filename(__name__, 'data/tp_input.csv'),
                  'r') as fin:
            reader = csv.reader(fin)
            records = []
            for bottomUpInStr in fin:
                bottomUpIn = numpy.array(eval('[' + bottomUpInStr.strip() +
                                              ']'),
                                         dtype='int32')
                records.append(bottomUpIn)

        i = 1
        for r in records[:250]:
            print i
            i += 1
            output1 = tp1.compute(r, True, True)
            output2 = tp2.compute(r, True, True)
            self.assertTrue(numpy.array_equal(output1, output2))

        print 'Serializing and deserializing models.'

        savePath1 = os.path.join(self._tmpDir, 'tp1.bin')
        tp1.saveToFile(savePath1)
        tp3 = pickle.loads(pickle.dumps(tp1))
        tp3.loadFromFile(savePath1)

        savePath2 = os.path.join(self._tmpDir, 'tp2.bin')
        tp2.saveToFile(savePath2)
        tp4 = pickle.loads(pickle.dumps(tp2))
        tp4.loadFromFile(savePath2)

        self.assertTPsEqual(tp1, tp3)
        self.assertTPsEqual(tp2, tp4)

        for r in records[250:]:
            print i
            i += 1
            out1 = tp1.compute(r, True, True)
            out2 = tp2.compute(r, True, True)
            out3 = tp3.compute(r, True, True)
            out4 = tp4.compute(r, True, True)

            self.assertTrue(numpy.array_equal(out1, out2))
            self.assertTrue(numpy.array_equal(out1, out3))
            self.assertTrue(numpy.array_equal(out1, out4))

        self.assertTPsEqual(tp1, tp2)
        self.assertTPsEqual(tp1, tp3)
        self.assertTPsEqual(tp2, tp4)
Beispiel #26
0
 def testInitDefaultTP(self):
     self.assertTrue(isinstance(TP(), TP))
Beispiel #27
0
import pyaudio
import audioop
import math
from nupic.encoders import ScalarEncoder
from nupic.research.TP import TP
from termcolor import colored

# Create our NuPIC entities

enc = ScalarEncoder(n=50, w=3, minval=0, maxval=100,
						clipInput=True, forced=True)

tp = TP(numberOfCols=50, cellsPerColumn=4, initialPerm=0.5,
		connectedPerm=0.5, minThreshold=5, newSynapseCount=5,
		permanenceInc=0.1, permanenceDec=0.1,
        activationThreshold=3, globalDecay=0.1, burnIn=1,
        checkSynapseConsistency=False, pamLength=3)

# Setup our PyAudio Stream

p = pyaudio.PyAudio()
stream = p.open(format = pyaudio.paInt16, channels = 1,
	rate = int(p.get_device_info_by_index(0)['defaultSampleRate']),
	input = True, frames_per_buffer = 1024*5)

print "%-48s %48s" % (colored("DECIBELS","green"),
						colored("PREDICTION","red"))

b = 0
while 1:
Beispiel #28
0
from nupic.research.TP import TP
import numpy as np

tp = TP(numberOfCols=20,
        cellsPerColumn=3,
        initialPerm=0.5,
        connectedPerm=0.5,
        minThreshold=10,
        newSynapseCount=10,
        permanenceInc=0.1,
        permanenceDec=0.0,
        activationThreshold=6,
        globalDecay=0,
        burnIn=1,
        checkSynapseConsistency=False,
        pamLength=10)

list = np.array([[1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0],
                 [0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0],
                 [1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0],
                 [1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0],
                 [0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1]])

list1 = np.array([])

for i in range(30):
    for j in range(len(list)):
        tp.compute(list[j], enableLearn=True, computeInfOutput=False)

    tp.reset()
Beispiel #29
0
    def testCheckpointMiddleOfSequence2(self):
        """More complex test of checkpointing in the middle of a sequence."""
        tp1 = TP(
            2048,
            32,
            0.21,
            0.5,
            11,
            20,
            0.1,
            0.1,
            1.0,
            0.0,
            14,
            False,
            5,
            2,
            False,
            1960,
            0,
            False,
            "",
            3,
            10,
            5,
            0,
            32,
            128,
            32,
            "normal",
        )
        tp2 = TP(
            2048,
            32,
            0.21,
            0.5,
            11,
            20,
            0.1,
            0.1,
            1.0,
            0.0,
            14,
            False,
            5,
            2,
            False,
            1960,
            0,
            False,
            "",
            3,
            10,
            5,
            0,
            32,
            128,
            32,
            "normal",
        )

        with resource_stream(__name__, "data/tp_input.csv") as fin:
            reader = csv.reader(fin)
            records = []
            for bottomUpInStr in fin:
                bottomUpIn = numpy.array(eval("[" + bottomUpInStr.strip() + "]"), dtype="int32")
                records.append(bottomUpIn)

        for r in records[:250]:
            output1 = tp1.compute(r, True, True)
            output2 = tp2.compute(r, True, True)
            self.assertTrue(numpy.array_equal(output1, output2))

        tp3 = pickle.loads(pickle.dumps(tp1))
        tp4 = pickle.loads(pickle.dumps(tp2))

        i = 0
        for r in records[250:]:
            print i
            i += 1
            out1 = tp1.compute(r, True, True)
            out2 = tp2.compute(r, True, True)
            out3 = tp3.compute(r, True, True)
            out4 = tp4.compute(r, True, True)

            self.assertTPsEqual(tp1, tp2)

            self.assertTrue(numpy.array_equal(out1, out2))
            self.assertTrue(numpy.array_equal(out1, out3))
            self.assertTrue(numpy.array_equal(out1, out4))
Beispiel #30
0
        iterationOutput = numpy.zeros(shape=8, dtype="int")
        spatialPooler.compute(inputCategories[category],
                              learn=True,
                              activeArray=iterationOutput)
        print(category + ":").ljust(10), iterationOutput
    print "end iterating some test data\n"

print "initializing temporal pooler"
temporalPooler = TP(
    numberOfCols=15,
    cellsPerColumn=4,
    initialPerm=0.5,
    connectedPerm=0.5,
    minThreshold=10,
    newSynapseCount=10,
    permanenceInc=0.1,
    permanenceDec=0.0,
    activationThreshold=3,
    globalDecay=0,
    burnIn=1,
    checkSynapseConsistency=False,
    pamLength=1  #10
)
print "temporal pooler initialized, temporalPooler.numberOfCols = " + str(
    temporalPooler.numberOfCols) + "\n"

#print "temporal pooler state:"
#temporalPooler.printStates(printPrevious=False, printLearnState=False)
#wait()

trainTemporalPooler = True
# In[20]:

for column in xrange(4):
    connected = np.zeros((24,), dtype="int")
    sp.getConnectedSynapses(column, connected)
    print connected


print 'STARTING TEMPORAL POOLING'

# In[21]:

tp = TP(numberOfCols=50, cellsPerColumn=2,
        initialPerm=0.5, connectedPerm=0.5,
        minThreshold=10, newSynapseCount=10,
        permanenceInc=0.1, permanenceDec=0.0,
        activationThreshold=8,
        globalDecay=0, burnIn=1,
        checkSynapseConsistency=False,
        pamLength=10)


# In[22]:

for i in range(1):
    for note in encoded_list:
        tp.compute(note, enableLearn = True, computeInfOutput = False)
        # This function prints the segments associated with every cell.$$$$
        # If you really want to understand the TP, uncomment this line. By following
        # every step you can get an excellent understanding for exactly how the TP
        # learns.
        # tp.printCells()
Beispiel #32
0
    def _createTPs(self,
                   numCols,
                   fixedResources=False,
                   checkSynapseConsistency=True):
        """Create an instance of the appropriate temporal pooler. We isolate
    all parameters as constants specified here."""

        # Keep these fixed:
        minThreshold = 4
        activationThreshold = 8
        newSynapseCount = 15
        initialPerm = 0.3
        connectedPerm = 0.5
        permanenceInc = 0.1
        permanenceDec = 0.05

        if fixedResources:
            permanenceDec = 0.1
            maxSegmentsPerCell = 5
            maxSynapsesPerSegment = 15
            globalDecay = 0
            maxAge = 0
        else:
            permanenceDec = 0.05
            maxSegmentsPerCell = -1
            maxSynapsesPerSegment = -1
            globalDecay = 0.0001
            maxAge = 1

        if g_testCPPTP:
            if g_options.verbosity > 1:
                print "Creating TP10X2 instance"

            cppTP = TP10X2(
                numberOfCols=numCols,
                cellsPerColumn=4,
                initialPerm=initialPerm,
                connectedPerm=connectedPerm,
                minThreshold=minThreshold,
                newSynapseCount=newSynapseCount,
                permanenceInc=permanenceInc,
                permanenceDec=permanenceDec,
                activationThreshold=activationThreshold,
                globalDecay=globalDecay,
                maxAge=maxAge,
                burnIn=1,
                seed=g_options.seed,
                verbosity=g_options.verbosity,
                checkSynapseConsistency=checkSynapseConsistency,
                pamLength=1000,
                maxSegmentsPerCell=maxSegmentsPerCell,
                maxSynapsesPerSegment=maxSynapsesPerSegment,
            )
            # Ensure we are copying over learning states for TPDiff
            cppTP.retrieveLearningStates = True

        else:
            cppTP = None

        if g_options.verbosity > 1:
            print "Creating PY TP instance"
        pyTP = TP(
            numberOfCols=numCols,
            cellsPerColumn=4,
            initialPerm=initialPerm,
            connectedPerm=connectedPerm,
            minThreshold=minThreshold,
            newSynapseCount=newSynapseCount,
            permanenceInc=permanenceInc,
            permanenceDec=permanenceDec,
            activationThreshold=activationThreshold,
            globalDecay=globalDecay,
            maxAge=maxAge,
            burnIn=1,
            seed=g_options.seed,
            verbosity=g_options.verbosity,
            pamLength=1000,
            maxSegmentsPerCell=maxSegmentsPerCell,
            maxSynapsesPerSegment=maxSynapsesPerSegment,
        )

        return cppTP, pyTP
Beispiel #33
0
    def testCheckpointMiddleOfSequence(self):
        # Create a model and give it some inputs to learn.
        tp1 = TP(numberOfCols=100, cellsPerColumn=12, verbosity=VERBOSITY)
        sequences = [self.generateSequence() for _ in xrange(5)]
        train = list(
            itertools.chain.from_iterable(sequences[:3] + [sequences[3][:5]]))
        for bottomUpInput in train:
            if bottomUpInput is None:
                tp1.reset()
            else:
                tp1.compute(bottomUpInput, True, True)

        # Serialize and deserialized the TP.
        checkpointPath = os.path.join(self._tmpDir, 'a')
        tp1.saveToFile(checkpointPath)
        tp2 = pickle.loads(pickle.dumps(tp1))
        tp2.loadFromFile(checkpointPath)

        # Check that the TPs are the same.
        self.assertTPsEqual(tp1, tp2)

        # Feed some data into the models.
        test = list(
            itertools.chain.from_iterable([sequences[3][5:]] + sequences[3:]))
        for bottomUpInput in test:
            if bottomUpInput is None:
                tp1.reset()
                tp2.reset()
            else:
                result1 = tp1.compute(bottomUpInput, True, True)
                result2 = tp2.compute(bottomUpInput, True, True)

                self.assertTPsEqual(tp1, tp2)
                self.assertTrue(numpy.array_equal(result1, result2))
Beispiel #34
0
 def reset(self):
     """ Reset the state of all cells.
 This is normally used between sequences while training. All internal states
 are reset to 0.
 """
     TP.reset(self)
Beispiel #35
0
def main(SEED, VERBOSITY):
    # TP 作成
    tp = TP(
            numberOfCols          = 100,
            cellsPerColumn        = 1,
            initialPerm           = 0.3,
            connectedPerm         = 0.5,
            minThreshold          = 4,
            newSynapseCount       = 7,
            permanenceInc         = 0.1,
            permanenceDec         = 0.05,
            activationThreshold   = 5,
            globalDecay           = 0,
            burnIn                = 1,
            seed                  = SEED,
            verbosity             = VERBOSITY,
            checkSynapseConsistency  = True,
            pamLength                = 1000
            )

    print
    trainingSet = _getSimplePatterns(10, 10)
    for seq in trainingSet[0:5]:
        _printOneTrainingVector(seq)


    # TP学習
    print
    print 'Learning 1 ... A->A->A'
    for _ in range(2):
        for seq in trainingSet[0:5]:
            for _ in range(10):
                #tp.learn(seq)
                tp.compute(seq, enableLearn = True, computeInfOutput=False)
            tp.reset()

    print
    print 'Learning 2 ... A->B->C'
    for _ in range(10):
        for seq in trainingSet[0:5]:
            tp.compute(seq, enableLearn = True, computeInfOutput=False)
        tp.reset()


    # TP 予測
    # Learning 1のみだと, A->Aを出力するのみだが,
    # その後, Learning 2もやると, A->A,Bを出力するようになる. 
    print
    print 'Running inference'
    for seq in trainingSet[0:5]:
        # tp.reset()
        # tp.resetStats()
        tp.compute(seq, enableLearn = False, computeInfOutput = True)
        tp.printStates(False, False)
def createTPs(includeCPP=True,
              includePy=True,
              numCols=100,
              cellsPerCol=4,
              activationThreshold=3,
              minThreshold=3,
              newSynapseCount=3,
              initialPerm=0.6,
              permanenceInc=0.1,
              permanenceDec=0.0,
              globalDecay=0.0,
              pamLength=0,
              checkSynapseConsistency=True,
              maxInfBacktrack=0,
              maxLrnBacktrack=0,
              **kwargs):
    """Create one or more TP instances, placing each into a dict keyed by
  name.

  Parameters:
  ------------------------------------------------------------------
  retval:   tps - dict of TP instances
  """

    # Keep these fixed:
    connectedPerm = 0.5

    tps = dict()

    if includeCPP:
        if VERBOSITY >= 2:
            print "Creating TP10X2 instance"

        cpp_tp = TP10X2(
            numberOfCols=numCols,
            cellsPerColumn=cellsPerCol,
            initialPerm=initialPerm,
            connectedPerm=connectedPerm,
            minThreshold=minThreshold,
            newSynapseCount=newSynapseCount,
            permanenceInc=permanenceInc,
            permanenceDec=permanenceDec,
            activationThreshold=activationThreshold,
            globalDecay=globalDecay,
            burnIn=1,
            seed=SEED,
            verbosity=VERBOSITY,
            checkSynapseConsistency=checkSynapseConsistency,
            collectStats=True,
            pamLength=pamLength,
            maxInfBacktrack=maxInfBacktrack,
            maxLrnBacktrack=maxLrnBacktrack,
        )

        # Ensure we are copying over learning states for TPDiff
        cpp_tp.retrieveLearningStates = True

        tps['CPP'] = cpp_tp

    if includePy:
        if VERBOSITY >= 2:
            print "Creating PY TP instance"

        py_tp = TP(
            numberOfCols=numCols,
            cellsPerColumn=cellsPerCol,
            initialPerm=initialPerm,
            connectedPerm=connectedPerm,
            minThreshold=minThreshold,
            newSynapseCount=newSynapseCount,
            permanenceInc=permanenceInc,
            permanenceDec=permanenceDec,
            activationThreshold=activationThreshold,
            globalDecay=globalDecay,
            burnIn=1,
            seed=SEED,
            verbosity=VERBOSITY,
            collectStats=True,
            pamLength=pamLength,
            maxInfBacktrack=maxInfBacktrack,
            maxLrnBacktrack=maxLrnBacktrack,
        )

        tps['PY '] = py_tp

    return tps
Beispiel #37
0
 def reset(self):
   """ Reset the state of all cells.
   This is normally used between sequences while training. All internal states
   are reset to 0.
   """
   TP.reset(self)
Beispiel #38
0
  def basicTest2(self, tp, numPatterns=100, numRepetitions=3, activity=15,
                 testTrimming=False, testRebuild=False):
    """Basic test (basic run of learning and inference)"""
    # Create PY TP object that mirrors the one sent in.
    tpPy = TP(numberOfCols=tp.numberOfCols, cellsPerColumn=tp.cellsPerColumn,
              initialPerm=tp.initialPerm, connectedPerm=tp.connectedPerm,
              minThreshold=tp.minThreshold, newSynapseCount=tp.newSynapseCount,
              permanenceInc=tp.permanenceInc, permanenceDec=tp.permanenceDec,
              permanenceMax=tp.permanenceMax, globalDecay=tp.globalDecay,
              activationThreshold=tp.activationThreshold,
              doPooling=tp.doPooling,
              segUpdateValidDuration=tp.segUpdateValidDuration,
              pamLength=tp.pamLength, maxAge=tp.maxAge,
              maxSeqLength=tp.maxSeqLength,
              maxSegmentsPerCell=tp.maxSegmentsPerCell,
              maxSynapsesPerSegment=tp.maxSynapsesPerSegment,
              seed=tp.seed, verbosity=tp.verbosity)

    # Ensure we are copying over learning states for TPDiff
    tp.retrieveLearningStates = True

    verbosity = VERBOSITY

    # Learn

    # Build up sequences
    sequence = fdrutils.generateCoincMatrix(nCoinc=numPatterns,
                                            length=tp.numberOfCols,
                                            activity=activity)
    for r in xrange(numRepetitions):
      for i in xrange(sequence.nRows()):

        #if i > 11:
        #  setVerbosity(6, tp, tpPy)

        if i % 10 == 0:
          tp.reset()
          tpPy.reset()

        if verbosity >= 2:
          print "\n\n    ===================================\nPattern:",
          print i, "Round:", r, "input:", sequence.getRow(i)

        y1 = tp.learn(sequence.getRow(i))
        y2 = tpPy.learn(sequence.getRow(i))

        # Ensure everything continues to work well even if we continuously
        # rebuild outSynapses structure
        if testRebuild:
          tp.cells4.rebuildOutSynapses()

        if testTrimming:
          tp.trimSegments()
          tpPy.trimSegments()

        if verbosity > 2:
          print "\n   ------  CPP states  ------ ",
          tp.printStates()
          print "\n   ------  PY states  ------ ",
          tpPy.printStates()
          if verbosity > 6:
            print "C++ cells: "
            tp.printCells()
            print "PY cells: "
            tpPy.printCells()

        if verbosity >= 3:
          print "Num segments in PY and C++", tpPy.getNumSegments(), \
              tp.getNumSegments()

        # Check if the two TP's are identical or not. This check is slow so
        # we do it every other iteration. Make it every iteration for debugging
        # as needed.
        self.assertTrue(fdrutils.tpDiff2(tp, tpPy, verbosity, False))

        # Check that outputs are identical
        self.assertLess(abs((y1 - y2).sum()), 3)

    print "Learning completed"

    self.assertTrue(fdrutils.tpDiff2(tp, tpPy, verbosity))

    # TODO: Need to check - currently failing this
    #checkCell0(tpPy)

    # Remove unconnected synapses and check TP's again

    # Test rebuild out synapses
    print "Rebuilding outSynapses"
    tp.cells4.rebuildOutSynapses()
    self.assertTrue(fdrutils.tpDiff2(tp, tpPy, VERBOSITY))

    print "Trimming segments"
    tp.trimSegments()
    tpPy.trimSegments()
    self.assertTrue(fdrutils.tpDiff2(tp, tpPy, VERBOSITY))

    # Save and reload after learning
    print "Pickling and unpickling"
    tp.makeCells4Ephemeral = False
    pickle.dump(tp, open("test_tp10x.pkl", "wb"))
    tp2 = pickle.load(open("test_tp10x.pkl"))
    self.assertTrue(fdrutils.tpDiff2(tp, tp2, VERBOSITY, checkStates=False))

    # Infer
    print "Testing inference"

    # Setup for inference
    tp.reset()
    tpPy.reset()
    setVerbosity(INFERENCE_VERBOSITY, tp, tpPy)

    patterns = numpy.zeros((40, tp.numberOfCols), dtype='uint32')
    for i in xrange(4):
      _RGEN.initializeUInt32Array(patterns[i], 2)

    for i, x in enumerate(patterns):

      x = numpy.zeros(tp.numberOfCols, dtype='uint32')
      _RGEN.initializeUInt32Array(x, 2)
      y = tp.infer(x)
      yPy = tpPy.infer(x)

      self.assertTrue(fdrutils.tpDiff2(tp, tpPy, VERBOSITY, checkLearn=False))
      if abs((y - yPy).sum()) > 0:
        print "C++ output", y
        print "Py output", yPy
        assert False

      if i > 0:
        tp.checkPrediction2(patterns)
        tpPy.checkPrediction2(patterns)

    print "Inference completed"
    print "===================================="

    return tp, tpPy
    def setUp(cls):
        tmPy = TemporalMemoryPy(columnDimensions=[2048],
                                cellsPerColumn=32,
                                initialPermanence=0.5,
                                connectedPermanence=0.8,
                                minThreshold=10,
                                maxNewSynapseCount=12,
                                permanenceIncrement=0.1,
                                permanenceDecrement=0.05,
                                activationThreshold=15)

        tmCPP = TemporalMemoryCPP(columnDimensions=[2048],
                                  cellsPerColumn=32,
                                  initialPermanence=0.5,
                                  connectedPermanence=0.8,
                                  minThreshold=10,
                                  maxNewSynapseCount=12,
                                  permanenceIncrement=0.1,
                                  permanenceDecrement=0.05,
                                  activationThreshold=15)

        tp = TP(numberOfCols=2048,
                cellsPerColumn=32,
                initialPerm=0.5,
                connectedPerm=0.8,
                minThreshold=10,
                newSynapseCount=12,
                permanenceInc=0.1,
                permanenceDec=0.05,
                activationThreshold=15,
                globalDecay=0,
                burnIn=1,
                checkSynapseConsistency=False,
                pamLength=1)

        tp10x2 = TP10X2(numberOfCols=2048,
                        cellsPerColumn=32,
                        initialPerm=0.5,
                        connectedPerm=0.8,
                        minThreshold=10,
                        newSynapseCount=12,
                        permanenceInc=0.1,
                        permanenceDec=0.05,
                        activationThreshold=15,
                        globalDecay=0,
                        burnIn=1,
                        checkSynapseConsistency=False,
                        pamLength=1)

        def tmComputeFn(pattern, instance):
            instance.compute(pattern, True)

        def tpComputeFn(pattern, instance):
            array = cls._patternToNumpyArray(pattern)
            instance.compute(array, enableLearn=True, computeInfOutput=True)

        return (
            ("TM (py)", tmPy, tmComputeFn),
            ("TM (C++)", tmCPP, tmComputeFn),
            ("TP", tp, tpComputeFn),
            ("TP10X2", tp10x2, tpComputeFn),
        )