Exemplo n.º 1
0
    def testCheckpointLearned(self):
        # Create a model and give it some inputs to learn.
        tm1 = BacktrackingTM(numberOfCols=100,
                             cellsPerColumn=12,
                             verbosity=VERBOSITY)
        sequences = [self.generateSequence() for _ in range(5)]
        train = list(itertools.chain.from_iterable(sequences[:3]))
        for bottomUpInput in train:
            if bottomUpInput is None:
                tm1.reset()
            else:
                tm1.compute(bottomUpInput, True, True)

        # Serialize and deserialized the TM.
        checkpointPath = os.path.join(self._tmpDir, 'a')
        tm1.saveToFile(checkpointPath)
        tm2 = pickle.loads(pickle.dumps(tm1))
        tm2.loadFromFile(checkpointPath)

        # Check that the TMs are the same.
        self.assertTMsEqual(tm1, tm2)

        # Feed some data into the models.
        test = list(itertools.chain.from_iterable(sequences[3:]))
        for bottomUpInput in test:
            if bottomUpInput is None:
                tm1.reset()
                tm2.reset()
            else:
                result1 = tm1.compute(bottomUpInput, True, True)
                result2 = tm2.compute(bottomUpInput, True, True)

                self.assertTMsEqual(tm1, tm2)
                self.assertTrue(numpy.array_equal(result1, result2))
Exemplo n.º 2
0
  def testCheckpointMiddleOfSequence(self):
    # Create a model and give it some inputs to learn.
    tm1 = BacktrackingTM(numberOfCols=100, cellsPerColumn=12,
                         verbosity=VERBOSITY)
    sequences = [self.generateSequence() for _ in xrange(5)]
    train = list(itertools.chain.from_iterable(sequences[:3] +
                                               [sequences[3][:5]]))
    for bottomUpInput in train:
      if bottomUpInput is None:
        tm1.reset()
      else:
        tm1.compute(bottomUpInput, True, True)

    # Serialize and deserialized the TM.
    checkpointPath = os.path.join(self._tmpDir, 'a')
    tm1.saveToFile(checkpointPath)
    tm2 = pickle.loads(pickle.dumps(tm1))
    tm2.loadFromFile(checkpointPath)

    # Check that the TMs are the same.
    self.assertTMsEqual(tm1, tm2)

    # Feed some data into the models.
    test = list(itertools.chain.from_iterable([sequences[3][5:]] +
                                              sequences[3:]))
    for bottomUpInput in test:
      if bottomUpInput is None:
        tm1.reset()
        tm2.reset()
      else:
        result1 = tm1.compute(bottomUpInput, True, True)
        result2 = tm2.compute(bottomUpInput, True, True)

        self.assertTMsEqual(tm1, tm2)
        self.assertTrue(numpy.array_equal(result1, result2))
Exemplo n.º 3
0
 def reset(self):
     """
 Overrides :meth:`nupic.algorithms.backtracking_tm.BacktrackingTM.reset`.
 """
     if self.verbosity >= 3:
         print "TM Reset"
     self._setStatePointers()
     self.cells4.reset()
     BacktrackingTM.reset(self)
Exemplo n.º 4
0
 def reset(self):
   """
   Overrides :meth:`nupic.algorithms.backtracking_tm.BacktrackingTM.reset`.
   """
   if self.verbosity >= 3:
     print "TM Reset"
   self._setStatePointers()
   self.cells4.reset()
   BacktrackingTM.reset(self)
Exemplo n.º 5
0
 def reset(self):
   """ Reset the state of all cells.
   This is normally used between sequences while training. All internal states
   are reset to 0.
   """
   if self.verbosity >= 3:
     print "TM Reset"
   self._setStatePointers()
   self.cells4.reset()
   BacktrackingTM.reset(self)
Exemplo n.º 6
0
 def reset(self):
     """ Reset the state of all cells.
 This is normally used between sequences while training. All internal states
 are reset to 0.
 """
     if self.verbosity >= 3:
         print "TM Reset"
     self._setStatePointers()
     self.cells4.reset()
     BacktrackingTM.reset(self)
Exemplo n.º 7
0
    def testSerializationMiddleOfSequence(self):
        # Create a model and give it some inputs to learn.
        tm1 = BacktrackingTM(numberOfCols=100,
                             cellsPerColumn=12,
                             verbosity=VERBOSITY)
        sequences = [self.generateSequence() for _ in range(5)]
        train = list(
            itertools.chain.from_iterable(sequences[:3] + [sequences[3][:5]]))
        for bottomUpInput in train:
            if bottomUpInput is None:
                tm1.reset()
            else:
                tm1.compute(bottomUpInput, True, True)

        # Serialize and deserialized the TM.
        tmProto = BacktrackingTM.getSchema().new_message()
        tm1.write(tmProto)
        checkpointPath = os.path.join(self._tmpDir, 'a')
        with open(checkpointPath, "wb") as f:
            tmProto.write(f)
        with open(checkpointPath, "rb") as f:
            tmProto = BacktrackingTM.getSchema().read(f)
        tm2 = BacktrackingTM.read(tmProto)

        # Check that the TMs are the same.
        self.assertTMsEqual(tm1, tm2)

        # Feed some data into the models.
        test = list(
            itertools.chain.from_iterable([sequences[3][5:]] + sequences[3:]))
        for bottomUpInput in test:
            if bottomUpInput is None:
                tm1.reset()
                tm2.reset()
            else:
                result1 = tm1.compute(bottomUpInput, True, True)
                result2 = tm2.compute(bottomUpInput, True, True)

                self.assertTMsEqual(tm1, tm2)
                self.assertTrue(numpy.array_equal(result1, result2))
Exemplo n.º 8
0
  def testSerializationMiddleOfSequence(self):
    # Create a model and give it some inputs to learn.
    tm1 = BacktrackingTM(numberOfCols=100, cellsPerColumn=12,
                         verbosity=VERBOSITY)
    sequences = [self.generateSequence() for _ in xrange(5)]
    train = list(itertools.chain.from_iterable(sequences[:3] +
                                               [sequences[3][:5]]))
    for bottomUpInput in train:
      if bottomUpInput is None:
        tm1.reset()
      else:
        tm1.compute(bottomUpInput, True, True)

    # Serialize and deserialized the TM.
    tmProto = BacktrackingTM.getSchema().new_message()
    tm1.write(tmProto)
    checkpointPath = os.path.join(self._tmpDir, 'a')
    with open(checkpointPath, "wb") as f:
      tmProto.write(f)
    with open(checkpointPath, "rb") as f:
      tmProto = BacktrackingTM.getSchema().read(f)
    tm2 = BacktrackingTM.read(tmProto)

    # Check that the TMs are the same.
    self.assertTMsEqual(tm1, tm2)

    # Feed some data into the models.
    test = list(itertools.chain.from_iterable([sequences[3][5:]] +
                                              sequences[3:]))
    for bottomUpInput in test:
      if bottomUpInput is None:
        tm1.reset()
        tm2.reset()
      else:
        result1 = tm1.compute(bottomUpInput, True, True)
        result2 = tm2.compute(bottomUpInput, True, True)

        self.assertTMsEqual(tm1, tm2)
        self.assertTrue(numpy.array_equal(result1, result2))
Exemplo n.º 9
0
    def basicTest2(self,
                   tm,
                   numPatterns=100,
                   numRepetitions=3,
                   activity=15,
                   testTrimming=False,
                   testRebuild=False):
        """Basic test (basic run of learning and inference)"""
        # Create PY TM object that mirrors the one sent in.
        tmPy = BacktrackingTM(numberOfCols=tm.numberOfCols,
                              cellsPerColumn=tm.cellsPerColumn,
                              initialPerm=tm.initialPerm,
                              connectedPerm=tm.connectedPerm,
                              minThreshold=tm.minThreshold,
                              newSynapseCount=tm.newSynapseCount,
                              permanenceInc=tm.permanenceInc,
                              permanenceDec=tm.permanenceDec,
                              permanenceMax=tm.permanenceMax,
                              globalDecay=tm.globalDecay,
                              activationThreshold=tm.activationThreshold,
                              doPooling=tm.doPooling,
                              segUpdateValidDuration=tm.segUpdateValidDuration,
                              pamLength=tm.pamLength,
                              maxAge=tm.maxAge,
                              maxSeqLength=tm.maxSeqLength,
                              maxSegmentsPerCell=tm.maxSegmentsPerCell,
                              maxSynapsesPerSegment=tm.maxSynapsesPerSegment,
                              seed=tm.seed,
                              verbosity=tm.verbosity)

        # Ensure we are copying over learning states for TMDiff
        tm.retrieveLearningStates = True

        verbosity = VERBOSITY

        # Learn

        # Build up sequences
        sequence = fdrutils.generateCoincMatrix(nCoinc=numPatterns,
                                                length=tm.numberOfCols,
                                                activity=activity)
        for r in xrange(numRepetitions):
            for i in xrange(sequence.nRows()):

                #if i > 11:
                #  setVerbosity(6, tm, tmPy)

                if i % 10 == 0:
                    tm.reset()
                    tmPy.reset()

                if verbosity >= 2:
                    print "\n\n    ===================================\nPattern:",
                    print i, "Round:", r, "input:", sequence.getRow(i)

                y1 = tm.learn(sequence.getRow(i))
                y2 = tmPy.learn(sequence.getRow(i))

                # Ensure everything continues to work well even if we continuously
                # rebuild outSynapses structure
                if testRebuild:
                    tm.cells4.rebuildOutSynapses()

                if testTrimming:
                    tm.trimSegments()
                    tmPy.trimSegments()

                if verbosity > 2:
                    print "\n   ------  CPP states  ------ ",
                    tm.printStates()
                    print "\n   ------  PY states  ------ ",
                    tmPy.printStates()
                    if verbosity > 6:
                        print "C++ cells: "
                        tm.printCells()
                        print "PY cells: "
                        tmPy.printCells()

                if verbosity >= 3:
                    print "Num segments in PY and C++", tmPy.getNumSegments(), \
                        tm.getNumSegments()

                # Check if the two TM's are identical or not. This check is slow so
                # we do it every other iteration. Make it every iteration for debugging
                # as needed.
                self.assertTrue(fdrutils.tmDiff2(tm, tmPy, verbosity, False))

                # Check that outputs are identical
                self.assertLess(abs((y1 - y2).sum()), 3)

        print "Learning completed"

        self.assertTrue(fdrutils.tmDiff2(tm, tmPy, verbosity))

        # TODO: Need to check - currently failing this
        #checkCell0(tmPy)

        # Remove unconnected synapses and check TM's again

        # Test rebuild out synapses
        print "Rebuilding outSynapses"
        tm.cells4.rebuildOutSynapses()
        self.assertTrue(fdrutils.tmDiff2(tm, tmPy, VERBOSITY))

        print "Trimming segments"
        tm.trimSegments()
        tmPy.trimSegments()
        self.assertTrue(fdrutils.tmDiff2(tm, tmPy, VERBOSITY))

        # Save and reload after learning
        print "Pickling and unpickling"
        tm.makeCells4Ephemeral = False
        pickle.dump(tm, open("test_tm_cpp.pkl", "wb"))
        tm2 = pickle.load(open("test_tm_cpp.pkl"))
        self.assertTrue(fdrutils.tmDiff2(tm, tm2, VERBOSITY,
                                         checkStates=False))

        # Infer
        print "Testing inference"

        # Setup for inference
        tm.reset()
        tmPy.reset()
        setVerbosity(INFERENCE_VERBOSITY, tm, tmPy)

        patterns = numpy.zeros((40, tm.numberOfCols), dtype='uint32')
        for i in xrange(4):
            _RGEN.initializeUInt32Array(patterns[i], 2)

        for i, x in enumerate(patterns):

            x = numpy.zeros(tm.numberOfCols, dtype='uint32')
            _RGEN.initializeUInt32Array(x, 2)
            y = tm.infer(x)
            yPy = tmPy.infer(x)

            self.assertTrue(
                fdrutils.tmDiff2(tm, tmPy, VERBOSITY, checkLearn=False))
            if abs((y - yPy).sum()) > 0:
                print "C++ output", y
                print "Py output", yPy
                assert False

            if i > 0:
                tm._checkPrediction(patterns)
                tmPy._checkPrediction(patterns)

        print "Inference completed"
        print "===================================="

        return tm, tmPy
Exemplo n.º 10
0
  def basicTest2(self, tm, numPatterns=100, numRepetitions=3, activity=15,
                 testTrimming=False, testRebuild=False):
    """Basic test (basic run of learning and inference)"""
    # Create PY TM object that mirrors the one sent in.
    tmPy = BacktrackingTM(numberOfCols=tm.numberOfCols,
                          cellsPerColumn=tm.cellsPerColumn,
                          initialPerm=tm.initialPerm,
                          connectedPerm=tm.connectedPerm,
                          minThreshold=tm.minThreshold,
                          newSynapseCount=tm.newSynapseCount,
                          permanenceInc=tm.permanenceInc,
                          permanenceDec=tm.permanenceDec,
                          permanenceMax=tm.permanenceMax,
                          globalDecay=tm.globalDecay,
                          activationThreshold=tm.activationThreshold,
                          doPooling=tm.doPooling,
                          segUpdateValidDuration=tm.segUpdateValidDuration,
                          pamLength=tm.pamLength, maxAge=tm.maxAge,
                          maxSeqLength=tm.maxSeqLength,
                          maxSegmentsPerCell=tm.maxSegmentsPerCell,
                          maxSynapsesPerSegment=tm.maxSynapsesPerSegment,
                          seed=tm.seed, verbosity=tm.verbosity)

    # Ensure we are copying over learning states for TMDiff
    tm.retrieveLearningStates = True

    verbosity = VERBOSITY

    # Learn

    # Build up sequences
    sequence = fdrutils.generateCoincMatrix(nCoinc=numPatterns,
                                            length=tm.numberOfCols,
                                            activity=activity)
    for r in xrange(numRepetitions):
      for i in xrange(sequence.nRows()):

        #if i > 11:
        #  setVerbosity(6, tm, tmPy)

        if i % 10 == 0:
          tm.reset()
          tmPy.reset()

        if verbosity >= 2:
          print "\n\n    ===================================\nPattern:",
          print i, "Round:", r, "input:", sequence.getRow(i)

        y1 = tm.learn(sequence.getRow(i))
        y2 = tmPy.learn(sequence.getRow(i))

        # Ensure everything continues to work well even if we continuously
        # rebuild outSynapses structure
        if testRebuild:
          tm.cells4.rebuildOutSynapses()

        if testTrimming:
          tm.trimSegments()
          tmPy.trimSegments()

        if verbosity > 2:
          print "\n   ------  CPP states  ------ ",
          tm.printStates()
          print "\n   ------  PY states  ------ ",
          tmPy.printStates()
          if verbosity > 6:
            print "C++ cells: "
            tm.printCells()
            print "PY cells: "
            tmPy.printCells()

        if verbosity >= 3:
          print "Num segments in PY and C++", tmPy.getNumSegments(), \
              tm.getNumSegments()

        # Check if the two TM's are identical or not. This check is slow so
        # we do it every other iteration. Make it every iteration for debugging
        # as needed.
        self.assertTrue(fdrutils.tmDiff2(tm, tmPy, verbosity, False))

        # Check that outputs are identical
        self.assertLess(abs((y1 - y2).sum()), 3)

    print "Learning completed"

    self.assertTrue(fdrutils.tmDiff2(tm, tmPy, verbosity))

    # TODO: Need to check - currently failing this
    #checkCell0(tmPy)

    # Remove unconnected synapses and check TM's again

    # Test rebuild out synapses
    print "Rebuilding outSynapses"
    tm.cells4.rebuildOutSynapses()
    self.assertTrue(fdrutils.tmDiff2(tm, tmPy, VERBOSITY))

    print "Trimming segments"
    tm.trimSegments()
    tmPy.trimSegments()
    self.assertTrue(fdrutils.tmDiff2(tm, tmPy, VERBOSITY))

    # Save and reload after learning
    print "Pickling and unpickling"
    tm.makeCells4Ephemeral = False
    pickle.dump(tm, open("test_tm_cpp.pkl", "wb"))
    tm2 = pickle.load(open("test_tm_cpp.pkl"))
    self.assertTrue(fdrutils.tmDiff2(tm, tm2, VERBOSITY, checkStates=False))

    # Infer
    print "Testing inference"

    # Setup for inference
    tm.reset()
    tmPy.reset()
    setVerbosity(INFERENCE_VERBOSITY, tm, tmPy)

    patterns = numpy.zeros((40, tm.numberOfCols), dtype='uint32')
    for i in xrange(4):
      _RGEN.initializeUInt32Array(patterns[i], 2)

    for i, x in enumerate(patterns):

      x = numpy.zeros(tm.numberOfCols, dtype='uint32')
      _RGEN.initializeUInt32Array(x, 2)
      y = tm.infer(x)
      yPy = tmPy.infer(x)

      self.assertTrue(fdrutils.tmDiff2(tm, tmPy, VERBOSITY, checkLearn=False))
      if abs((y - yPy).sum()) > 0:
        print "C++ output", y
        print "Py output", yPy
        assert False

      if i > 0:
        tm.checkPrediction2(patterns)
        tmPy.checkPrediction2(patterns)

    print "Inference completed"
    print "===================================="

    return tm, tmPy