def __init__(self, world, parameters=default_parameters):
     self.world = world
     self.area_size = parameters['num_cells']
     self.num_areas = len(self.world.coordinates)
     # Make an RDSE for every location.
     self.enc = np.zeros(self.world.dims, dtype=object)
     enc_parameters = RDSE_Parameters()
     enc_parameters.size = self.area_size
     enc_parameters.sparsity = parameters['local_sparsity']
     enc_parameters.category = True
     for coords in self.world.coordinates:
         self.enc[coords] = RDSE(enc_parameters)
     # Make empty buffers for the working data.
     self.local = np.zeros(self.world.dims, dtype=object)
     self.gnw = np.zeros(self.world.dims, dtype=object)
     for coords in self.world.coordinates:
         self.local[coords] = SDR((self.area_size, ))
         self.gnw[coords] = SDR((self.area_size, ))
     # Make an instance of the model at every location.
     self.apical_denrites = np.zeros(self.world.dims, dtype=object)
     self.gnw_size = self.num_areas * self.area_size
     for coords in self.world.coordinates:
         self.apical_denrites[coords] = TemporalMemory(
             [self.area_size],  # column_dimensions
             cellsPerColumn=1,
             externalPredictiveInputs=self.gnw_size,
             seed=0,
             **parameters['apical_denrites'])
 def advance(self, learn=True):
     self.world.advance()
     world_data = self.world.state()
     # Compute the local activity by encoding the sensory data into an SDR.
     self.local = np.zeros(self.world.dims, dtype=object)
     for idx, coords in enumerate(self.world.coordinates):
         character = world_data[coords]
         enc = self.enc[coords]
         if character == BACKGROUND:
             self.local[coords] = SDR((self.area_size, ))
         else:
             self.local[coords] = enc.encode(ord(character))
     # Compute the apical dendrites.
     prev_gnw = SDR((self.gnw_size, )).concatenate(list(self.gnw.flat))
     self.gnw = np.zeros(self.world.dims, dtype=object)
     for coords in self.world.coordinates:
         self.apical_denrites[coords].reset()
         self.apical_denrites[coords].activateDendrites(
             True, prev_gnw, prev_gnw)
         apical_activity = self.apical_denrites[coords].getPredictiveCells(
         ).reshape((self.area_size, ))
         self.gnw[coords] = SDR(
             (self.area_size, )).intersection(self.local[coords],
                                              apical_activity)
         self.apical_denrites[coords].activateCells(self.local[coords],
                                                    True)
Beispiel #3
0
    def _runGetPermanenceTrial(self, float_type):
        """ 
    Check that getPermanence() returns values for a given float_type. 
    These tests are sensitive to the data type. This because if you pass a 
    numpy array of the type matching the C++ argument then PyBind11 does not
    convert the data, and the C++ code can modify the data in-place
    If you pass the wrong data type, then PyBind11 does a conversion so 
    your C++ function only gets a converted copy of a numpy array, and any changes 
    are lost after returning
    """
        inputs = SDR(100).randomize(.05)
        active = SDR(100)
        sp = SP(inputs.dimensions, active.dimensions, stimulusThreshold=1)

        # Make sure that the perms start off zero.
        perms_in = np.zeros(sp.getNumInputs(), dtype=float_type)
        sp.setPermanence(0, perms_in)
        perms = np.zeros(sp.getNumInputs(), dtype=float_type)
        sp.getPermanence(0, perms)
        assert (perms.sum() == 0.0)

        for i in range(10):
            sp.compute(inputs, True, active)

        # There should be at least one perm none zero
        total = np.zeros(sp.getNumInputs(), dtype=float_type)
        for i in range(100):
            perms = np.zeros(sp.getNumInputs(), dtype=float_type)
            sp.getPermanence(i, perms)
            total = total + perms
        assert (total.sum() > 0.0)
Beispiel #4
0
 def testCategories(self):
     # Test two categories.
     p = ScalarEncoderParameters()
     p.minimum    = 0
     p.maximum    = 1
     p.activeBits = 3
     p.radius     = 1
     enc = ScalarEncoder(p)
     sdr = SDR( enc.dimensions )
     zero = enc.encode( 0 )
     one  = enc.encode( 1 )
     assert( zero.getOverlap( one ) == 0 )
     # Test three categories.
     p = ScalarEncoderParameters()
     p.minimum    = 0
     p.maximum    = 2
     p.activeBits = 3
     p.radius     = 1
     enc = ScalarEncoder(p)
     sdr = SDR( enc.dimensions )
     zero = enc.encode( 0 )
     one  = enc.encode( 1 )
     two  = enc.encode( 2 )
     assert( zero.getOverlap( one ) == 0 )
     assert( one.getOverlap( two ) == 0 )
     assert( two.getSum() == 3 )
Beispiel #5
0
 def testReshape(self):
     # Convert SDR dimensions from (4 x 4) to (8 x 2)
     A = SDR([4, 4])
     A.coordinates = ([1, 1, 2], [0, 1, 2])
     B = A.reshape([8, 2])
     assert ((np.array(B.coordinates) == ([2, 2, 5], [0, 1, 0])).all())
     assert (A is B)
Beispiel #6
0
    def testAdaptShouldRemoveSegments(self):
        """
    Test that connections are generated on predefined segments.
    """
        random = Random(1981)
        active_cells = np.array(random.sample(
            np.arange(0, NUM_CELLS, 1, dtype="uint32"), 40),
                                dtype="uint32")
        active_cells.sort()

        presynaptic_input = list(range(0, 10))
        inputSDR = SDR(1024)
        inputSDR.sparse = presynaptic_input

        connections = Connections(NUM_CELLS, 0.51)
        for i in range(NUM_CELLS):
            seg = connections.createSegment(i, 1)

        for cell in active_cells:
            segments = connections.segmentsForCell(cell)
            self.assertEqual(len(segments), 1,
                             "Segments were prematurely destroyed.")
            segment = segments[0]
            connections.adaptSegment(segment, inputSDR, 0.1, 0.001, True)
            segments = connections.segmentsForCell(cell)
            self.assertEqual(len(segments), 0, "Segments were not destroyed.")
 def testAdaptShouldRemoveSegments(self):
   """
   Test that connections are generated on predefined segments.
   """
   random = Random(1981)
   active_cells = np.array(random.sample(np.arange(0, NUM_CELLS, 1, dtype="uint32"), 40), dtype="uint32")
   active_cells.sort()
   
   presynaptic_input = list(range(0, 10))
   inputSDR = SDR(1024)
   inputSDR.sparse = presynaptic_input
   
   connections = Connections(NUM_CELLS, 0.51) 
   for i in range(NUM_CELLS):
     seg = connections.createSegment(i, 2)
     seg = connections.createSegment(i, 2) #create 2 segments on each cell
   
   for cell in active_cells:
       segments = connections.segmentsForCell(cell)
       self.assertEqual(len(segments), 2, "Segments were prematurely destroyed.")
       segment = segments[0]
       numSynapsesOnSegment = len(segments)
       connections.adaptSegment(segment, inputSDR, 0.1, 0.001, pruneZeroSynapses=True, segmentThreshold=1) #set to =1 so that segments get always deleted in this test
       segments = connections.segmentsForCell(cell)
       self.assertEqual(len(segments), 1, "Segments were not destroyed.")
Beispiel #8
0
 def _trainThalamus(self, t):
     # Learn
     L6Pattern = SDR(t.l6CellCount)
     L6Pattern.sparse = [0, 1, 2, 3, 4, 5]
     t.learnL6Pattern(L6Pattern, [(0, 0), (2, 3)])
     L6Pattern.sparse = [6, 7, 8, 9, 10]
     t.learnL6Pattern(L6Pattern, [(1, 1), (3, 4)])
Beispiel #9
0
    def testNumSynapses(self):
        """
    Test that connections are generated on predefined segments.
    """
        random = Random(1981)
        active_cells = np.array(random.sample(
            np.arange(0, NUM_CELLS, 1, dtype="uint32"), 40),
                                dtype="uint32")
        active_cells.sort()

        presynaptic_input = list(range(0, 10))
        presynaptic_input_set = set(presynaptic_input)
        inputSDR = SDR(1024)
        inputSDR.sparse = presynaptic_input

        connections = Connections(NUM_CELLS, 0.3)
        for i in range(NUM_CELLS):
            seg = connections.createSegment(i, 1)

        for cell in active_cells:
            segments = connections.segmentsForCell(cell)
            segment = segments[0]
            for c in presynaptic_input:
                connections.createSynapse(segment, c, 0.1)

            connections.adaptSegment(segment, inputSDR, 0.1, 0.0, False)

            num_synapses = connections.numSynapses(segment)
            self.assertEqual(num_synapses, len(presynaptic_input),
                             "Missing synapses")

        self.assertEqual(connections.numSynapses(),
                         len(presynaptic_input) * 40, "Missing synapses")
Beispiel #10
0
    def testThalamusBursting(self, verbose=False):
        """
        Test that thalamus relays around the trained locations,
        and also does busrts.
        """
        t = Thalamus(trnThreshold=6)

        self._trainThalamus(t)
        ff = np.zeros((32, 32))
        ff.reshape(-1)[[8, 9, 98, 99]] = 1.0

        L6Pattern = SDR(t.l6CellCount)
        L6Pattern.sparse = [0, 1, 2, 3, 4, 5]

        result = self._inferThalamus(t, L6Pattern, ff)

        non_bursting = result[result >= 0.4].nonzero()[0].tolist()
        bursting = result[result >= 1.4].nonzero()[0].tolist()
        self.assertEqual([
            0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,
            19
        ], non_bursting, "Non-bursting not correct")
        self.assertEqual([0, 1, 2, 3, 4, 5, 6, 7, 8], bursting,
                         "Bursting not correct")

        if verbose:
            print(non_bursting)
            print(bursting)
Beispiel #11
0
    def RunExperiment1(self):
        global fig_expect

        # put agent in the environment
        self.agent.set_env(self.env, 1, 1, 1,
                           1)  # is on [1,1] and will go to [1,1]

        agentDir = Direction.RIGHT

        self.iterationNo = 0

        for i in range(3):
            for x in range(2, 18):
                for y in range(2, 18):
                    print("Iteration:" + str(self.iterationNo))
                    self.agent.move(x, y)
                    self.SystemCalculate(self.agent.get_feature(Direction.UP),
                                         learning=True)

        expectedObject = [x[:] for x in [[0] * 20] * 20]

        A = [x[:] for x in [[0] * 20] * 20]
        B = [x[:] for x in [[0] * 20] * 20]

        predSDR1 = SDR(self.predictiveCellsSDR)
        predSDR2 = SDR(self.predictiveCellsSDR)

        # calculate what kind of object will system expect
        for x in range(2, 18):
            for y in range(2, 18):  # for sensor UP !
                self.agent.move(x, y)

                self.SystemCalculate("X", learning=False)
                scoreWithFeature = self.rawAnomaly

                self.SystemCalculate(" ", learning=False)
                scoreWithoutFeature = self.rawAnomaly

                # y -1 because we are using sensor UP
                A[x][y - 1] = scoreWithFeature
                B[x][y - 1] = scoreWithoutFeature
                expectedObject[x][
                    y - 1] = 1 if scoreWithFeature < scoreWithoutFeature else 0

        print(A)
        print(B)
        print(expectedObject)

        # Plotting and visualising environment-------------------------------------------
        if (
                fig_expect == None or isNotebook()
        ):  # create figure only if it doesn't exist yet or we are in interactive console
            fig_expect, _ = plt.subplots(nrows=1, ncols=1, figsize=(6, 4))
        else:
            fig_expect.axes[0].clear()

        plotBinaryMap(fig_expect.axes[0], "Expectation", expectedObject)
        fig_expect.canvas.draw()

        plt.show(block=True)
Beispiel #12
0
    def testEncoding(self):
        params = SimHashDocumentEncoderParameters()
        params.size = 400
        params.activeBits = 20

        # main call style - list
        encoder = SimHashDocumentEncoder(params)
        output = encoder.encode(testDoc1)
        assert(encoder.size == params.size)
        assert(output.size == params.size)
        assert(output.getSum() == params.activeBits)

        # simple alternate calling style - string
        encoder2 = SimHashDocumentEncoder(params)
        value2 = "abcde fghij klmno pqrst uvwxy"
        output2 = encoder2.encode(value2)
        assert(output == output2)

        # encoding empty values leads to output of zeros
        outputZ = SDR(params.size)
        outputZ.zero()
        output3 = encoder.encode([])
        output4 = encoder.encode("")
        assert(output3 == outputZ)
        assert(output4 == outputZ)
Beispiel #13
0
    def testUnicode(self):
        testDocUni1 = [
          "\u0395\u0396\u0397\u0398\u0399",
          "\u0400\u0401\u0402\u0403\u0404",
          "\u0405\u0406\u0407\u0408\u0409"]
        testDocUni2 = [
          "\u0395\u0396\u0397\u0398\u0399\u0410",
          "\u0400\u0401\u0402\u0403\u0404\u0410",
          "\u0405\u0406\u0407\u0408\u0409\u0410"]

        params = SimHashDocumentEncoderParameters()
        params.size = 400
        params.sparsity = 0.33

        # unicode 'tokenSimilarity' ON
        params.tokenSimilarity = True
        encoder1 = SimHashDocumentEncoder(params)
        output1 = SDR(params.size)
        output2 = SDR(params.size)
        encoder1.encode(testDocUni1, output1)
        encoder1.encode(testDocUni2, output2)
        assert(output1.getOverlap(output2) > 65)

        # unicode 'tokenSimilarity' OFF
        params.tokenSimilarity = False
        encoder2 = SimHashDocumentEncoder(params)
        output1.zero()
        output2.zero()
        encoder2.encode(testDocUni1, output1)
        encoder2.encode(testDocUni2, output2)
        assert(output1.getOverlap(output2) < 65)
Beispiel #14
0
 def testPickle(self):
     for sparsity in (0, .3, 1):
         A = SDR((103, ))
         A.randomize(sparsity)
         P = pickle.dumps(A)
         B = pickle.loads(P)
         assert (A == B)
    def testNupicSpatialPoolerPickling(self):
        """Test pickling / unpickling of HTM SpatialPooler."""
        inputs = SDR(100).randomize(.05)
        active = SDR(100)
        sp = SP(inputs.dimensions, active.dimensions, stimulusThreshold=1)

        for _ in range(10):
            sp.compute(inputs, True, active)

        if sys.version_info[0] >= 3:
            proto = 3
        else:
            proto = 2

        # Simple test: make sure that dumping / loading works...
        pickledSp = pickle.dumps(sp, proto)
        sp2 = pickle.loads(pickledSp)
        self.assertEqual(str(sp), str(sp2),
                         "Simple SpatialPooler pickle/unpickle failed.")

        # or using File I/O
        f = tempfile.TemporaryFile()  # simulates opening a file ('wb')
        pickle.dump(sp, f, proto)
        f.seek(0)
        sp3 = pickle.load(f)
        #print(str(sp3))
        f.close()
        self.assertEqual(str(sp), str(sp3),
                         "File I/O SpatialPooler pickle/unpickle failed.")
Beispiel #16
0
    def deInactivateCells(self, l6Input):
        """
        Activate trnCells according to the l6Input. These in turn will impact 
        bursting mode in relay cells that are connected to these trnCells.
        Given the feedForwardInput, compute which cells will be silent, tonic,
        or bursting.
        
        :param l6Input:
            An SDR from L6. List of indices corresponding to L6 cells.

        :return: nothing
        """
        # Figure out which TRN cells recognize the L6 pattern.
        self.trnOverlaps = self.trnConnections.computeActivity(l6Input, False)
        self.activeTRNSegments = np.flatnonzero(
            self.trnOverlaps >= self.trnActivationThreshold)
        self.activeTRNCellIndices = self.trnConnections.mapSegmentsToCells(
            self.activeTRNSegments)

        # for s, idx in zip(self.activeTRNSegments, self.activeTRNCellIndices):
        #     print(self.trnOverlaps[s], idx, self.trnIndextoCoord(idx))

        # Figure out which relay cells have dendrites in de-inactivated state
        activeTRNCells = SDR(self.trnCellCount)
        activeTRNCells.sparse = self.activeTRNCellIndices
        self.relayOverlaps = self.relayConnections.computeActivity(
            activeTRNCells, False)
        self.activeRelaySegments = np.flatnonzero(
            self.relayOverlaps >= self.relayThreshold)
        self.burstReadyCellIndices = self.relayConnections.mapSegmentsToCells(
            self.activeRelaySegments)

        self.burstReadyCells.reshape(-1)[self.burstReadyCellIndices] = 1
Beispiel #17
0
    def testAdaptShouldIncrementSynapses(self):
        """
    Test that connections are generated on predefined segments.
    """
        random = Random(1981)
        active_cells = np.array(random.sample(
            np.arange(0, NUM_CELLS, 1, dtype="uint32"), 40),
                                dtype="uint32")
        active_cells.sort()

        presynaptic_input = list(range(0, 10))
        presynaptic_input_set = set(presynaptic_input)
        inputSDR = SDR(1024)
        inputSDR.sparse = presynaptic_input

        connections = Connections(NUM_CELLS, 0.51)
        for i in range(NUM_CELLS):
            seg = connections.createSegment(i, 1)

        for cell in active_cells:
            segments = connections.segmentsForCell(cell)
            segment = segments[0]
            for c in presynaptic_input:
                connections.createSynapse(segment, c, 0.1)
            connections.adaptSegment(segment, inputSDR, 0.1, 0.001, True)

            presynamptic_cells = self._getPresynapticCells(
                connections, segment, 0.2)
            self.assertEqual(presynamptic_cells, presynaptic_input_set,
                             "Missing synapses")

            presynamptic_cells = self._getPresynapticCells(
                connections, segment, 0.3)
            self.assertEqual(presynamptic_cells, set(), "Too many synapses")
Beispiel #18
0
 def testCompute(self):
     """ Check that there are no errors in call to compute. """
     inputs = SDR(100).randomize(.05)
     active = SDR(100)
     sp = SP(inputs.dimensions, active.dimensions, stimulusThreshold=1)
     sp.compute(inputs, True, active)
     assert (active.getSum() > 0)
Beispiel #19
0
def spatial_pooler_encoer(pooler_data):
    sp1 = SpatialPooler(inputDimensions=(8000, ),
                        columnDimensions=(8000, ),
                        potentialPct=0.85,
                        globalInhibition=True,
                        localAreaDensity=0.0335,
                        synPermInactiveDec=0.006,
                        synPermActiveInc=0.04,
                        synPermConnected=0.13999999999999999,
                        boostStrength=4.0,
                        wrapAround=True)
    sdr_array = []
    # We run SP over there epochs and in the third epoch collect the results
    # this technique yield betters results than  a single epoch
    for encoding in pooler_data:
        activeColumns1 = SDR(sp1.getColumnDimensions())
        sp1.compute(encoding, True, activeColumns1)
    for encoding in pooler_data:
        activeColumns2 = SDR(sp1.getColumnDimensions())
        sp1.compute(encoding, True, activeColumns2)
    for encoding in pooler_data:
        activeColumns3 = SDR(sp1.getColumnDimensions())
        sp1.compute(encoding, True, activeColumns3)
        sdr_array.append(activeColumns3)
    # To make sure the we can relate SP output to real images
    # we take out specific SDR which corrospond to known images.
    hold_out = sdr_array[10000]
    hold_out1 = sdr_array[10001]
    hold_out2 = sdr_array[10002]
    (x_train, _), (x_test, _) = mnist.load_data()
    anm = np.asarray(PIL.Image.open('images/anm.jpg').convert('L')).reshape(
        (1, 28, 28, 1)) / (255 - 1)
    anm1 = np.asarray(PIL.Image.open('images/anm1.jpg').convert('L')).reshape(
        (1, 28, 28, 1)) / (255 - 1)
    anm2 = np.asarray(PIL.Image.open('images/anm2.jpg').convert('L')).reshape(
        (1, 28, 28, 1)) / (255 - 1)
    x_train = x_train.astype('float32') / (255 - 1)
    x_test = x_test.astype('float32') / (255 - 1)
    x_train = np.reshape(
        x_train, (len(x_train), 28, 28,
                  1))  # adapt this if using `channels_first` image data format
    x_test = np.reshape(
        x_test, (len(x_test), 28, 28,
                 1))  # adapt this if using `channels_first` image data format
    x_test = np.concatenate((x_test, anm, anm1, anm2))
    counter = 0
    # finally we loop over the SP SDR and related image to get the once
    # which have a greater overlap with the image we are searching for
    for _s, _img in zip(sdr_array, x_test):
        _x_ = hold_out2.getOverlap(_s)
        if _x_ > 138:  # Adjust as required.
            _img_ = _img.reshape((28, 28))
            _img_ = (_img_ * 254).astype(np.uint8)
            im = Image.fromarray(_img_).convert('RGB')
            im.save('test_results/' + str(counter) + 'outfile.jpg')
            print('Sparsity - ' + str(_s.getSparsity()))
            print(_x_)
            print(str('counter - ') + str(counter))
            counter += 1
Beispiel #20
0
 def testExampleUsage(self):
     A = SDR(10)
     B = SDR(10)
     C = SDR(20)
     A.sparse = [0, 1, 2]
     B.sparse = [0, 1, 2]
     C.concatenate(A, B)
     assert (set(C.sparse) == set([0, 1, 2, 10, 11, 12]))
Beispiel #21
0
 def testExampleUsage(self):
     A = SDR(10)
     B = SDR(10)
     X = SDR(A.dimensions)
     A.sparse = [0, 1, 2, 3]
     B.sparse = [2, 3, 4, 5]
     X.intersection(A, B)
     assert (set(X.sparse) == set([2, 3]))
Beispiel #22
0
 def testExampleUsage(self):
     A = SDR(10)
     B = SDR(10)
     U = SDR(A.dimensions)
     A.sparse = [0, 1, 2, 3]
     B.sparse = [2, 3, 4, 5]
     U.union(A, B)
     assert (set(U.sparse) == set([0, 1, 2, 3, 4, 5]))
Beispiel #23
0
def main(parameters=default_parameters, argv=None, verbose=True):

    # Load data.
    train_labels, train_images, test_labels, test_images = load_ds(
        'mnist_784', 10000, shape=[28, 28])  # HTM: ~95.6%
    #train_labels, train_images, test_labels, test_images = load_ds('Fashion-MNIST', 10000, shape=[28,28]) # HTM baseline: ~83%

    training_data = list(zip(train_images, train_labels))
    test_data = list(zip(test_images, test_labels))
    random.shuffle(training_data)

    # Setup the AI.
    enc = SDR(train_images[0].shape)
    sp = SpatialPooler(
        inputDimensions=enc.dimensions,
        columnDimensions=parameters['columnDimensions'],
        potentialRadius=parameters['potentialRadius'],
        potentialPct=parameters['potentialPct'],
        globalInhibition=True,
        localAreaDensity=parameters['localAreaDensity'],
        stimulusThreshold=int(round(parameters['stimulusThreshold'])),
        synPermInactiveDec=parameters['synPermInactiveDec'],
        synPermActiveInc=parameters['synPermActiveInc'],
        synPermConnected=parameters['synPermConnected'],
        minPctOverlapDutyCycle=parameters['minPctOverlapDutyCycle'],
        dutyCyclePeriod=int(round(parameters['dutyCyclePeriod'])),
        boostStrength=parameters['boostStrength'],
        seed=
        0,  # this is important, 0="random" seed which changes on each invocation
        spVerbosity=99,
        wrapAround=False)
    columns = SDR(sp.getColumnDimensions())
    columns_stats = Metrics(columns, 99999999)
    sdrc = Classifier()

    # Training Loop
    for i in range(len(train_images)):
        img, lbl = training_data[i]
        encode(img, enc)
        sp.compute(enc, True, columns)
        sdrc.learn(
            columns, lbl
        )  #TODO SDRClassifier could accept string as a label, currently must be int

    print(str(sp))
    print(str(columns_stats))

    # Testing Loop
    score = 0
    for img, lbl in test_data:
        encode(img, enc)
        sp.compute(enc, False, columns)
        if lbl == np.argmax(sdrc.infer(columns)):
            score += 1
    score = score / len(test_data)

    print('Score:', 100 * score, '%')
    return score
Beispiel #24
0
    def modelRun(self, ts, val):
        """
           Run a single pass through HTM model
           @params ts - Timestamp
           @params val - float input value
           @return rawAnomalyScore computed for the `val` in this step
        """
        ## run data through our model pipeline: enc -> SP -> TM -> Anomaly
        self.inputs_.append(val)
        self.iteration_ += 1

        # 1. Encoding
        # Call the encoders to create bit representations for each value.  These are SDR objects.
        dateBits = self.encTimestamp.encode(ts)
        valueBits = self.encValue.encode(float(val))
        # Concatenate all these encodings into one large encoding for Spatial Pooling.
        encoding = SDR(self.encTimestamp.size +
                       self.encValue.size).concatenate([valueBits, dateBits])
        self.enc_info.addData(encoding)

        # 2. Spatial Pooler
        # Create an SDR to represent active columns, This will be populated by the
        # compute method below. It must have the same dimensions as the Spatial Pooler.
        activeColumns = SDR(self.sp.getColumnDimensions())
        # Execute Spatial Pooling algorithm over input space.
        self.sp.compute(encoding, True, activeColumns)
        self.sp_info.addData(activeColumns)

        # 3. Temporal Memory
        # Execute Temporal Memory algorithm over active mini-columns.
        self.tm.compute(activeColumns, learn=True)
        self.tm_info.addData(self.tm.getActiveCells().flatten())

        # 4.1 (optional) Predictor #TODO optional
        # TODO optional: also return an error metric on predictions (RMSE, R2,...)

        # 4.2 Anomaly
        # handle contextual (raw, likelihood) anomalies
        # -temporal (raw)
        raw = self.tm.anomaly
        temporalAnomaly = raw

        if self.useLikelihood:
            # Compute log(anomaly likelihood)
            like = self.anomalyLikelihood.anomalyProbability(val, raw, ts)
            logScore = self.anomalyLikelihood.computeLogLikelihood(like)
            temporalAnomaly = logScore  # TODO optional: TM to provide anomaly {none, raw, likelihood}, compare correctness with the py anomaly_likelihood

        anomalyScore = temporalAnomaly  # this is the "main" anomaly, compared in NAB

        # 5. print stats
        if self.verbose and self.iteration_ % 1000 == 0:
            print(self.enc_info)
            print(self.sp_info)
            print(self.tm_info)
            pass

        return anomalyScore, raw
Beispiel #25
0
 def testNan(self):
     gc = GridCellEncoder(size=200,
                          sparsity=.25,
                          periods=[6, 8.5, 12, 17, 24],
                          seed=42)
     zero = SDR(gc.dimensions)
     zero.randomize(.25)
     gc.encode([3, float('nan')], zero)
     assert (zero.getSum() == 0)
Beispiel #26
0
    def testWrongDim(self):
        """
    Tests given wrong dimensions of SDRs
    """

        activeCols = SDR(50)
        predictiveCols = SDR(40)

        self.assertRaises(ValueError, an.calculateRawAnomaly, activeCols,
                          predictiveCols)
Beispiel #27
0
 def testNaNs(self):
     p = ScalarEncoderParameters()
     p.size       = 100
     p.activeBits =  10
     p.minimum    =   0
     p.maximum    = 100
     enc = ScalarEncoder(p)
     sdr = SDR( 100 )
     enc.encode( float("nan"), sdr )
     assert( sdr.getSum() == 0 )
Beispiel #28
0
    def CellsToColumns(self, cells, cellsPerColumn, columnsCount):
        array = []
        for cell in cells.sparse:
            col = int(cell / cellsPerColumn)
            if col not in array:  #each column max once
                array += [col]

        columns = SDR(columnsCount)
        columns.sparse = array
        return columns
Beispiel #29
0
 def testNoTopology(self):
     rng = Random(42)
     presyn = SDR([5, 6, 7, 8])
     postsyn = SDR([6, 5, 4, 3, 2, 1])
     postsyn.randomize(1. / postsyn.size)
     for sparsity in (0., 1., 1. / presyn.size):
         function = NoTopology(sparsity)
         pp = function(postsyn, presyn.dimensions, rng)
         assert (pp.dimensions == presyn.dimensions)
         assert (abs(pp.getSparsity() - sparsity) < (.25 / presyn.size))
Beispiel #30
0
 def testSparsity(self):
     test_cases = [
         (0.5, 0.5),
         (0.1, 0.9),
         (0.25, 0.3),
         (0.5, 0.5, 0.5),
         (0.95, 0.95, 0.95),
         (0.10, 0.10, 0.60),
         (0.0, 1.0, 1.0),
         (0.5, 0.5, 0.5, 0.5),
         (0.11, 0.20, 0.05, 0.04, 0.03, 0.01, 0.01, 0.02, 0.02, 0.02),
     ]
     size = 10000
     seed = 99
     X = SDR(size)
     for sparsities in test_cases:
         sdrs = []
         for S in sparsities:
             inp = SDR(size)
             inp.randomize(S, seed)
             seed += 1
             sdrs.append(inp)
         X.union(sdrs)
         mean_sparsity = np.product(list(1 - s for s in sparsities))
         assert (X.getSparsity() >= (2. / 3.) * (1 - mean_sparsity))
         assert (X.getSparsity() <= (4. / 3.) * (1 - mean_sparsity))