Beispiel #1
0
    def testGetOverlap(self):
        A = SDR((103, ))
        B = SDR((103, ))
        assert (A.getOverlap(B) == 0)

        A.dense[:10] = 1
        B.dense[:20] = 1
        A.dense = A.dense
        B.dense = B.dense
        assert (A.getOverlap(B) == 10)

        A.dense[:20] = 1
        A.dense = A.dense
        assert (A.getOverlap(B) == 20)

        A.dense[50:60] = 1
        B.dense[0] = 0
        A.dense = A.dense
        B.dense = B.dense
        assert (A.getOverlap(B) == 19)

        # Test wrong dimensions
        C = SDR((1, 1, 1, 1, 103))
        C.randomize(.5)
        try:
            A.getOverlap(C)
        except RuntimeError:
            pass
        else:
            self.fail()
Beispiel #2
0
    def testExampleUsage(self):
        # Make an SDR with 9 values, arranged in a (3 x 3) grid.
        X = SDR(dimensions=(3, 3))

        # These three statements are equivalent.
        X.dense = [[0, 1, 0], [0, 1, 0], [0, 0, 1]]
        assert (X.dense.tolist() == [[0, 1, 0], [0, 1, 0], [0, 0, 1]])
        assert ([list(v) for v in X.coordinates] == [[0, 1, 2], [1, 1, 2]])
        assert (list(X.sparse) == [1, 4, 8])
        X.coordinates = [[0, 1, 2], [1, 1, 2]]
        assert (X.dense.tolist() == [[0, 1, 0], [0, 1, 0], [0, 0, 1]])
        assert ([list(v) for v in X.coordinates] == [[0, 1, 2], [1, 1, 2]])
        assert (list(X.sparse) == [1, 4, 8])
        X.sparse = [1, 4, 8]

        # Access data in any format, SDR will automatically convert data formats,
        # even if it was not the format used by the most recent assignment to the
        # SDR.
        assert (X.dense.tolist() == [[0, 1, 0], [0, 1, 0], [0, 0, 1]])
        assert ([list(v) for v in X.coordinates] == [[0, 1, 2], [1, 1, 2]])
        assert (list(X.sparse) == [1, 4, 8])

        # Data format conversions are cached, and when an SDR value changes the
        # cache is cleared.
        X.sparse = [1, 2, 3]  # Assign new data to the SDR, clearing the cache.
        X.dense  # This line will convert formats.
        X.dense  # This line will resuse the result of the previous line

        X = SDR((1000, 1000))
        data = X.dense
        data[0, 4] = 1
        data[444, 444] = 1
        X.dense = data
        assert (list(X.sparse) == [4, 444444])
Beispiel #3
0
 def testStr(self):
     A = SDR((103, ))
     B = SDR((100, 100, 1))
     A.dense[0] = 1
     A.dense[9] = 1
     A.dense[102] = 1
     A.dense = A.dense
     assert (str(A) == "SDR( 103 ) 0, 9, 102")
     A.zero()
     assert (str(A) == "SDR( 103 )")
     B.dense[0, 0, 0] = 1
     B.dense[99, 99, 0] = 1
     B.dense = B.dense
     assert (str(B) == "SDR( 100, 100, 1 ) 0, 9999")
Beispiel #4
0
    def testDenseInplace(self):
        # Check that assigning dense data to itself (ie: sdr.dense = sdr.dense)
        # is significantly faster than copying the data on assignment.

        # Also, it should not be *too* much faster because this test-case is
        # tuned to very fast in both situations.
        A = SDR(100 * 1000)
        B = np.copy(A.dense)

        copy_time = time.clock()
        for i in range(100):
            A.dense = B
        copy_time = time.clock() - copy_time

        inplace_time = time.clock()
        for i in range(100):
            A.dense = A.dense
        inplace_time = time.clock() - inplace_time

        assert (inplace_time < copy_time / 3)
Beispiel #5
0
    def testDefaultTopology(self):
        rng = Random(42)
        presyn = SDR([10, 10])
        postsyn = SDR([10, 10])
        postsyn.dense[0, 4] = 1
        postsyn.dense = postsyn.dense
        noWrap = DefaultTopology(1.0, 1.1, False)
        noWrapPP = noWrap(postsyn, presyn.dimensions, rng)
        assert (noWrapPP.dimensions == presyn.dimensions)
        assert (set(noWrapPP.sparse) == set([3, 4, 5, 13, 14, 15]))

        wrap = DefaultTopology(1.0, 1.1, True)
        wrapPP = wrap(postsyn, presyn.dimensions, rng)
        assert (set(wrapPP.sparse) == set([3, 4, 5, 13, 14, 15, 93, 94, 95]))

        potentialPct = DefaultTopology(0.5, 1.1, False)
        assert (potentialPct(postsyn, presyn.dimensions, rng).getSum() == 3)
Beispiel #6
0
    def testDense(self):
        A = SDR((103,))
        B = SDR((100, 100, 1))

        A.dense
        # Test is the same buffer every time
        A.dense[0] = 1
        A.dense[99] = 1
        assert(A.dense[0] + A.dense[99] == 2)
        # Test modify in-place
        A.dense = A.dense
        assert(set(A.sparse) == set((0, 99)))
        # Test dense dimensions
        assert(B.dense.shape == (100, 100, 1))
        # No crash with dimensions
        B.dense[0, 0, 0] += 1
        B.dense[66, 2, 0] += 1
        B.dense[99, 99, 0] += 1
        B.dense = B.dense
        # Test wrong dimensions assigned
        C = SDR(( A.size + 1 ))
        C.randomize( .5 )
        test_cases = [
            (SDR(1), SDR(2)),
            (SDR(100),      SDR((100, 1))),
            (SDR((1, 100)), SDR((100, 1))),
        ]
        for left, right in test_cases:
            try:
                left.dense = right.dense
            except RuntimeError:
                pass
            else:
                self.fail()
        # Test assign data.
        A.dense = np.zeros( A.size, dtype=np.int16 )
        A.dense = np.ones(  A.size, dtype=np.uint64 )
        A.dense = np.zeros( A.size, dtype=np.int8 )
        A.dense = [1] * A.size
        B.dense = [[[1]] * 100 for _ in range(100)]
Beispiel #7
0
    def testSetSDR(self):
        A = SDR((103, ))
        B = SDR((103, ))
        A.sparse = [66]
        B.setSDR(A)
        assert (B.dense[66] == 1)
        assert (B.getSum() == 1)
        B.dense[77] = 1
        B.dense = B.dense
        A.setSDR(B)
        assert (set(A.sparse) == set((66, 77)))

        # Test wrong dimensions assigned
        C = SDR((2, 4, 5, 1, 1, 1, 1, 3))
        C.randomize(.5)
        try:
            A.setSDR(C)
        except RuntimeError:
            pass
        else:
            self.fail()
        # Check return value.
        D = A.setSDR(B)
        assert (D is A)
# Working on a more elegant way to do this. Very messy.
for result in results:
    # ground truth
    val = result[0]
    # bit array of our prediction
    raw_prediction = result[1].coordinates[0]

    if len(raw_prediction) > 0:
        # make blank SDR big enough for one number
        prediction = SDR(numSize)
        # remove prompt blank space from answer
        for value in raw_prediction:
            prediction.dense[value - numSize * 2] = 1
        # tell SDR we updated its values
        # (unsure why this works, found in sp tutorial)
        prediction.dense = prediction.dense
        # convert prediction into a number!
        prediction = common.decode(numDecoder, prediction)
    else:
        prediction = None  # no prediction
    # is prediction correct?
    agreement = (val == prediction)
    print("truth:", val, "prediction:", prediction, "agree?", agreement)
    if not agreement:
        errors += 1
        err_list.append((val, prediction))

print(err_list)

# should be 0 at default settings.
print("errors:", errors, "/", int(tries / 2))
Beispiel #9
0
 def testGetSparsity(self):
     A = SDR((103, ))
     assert (A.getSparsity() == 0)
     A.dense = np.ones(A.size)
     assert (A.getSparsity() == 1)
Beispiel #10
0
 def testGetSum(self):
     A = SDR((103, ))
     assert (A.getSum() == 0)
     A.dense = np.ones(A.size)
     assert (A.getSum() == 103)
Beispiel #11
0
def main(parameters=default_parameters, argv=None, verbose=True):
    parser = argparse.ArgumentParser()
    parser.add_argument('--data_dir',
                        type=str,
                        default=os.path.join(os.path.dirname(__file__), '..',
                                             '..', '..', 'build', 'ThirdParty',
                                             'mnist_data', 'mnist-src'))
    args = parser.parse_args(args=argv)

    # Load data.
    train_labels, train_images, test_labels, test_images = load_mnist(
        args.data_dir)
    training_data = list(zip(train_images, train_labels))
    test_data = list(zip(test_images, test_labels))
    random.shuffle(training_data)
    random.shuffle(test_data)

    # Setup the AI.
    enc = SDR((train_images[0].shape))
    sp = SpatialPooler(
        inputDimensions=enc.dimensions,
        columnDimensions=parameters['columnDimensions'],
        potentialRadius=parameters['potentialRadius'],
        potentialPct=parameters['potentialPct'],
        globalInhibition=True,
        localAreaDensity=parameters['localAreaDensity'],
        stimulusThreshold=int(round(parameters['stimulusThreshold'])),
        synPermInactiveDec=parameters['synPermInactiveDec'],
        synPermActiveInc=parameters['synPermActiveInc'],
        synPermConnected=parameters['synPermConnected'],
        minPctOverlapDutyCycle=parameters['minPctOverlapDutyCycle'],
        dutyCyclePeriod=int(round(parameters['dutyCyclePeriod'])),
        boostStrength=parameters['boostStrength'],
        seed=0,
        spVerbosity=99,
        wrapAround=False)
    columns = SDR(sp.getColumnDimensions())
    columns_stats = Metrics(columns, 99999999)
    sdrc = Classifier()

    # Training Loop
    for i in range(len(train_images)):
        img, lbl = random.choice(training_data)
        enc.dense = img >= np.mean(img)  # Convert greyscale image to binary.
        sp.compute(enc, True, columns)
        sdrc.learn(columns, lbl)

    print(str(sp))
    print(str(columns_stats))

    # Testing Loop
    score = 0
    for img, lbl in test_data:
        enc.dense = img >= np.mean(img)  # Convert greyscale image to binary.
        sp.compute(enc, False, columns)
        if lbl == np.argmax(sdrc.infer(columns)):
            score += 1
    score = score / len(test_data)

    print('Score:', 100 * score, '%')
    return score