Exemplo n.º 1
0
    def testConstructor(self):
        params1 = RDSE_Parameters()
        params1.size = 100
        params1.sparsity = .10
        params1.radius = 10
        R1 = RDSE(params1)

        params2 = R1.parameters
        params2.sparsity = 0  # Remove duplicate arguments
        params2.radius = 0  # Remove duplicate arguments
        R2 = RDSE(params2)

        A = SDR(R1.parameters.size)
        R1.encode(66, A)

        B = R2.encode(66)
        assert (A == B)
Exemplo n.º 2
0
 def testRandomOverlap(self):
     """ Verify that distant values have little to no semantic similarity.
     Also measure sparsity & activation frequency. """
     P = RDSE_Parameters()
     P.size = 2000
     P.sparsity = .08
     P.radius = 12
     P.seed = 42
     R = RDSE(P)
     num_samples = 1000
     A = SDR(R.parameters.size)
     M = Metrics(A, num_samples + 1)
     for i in range(num_samples):
         X = i * R.parameters.radius
         R.encode(X, A)
     print(M)
     assert (M.overlap.max() < .15)
     assert (M.overlap.mean() < .10)
     assert (M.sparsity.min() > R.parameters.sparsity - .01)
     assert (M.sparsity.max() < R.parameters.sparsity + .01)
     assert (M.sparsity.mean() > R.parameters.sparsity - .005)
     assert (M.sparsity.mean() < R.parameters.sparsity + .005)
     assert (M.activationFrequency.min() > R.parameters.sparsity - .05)
     assert (M.activationFrequency.max() < R.parameters.sparsity + .05)
     assert (M.activationFrequency.mean() > R.parameters.sparsity - .005)
     assert (M.activationFrequency.mean() < R.parameters.sparsity + .005)
     assert (M.activationFrequency.entropy() > .99)
Exemplo n.º 3
0
 def testAverageOverlap(self):
     """ Verify that nearby values have the correct amount of semantic
     similarity. Also measure sparsity & activation frequency. """
     P = RDSE_Parameters()
     P.size = 2000
     P.sparsity = .08
     P.radius = 12
     P.seed = 42
     R = RDSE(P)
     A = SDR(R.parameters.size)
     num_samples = 10000
     M = Metrics(A, num_samples + 1)
     for i in range(num_samples):
         R.encode(i, A)
     print(M)
     assert (M.overlap.min() > (1 - 1. / R.parameters.radius) - .04)
     assert (M.overlap.max() < (1 - 1. / R.parameters.radius) + .04)
     assert (M.overlap.mean() > (1 - 1. / R.parameters.radius) - .001)
     assert (M.overlap.mean() < (1 - 1. / R.parameters.radius) + .001)
     assert (M.sparsity.min() > R.parameters.sparsity - .01)
     assert (M.sparsity.max() < R.parameters.sparsity + .01)
     assert (M.sparsity.mean() > R.parameters.sparsity - .005)
     assert (M.sparsity.mean() < R.parameters.sparsity + .005)
     assert (M.activationFrequency.min() > R.parameters.sparsity - .05)
     assert (M.activationFrequency.max() < R.parameters.sparsity + .05)
     assert (M.activationFrequency.mean() > R.parameters.sparsity - .005)
     assert (M.activationFrequency.mean() < R.parameters.sparsity + .005)
     assert (M.activationFrequency.entropy() > .99)
Exemplo n.º 4
0
    def testSeed(self):
        P = RDSE_Parameters()
        P.size = 1000
        P.sparsity = .08
        P.radius = 12
        P.seed = 98
        R = RDSE(P)
        A = R.encode(987654)

        P.seed = 99
        R = RDSE(P)
        B = R.encode(987654)
        assert (A != B)
Exemplo n.º 5
0
    def testDeterminism(self):
        """ Verify that the same seed always gets the same results. """
        GOLD = SDR(1000)
        GOLD.sparse = [
            28, 47, 63, 93, 123, 124, 129, 131, 136, 140, 196, 205, 213, 239,
            258, 275, 276, 286, 305, 339, 345, 350, 372, 394, 395, 443, 449,
            462, 468, 471, 484, 514, 525, 557, 565, 570, 576, 585, 600, 609,
            631, 632, 635, 642, 651, 683, 693, 694, 696, 699, 721, 734, 772,
            790, 792, 795, 805, 806, 833, 836, 842, 846, 892, 896, 911, 914,
            927, 936, 947, 953, 955, 962, 965, 989, 990, 996
        ]

        P = RDSE_Parameters()
        P.size = GOLD.size
        P.sparsity = .08
        P.radius = 12
        P.seed = 42
        R = RDSE(P)
        A = R.encode(987654)
        print(A)
        assert (A == GOLD)
Exemplo n.º 6
0
    def testErrorChecks(self):
        params1 = RDSE_Parameters()
        params1.size = 100
        params1.sparsity = .10
        params1.radius = 10
        R1 = RDSE(params1)
        A = SDR([10, 10])
        R1.encode(33, A)

        # Test wrong input dimensions
        B = SDR(1)
        with self.assertRaises(RuntimeError):
            R1.encode(3, B)

        # Test invalid parameters, size == 0
        params1.size = 0
        with self.assertRaises(RuntimeError):
            RDSE(params1)
        params1.size = 100

        # Test invalid parameters, activeBits == 0
        params1.activeBits = 0
        params1.sparsity = 0.00001  # Rounds to zero!
        with self.assertRaises(RuntimeError):
            RDSE(params1)

        # Test missing activeBits
        params2 = RDSE_Parameters()
        params2.size = 100
        params2.radius = 10
        with self.assertRaises(RuntimeError):
            RDSE(params2)
        # Test missing resolution/radius
        params3 = RDSE_Parameters()
        params3.size = 100
        params3.activeBits = 10
        with self.assertRaises(RuntimeError):
            RDSE(params3)

        # Test too many parameters: activeBits & sparsity
        params4 = RDSE_Parameters()
        params4.size = 100
        params4.sparsity = .6
        params4.activeBits = 10
        params4.radius = 4
        with self.assertRaises(RuntimeError):
            RDSE(params4)
        # Test too many parameters: resolution & radius
        params5 = RDSE_Parameters()
        params5.size = 100
        params5.activeBits = 10
        params5.radius = 4
        params5.resolution = 4
        with self.assertRaises(RuntimeError):
            RDSE(params5)
Exemplo n.º 7
0
    def testRadiusResolution(self):
        """ Check that these arguments are equivalent. """
        # radius -> resolution
        P = RDSE_Parameters()
        P.size = 2000
        P.activeBits = 100
        P.radius = 12
        R = RDSE(P)
        self.assertAlmostEqual(R.parameters.resolution, 12. / 100, places=5)

        # resolution -> radius
        P = RDSE_Parameters()
        P.size = 2000
        P.activeBits = 100
        P.resolution = 12
        R = RDSE(P)
        self.assertAlmostEqual(R.parameters.radius, 12 * 100, places=5)

        # Moving 1 resolution moves 1 bit (usually)
        P = RDSE_Parameters()
        P.size = 2000
        P.activeBits = 100
        P.resolution = 3.33
        P.seed = 42
        R = RDSE(P)
        sdrs = []
        for i in range(100):
            X = i * (R.parameters.resolution)
            sdrs.append(R.encode(X))
            print("X", X, sdrs[-1])
        moved_one = 0
        moved_one_samples = 0
        for A, B in zip(sdrs, sdrs[1:]):
            delta = A.getSum() - A.getOverlap(B)
            if A.getSum() == B.getSum():
                assert (delta < 2)
                moved_one += delta
                moved_one_samples += 1
        assert (moved_one >= .9 * moved_one_samples)
Exemplo n.º 8
0
 def testSparsityActiveBits(self):
     """ Check that these arguments are equivalent. """
     # Round sparsity up
     P = RDSE_Parameters()
     P.size = 100
     P.sparsity = .0251
     P.radius = 10
     R = RDSE(P)
     assert (R.parameters.activeBits == 3)
     # Round sparsity down
     P = RDSE_Parameters()
     P.size = 100
     P.sparsity = .0349
     P.radius = 10
     R = RDSE(P)
     assert (R.parameters.activeBits == 3)
     # Check activeBits
     P = RDSE_Parameters()
     P.size = 100
     P.activeBits = 50  # No floating point issues here.
     P.radius = 10
     R = RDSE(P)
     assert (R.parameters.sparsity == .5)
Exemplo n.º 9
0
def runHotgym(numRecords):
    with open(_PARAMS_PATH, "r") as f:
        modelParams = yaml.safe_load(f)["modelParams"]
        enParams = modelParams["sensorParams"]["encoders"]
        spParams = modelParams["spParams"]
        tmParams = modelParams["tmParams"]

    # timeOfDayEncoder = DateEncoder(
    #   timeOfDay=enParams["timestamp_timeOfDay"]["timeOfDay"])
    # weekendEncoder = DateEncoder(
    #   weekend=enParams["timestamp_weekend"]["weekend"])
    # scalarEncoder = RandomDistributedScalarEncoder(
    #   enParams["consumption"]["resolution"])

    rdseParams = RDSE_Parameters()
    rdseParams.size = 100
    rdseParams.sparsity = .10
    rdseParams.radius = 10
    scalarEncoder = RDSE(rdseParams)

    # encodingWidth = (timeOfDayEncoder.getWidth()
    #                  + weekendEncoder.getWidth()
    #                  + scalarEncoder.getWidth())

    encodingWidth = scalarEncoder.size

    sp = SpatialPooler(
        inputDimensions=(encodingWidth, ),
        columnDimensions=(spParams["columnCount"], ),
        potentialPct=spParams["potentialPct"],
        potentialRadius=encodingWidth,
        globalInhibition=spParams["globalInhibition"],
        localAreaDensity=spParams["localAreaDensity"],
        numActiveColumnsPerInhArea=spParams["numActiveColumnsPerInhArea"],
        synPermInactiveDec=spParams["synPermInactiveDec"],
        synPermActiveInc=spParams["synPermActiveInc"],
        synPermConnected=spParams["synPermConnected"],
        boostStrength=spParams["boostStrength"],
        seed=spParams["seed"],
        wrapAround=True)

    tm = TemporalMemory(
        columnDimensions=(tmParams["columnCount"], ),
        cellsPerColumn=tmParams["cellsPerColumn"],
        activationThreshold=tmParams["activationThreshold"],
        initialPermanence=tmParams["initialPerm"],
        connectedPermanence=spParams["synPermConnected"],
        minThreshold=tmParams["minThreshold"],
        maxNewSynapseCount=tmParams["newSynapseCount"],
        permanenceIncrement=tmParams["permanenceInc"],
        permanenceDecrement=tmParams["permanenceDec"],
        predictedSegmentDecrement=0.0,
        maxSegmentsPerCell=tmParams["maxSegmentsPerCell"],
        maxSynapsesPerSegment=tmParams["maxSynapsesPerSegment"],
        seed=tmParams["seed"])

    results = []
    with open(_INPUT_FILE_PATH, "r") as fin:
        reader = csv.reader(fin)
        headers = next(reader)
        next(reader)
        next(reader)

        for count, record in enumerate(reader):

            if count >= numRecords: break

            # Convert data string into Python date object.
            dateString = datetime.datetime.strptime(record[0],
                                                    "%m/%d/%y %H:%M")
            # Convert data value string into float.
            consumption = float(record[1])

            # To encode, we need to provide zero-filled numpy arrays for the encoders
            # to populate.
            # timeOfDayBits = numpy.zeros(timeOfDayEncoder.getWidth())
            # weekendBits = numpy.zeros(weekendEncoder.getWidth())
            # consumptionBits = numpy.zeros(scalarEncoder.size)
            consumptionBits = SDR(scalarEncoder.size)

            # Now we call the encoders to create bit representations for each value.
            # timeOfDayEncoder.encodeIntoArray(dateString, timeOfDayBits)
            # weekendEncoder.encodeIntoArray(dateString, weekendBits)
            scalarEncoder.encode(consumption, consumptionBits)

            # Concatenate all these encodings into one large encoding for Spatial
            # Pooling.
            # encoding = numpy.concatenate(
            #   [timeOfDayBits, weekendBits, consumptionBits]
            # )
            encoding = consumptionBits

            # Create an array to represent active columns, all initially zero. This
            # will be populated by the compute method below. It must have the same
            # dimensions as the Spatial Pooler.
            # activeColumns = numpy.zeros(spParams["columnCount"])
            activeColumns = SDR(spParams["columnCount"])

            encodingIn = numpy.uint32(encoding.dense)
            minicolumnsOut = numpy.uint32(activeColumns.dense)
            # Execute Spatial Pooling algorithm over input space.
            sp.compute(encodingIn, True, minicolumnsOut)
            activeColumnIndices = numpy.nonzero(minicolumnsOut)[0]

            # Execute Temporal Memory algorithm over active mini-columns.
            tm.compute(activeColumnIndices, learn=True)

            activeCells = tm.getActiveCells()
            print(len(activeCells))
            results.append(activeCells)

        return results