Ejemplo n.º 1
0
def test_am_basic(Simulator, plt, seed, rng):
    """Basic associative memory test."""

    d = 64
    vocab = Vocabulary(d, pointer_gen=rng)
    vocab.populate("A; B; C; D")

    with spa.Network("model", seed=seed) as m:
        m.am = ThresholdingAssocMem(
            threshold=0.3,
            input_vocab=vocab,
            mapping=vocab.keys(),
            function=filtered_step_fn,
        )
        spa.sym.A >> m.am

        in_p = nengo.Probe(m.am.input)
        out_p = nengo.Probe(m.am.output, synapse=0.03)

    with Simulator(m) as sim:
        sim.run(0.2)
    t = sim.trange()

    plt.subplot(3, 1, 1)
    plt.plot(t, similarity(sim.data[in_p], vocab))
    plt.ylabel("Input")
    plt.ylim(top=1.1)
    plt.subplot(3, 1, 2)
    plt.plot(t, similarity(sim.data[out_p], vocab))
    plt.plot(t[t > 0.15], np.ones(t.shape)[t > 0.15] * 0.95, c="g", lw=2)
    plt.ylabel("Output")

    assert_sp_close(t, sim.data[in_p], vocab["A"], skip=0.15, atol=0.05)
    assert_sp_close(t, sim.data[out_p], vocab["A"], skip=0.15)
Ejemplo n.º 2
0
def test_invalid_dimensions():
    with pytest.raises(ValidationError):
        Vocabulary(1.5)
    with pytest.raises(ValidationError):
        Vocabulary(0)
    with pytest.raises(ValidationError):
        Vocabulary(-1)
Ejemplo n.º 3
0
def test_added_algebra_match(rng):
    v = Vocabulary(4, algebra=VtbAlgebra())
    sp = v.create_pointer()
    assert sp.algebra is VtbAlgebra()
    v.add("V", sp)
    assert v["V"].vocab is v
    assert v["V"].algebra is VtbAlgebra()
    assert v["V"].name == "V"
Ejemplo n.º 4
0
def test_subset(rng, algebra):
    v1 = Vocabulary(32, pointer_gen=rng, algebra=algebra)
    v1.populate("A; B; C; D; E; F; G")

    # Test creating a vocabulary subset
    v2 = v1.create_subset(["A", "C", "E"])
    assert list(v2.keys()) == ["A", "C", "E"]
    assert_equal(v2["A"].v, v1["A"].v)
    assert_equal(v2["C"].v, v1["C"].v)
    assert_equal(v2["E"].v, v1["E"].v)

    assert v1.algebra is v2.algebra
Ejemplo n.º 5
0
def test_subset(rng, algebra):
    v1 = Vocabulary(32, pointer_gen=rng, algebra=algebra)
    v1.populate('A; B; C; D; E; F; G')

    # Test creating a vocabulary subset
    v2 = v1.create_subset(['A', 'C', 'E'])
    assert list(v2.keys()) == ['A', 'C', 'E']
    assert_equal(v2['A'].v, v1['A'].v)
    assert_equal(v2['C'].v, v1['C'].v)
    assert_equal(v2['E'].v, v1['E'].v)

    assert v1.algebra is v2.algebra
Ejemplo n.º 6
0
def test_am_wta(Simulator, plt, seed, rng):
    """Test the winner-take-all ability of the associative memory."""

    d = 64
    vocab = Vocabulary(d, pointer_gen=rng)
    vocab.populate("A; B; C; D")

    def input_func(t):
        if t < 0.2:
            return "A + 0.8 * B"
        elif t < 0.3:
            return "0"
        else:
            return "0.8 * A + B"

    with spa.Network("model", seed=seed) as m:
        m.am = WTAAssocMem(
            threshold=0.3,
            input_vocab=vocab,
            mapping=vocab.keys(),
            function=filtered_step_fn,
        )
        m.stimulus = spa.Transcode(input_func, output_vocab=vocab)
        m.stimulus >> m.am

        in_p = nengo.Probe(m.am.input)
        out_p = nengo.Probe(m.am.output, synapse=0.03)

    with Simulator(m) as sim:
        sim.run(0.5)
    t = sim.trange()
    more_a = (t > 0.15) & (t < 0.2)
    more_b = t > 0.45

    plt.subplot(2, 1, 1)
    plt.plot(t, similarity(sim.data[in_p], vocab))
    plt.ylabel("Input")
    plt.ylim(top=1.1)
    plt.subplot(2, 1, 2)
    plt.plot(t, similarity(sim.data[out_p], vocab))
    plt.plot(t[more_a], np.ones(t.shape)[more_a] * 0.9, c="g", lw=2)
    plt.plot(t[more_b], np.ones(t.shape)[more_b] * 0.9, c="g", lw=2)
    plt.ylabel("Output")

    assert_sp_close(t, sim.data[out_p], vocab["A"], skip=0.15, duration=0.05)
    assert_sp_close(t, sim.data[out_p], vocab["B"], skip=0.45, duration=0.05)
Ejemplo n.º 7
0
def test_vocabulary_map_param():
    class Test(object):
        vocab_map = VocabularyMapParam('vocab_map', readonly=False)

    obj = Test()
    vm = VocabularyMap()
    v16 = Vocabulary(16)
    v32 = Vocabulary(32)

    obj.vocab_map = vm
    assert obj.vocab_map is vm

    obj.vocab_map = [v16, v32]
    assert obj.vocab_map[16] is v16
    assert obj.vocab_map[32] is v32

    with pytest.raises(ValidationError):
        obj.vocab_map = 'incompatible'
Ejemplo n.º 8
0
def test_am_ia(Simulator, plt, seed, rng):
    """Test the winner-take-all ability of the IA memory."""

    d = 64
    vocab = Vocabulary(d, pointer_gen=rng)
    vocab.populate("A; B; C; D")

    def input_func(t):
        if t < 0.2:
            return "A + 0.8 * B"
        else:
            return "0.6 * A + B"

    with spa.Network("model", seed=seed) as m:
        m.am = IAAssocMem(input_vocab=vocab, mapping=vocab.keys())
        m.stimulus = spa.Transcode(input_func, output_vocab=vocab)
        m.reset = nengo.Node(lambda t: 0.2 < t < 0.4)

        m.stimulus >> m.am
        nengo.Connection(m.reset, m.am.input_reset, synapse=0.1)

        in_p = nengo.Probe(m.am.input)
        reset_p = nengo.Probe(m.reset)
        out_p = nengo.Probe(m.am.output, synapse=0.03)

    with nengo.Simulator(m) as sim:
        sim.run(0.7)
    t = sim.trange()
    more_a = (t > 0.15) & (t < 0.2)
    more_b = t > 0.65

    plt.subplot(2, 1, 1)
    plt.plot(t, similarity(sim.data[in_p], vocab))
    plt.plot(t, sim.data[reset_p], c="k", linestyle="--")
    plt.ylabel("Input")
    plt.ylim(top=1.1)
    plt.subplot(2, 1, 2)
    plt.plot(t, similarity(sim.data[out_p], vocab))
    plt.plot(t[more_a], np.ones(t.shape)[more_a] * 0.9, c="tab:blue", lw=2)
    plt.plot(t[more_b], np.ones(t.shape)[more_b] * 0.9, c="tab:orange", lw=2)
    plt.ylabel("Output")

    assert_sp_close(t, sim.data[out_p], vocab["A"], skip=0.15, duration=0.05)
    assert_sp_close(t, sim.data[out_p], vocab["B"], skip=0.65, duration=0.05)
Ejemplo n.º 9
0
def test_readonly(rng):
    v1 = Vocabulary(32, pointer_gen=rng)
    v1.populate('A;B;C')

    v1.readonly = True

    with pytest.raises(ValueError):
        v1.parse('D')
Ejemplo n.º 10
0
def test_vocabulary_set(rng):
    v8 = Vocabulary(8)
    v16 = Vocabulary(16)
    v32 = Vocabulary(32)
    vs = VocabularyMap([v8, v16], rng=rng)

    # Behaviour common to set and dict
    assert len(vs) == 2
    assert 8 in vs
    assert 16 in vs
    assert 32 not in vs

    assert v8 in vs
    assert v16 in vs
    assert v32 not in vs
    assert Vocabulary(8) not in vs

    # dict behaviour
    assert vs[8] is v8
    assert vs[16] is v16

    del vs[8]
    assert 8 not in vs

    # set behaviour
    vs.add(v32)
    assert vs[32] is v32
    with pytest.warns(UserWarning):
        vs.add(v32)

    vs.discard(32)
    assert 32 not in vs
    vs.discard(v16)
    assert 16 not in vs

    # creating new vocabs if non existent
    vs.add(v8)
    assert vs.get_or_create(8) is v8
    new = vs.get_or_create(16)
    assert vs[16] is new
    assert new.dimensions == 16
    assert new.pointer_gen.rng is rng
Ejemplo n.º 11
0
def test_am_default_output(Simulator, plt, seed, rng):
    d = 64
    vocab = Vocabulary(d, pointer_gen=rng)
    vocab.populate("A; B; C; D")

    def input_func(t):
        return "0.2 * A" if t < 0.25 else "A"

    with spa.Network("model", seed=seed) as m:
        m.am = ThresholdingAssocMem(
            threshold=0.5,
            input_vocab=vocab,
            mapping=vocab.keys(),
            function=filtered_step_fn,
        )
        m.am.add_default_output("D", 0.5)
        m.stimulus = spa.Transcode(input_func, output_vocab=vocab)
        m.stimulus >> m.am

        in_p = nengo.Probe(m.am.input)
        out_p = nengo.Probe(m.am.output, synapse=0.03)

    with Simulator(m) as sim:
        sim.run(0.5)
    t = sim.trange()
    below_th = (t > 0.15) & (t < 0.25)
    above_th = t > 0.4

    plt.subplot(2, 1, 1)
    plt.plot(t, similarity(sim.data[in_p], vocab))
    plt.ylabel("Input")
    plt.subplot(2, 1, 2)
    plt.plot(t, similarity(sim.data[out_p], vocab))
    plt.plot(t[below_th], np.ones(t.shape)[below_th] * 0.9, c="c", lw=2)
    plt.plot(t[above_th], np.ones(t.shape)[above_th] * 0.9, c="b", lw=2)
    plt.plot(t[above_th], np.ones(t.shape)[above_th] * 0.1, c="c", lw=2)
    plt.ylabel("Output")

    assert np.all(similarity(sim.data[out_p][below_th], [vocab["D"].v]) > 0.9)
    assert np.all(similarity(sim.data[out_p][above_th], [vocab["D"].v]) < 0.15)
    assert np.all(similarity(sim.data[out_p][above_th], [vocab["A"].v]) > 0.9)
Ejemplo n.º 12
0
def test_vocabulary_or_dim_param():
    v16 = Vocabulary(16)
    v32 = Vocabulary(32)

    class Test(object):
        vocabs = VocabularyMap([v16])
        vocab = VocabularyOrDimParam('vocab', readonly=False)

    obj = Test()

    obj.vocab = v32
    assert obj.vocab is v32

    obj.vocab = 16
    assert obj.vocab is v16

    with pytest.raises(ValidationError):
        obj.vocab = 'incompatible'

    with pytest.raises(ValidationError):
        obj.vocab = 0
Ejemplo n.º 13
0
def test_add_raises_exception_for_algebra_mismatch():
    v = Vocabulary(4, algebra=HrrAlgebra())
    with pytest.raises(ValidationError,
                       match="different vocabulary or algebra"):
        v.add("V", SemanticPointer(np.ones(4), algebra=VtbAlgebra()))
    v.add("V",
          SemanticPointer(np.ones(4), algebra=VtbAlgebra()).reinterpret(v))
Ejemplo n.º 14
0
def test_parse_n(rng):
    v = Vocabulary(64, pointer_gen=rng)
    v.populate('A; B; C')
    A = v.parse('A')
    B = v.parse('B')

    parsed = v.parse_n('A', 'A*B', 'A+B', '3')
    assert np.allclose(parsed[0].v, A.v)
    assert np.allclose(parsed[1].v, (A * B).v)
    assert np.allclose(parsed[2].v, (A + B).v)
    # FIXME should give an exception?
    assert np.allclose(parsed[3].v, 3 * v['Identity'].v)
Ejemplo n.º 15
0
def test_parse_n(rng):
    v = Vocabulary(64, pointer_gen=rng)
    v.populate("A; B; C")
    A = v.parse("A")
    B = v.parse("B")

    parsed = v.parse_n("A", "A*B", "A+B", "3")
    assert np.allclose(parsed[0].v, A.v)
    assert np.allclose(parsed[1].v, (A * B).v)
    assert np.allclose(parsed[2].v, (A + B).v)
    # FIXME should give an exception?
    assert np.allclose(parsed[3].v, 3 * v["Identity"].v)
Ejemplo n.º 16
0
def test_am_threshold(Simulator, plt, seed, rng):
    """Associative memory thresholding with differing input/output vocabs."""
    d = 64
    vocab = Vocabulary(d, pointer_gen=rng)
    vocab.populate("A; B; C; D")

    d2 = int(d / 2)
    vocab2 = Vocabulary(d2, pointer_gen=rng)
    vocab2.populate("A; B; C; D")

    def input_func(t):
        return "0.49 * A" if t < 0.1 else "0.8 * B"

    with spa.Network("model", seed=seed) as m:
        m.am = ThresholdingAssocMem(
            threshold=0.5,
            input_vocab=vocab,
            output_vocab=vocab2,
            function=filtered_step_fn,
            mapping="by-key",
        )
        m.stimulus = spa.Transcode(input_func, output_vocab=vocab)
        m.stimulus >> m.am

        in_p = nengo.Probe(m.am.input)
        out_p = nengo.Probe(m.am.output, synapse=0.03)

    with Simulator(m) as sim:
        sim.run(0.3)
    t = sim.trange()
    below_th = t < 0.1
    above_th = t > 0.25

    plt.subplot(2, 1, 1)
    plt.plot(t, similarity(sim.data[in_p], vocab))
    plt.ylabel("Input")
    plt.subplot(2, 1, 2)
    plt.plot(t, similarity(sim.data[out_p], vocab2))
    plt.plot(t[above_th], np.ones(t.shape)[above_th] * 0.9, c="g", lw=2)
    plt.ylabel("Output")

    assert np.mean(sim.data[out_p][below_th]) < 0.01
    assert_sp_close(t, sim.data[out_p], vocab2["B"], skip=0.25, duration=0.05)
Ejemplo n.º 17
0
def test_vocabulary_tracking(rng):
    v = Vocabulary(32, pointer_gen=rng)
    v.populate('A')

    assert v['A'].vocab is v
    assert v.parse('2 * A').vocab is v
Ejemplo n.º 18
0
def test_populate_with_transform_on_nonstrict_vocab(rng):
    v = Vocabulary(64, pointer_gen=rng, strict=False)

    v.populate('A.unitary()')
    assert 'A' in v
    assert np.allclose(v['A'].v, v['A'].unitary().v)
Ejemplo n.º 19
0
def test_am_spa_keys_as_expressions(Simulator, plt, seed, rng):
    """Provide semantic pointer expressions as input and output keys."""
    d = 64

    vocab_in = Vocabulary(d, pointer_gen=rng)
    vocab_out = Vocabulary(d, pointer_gen=rng)

    vocab_in.populate("A; B")
    vocab_out.populate("C; D")

    in_keys = ["A", "A*B"]
    out_keys = ["C*D", "C+D"]
    mapping = dict(zip(in_keys, out_keys))

    with spa.Network(seed=seed) as m:
        m.am = ThresholdingAssocMem(threshold=0.3,
                                    input_vocab=vocab_in,
                                    output_vocab=vocab_out,
                                    mapping=mapping)

        m.inp = spa.Transcode(lambda t: "A" if t < 0.1 else "A*B",
                              output_vocab=vocab_in)
        m.inp >> m.am

        in_p = nengo.Probe(m.am.input)
        out_p = nengo.Probe(m.am.output, synapse=0.03)

    with nengo.Simulator(m) as sim:
        sim.run(0.2)

    # Specify t ranges
    t = sim.trange()
    t_item1 = (t > 0.075) & (t < 0.1)
    t_item2 = (t > 0.175) & (t < 0.2)

    # Modify vocabularies (for plotting purposes)
    vocab_in.add("AxB", vocab_in.parse(in_keys[1]).v)
    vocab_out.add("CxD", vocab_out.parse(out_keys[0]).v)

    plt.subplot(2, 1, 1)
    plt.plot(t, similarity(sim.data[in_p], vocab_in))
    plt.ylabel("Input: " + ", ".join(in_keys))
    plt.legend(vocab_in.keys(), loc="best")
    plt.ylim(top=1.1)
    plt.subplot(2, 1, 2)
    for t_item, c, k in zip([t_item1, t_item2], ["b", "g"], out_keys):
        plt.plot(
            t,
            similarity(sim.data[out_p], [vocab_out.parse(k).v],
                       normalize=True),
            label=k,
            c=c,
        )
        plt.plot(t[t_item], np.ones(t.shape)[t_item] * 0.9, c=c, lw=2)
    plt.ylabel("Output: " + ", ".join(out_keys))
    plt.legend(loc="best")

    assert (np.mean(
        similarity(sim.data[out_p][t_item1],
                   vocab_out.parse(out_keys[0]).v,
                   normalize=True)) > 0.9)
    assert (np.mean(
        similarity(sim.data[out_p][t_item2],
                   vocab_out.parse(out_keys[1]).v,
                   normalize=True)) > 0.9)
Ejemplo n.º 20
0
def test_add(rng):
    v = Vocabulary(3, pointer_gen=rng)
    v.add('A', [1, 2, 3])
    v.add('B', [4, 5, 6])
    v.add('C', [7, 8, 9])
    assert np.allclose(v.vectors, [[1, 2, 3], [4, 5, 6], [7, 8, 9]])
Ejemplo n.º 21
0
def test_parse(rng):
    v = Vocabulary(64, pointer_gen=rng)
    v.populate('A; B; C')
    A = v.parse('A')
    B = v.parse('B')
    C = v.parse('C')
    assert np.allclose((A * B).v, v.parse('A * B').v)
    assert np.allclose((A * ~B).v, v.parse('A * ~B').v)
    assert np.allclose((A + B).v, v.parse('A + B').v)
    assert np.allclose((A - (B * C) * 3 + ~C).v, v.parse('A-(B*C)*3+~C').v)

    assert np.allclose(v.parse('0').v, np.zeros(64))
    assert np.allclose(v.parse('1').v, np.eye(64)[0])
    assert np.allclose(v.parse('1.7').v, np.eye(64)[0] * 1.7)

    with pytest.raises(SyntaxError):
        v.parse('A((')
    with pytest.raises(SpaParseError):
        v.parse('"hello"')
    with pytest.raises(SpaParseError):
        v.parse('"hello"')
Ejemplo n.º 22
0
def test_reserved_names(name):
    v = Vocabulary(16)
    with pytest.raises(SpaParseError):
        v.populate(name)
Ejemplo n.º 23
0
def test_special_sps(name, sp):
    v = Vocabulary(16)
    assert name in v
    assert np.allclose(v[name].v, sp(16).v)
    assert np.allclose(v.parse(name).v, sp(16).v)
Ejemplo n.º 24
0
def test_pointer_gen():
    v = Vocabulary(32, pointer_gen=AxisAlignedVectors(32))
    v.populate('A; B; C')
    assert np.all(v.vectors == np.eye(32)[:3])
Ejemplo n.º 25
0
def test_invalid_pointer_gen(pointer_gen):
    with pytest.raises(ValidationError):
        Vocabulary(32, pointer_gen=pointer_gen)
Ejemplo n.º 26
0
def test_pointer_names():
    v = Vocabulary(16)
    v.populate('A; B')

    assert v['A'].name == 'A'
    assert v.parse('A*B').name == '(A)*(B)'
Ejemplo n.º 27
0
def test_capital(rng):
    v = Vocabulary(16, pointer_gen=rng)
    with pytest.raises(SpaParseError):
        v.parse('a')
    with pytest.raises(SpaParseError):
        v.parse('A+B+C+a')
Ejemplo n.º 28
0
def test_transform(recwarn, rng, solver):
    v1 = Vocabulary(32, strict=False, pointer_gen=rng)
    v2 = Vocabulary(64, strict=False, pointer_gen=rng)
    v1.populate('A; B; C')
    v2.populate('A; B; C')
    A = v1['A']
    B = v1['B']
    C = v1['C']

    # Test transform from v1 to v2 (full vocbulary)
    # Expected: np.dot(t, A.v) ~= v2.parse('A')
    # Expected: np.dot(t, B.v) ~= v2.parse('B')
    # Expected: np.dot(t, C.v) ~= v2.parse('C')
    t = v1.transform_to(v2, solver=solver)

    assert v2.parse('A').compare(np.dot(t, A.v)) > 0.85
    assert v2.parse('C+B').compare(np.dot(t, C.v + B.v)) > 0.85

    # Test transform from v1 to v2 (only 'A' and 'B')
    t = v1.transform_to(v2, keys=['A', 'B'], solver=solver)

    assert v2.parse('A').compare(np.dot(t, A.v)) > 0.85
    assert v2.parse('B').compare(np.dot(t, C.v + B.v)) > 0.85

    # Test warns on missing keys
    v1.populate('D')
    D = v1['D']
    with pytest.warns(NengoWarning):
        v1.transform_to(v2, solver=solver)

    # Test populating missing keys
    t = v1.transform_to(v2, populate=True, solver=solver)
    assert v2.parse('D').compare(np.dot(t, D.v)) > 0.85

    # Test ignores missing keys in source vocab
    v2.populate('E')
    v1.transform_to(v2, populate=True, solver=solver)
    assert 'E' not in v1
Ejemplo n.º 29
0
def test_create_pointer_warning(rng):
    v = Vocabulary(2, pointer_gen=rng)

    # five pointers shouldn't fit
    with pytest.warns(UserWarning):
        v.populate('A; B; C; D; E')
Ejemplo n.º 30
0
def test_populate(rng):
    v = Vocabulary(64, pointer_gen=rng)

    v.populate('')
    v.populate(' \r\n\t')
    assert len(v) == 0

    v.populate('A')
    assert 'A' in v

    v.populate('B; C')
    assert 'B' in v
    assert 'C' in v

    v.populate('D.unitary()')
    assert 'D' in v
    np.testing.assert_almost_equal(np.linalg.norm(v['D'].v), 1.)
    np.testing.assert_almost_equal(np.linalg.norm((v['D'] * v['D']).v), 1.)

    v.populate('E = A + 2 * B')
    assert np.allclose(v['E'].v, v.parse('A + 2 * B').v)
    assert np.linalg.norm(v['E'].v) > 2.

    v.populate('F = (A + 2 * B).normalized()')
    assert np.allclose(v['F'].v, v.parse('A + 2 * B').normalized().v)
    np.testing.assert_almost_equal(np.linalg.norm(v['F'].v), 1.)

    v.populate('G = A; H')
    assert np.allclose(v['G'].v, v['A'].v)
    assert 'H' in v

    # Assigning non-existing pointer
    with pytest.raises(NameError):
        v.populate('I = J')

    # Redefining
    with pytest.raises(ValidationError):
        v.populate('H = A')

    # Calling non existing function
    with pytest.raises(AttributeError):
        v.populate('I = H.invalid()')

    # invalid names: lowercase, unicode
    with pytest.raises(SpaParseError):
        v.populate('x = A')
    with pytest.raises(SpaParseError):
        v.populate(u'Aα = A')