Example #1
0
def test_updates_add():

    up1 = Updates()
    up2 = Updates()

    a = theano.shared('a')
    b = theano.shared('b')

    assert not up1 + up2

    up1[a] = 5

    # test that addition works
    assert up1
    assert up1 + up2
    assert not up2

    assert len(up1 + up2) == 1
    assert (up1 + up2)[a] == 5

    up2[b] = 7
    assert up1
    assert up1 + up2
    assert up2

    assert len(up1 + up2) == 2
    assert (up1 + up2)[a] == 5
    assert (up1 + up2)[b] == 7

    assert a in (up1 + up2)
    assert b in (up1 + up2)

    # this works even though there is a collision
    # because values all match
    assert len(up1 + up1 + up1) == 1

    up2[a] = 8  # a gets different value in up1 and up2
    try:
        up1 + up2
        assert 0
    except KeyError:
        pass

    # reassigning to a key works fine right?
    up2[a] = 10
Example #2
0
    def test_updates_setitem(self):
        ok = True

        up = Updates()
        sv = theano.shared('asdf')

        # keys have to be SharedVariables
        self.assertRaises(TypeError, up.__setitem__, 5, 7)
        self.assertRaises(TypeError, up.__setitem__, T.vector(), 7)

        up[theano.shared(88)] = 7
Example #3
0
def test_updates_setitem():
    ok = True

    up = Updates()
    sv = theano.shared('asdf')

    # keys have to be SharedVariables
    try:
        up[5] = 7
        ok = False
    except TypeError:
        ok = True
    assert ok

    # keys have to be SharedVariables
    try:
        up[T.vector()] = 7
        ok = False
    except TypeError:
        ok = True
    assert ok

    # keys have to be SharedVariables
    up[theano.shared(88)] = 7
Example #4
0
        try:
            arg = tensor.as_tensor_variable(arg)
        except TypeError:
            # This happens for Random States for e.g. but it is a good way
            # to make sure no input is a cuda ndarrays
            pass
        scan_inputs += [arg]
    scan_outs = local_op(*scan_inputs)
    if type(scan_outs) not in (list, tuple):
        scan_outs = [scan_outs]
    ##
    ### Step 9. Figure out which outs are update rules for shared variables
    ###         and so on ...
    ##

    update_map = Updates()

    def remove_dimensions(outs, steps_return, offsets=None):
        out_ls = []
        for idx, out in enumerate(outs):
            if idx in steps_return:
                if steps_return[idx] > 1:
                    out_ls.append(out[-steps_return[idx]:])
                else:
                    out_ls.append(out[-1])
            else:
                if offsets is None:
                    out_ls.append(out)
                else:
                    out_ls.append(out[offsets[idx]:])
        return out_ls
Example #5
0
    def test_updates_init(self):
        self.assertRaises(TypeError, Updates, dict(d=3))

        sv = theano.shared('asdf')
        Updates({sv: 3})