Esempio n. 1
0
    def test_bad_size(self):

        R = MRG_RandomStream(234)

        for size in [
            (0, 100),
            (-1, 100),
            (1, 0),
        ]:

            with pytest.raises(ValueError):
                R.uniform(size)
            with pytest.raises(ValueError):
                R.binomial(size)
            with pytest.raises(ValueError):
                R.multinomial(size, 1, [])
            with pytest.raises(ValueError):
                R.normal(size)
            with pytest.raises(ValueError):
                R.truncated_normal(size)
Esempio n. 2
0
def test_undefined_grad_opt():
    # Make sure that undefined grad get removed in optimized graph.
    random = MRG_RandomStream(np.random.randint(1, 2147462579))
    pvals = theano.shared(np.random.rand(10, 20).astype(theano.config.floatX))
    pvals = pvals / pvals.sum(axis=1)
    pvals = theano.gradient.zero_grad(pvals)
    samples = random.multinomial(pvals=pvals, n=1)
    samples = theano.tensor.cast(samples, pvals.dtype)
    samples = theano.gradient.zero_grad(samples)
    cost = theano.tensor.sum(samples + pvals)
    grad = theano.tensor.grad(cost, samples)
    f = theano.function([], grad)
    assert not any([
        isinstance(node.op, theano.gradient.UndefinedGrad)
        for node in f.maker.fgraph.apply_nodes
    ])
Esempio n. 3
0
def test_target_parameter():
    srng = MRG_RandomStream()
    pvals = np.array([[0.98, 0.01, 0.01], [0.01, 0.49, 0.50]])

    def basic_target_parameter_test(x):
        f = theano.function([], x)
        assert isinstance(f(), np.ndarray)

    basic_target_parameter_test(srng.uniform((3, 2), target="cpu"))
    basic_target_parameter_test(srng.normal((3, 2), target="cpu"))
    basic_target_parameter_test(srng.truncated_normal((3, 2), target="cpu"))
    basic_target_parameter_test(srng.binomial((3, 2), target="cpu"))
    basic_target_parameter_test(
        srng.multinomial(pvals=pvals.astype("float32"), target="cpu"))
    basic_target_parameter_test(
        srng.choice(p=pvals.astype("float32"), replace=False, target="cpu"))
    basic_target_parameter_test(
        srng.multinomial_wo_replacement(pvals=pvals.astype("float32"),
                                        target="cpu"))
Esempio n. 4
0
def test_multinomial():
    steps = 100

    if (config.mode in ["DEBUG_MODE", "DebugMode", "FAST_COMPILE"]
            or config.mode == "Mode" and config.linker in ["py"]):
        sample_size = (49, 5)
    else:
        sample_size = (450, 6)

    pvals = np.asarray(np.random.uniform(size=sample_size))
    pvals = np.apply_along_axis(lambda row: row / np.sum(row), 1, pvals)
    R = MRG_RandomStream(234)
    # Note: we specify `nstreams` to avoid a warning.
    m = R.multinomial(pvals=pvals, dtype=config.floatX, nstreams=30 * 256)
    f = theano.function([], m)
    f()
    basic_multinomialtest(f,
                          steps,
                          sample_size,
                          pvals,
                          n_samples=1,
                          prefix="mrg ")
Esempio n. 5
0
def test_multinomial_n_samples():
    if (config.mode in ["DEBUG_MODE", "DebugMode", "FAST_COMPILE"]
            or config.mode == "Mode" and config.linker in ["py"]):
        sample_size = (49, 5)
    else:
        sample_size = (450, 6)

    pvals = np.asarray(np.random.uniform(size=sample_size))
    pvals = np.apply_along_axis(lambda row: row / np.sum(row), 1, pvals)
    R = MRG_RandomStream(234)

    for n_samples, steps in zip([5, 10, 100, 1000], [20, 10, 1, 1]):
        m = R.multinomial(pvals=pvals,
                          n=n_samples,
                          dtype=config.floatX,
                          nstreams=30 * 256)
        f = theano.function([], m)
        basic_multinomialtest(f,
                              steps,
                              sample_size,
                              pvals,
                              n_samples,
                              prefix="mrg ")
        sys.stdout.flush()
Esempio n. 6
0
def test_undefined_grad():
    srng = MRG_RandomStream(seed=1234)

    # checking uniform distribution
    low = tensor.scalar()
    out = srng.uniform((), low=low)
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out, low)

    high = tensor.scalar()
    out = srng.uniform((), low=0, high=high)
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out, high)

    out = srng.uniform((), low=low, high=high)
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out, (low, high))

    # checking binomial distribution
    prob = tensor.scalar()
    out = srng.binomial((), p=prob)
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out, prob)

    # checking multinomial distribution
    prob1 = tensor.scalar()
    prob2 = tensor.scalar()
    p = [theano.tensor.as_tensor_variable([prob1, 0.5, 0.25])]
    out = srng.multinomial(size=None, pvals=p, n=4)[0]
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(theano.tensor.sum(out), prob1)

    p = [theano.tensor.as_tensor_variable([prob1, prob2])]
    out = srng.multinomial(size=None, pvals=p, n=4)[0]
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(theano.tensor.sum(out), (prob1, prob2))

    # checking choice
    p = [theano.tensor.as_tensor_variable([prob1, prob2, 0.1, 0.2])]
    out = srng.choice(a=None, size=1, p=p, replace=False)[0]
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out[0], (prob1, prob2))

    p = [theano.tensor.as_tensor_variable([prob1, prob2])]
    out = srng.choice(a=None, size=1, p=p, replace=False)[0]
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out[0], (prob1, prob2))

    p = [theano.tensor.as_tensor_variable([prob1, 0.2, 0.3])]
    out = srng.choice(a=None, size=1, p=p, replace=False)[0]
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out[0], prob1)

    # checking normal distribution
    avg = tensor.scalar()
    out = srng.normal((), avg=avg)
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out, avg)

    std = tensor.scalar()
    out = srng.normal((), avg=0, std=std)
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out, std)

    out = srng.normal((), avg=avg, std=std)
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out, (avg, std))

    # checking truncated normal distribution
    avg = tensor.scalar()
    out = srng.truncated_normal((), avg=avg)
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out, avg)

    std = tensor.scalar()
    out = srng.truncated_normal((), avg=0, std=std)
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out, std)

    out = srng.truncated_normal((), avg=avg, std=std)
    with pytest.raises(theano.gradient.NullTypeGradError):
        theano.grad(out, (avg, std))