示例#1
0
文件: 1.py 项目: xcgfth/InferPy
def qmodel(k):
    with inf.datamodel():
        qz_loc = inf.Parameter(tf.ones([k]) * 0.5, name="qz_loc")
        qz_scale = tf.math.softplus(
            inf.Parameter(tf.ones([k]), name="qz_scale"))

        qz = inf.Normal(qz_loc, qz_scale, name="z")
示例#2
0
    def qmodel(d):
        qw0_loc = inf.Parameter(0., name="qw0_loc")
        qw0_scale = tf.math.softplus(inf.Parameter(1., name="qw0_scale"))
        qw0 = inf.Normal(qw0_loc, qw0_scale, name="w0")

        qw_loc = inf.Parameter(tf.zeros([d, 1]), name="qw_loc")
        qw_scale = tf.math.softplus(inf.Parameter(tf.ones([d, 1]), name="qw_scale"))
        qw = inf.Normal(qw_loc, qw_scale, name="w")
示例#3
0
def Q_nlpca(k, l, d):
    """
    Variational model for Probabilistic non-linear PCA model (nlpca(k,l,d) function).
    Arguments:
    - k: hidden space dimension.
    - l: neural network hidden layer dimension.
    - d: observed space dimension.

    """

    # First layer

    # beta0's mean parameter
    qbeta0_loc = inf.Parameter(tf.zeros([k, l]), name="qbeta0_loc")
    # beta0's deviation parameter
    qbeta0_scale = tf.math.softplus(
        inf.Parameter(tf.ones([k, l]), name="qbeta0_scale"))
    # beta0 ~ N(qbeta0_loc, qbeta0_scale)
    qbeta0 = inf.Normal(qbeta0_loc, qbeta0_scale, name="beta0")

    # alpha0's mean parameter
    qalpha0_loc = inf.Parameter(tf.zeros([l]), name="qalpha0_loc")
    # alpha0's deviation parameter
    qalpha0_scale = tf.math.softplus(
        inf.Parameter(tf.ones([l]), name="qalpha0_scale"))
    # alpha0 ~ N(qalpha0_loc , qalpha0_scale)
    qalpha0 = inf.Normal(qalpha0_loc, qalpha0_scale, name="alpha0")

    # Second layer

    # beta1's mean parameter
    qbeta1_loc = inf.Parameter(tf.zeros([l, d]), name="qbeta1_loc")
    # beta1's deviation parameter
    qbeta1_scale = tf.math.softplus(
        inf.Parameter(tf.ones([l, d]), name="qbeta1_scale"))
    # beta1 ~ N(qbeta1_loc, qbeta1_scale)
    qbeta1 = inf.Normal(qbeta1_loc, qbeta1_scale, name="beta1")

    # alpha1's mean parameter
    qalpha1_loc = inf.Parameter(tf.zeros([d]), name="qalpha1_loc")
    # alpha1's deviation parameter
    qalpha1_scale = tf.math.softplus(
        inf.Parameter(tf.ones([d]), name="qalpha1_scale"))
    # alpha1 ~ N(qalpha1_loc , qalpha1_scale)
    qalpha1 = inf.Normal(qalpha1_loc, qalpha1_scale, name="alpha1")

    with inf.datamodel():
        # z's mean parameter
        qz_loc = inf.Parameter(tf.zeros([k]), name="qz_loc")
        # z's deviation parameter
        qz_scale = tf.math.softplus(
            inf.Parameter(tf.ones([k]), name="qz_scale"))
        # z ~ N(qz_loc, qz_scale)
        qz = inf.Normal(loc=qz_loc, scale=qz_scale, name="z")
示例#4
0
文件: 1.py 项目: saintland/InferPy
def qmodel(k, d):
    qw_loc = inf.Parameter(tf.ones([k, d]), name="qw_loc")
    qw_scale = tf.math.softplus(inf.Parameter(tf.ones([k, d]),
                                              name="qw_scale"))
    qw = inf.Normal(qw_loc, qw_scale, name="w")

    with inf.datamodel():
        qz_loc = inf.Parameter(tf.ones([k]), name="qz_loc")
        qz_scale = tf.math.softplus(
            inf.Parameter(tf.ones([k]), name="qz_scale"))
        qz = inf.Normal(qz_loc, qz_scale, name="z")
示例#5
0
def qmodel(k, d):
    qbeta_loc = inf.Parameter(tf.zeros([k, d]), name="qbeta_loc")
    qbeta_scale = tf.math.softplus(inf.Parameter(tf.ones([k, d]),
                                                 name="qbeta_scale"))

    qbeta = inf.Normal(qbeta_loc, qbeta_scale, name="beta")

    with inf.datamodel():
        qz_loc = inf.Parameter(np.ones(k), name="qz_loc")
        qz_scale = tf.math.softplus(inf.Parameter(tf.ones(k),
                                                  name="qz_scale"))

        qz = inf.Normal(qz_loc, qz_scale, name="z")
示例#6
0
def Q_pca(k, d):
    """
    Variational model for Probabilistic PCA model (pca(k,d) function).
    Arguments:
    - k: hidden space dimension.
    - d: observed space dimension.

    """
    # W's mean parameter
    qw_loc = inf.Parameter(tf.zeros([k, d]), name="qw_loc")
    # W's deviation parameter
    qw_scale = tf.math.softplus(inf.Parameter(tf.ones([k, d]),
                                              name="qw_scale"))
    # W ~ N(qw_loc, qw_scale)
    qw = inf.Normal(qw_loc, qw_scale, name="w")

    # delta's mean parameter
    qd_loc = inf.Parameter(tf.ones([d]), name="qd_loc")
    # delta's deviation parameter
    qd_scale = tf.math.softplus(inf.Parameter(tf.ones([d]), name="qd_scale"))
    # delta ~ N(qd_loc, qd_scale)
    qd = inf.Normal(qd_loc, qd_scale, name="delta")

    with inf.datamodel():
        # Z's mean parameter
        qz_loc = inf.Parameter(np.zeros([k]), name="qz_loc")
        # Z's deviation parameter
        qz_scale = tf.math.softplus(
            inf.Parameter(tf.ones([k]), name="qz_scale"))
        # Z ~ N(qz_loc, qz_scale)
        qz = inf.Normal(qz_loc, qz_scale, name="z")
示例#7
0
def Q_mixture(k, d):
    """
    Gaussian mixture variational model. Validation arguments are used to check the model learning.
    Arguments:
    - k: number of components.
    - d: observed space dimensionality.

    """
    # Dirichlet distribution for each component probability.
    qpi_param = inf.Parameter(tf.ones(k) / k, name="qpi_param")
    qpi = inf.Dirichlet(qpi_param,
                        allow_nan_stats=False,
                        validate_args=True,
                        name="pi")

    # InverseGamma parameters and distribution.
    qLambda_w = tf.math.softplus(
        inf.Parameter(tf.ones([d, k]), name="qLambda_w"))
    qLambda_v = tf.math.softplus(
        inf.Parameter(tf.ones([d, k]), name="qLambda_v"))
    qLambda = inf.InverseGamma(
        concentration=qLambda_w,
        scale=qLambda_v,
        validate_args=True,
        allow_nan_stats=False,
        name="Lambda",
    )

    # Gaussian parameters and distribution.
    qmu_m = inf.Parameter(tf.zeros([d, k]), name="qmu_m")
    qmu_b = tf.math.softplus(inf.Parameter(tf.ones([d, k]), name="qmu_b"))
    qmu = inf.Normal(qmu_m,
                     qmu_b,
                     allow_nan_stats=False,
                     validate_args=True,
                     name="mu")
示例#8
0
def test_register_parameter(init_context):
    is_default = init_context['is_default']
    name = 'x'

    # the parameter does not exist in the context
    assert len(randvar_registry.get_var_parameters()) == 0
    assert randvar_registry.get_variable_or_parameter(name) is None
    p = inf.Parameter(0, name=name)

    # the parameter exists in the context
    # randvar_registry.register_variable(x) has been automatically called
    assert len(randvar_registry.get_var_parameters()) == 1
    assert randvar_registry.get_var_parameters()[name] == p
    assert randvar_registry.get_variable_or_parameter(name) == p

    # if create a new parameter with the same name, it fails just if is_default is False
    if is_default:
        p = inf.Parameter(0, name=name)
        assert len(randvar_registry.get_var_parameters()) == 1
        assert randvar_registry.get_var_parameters()[name] == p
        assert randvar_registry.get_variable_or_parameter(name) == p
    else:
        with pytest.raises(ValueError):
            inf.Parameter(0, name=name)
示例#9
0
 def model():
     p = inf.Parameter(0., name='p')
     with inf.datamodel():
         x = inf.Normal(p, 1., name='x')
         inf.Normal(x, 1., name='y')
示例#10
0
 def model():
     p = inf.Parameter(np.zeros(batch_shape, dtype=np.float32), name='p')
     with inf.datamodel():
         x = inf.Normal(p, 1., name='x')
         inf.Normal(x, 1., name='y')
示例#11
0
 def model():
     inf.Parameter(0)
示例#12
0
def test_run_in_session():
    x = inf.Parameter(0)
    assert inf.get_session().run(x) == 0
示例#13
0
def test_parameter_in_datamodel():
    with inf.datamodel(10):
        x = inf.Parameter(0)

    # assert that is_datamodel is true
    assert x.is_datamodel