def _each_loss(X, Y, Num, Model_prior, D, F, U):
    #U=[tf.contrib.distributions.Uniform().sample(sample_shape=(1,1)) for j in range (D)]
    _W = [f(u) for f, u in zip(F, U)]

    #variable transformation
    W = [minus_inf_inf_to_zero_one(__w) for __w in _W]
    raw_params = _W
    #Variable transoformation's Log_det_Jacobian
    minus_Log_det_J = 0
    for i in raw_params:
        minus_Log_det_J += tf.log(
            tf.abs(tf.reduce_sum(minus_inf_inf_to_zero_one_Log_det_J(i), 1)))
    #likelihood and prior calculation
    #if you want to consider other than logistic regression, change the model and prior
    Log_likelihood, Prior, _ = Model_prior(X, Y, W)

    Posteri_numerator = Log_likelihood[:, None] + Prior + minus_Log_det_J[:,
                                                                          None]

    dp_du = [tf.gradients(Posteri_numerator, u)[0] for u in U]

    d2w_du2 = [
        tf.gradients(tf.log(tf.abs(tf.gradients(_p, __u)[0])), __u)[0]
        for _p, __u in zip(raw_params, U)
    ]

    _G = [(i + j)**2 for i, j in zip(d2w_du2, dp_du)]
    L_0 = tf.reduce_sum(_G)
    return L_0
def _each_loss(X, Y, Num, Model_prior, D, F, U):
    _W = [f(u) for f, u in zip(F, U)]

    #variable transformation
    W = [minus_inf_inf_to_zero_one(__w) for __w in _W]
    raw_params = _W
    #Variable transoformation's Log_det_Jacobian
    minus_Log_det_J = 0
    for i in raw_params:
        minus_Log_det_J += -tf.reduce_sum(
            minus_inf_inf_to_zero_one_Log_det_J(i), 1)
    #likelihood and prior calculation
    #if you want to consider other than logistic regression, change the model and prior

    params = parameter_reshaper(W, b, data_dim, Num_of_hidden)
    Minus_Log_likelihood, Minus_Prior = Model_prior(X, Y, params)

    Posteri_numerator = Minus_Log_likelihood[:,
                                             None] + Minus_Prior + minus_Log_det_J[:,
                                                                                   None]

    dp_du = tf.gradients(Posteri_numerator, U)

    d2w_du2 = [
        tf.gradients(tf.log(tf.abs(tf.gradients(_p, __u)[0])), __u)[0]
        for _p, __u in zip(raw_params, U)
    ]

    _G = [(i - j)**2 for i, j in zip(d2w_du2, dp_du)]
    L_0 = tf.reduce_sum(_G)
    return L_0
예제 #3
0
def construct_loss(X, Y, dic, Num, Model_prior, D):
    '''
    This is the function calculating the loss function.
    
    X,Y : data, they are place holders
    dic : this is the ordered dictionally which contains placeholders of U
    Num : how many placeholders are included in dic
    Model_prior : what kind of model we use
    D : how many parameters are in the model
    '''
    F = make_trials(D)
    loss = 0
    for i in range(0, Num * D, D):

        U = [dic["U_" + str(i + j)] for j in range(D)]
        _W = [f(u) for f, u in zip(F, U)]

        #variable transformation
        W = [minus_inf_inf_to_zero_one(__w) for __w in _W]
        raw_params = _W
        #Variable transoformation's Log_det_Jacobian
        minus_Log_det_J = 0
        for i in raw_params:
            minus_Log_det_J += -tf.reduce_sum(
                minus_inf_inf_to_zero_one_Log_det_J(i), 1)
        #likelihood and prior calculation
        #if you want to consider other than logistic regression, change the model and prior
        params = parameter_reshaper(W, b, data_dim, Num_of_hidden)
        Minus_Log_likelihood, Minus_Prior = Model_prior(X, Y, params)

        Posteri_numerator = Minus_Log_likelihood[:,
                                                 None] + Minus_Prior + minus_Log_det_J[:,
                                                                                       None]

        dp_du = [tf.gradients(Posteri_numerator, u)[0] for u in U]

        d2w_du2 = [
            tf.gradients(tf.log(tf.abs(tf.gradients(_p, __u)[0])), __u)[0]
            for _p, __u in zip(raw_params, U)
        ]

        _G = [(i - j)**2 for i, j in zip(d2w_du2, dp_du)]
        L_0 = tf.reduce_sum(_G)

    loss += L_0
    return loss, F
예제 #4
0
def construct_loss(X, Y, F, dic, Num, Model_prior, D):
    '''
    This is the function calculating the loss function.
    
    X,Y : data, they are place holders
    dic : this is the ordered dictionally which contains placeholders of U
    Num : how many placeholders are included in dic
    Model_prior : what kind of model we use
    D : how many parameters are in the model
    '''

    loss = 0
    for i in range(0, Num):

        U = dic["U_" + str(i)]

        _W, sigma_list = F(U)
        _W = tf.nn.sigmoid(_W)
        #variable transformation
        W = minus_inf_inf_to_zero_one(_W)

        #Variable transoformation's Log_det_Jacobian
        Log_det_J = tf.log(
            tf.abs(tf.reduce_sum(minus_inf_inf_to_zero_one_Log_det_J(_W), 1)))
        #likelihood and prior calculation
        #if you want to consider other than logistic regression, change the model and prior
        Log_likelihood, Prior = Model_prior(X, Y, W)

        Posteri_numerator = Log_likelihood[:, None] + Prior + Log_det_J[:,
                                                                        None]

        dp_du = tf.gradients(Posteri_numerator, U)[0]

        log_det_J = tf.reduce_sum(tf.log(sigma_list))

        d2w_du2 = tf.gradients(log_det_J, U)[0]

        L_0 = tf.reduce_sum((d2w_du2 + dp_du)**2)

        loss += L_0
    return loss
예제 #5
0
def construct_loss(dic,Num,Model_prior,D):
    '''
    This is the function calculating the loss function.
    
    X,Y : data, they are place holders
    dic : this is the ordered dictionally which contains placeholders of U
    Num : how many placeholders are included in dic
    Model_prior : what kind of model we use
    D : how many parameters are in the model
    '''
    F=make_trials(D)
    loss = 0
    for  i in range(0,Num*D,D):
        
        U=[dic["U_"+ str(i+j)] for j in range (D)]
        _W=[f(u) for f,u in zip(F,U)]

        #variable transformation        
        W=[minus_inf_inf_to_zero_one(__w) for __w in _W]        
        raw_params=_W
        #Variable transoformation's Log_det_Jacobian
        minus_Log_det_J=0
        for i in raw_params:
            minus_Log_det_J+=tf.log(tf.abs(tf.reduce_sum(minus_inf_inf_to_zero_one_Log_det_J(i),1)))
        #likelihood and prior calculation
        #if you want to consider other than logistic regression, change the model and prior
        Log_joint=-0.5*(W[0]**2)

        Posteri_numerator=Log_joint+minus_Log_det_J[:,None]+np.log(1/np.sqrt((2*np.pi*np.square(1.))))

        
        dw_du=[tf.log(tf.abs(tf.gradients(_p,__u)[0])) for _p,__u in zip(raw_params,U)]
        
        #_G=[(i+j-1)**2 for i,j in zip(dw_du,Posteri_numerator)]
        _G=(dw_du[0]+Posteri_numerator)**2
        L_0=tf.reduce_sum(_G)
        
    loss +=L_0
    return loss,F