コード例 #1
0
class MixedLogisticRegressionEM(MixedLogisticRegression):
    """
    Class of EM algorithm
    """
    def __init__(self, input_for_decision_layer, c, n_in_for_decision_layer, n_out,
                 p_membership=None, input_for_hidden_layer=None, n_in_for_hidden_layer=None):
        MixedLogisticRegression.__init__(self, input_for_decision_layer, c, n_in_for_decision_layer, n_out,
                                         p_membership, input_for_hidden_layer, n_in_for_hidden_layer)

        self.hiddenLayer2 = HiddenLayer(c, p_membership, input_for_hidden_layer, n_in_for_hidden_layer)
        self.decisionLayer2 = DecisionLayer(c, input_for_decision_layer, n_in_for_decision_layer, n_out)

        self.params = self.hiddenLayer2.params + self.decisionLayer2.params  # parameter for updating

    def negative_expected_log_likelihood(self, y):
        """

        :param y:
        :return:
        """
        lp_membership = T.log(self.hiddenLayer2.p_membership)
        lp_decision = T.log(self.decisionLayer2.conditionalLikelihood(y))
        p_conditionalMembership = self.conditionalMembershipProbability(y)  # inherited from MixedLogisticRegression
        return -T.sum(p_conditionalMembership * (lp_membership + lp_decision))

    def m_step(self):
        # Pylearn2 may be helpful
        # Reference: http://deeplearning.net/software/pylearn2/index.html#
        pass
コード例 #2
0
    def __init__(self, input_for_decision_layer, c, n_in_for_decision_layer, n_out,
                 p_membership=None, input_for_hidden_layer=None, n_in_for_hidden_layer=None):
        self.hiddenLayer = HiddenLayer(c, p_membership,
                                       input_for_hidden_layer, n_in_for_hidden_layer)

        self.decisionLayer = DecisionLayer(c, input_for_decision_layer,
                                           n_in_for_decision_layer, n_out)

        self.params = self.hiddenLayer.params + self.decisionLayer.params
コード例 #3
0
    def __init__(self, input_for_decision_layer, c, n_in_for_decision_layer, n_out,
                 p_membership=None, input_for_hidden_layer=None, n_in_for_hidden_layer=None):
        MixedLogisticRegression.__init__(self, input_for_decision_layer, c, n_in_for_decision_layer, n_out,
                                         p_membership, input_for_hidden_layer, n_in_for_hidden_layer)

        self.hiddenLayer2 = HiddenLayer(c, p_membership, input_for_hidden_layer, n_in_for_hidden_layer)
        self.decisionLayer2 = DecisionLayer(c, input_for_decision_layer, n_in_for_decision_layer, n_out)

        self.params = self.hiddenLayer2.params + self.decisionLayer2.params  # parameter for updating
コード例 #4
0
class MixedLogisticRegression(object):
    def __init__(self, input_for_decision_layer, c, n_in_for_decision_layer, n_out,
                 p_membership=None, input_for_hidden_layer=None, n_in_for_hidden_layer=None):
        self.hiddenLayer = HiddenLayer(c, p_membership,
                                       input_for_hidden_layer, n_in_for_hidden_layer)

        self.decisionLayer = DecisionLayer(c, input_for_decision_layer,
                                           n_in_for_decision_layer, n_out)

        self.params = self.hiddenLayer.params + self.decisionLayer.params

    def conditionalMembershipProbability(self, y):
        conditionalLikelihood = self.decisionLayer.conditionalLikelihood(y)
        p_membership = self.hiddenLayer.p_membership
        # if self.hiddenLayer.isHierarchical:
        #     p_membership = self.hiddenLayer.p_membership
        # else:
        #     p_membership = self.hiddenLayer.params

        jointProbabilities = conditionalLikelihood * p_membership
        return (jointProbabilities.T / T.sum(jointProbabilities, axis=1)).T

    def negative_expected_log_likelihood(self, y):
        """
        Minimization target for M-step

        :param y:
        :return:
        """
        lp_membership = T.log(self.hiddenLayer.p_membership)
        lp_decision = T.log(self.decisionLayer.conditionalLikelihood(y))
        p_conditionalMembership = self.conditionalMembershipProbability(y)
        return -T.sum(p_conditionalMembership * (lp_membership + lp_decision))

    def negative_log_likelihood(self, y):
        p_membership = self.hiddenLayer.p_membership
        p_decision = self.decisionLayer.conditionalLikelihood(y)
        return -T.sum(T.log(T.sum(p_decision * p_membership, axis=1)))