def __init__(self, input_for_decision_layer, c, n_in_for_decision_layer, n_out,
                 p_membership=None, input_for_hidden_layer=None, n_in_for_hidden_layer=None):
        MixedLogisticRegression.__init__(self, input_for_decision_layer, c, n_in_for_decision_layer, n_out,
                                         p_membership, input_for_hidden_layer, n_in_for_hidden_layer)

        self.hiddenLayer2 = HiddenLayer(c, p_membership, input_for_hidden_layer, n_in_for_hidden_layer)
        self.decisionLayer2 = DecisionLayer(c, input_for_decision_layer, n_in_for_decision_layer, n_out)

        self.params = self.hiddenLayer2.params + self.decisionLayer2.params  # parameter for updating
remainingIds = list(set(range(n)) - set(trainIds))
validateIds = random.sample(remainingIds, 1000)
testIds = list(set(remainingIds) - set(validateIds))

# Convert data to Theano shared variables
trainX = theano.shared(dataX[trainIds, :])
trainY = T.cast(theano.shared(dataY[trainIds]), 'int32')
validateX = theano.shared(dataX[validateIds, :])
validateY = T.cast(theano.shared(dataY[validateIds]), 'int32')
testX = theano.shared(dataX[testIds, :])
testY = T.cast(theano.shared(dataY[testIds]), 'int32')

# === Build the model ===
print '--- Building the model ---'

xm = T.dmatrix('xm')
xr = T.dmatrix('xr')
y = T.ivector('y')

c = 3

classifier = MixedLogisticRegression(input_for_decision_layer=xr, c=c, n_in_for_decision_layer=dxr,
                                     n_out=2, input_for_hidden_layer=xm, n_in_for_hidden_layer=dxm)

# Cost function is used to check if EM converges.
# When it becomes stable, it's time to terminate EM.
cost = classifier.negative_log_likelihood(y)

# Training a mixed logistic regression model uses EM.
pass