def __init__(self, input, n_in, n_out): """ Initialize the parameters of the logistic regression :type input: my_theano.tensor.TensorType :param input: symbolic variable that describes the input of the architecture (one minibatch) :type n_in: int :param n_in: number of input units, the dimension of the space in which the datapoints lie :type n_out: int :param n_out: number of output units, the dimension of the space in which the labels lie """ # start-snippet-1 # initialize with 0 the weights W as a matrix of shape (n_in, n_out) self.W = my_theano.shared( value=numpy.zeros( (n_in, n_out), dtype=my_theano.config.floatX ), name='W', borrow=True ) # initialize the biases b as a vector of n_out 0s self.b = my_theano.shared( value=numpy.zeros( (n_out,), dtype=my_theano.config.floatX ), name='b', borrow=True ) # symbolic expression for computing the matrix of class-membership # probabilities # Where: # W is a matrix where column-k represent the separation hyperplane for # class-k # x is a matrix where row-j represents input training sample-j # b is a vector where element-k represent the free parameter of # hyperplane-k self.p_y_given_x = T.nnet.softmax(T.dot(input, self.W) + self.b) # symbolic description of how to compute prediction as class whose # probability is maximal self.y_pred = T.argmax(self.p_y_given_x, axis=1) # end-snippet-1 # parameters of the model self.params = [self.W, self.b] # keep track of model input self.input = input
def zero_one(p_y, y): """ :param p_y: the probability distribution of y given x :type y: my_theano.tensor.TensorType :param y: corresponds to a vector that gives for each example the correct label :param p_y: :param y: :return: """ return T.mean(T.neq(T.argmax(p_y, axis=1), y))