class SoftmaxRegression(Objective): def __init__(self,theta_df,X_df,y_df,reg): """ Combine Softmax loss with a linear model for Softmax regression. """ self.X = X_df.get_matrix(readonly=True) self.theta = theta_df.get_matrix() self.yt = y_df.get_matrix(readonly=True) self.yp = self.X.dot(self.theta.T) self.loss = Softmax(self.yp,self.yt) self.X_df = X_df self.y_df = y_df self.reg = reg def f(self): return np.mean(self.loss.f()) + self.reg/2*np.sum(self.theta**2) def f_vec(self): return self.loss.f() + self.reg/2*np.sum(self.theta**2) def g(self): n = self.X.shape[0] return self.loss.g().T.dot(self.X)/n + self.reg*self.theta def h(self): # Needs to be averaged and also regularized return self.loss.h() def err(self): d = self.X.ndim-1 return (self.yt.argmax(axis=d)!=self.yp.argmax(axis=d)) @staticmethod def structure(X_df,y_df,reg): return y_df.cols(), X_df.cols()
class SoftmaxRegression(Objective): def __init__(self, theta_df, X_df, y_df, reg): """ Combine Softmax loss with a linear model for Softmax regression. """ self.X = X_df.get_matrix(readonly=True) self.theta = theta_df.get_matrix() self.yt = y_df.get_matrix(readonly=True) self.yp = self.X.dot(self.theta.T) self.loss = Softmax(self.yp, self.yt) self.X_df = X_df self.y_df = y_df self.reg = reg def f(self): return np.mean(self.loss.f()) + self.reg / 2 * np.sum(self.theta**2) def f_vec(self): return self.loss.f() + self.reg / 2 * np.sum(self.theta**2) def g(self): n = self.X.shape[0] return self.loss.g().T.dot(self.X) / n + self.reg * self.theta def h(self): # Needs to be averaged and also regularized return self.loss.h() def err(self): d = self.X.ndim - 1 return (self.yt.argmax(axis=d) != self.yp.argmax(axis=d)) @staticmethod def structure(X_df, y_df, reg): return y_df.cols(), X_df.cols()
def __init__(self, theta_df, X_df, y_df, reg): """ Combine Softmax loss with a linear model for Softmax regression. """ self.X = X_df.get_matrix(readonly=True) self.theta = theta_df.get_matrix() self.yt = y_df.get_matrix(readonly=True) self.yp = self.X.dot(self.theta.T) self.loss = Softmax(self.yp, self.yt) self.X_df = X_df self.y_df = y_df self.reg = reg
def __init__(self,theta_df,X_df,y_df,reg): """ Combine Softmax loss with a linear model for Softmax regression. """ self.X = X_df.get_matrix(readonly=True) self.theta = theta_df.get_matrix() self.yt = y_df.get_matrix(readonly=True) self.yp = self.X.dot(self.theta.T) self.loss = Softmax(self.yp,self.yt) self.X_df = X_df self.y_df = y_df self.reg = reg