def fit(self, X, y, max_iter=MAX_ITER): """ Trains with supplied data. Args: :param X: each position contains the data of a task as an (m, n) np.array with data (rows are samples, cols are features). :param y: each position contains the labels of a task as an (m) np.array. Returns: W (np.array): (n, T) array with estimated parameters of all tasks. cost (np.array): cost at the end of each iteration. time (float): number of seconds spent in training. """ n_tasks = len(X) n_feats = X[0].shape[1] W = np.random.randn(n_feats, n_tasks) start = time.time() cost_function = 0 X = self.normalize_data(X) X = self.add_bias(X) for t in range(n_tasks): #print('Training {} task with lasso regression'.format(t)) lasso = Fista(self, self.lambda_1) w = lasso.fit(xk=W[:, t], A=X[t], b=y[t], ind=self.groups, max_iter=max_iter) W[:, t] = w stop = time.time() - start self.W = W return W, np.array([cost_function]), stop
def fit(self, X, y): """ Trains with supplied data. Args: :param X: each position contains the data of a task as an (m, n) np.array with data (rows are samples, cols are features). :param y: each position contains the labels of a task as an (m) np.array. Returns: W (np.array): (n, T) array with estimated parameters of all tasks. cost (np.array): cost at the end of each iteration. time (float): number of seconds spent in training. Uses optimization/solve_fista.py. """ X = self.normalize_data(X) X = self.add_bias(X) n_tasks = len(X) n_feats = X[0].shape[1] W = np.random.randn(n_feats, n_tasks) cost_function = 0 start = time.time() for t in range(n_tasks): #print('Training task {} with group lasso'.format(t)) fista = Fista(self, self.lambda_1) w_opt = fista.fit(W[:, t], X[t], y[t], self.groups, max_iter=self.max_iter) W[:, t] = w_opt cost_function += self.cost(X[t], y[t], W[:, t]) stop = time.time() - start self.W = W return W, np.array([cost_function]), stop
def fit(self, w, X, y): """ Uses optimization/fista.py to optimize Wt. See file for help. """ fista = Fista(self, self.lambda_3) w_opt = fista.fit(w, X, y, self.inds, max_iter=self.max_iter) return w_opt