def fit(self, X_train, Y_train, user_playlist_indices=None, batch_size=256, w0=None, njobs=1, verbose=0, fnpy='_'): assert X_train.shape[0] == Y_train.shape[0] N, D = X_train.shape K = Y_train.shape[1] if verbose > 1: t0 = time.time() if verbose > 0: if user_playlist_indices is None: print('\nC: %g, p: %g' % (self.C1, self.p)) else: print('\nC1: %g, C3: %g, p: %g' % (self.C1, self.C3, self.p)) if w0 is not None: assert w0.shape[0] == K * (D + 1) else: if fnpy is not None: try: w0 = np.load(fnpy, allow_pickle=False) assert w0.shape[0] == K * (D + 1) print('Restore from %s' % fnpy) except (IOError, ValueError): w0 = np.zeros(K * (D + 1)) data_helper = DataHelper(Y_train, ax=1, batch_size=batch_size) try: # f: callable(x, g, *args) # LBFGS().minimize(f, x0, progress=progress, args=args) optim = LBFGS() optim.linesearch = 'wolfe' res = optim.minimize(objective, w0, progress, args=(X_train, Y_train, self.C1, self.C3, self.p, user_playlist_indices, data_helper, njobs, verbose, fnpy)) self.b = res[:K].reshape(1, K) self.W = res[K:].reshape(K, D) self.trained = True except (LBFGSError, MemoryError) as err: self.trained = False sys.stderr.write('LBFGS failed: {0}\n'.format(err)) sys.stderr.flush() if verbose > 1: print('Training finished in %.1f seconds' % (time.time() - t0))
def fit(self, X_train, Y_train, user_playlist_indices=None, batch_size=256, verbose=0, w0=None, fnpy=None): assert X_train.shape[0] == Y_train.shape[0] N, D = X_train.shape K = Y_train.shape[1] VERBOSE = verbose # set verbose output, use a global variable in this case if VERBOSE > 0: t0 = time.time() if w0 is None: if fnpy is not None: try: w0 = np.load(fnpy, allow_pickle=False) assert w0.shape[0] == K * D + 1 print('Restore from %s' % fnpy) except (IOError, ValueError): w0 = np.zeros(K * D + 1) else: assert w0.shape[0] == K * D + 1 data_helper_example = None if self.loss_type == 'label' else DataHelper( Y_train, ax=0, batch_size=batch_size) data_helper_label = None if self.loss_type == 'example' else DataHelper( Y_train, ax=1, batch_size=batch_size) try: # f: callable(x, g, *args) # LBFGS().minimize(f, x0, progress=progress, args=args) optim = LBFGS() optim.linesearch = 'wolfe' res = optim.minimize( objective, w0, progress, args=(X_train, Y_train, self.C1, self.C2, self.C3, self.p, self.loss_type, user_playlist_indices, data_helper_example, data_helper_label, fnpy)) self.b = res[0] self.W = res[1:].reshape(K, D) self.trained = True except (LBFGSError, MemoryError) as err: self.trained = False sys.stderr.write('LBFGS failed: {0}\n'.format(err)) sys.stderr.flush() if VERBOSE > 0: print('Training finished in %.1f seconds' % (time.time() - t0))
def fit(self, w0=None, verbose=0, fnpy='_'): N, U, D = self.N, self.U, self.D if verbose > 0: t0 = time.time() if verbose > 0: print('\nC: %g, %g, p: %g' % (self.C1, self.C2, self.p)) num_vars = (U + N + 1) * D if w0 is None: np.random.seed(0) if fnpy is not None: try: w0 = np.load(fnpy, allow_pickle=False) print('Restore from %s' % fnpy) except (IOError, ValueError): w0 = 1e-3 * np.random.randn(num_vars) else: w0 = 1e-3 * np.random.randn(num_vars) if w0.shape != (num_vars, ): raise ValueError('ERROR: incorrect dimention for initial weights.') try: # f: callable(x, g, *args) # LBFGS().minimize(f, x0, progress=progress, args=args) optim = LBFGS() optim.linesearch = 'wolfe' optim.orthantwise_c = self.C2 optim.orthantwise_start = U * D # start index to compute L1 regularisation (included) optim.orthantwise_end = w0.shape[ 0] # end index to compute L1 regularisation (not included) res = optim.minimize(objective, w0, progress, args=(self.X, self.Y, self.C1, self.p, self.cliques, self.data_helper, verbose, fnpy)) self.V = res[:U * D].reshape(U, D) self.W = res[U * D:(U + N) * D].reshape(N, D) self.mu = res[(U + N) * D:] assert self.mu.shape == (D, ) self.trained = True except (LBFGSError, MemoryError) as err: self.trained = False sys.stderr.write('LBFGS failed: {0}\n'.format(err)) sys.stderr.flush() if verbose > 0: print('Training finished in %.1f seconds' % (time.time() - t0))
def fit(self, w0=None, verbose=0, fnpy=None): N, U, D = self.N, self.U, self.D if verbose > 0: t0 = time.time() print('\nC: %g, %g, %g' % (self.C1, self.C2, self.C3)) num_vars = (U + N + 1) * D if w0 is None: if fnpy is None: w0 = self._init_vars() else: try: assert type(fnpy) == str assert fnpy.endswith('.npy') w0 = np.load(fnpy, allow_pickle=False) print('Restore from %s' % fnpy) except (IOError, ValueError): w0 = self._init_vars() assert w0.shape == (num_vars, ) try: # f: callable(x, g, *args) # LBFGS().minimize(f, x0, progress=progress, args=args) optim = LBFGS() optim.linesearch = 'wolfe' optim.max_linesearch = 100 param_dict = {'N': self.N, 'C': (self.C1, self.C2, self.C3)} res = optim.minimize(objective, w0, progress, args=(self.X, self.cliques, self.data_helper, param_dict, verbose, fnpy)) self.mu = res[:D] self.V = res[D:(U + 1) * D].reshape(U, D) self.W = res[(U + 1) * D:].reshape(N, D) self.trained = True except (LBFGSError, MemoryError) as err: self.trained = False sys.stderr.write('LBFGS failed: {0}\n'.format(err)) sys.stderr.flush() if verbose > 0: print('Training finished in %.1f seconds' % (time.time() - t0))
def train_loglin(trainX, trainY, reg_const): D, V = trainX.shape K = 2 #two classes #print 'K={0}, D={1}, V={2}'.format(K, D, V) assert trainX.shape[0] == len(trainY) beta = loglinmm.get_beta(trainX) gamma0 = np.random.rand(K, V) #PYLBFGS stuff bb = LBFGS() bb.orthantwise_c = 2.0**reg_const bb.linesearch = 'wolfe' bb.epsilon = 1e-01 bb.delta = 1e-01 #find optimum gamma_opt = bb.minimize(loglinmm.negll, gamma0, None, [beta, trainX, trainY]) w_given_class = loglinmm.prob_w_given_k(beta, gamma_opt) return w_given_class
def fit(self, w0=None, verbose=0, fnpy='_'): N, U, D = self.N, self.U, self.D if verbose > 0: t0 = time.time() if verbose > 0: print('\nC: %g, p: %g' % (self.C, self.p)) if w0 is not None: assert w0.shape[0] == (U + N + 1) * D else: if fnpy is not None: try: w0 = np.load(fnpy, allow_pickle=False) assert w0.shape[0] == (U + N + 1) * D print('Restore from %s' % fnpy) except (IOError, ValueError): w0 = np.zeros((U + N + 1) * D) try: # f: callable(x, g, *args) # LBFGS().minimize(f, x0, progress=progress, args=args) optim = LBFGS() optim.linesearch = 'wolfe' optim.orthantwise_c = self.C optim.orthantwise_start = (U + 1) * D # start index to compute L1 regularisation (included) optim.orthantwise_end = w0.shape[0] # end index to compute L1 regularisation (not included) res = optim.minimize(objective_L1, w0, progress, args=(self.X, self.Y, self.C, self.p, self.cliques, self.data_helper, verbose, fnpy)) self.mu = res[:D] self.V = res[D:(U + 1) * D].reshape(U, D) self.W = res[(U + 1) * D:].reshape(N, D) self.trained = True except (LBFGSError, MemoryError) as err: self.trained = False sys.stderr.write('LBFGS failed: {0}\n'.format(err)) sys.stderr.flush() if verbose > 0: print('Training finished in %.1f seconds' % (time.time() - t0))