def _log_global(self, vk, Akxk, xk, i_iter, solver, intercept=0.0): record = {} record['i_iter'] = i_iter record['time'] = self.running_time # v := A x v = comm.all_reduce(np.array(Akxk), op='SUM') w = self.solver.grad_f(v) record['res'] = norm(v - self.solver.y) / norm(self.solver.y) # Compute squared norm of consensus violation record['cv2'] = norm(vk - v, 2) ** 2 if self.mode == 'all': self.records_l[-1]['cv2'] = record['cv2'] # Compute the value of minimizer objective val_gk = self.solver.gk(xk) record['g'] = comm.all_reduce(val_gk, 'SUM') record['f'] = self.solver.f(v) # Compute the value of conjugate objective val_gk_conj = self.solver.gk_conj(w) record['f_conj'] = self.solver.f_conj(w) record['g_conj'] = comm.all_reduce(val_gk_conj, op='SUM') if self.split_by_samples: n_samples = comm.all_reduce(len(solver.y), op='SUM') else: n_samples = len(solver.y) record['g'] /= n_samples record['g_conj'] /= n_samples record['f'] /= n_samples record['f_conj'] /= n_samples # The primal should be monotonically decreasing record['P'] = record['f'] + record['g'] record['D'] = record['f_conj'] + record['g_conj'] # Duality gap of the global problem record['gap'] = (record['D'] + record['P']) if self.do_prediction_tests: y_predict = self.model.predict(self.Ak_test) y_test_avg = np.average(self.y_test) record['n_train'] = self.solver.y.shape[0] record['n_test'] = self.y_test.shape[0] record['rmse'] = np.sqrt(np.average((y_predict - self.y_test)**2)) record['r2'] = 1.0 - np.sum((y_predict - self.y_test)**2)/np.sum((self.y_test - y_test_avg)**2) record['max_rel'] = np.amax(np.abs(y_predict - self.y_test)/self.y_test) record['l1_rel'] = np.linalg.norm(self.y_test-y_predict, 1)/np.linalg.norm(self.y_test, 1) record['l2_rel'] = np.linalg.norm(self.y_test-y_predict, 2)/np.linalg.norm(self.y_test, 2) self.records.append(record) if self.rank == 0: if self.verbose >= 2: print("Iter {i_iter:5}, Time {time:10.5e}: gap={gap:10.3e}, P={P:10.3e}, D={D:10.3e}, f={f:10.3e}, " "g={g:10.3e}, f_conj={f_conj:10.3e}, g_conj={g_conj:10.3e}".format(**record))
def log(self, vk, Akxk, xk, i_iter, solver, delta_xk=None, delta_vk=None, intercept=0.0, cert_cv=0.0): # Skip the time for logging self.running_time += time.time() - self.previous_time if self.mode == 'local': self._log_local(vk, Akxk, xk, i_iter, solver, delta_xk, delta_vk, cert_cv=cert_cv) elif self.mode == 'global': self._log_global(vk, Akxk, xk, i_iter, solver) elif self.mode == None: pass elif self.mode == 'all': self.records = self.records_l self._log_local(vk, Akxk, xk, i_iter, solver, delta_xk, delta_vk, cert_cv=cert_cv) self.records_l = self.records self.records = self.records_g self._log_global(vk, Akxk, xk, i_iter, solver, intercept=intercept) self.records_g = self.records if self.verbose >= 2: print(f"[{comm.get_rank()}] Certificate, Iter {self.records[-1]['i_iter']}: " f"global_gap={self.records[-1]['gap']:10.5e}; " f"local_gap={self.records_l[-1]['cert_gap']:10.5e}, local_cv={self.records_l[-1]['cv2']:10.5e}") else: raise NotImplementedError("[local, global, all, None] are expected mode, got {}".format(self.mode)) self.previous_time = time.time() max_running_time = comm.all_reduce(self.running_time, op='MAX') gap = 100 if self.mode == 'all': gap = self.records_g[-1]['gap'] return max_running_time > self.exit_time or abs(gap) < 1e-6
def save(self, Akxk, xk, weightname=None, logname=None): rank = self.rank if rank == 0 and logname: logfile = os.path.join(self.output_dir, logname) pd.DataFrame(self.records).to_csv(logfile) print("Data has been save to {} on node 0".format(logfile)) if weightname: if self.split_by_samples: Akxk = comm.reduce(Akxk, root=0, op='SUM') weight = Akxk else: # If features are split, then concatenate xk's weight size = [0] * self.world_size size[rank] = len(xk) size = comm.all_reduce(size, op='SUM') # the size is [len(x_0), len(x_1), ..., len(x_{K-1})] weight = np.zeros(sum(size)) weight[sum(size[:rank]): sum(size[:rank]) + len(xk)] = np.array(xk) weight = comm.reduce(weight, root=0, op='SUM') if rank == 0: weightfile = os.path.join(self.output_dir, weightname) weight.dump(weightfile) print("Weight has been save to {} on node 0".format(weightfile))
def _log_global(self, vk, Akxk, xk, i_iter, solver): record = {} record['i_iter'] = i_iter record['time'] = self.running_time # v := A x v = comm.all_reduce(Akxk, op='SUM') w = self.solver.grad_f(v) # Compute squared norm of consensus violation record['cv2'] = float(np.linalg.norm(vk - v, 2) ** 2) # Compute the value of minimizer objective val_gk = self.solver.gk(xk) record['g'] = comm.all_reduce(val_gk, 'SUM') record['f'] = self.solver.f(v) # Compute the value of conjugate objective val_gk_conj = self.solver.gk_conj(w) record['f_conj'] = self.solver.f_conj(w) record['g_conj'] = comm.all_reduce(val_gk_conj, op='SUM') if self.split_by_samples: n_samples = comm.all_reduce(len(solver.y), op='SUM') else: n_samples = len(solver.y) record['g'] /= n_samples record['g_conj'] /= n_samples record['f'] /= n_samples record['f_conj'] /= n_samples # The dual should be monotonically decreasing record['D'] = record['f'] + record['g'] record['P'] = record['f_conj'] + record['g_conj'] # Duality gap of the gloabl problem record['gap'] = record['D'] + record['P'] self.records.append(record) if self.rank == 0: print("Iter {i_iter:5}, Time {time:10.5e}: gap={gap:10.3e}, P={P:10.3e}, D={D:10.3e}, f={f:10.3e}, " "g={g:10.3e}, f_conj={f_conj:10.3e}, g_conj={g_conj:10.3e}".format(**record))
def log(self, vk, Akxk, xk, i_iter, solver): # Skip the time for logging self.running_time += time.time() - self.previous_time if self.mode == 'local': self._log_local(vk, Akxk, xk, i_iter, solver) elif self.mode == 'global': self._log_global(vk, Akxk, xk, i_iter, solver) elif self.mode == None: pass else: raise NotImplementedError("[local, global, None] are expected mode, got {}".format(self.mode)) self.previous_time = time.time() max_running_time = comm.all_reduce(self.running_time, op='MAX') return max_running_time > self.exit_time