def save(self): Path( self.get_summary_path() ).write_text( self.get_summary_text() ) self.onSave() model_data = { 'iter': self.iter, 'options': self.options, 'loss_history': self.loss_history, 'sample_for_preview' : self.sample_for_preview, 'choosed_gpu_indexes' : self.choosed_gpu_indexes, } pathex.write_bytes_safe (self.model_data_path, pickle.dumps(model_data) ) if self.autobackup: current_hour = time.localtime().tm_hour if self.autobackup_current_hour != current_hour: self.autobackup_current_hour = current_hour self.create_backup()
def save_weights(self, filename, force_dtype=None): d = {} weights = self.get_weights() if self.name is None: raise Exception("name must be defined.") name = self.name for w, w_val in zip(weights, nn.tf_sess.run (weights)): w_name_split = w.name.split('/', 1) if name != w_name_split[0]: raise Exception("weight first name != Saveable.name") if force_dtype is not None: w_val = w_val.astype(force_dtype) d[ w_name_split[1] ] = w_val d_dumped = pickle.dumps (d, 4) pathex.write_bytes_safe ( Path(filename), d_dumped )
def save(self): Path( self.get_summary_path() ).write_text( self.get_summary_text() ) self.onSave() model_data = { 'iter': self.iter, 'options': self.options, 'loss_history': self.loss_history, 'sample_for_preview' : self.sample_for_preview, 'choosed_gpu_indexes' : self.choosed_gpu_indexes, } pathex.write_bytes_safe (self.model_data_path, pickle.dumps(model_data) ) if self.autobackup_hour != 0: diff_hour = int ( (time.time() - self.autobackup_start_time) // 3600 ) if diff_hour > 0 and diff_hour % self.autobackup_hour == 0: self.autobackup_start_time += self.autobackup_hour*3600 self.create_backup()
def save(self): summary_path = self.get_strpath_storage_for_file('summary.txt') Path(summary_path).write_text(self.get_summary_text()) self.onSave() model_data = { 'iter': self.iter, 'options': self.options, 'loss_history': self.loss_history, 'sample_for_preview': self.sample_for_preview, 'choosed_gpu_indexes': self.choosed_gpu_indexes, } pathex.write_bytes_safe(self.model_data_path, pickle.dumps(model_data)) if self.autobackup: bckp_filename_list = [ self.get_strpath_storage_for_file(filename) for _, filename in self.get_model_filename_list() ] bckp_filename_list += [ str(summary_path), str(self.model_data_path) ] current_hour = time.localtime().tm_hour if self.autobackup_current_hour != current_hour: self.autobackup_current_hour = current_hour for i in range(15, 0, -1): idx_str = '%.2d' % i next_idx_str = '%.2d' % (i + 1) idx_backup_path = self.autobackups_path / idx_str next_idx_packup_path = self.autobackups_path / next_idx_str if idx_backup_path.exists(): if i == 15: pathex.delete_all_files(idx_backup_path) else: next_idx_packup_path.mkdir(exist_ok=True) pathex.move_all_files(idx_backup_path, next_idx_packup_path) if i == 1: idx_backup_path.mkdir(exist_ok=True) for filename in bckp_filename_list: shutil.copy( str(filename), str(idx_backup_path / Path(filename).name)) previews = self.get_previews() plist = [] for i in range(len(previews)): name, bgr = previews[i] plist += [(bgr, idx_backup_path / (('preview_%s.jpg') % (name)))] for preview, filepath in plist: preview_lh = ModelBase.get_loss_history_preview( self.loss_history, self.iter, preview.shape[1], preview.shape[2]) img = ( np.concatenate([preview_lh, preview], axis=0) * 255).astype(np.uint8) cv2_imwrite(filepath, img)