def prepare_for_training(self, optimizer=None): """Prepare for neural network training. Compiles the model and creates `Keras Callbacks <https://keras.io/callbacks/>`_ to be used for training. Note that this method will be implicitly called once by :func:`train` (with default arguments) if not done so explicitly beforehand. Parameters ---------- optimizer : obj or None Instance of a `Keras Optimizer <https://keras.io/optimizers/>`_ to be used for training. If ``None`` (default), uses ``Adam`` with the learning rate specified in ``config``. """ if optimizer is None: optimizer = Adam(lr=self.config.train_learning_rate) masked_dist_loss = {'mse': masked_loss_mse, 'mae': masked_loss_mae}[self.config.train_dist_loss] prob_loss = 'binary_crossentropy' def split_dist_true_mask(dist_true_mask): return tf.split(dist_true_mask, num_or_size_splits=[self.config.n_rays,-1], axis=-1) def dist_loss(dist_true_mask, dist_pred): dist_true, dist_mask = split_dist_true_mask(dist_true_mask) return masked_dist_loss(dist_mask, reg_weight=self.config.train_background_reg)(dist_true, dist_pred) def relevant_mae(dist_true_mask, dist_pred): dist_true, dist_mask = split_dist_true_mask(dist_true_mask) return masked_metric_mae(dist_mask)(dist_true, dist_pred) def relevant_mse(dist_true_mask, dist_pred): dist_true, dist_mask = split_dist_true_mask(dist_true_mask) return masked_metric_mse(dist_mask)(dist_true, dist_pred) self.keras_model.compile(optimizer, loss=[prob_loss, dist_loss], loss_weights = list(self.config.train_loss_weights), metrics={'prob': kld, 'dist': [relevant_mae, relevant_mse]}) self.callbacks = [] if self.basedir is not None: self.callbacks += self._checkpoint_callbacks() if self.config.train_tensorboard: if IS_TF_1: self.callbacks.append(CARETensorBoard(log_dir=str(self.logdir), prefix_with_timestamp=False, n_images=3, write_images=True, prob_out=False)) else: self.callbacks.append(TensorBoard(log_dir=str(self.logdir/'logs'), write_graph=False, profile_batch=0)) if self.config.train_reduce_lr is not None: rlrop_params = self.config.train_reduce_lr if 'verbose' not in rlrop_params: rlrop_params['verbose'] = True # TF2: add as first callback to put 'lr' in the logs for TensorBoard self.callbacks.insert(0,ReduceLROnPlateau(**rlrop_params)) self._model_prepared = True
def prepare_for_training(self, optimizer=None): """Prepare for neural network training. Compiles the model and creates `Keras Callbacks <https://keras.io/callbacks/>`_ to be used for training. Note that this method will be implicitly called once by :func:`train` (with default arguments) if not done so explicitly beforehand. Parameters ---------- optimizer : obj or None Instance of a `Keras Optimizer <https://keras.io/optimizers/>`_ to be used for training. If ``None`` (default), uses ``Adam`` with the learning rate specified in ``config``. """ if optimizer is None: optimizer = Adam(lr=self.config.train_learning_rate) input_mask = self.keras_model.inputs[ 1] # second input layer is mask for dist loss dist_loss = { 'mse': masked_loss_mse, 'mae': masked_loss_mae }[self.config.train_dist_loss]( input_mask, reg_weight=self.config.train_background_reg) prob_loss = 'binary_crossentropy' self.keras_model.compile( optimizer, loss=[prob_loss, dist_loss], loss_weights=list(self.config.train_loss_weights), metrics={ 'prob': kld, 'dist': [masked_metric_mae(input_mask), masked_metric_mse(input_mask)] }) self.callbacks = [] if self.basedir is not None: self.callbacks += self._checkpoint_callbacks() if self.config.train_tensorboard: # self.callbacks.append(TensorBoard(log_dir=str(self.logdir), write_graph=False)) self.callbacks.append( CARETensorBoard(log_dir=str(self.logdir), prefix_with_timestamp=False, n_images=3, write_images=True, prob_out=False)) if self.config.train_reduce_lr is not None: rlrop_params = self.config.train_reduce_lr if 'verbose' not in rlrop_params: rlrop_params['verbose'] = True self.callbacks.append(ReduceLROnPlateau(**rlrop_params)) self._model_prepared = True
def prepare_for_training(self, optimizer=None): """Prepare for neural network training. Compiles the model and creates `Keras Callbacks <https://keras.io/callbacks/>`_ to be used for training. Note that this method will be implicitly called once by :func:`train` (with default arguments) if not done so explicitly beforehand. Parameters ---------- optimizer : obj or None Instance of a `Keras Optimizer <https://keras.io/optimizers/>`_ to be used for training. If ``None`` (default), uses ``Adam`` with the learning rate specified in ``config``. """ if optimizer is None: optimizer = Adam(lr=self.config.train_learning_rate) input_mask = self.keras_model.inputs[ 1] # second input layer is mask for dist loss ## do a maxpooling(maximum filter) on the distance map (input_mask) to further emphasize pixel regions where we need to put more weights #mask_pool = K.pool2d(input_mask, (5,5), strides=(1, 1), padding='same', pool_mode='max', data_format="channels_last") dist_loss = { 'mse': masked_loss_mse, 'mae': masked_loss_mae }[self.config.train_dist_loss]( input_mask, reg_weight=self.config.train_background_reg, norm_by_mask=self.config.norm_by_mask) #prob_loss = {'huber':huber_loss(delta=self.config.train_huber_delta), 'mae':'mean_absolute_error', 'bce':'binary_crossentropy', 'mse':'mean_square_error'}[self.config.train_prob_loss] dist_loss = masked_loss_mape( input_mask, reg_weight=self.config.train_background_reg, norm_by_mask=self.config.norm_by_mask) #prob_loss = 'binary_crossentropy' #prob_loss = weighted_mse_loss(input_mask, weight=1) #prob_loss = cosine_bce_loss(input_mask, offset=0.5) prob_loss = cosine_bce_clipped_loss(input_mask, offset=0.5) #prob_loss = weighted_bce_loss(input_mask, weight=1) self.keras_model.compile( optimizer, loss=[prob_loss, dist_loss], loss_weights=list(self.config.train_loss_weights), metrics={ 'prob': kld, 'dist': [masked_metric_mae(input_mask), masked_metric_mse(input_mask)] }) self.callbacks = [] if self.basedir is not None: self.callbacks += self._checkpoint_callbacks() if self.config.train_tensorboard: # self.callbacks.append(TensorBoard(log_dir=str(self.logdir), write_graph=False)) self.callbacks.append( CARETensorBoard(log_dir=str(self.logdir), prefix_with_timestamp=True, n_images=3, write_images=True, prob_out=False, write_graph=True, compute_histograms=True)) if self.config.train_reduce_lr is not None: rlrop_params = self.config.train_reduce_lr if 'verbose' not in rlrop_params: rlrop_params['verbose'] = True self.callbacks.append(ReduceLROnPlateau(**rlrop_params)) if self.config.train_one_cycle_lr_max is not None: lrmax = self.config.train_one_cycle_lr_max self.callbacks.append(OneCycleScheduler(max_lr=lrmax)) self._model_prepared = True