def _setup_costs(self, log): if not self._total_cost is None: log.print3("ERROR: Problem in Trainer. It was called to setup the total cost, but it was not None."+\ "\n\t This should not happen. Setup should be called only once.\n Exiting!") exit(1) # Cost functions cost = 0 y_gt = self._net._output_gt_tensor_feeds['train']['y_gt'] if "xentr" in self._losses_and_weights and self._losses_and_weights["xentr"] is not None: log.print3("COST: Using cross entropy with weight: " +str(self._losses_and_weights["xentr"])) w_per_cl_vec = self._compute_w_per_class_vector_for_xentr( self._net.num_classes, y_gt ) cost += self._losses_and_weights["xentr"] * cfs.x_entr( self._net.finalTargetLayer.p_y_given_x_train, y_gt, w_per_cl_vec ) if "iou" in self._losses_and_weights and self._losses_and_weights["iou"] is not None: log.print3("COST: Using iou loss with weight: " +str(self._losses_and_weights["iou"])) cost += self._losses_and_weights["iou"] * cfs.iou( self._net.finalTargetLayer.p_y_given_x_train, y_gt ) if "dsc" in self._losses_and_weights and self._losses_and_weights["dsc"] is not None: log.print3("COST: Using dsc loss with weight: " +str(self._losses_and_weights["dsc"])) cost += self._losses_and_weights["dsc"] * cfs.dsc( self._net.finalTargetLayer.p_y_given_x_train, y_gt ) cost_L1_reg = self._L1_reg_weight * self._net._get_L1_cost() cost_L2_reg = self._L2_reg_weight * self._net._get_L2_cost() cost = cost + cost_L1_reg + cost_L2_reg self._total_cost = cost
def _setup_costs(self, log): if not self._total_cost is None: log.print3("ERROR: Problem in Trainer. It was called to setup the total cost, but it was not None."+\ "\n\t This should not happen. Setup should be called only once.\n Exiting!") exit(1) # Cost functions cost = 0 y_gtmix0 = self._net._output_gt_tensor_feeds['train']['y_gt0'] y_gtmix1 = self._net._output_gt_tensor_feeds['train']['y_gt1'] mixup_lambda = self._net._output_gt_tensor_feeds['train'][ 'mixup_lambda'] y_gt = y_gtmix0 if "xentr" in self._losses_and_weights and self._losses_and_weights[ "xentr"] is not None: log.print3("COST: Using cross entropy with weight: " + str(self._losses_and_weights["xentr"])) w_per_cl_vec = self._compute_w_per_class_vector_for_xentr( self._net.num_classes, y_gt) cost += self._losses_and_weights["xentr"] * cfs.x_entr( self._net.finalTargetLayer.p_y_given_x_train, y_gt, w_per_cl_vec) if "iou" in self._losses_and_weights and self._losses_and_weights[ "iou"] is not None: log.print3("COST: Using iou loss with weight: " + str(self._losses_and_weights["iou"])) cost += self._losses_and_weights["iou"] * cfs.iou( self._net.finalTargetLayer.p_y_given_x_train, y_gt) if "dsc" in self._losses_and_weights and self._losses_and_weights[ "dsc"] is not None: log.print3("COST: Using dsc loss with weight: " + str(self._losses_and_weights["dsc"])) cost += self._losses_and_weights["dsc"] * cfs.dsc( self._net.finalTargetLayer.p_y_given_x_train, y_gt) if "focaloneside" in self._losses_and_weights and self._losses_and_weights[ "focaloneside"] is not None: log.print3( "COST: Using focal loss one side cross entropy with weight: " + str(self._losses_and_weights["focaloneside"])) log.print3( "COST: Using focal loss one side cross entropy with gama: " + str(self._losses_and_weights["focalonesidegama"])) w_per_cl_vec = self._compute_w_per_class_vector_for_xentr( self._net.num_classes, y_gtmix0, y_gtmix1) cost += self._losses_and_weights["focaloneside"] * cfs.focaloneside( self._net.finalTargetLayer.network_output, y_gtmix0, y_gtmix1, self._losses_and_weights["focalonesidegama"], w_per_cl_vec, self._mixup_biasmargin, self._marginm, mixup_lambda) cost_L1_reg = self._L1_reg_weight * self._net._get_L1_cost() cost_L2_reg = self._L2_reg_weight * self._net._get_L2_cost() cost = cost + cost_L1_reg + cost_L2_reg self._total_cost = cost
def compute_costs( self, log, p_y_given_x ): # Needs to be run with initialized self._num_epochs_trained_tfv if not self._total_cost is None: log.print3("ERROR: Problem in Trainer. It was called to setup the total cost, but it was not None."+\ "\n\t This should not happen. Setup should be called only once.\n Exiting!") exit(1) # Cost functions cost = 0 y_gt = self._net._output_gt_tensor_feeds['train']['y_gt'] if "xentr" in self._losses_and_weights and self._losses_and_weights[ "xentr"] is not None: log.print3("COST: Using cross entropy with weight: " + str(self._losses_and_weights["xentr"])) w_per_cl_vec = self._compute_w_per_class_vector_for_xentr( self._net.num_classes, y_gt) cost += self._losses_and_weights["xentr"] * cfs.x_entr( p_y_given_x, y_gt, w_per_cl_vec) if "iou" in self._losses_and_weights and self._losses_and_weights[ "iou"] is not None: log.print3("COST: Using iou loss with weight: " + str(self._losses_and_weights["iou"])) cost += self._losses_and_weights["iou"] * cfs.iou( p_y_given_x, y_gt) if "dsc" in self._losses_and_weights and self._losses_and_weights[ "dsc"] is not None: log.print3("COST: Using dsc loss with weight: " + str(self._losses_and_weights["dsc"])) cost += self._losses_and_weights["dsc"] * cfs.dsc( p_y_given_x, y_gt) cost_L1_reg = self._L1_reg_weight * cfs.cost_L1( self._net.params_for_L1_L2_reg()) cost_L2_reg = self._L2_reg_weight * cfs.cost_L2( self._net.params_for_L1_L2_reg()) cost = cost + cost_L1_reg + cost_L2_reg self._total_cost = cost