Пример #1
0
    def __init__(self, net_param, action_param, is_training):
        SegmentationApplication.__init__(self, net_param, action_param,
                                         is_training)
        tf.logging.info('starting decay learning segmentation application')
        self.learning_rate = None
        self.momentum = None
        max_lr = action_param.lr
        self.max = action_param.max_iter
        pct = 1 / max_lr if max_lr > 3 else 0.3
        a = int(self.max * pct)
        b = self.max - a
        phases = (a, b)

        div_factor = 20
        final_div = div_factor * 1e3
        low_lr = max_lr / div_factor
        min_lr = max_lr / final_div
        lr_cfg = ((low_lr, max_lr), (max_lr, min_lr))
        moms = (action_param.mom, action_param.mom_end)
        mom_cfg = (moms, (moms[1], moms[0]))

        self.lr_prop = steps({'steps_cfg': lr_cfg, 'phases': phases})
        self.mom_prop = steps({'steps_cfg': mom_cfg, 'phases': phases})
        self.current_lr = self.lr_prop[0].start
        self.mom = self.mom_prop[0].start
        self.res = {}
Пример #2
0
 def __init__(self, net_param, action_param, is_training):
     SegmentationApplication.__init__(self, net_param, action_param,
                                      is_training)
     tf.logging.info('starting decay learning segmentation application')
     self.learning_rate = None
     self.current_lr = action_param.lr
     if self.action_param.validation_every_n > 0:
         raise NotImplementedError("validation process is not implemented "
                                   "in this demo.")
Пример #3
0
 def __init__(self, net_param, action_param, is_training):
     SegmentationApplication.__init__(
         self, net_param, action_param, is_training)
     tf.logging.info('starting decay learning segmentation application')
     self.learning_rate = None
     self.current_lr = action_param.lr
     if self.action_param.validation_every_n > 0:
         raise NotImplementedError("validation process is not implemented "
                                   "in this demo.")
Пример #4
0
    def connect_data_and_network(self,
                                 outputs_collector=None,
                                 gradients_collector=None):
        data_dict = self.get_sampler()[0][0].pop_batch_op()
        image = tf.cast(data_dict['image'], tf.float32)
        net_out = self.net(image, self.is_training)

        if self.is_training:
            with tf.name_scope('Optimiser'):
                self.learning_rate = tf.placeholder(tf.float64, shape=[])
                optimiser_class = OptimiserFactory.create(
                    name=self.action_param.optimiser)
                self.optimiser = optimiser_class.get_instance(
                    learning_rate=self.learning_rate)
            loss_func = LossFunction(
                n_class=self.segmentation_param.num_classes,
                loss_type=self.action_param.loss_type)
            data_loss = loss_func(prediction=net_out,
                                  ground_truth=data_dict.get('label', None),
                                  weight_map=data_dict.get('weight', None))

            self.current_loss = data_loss
            loss = data_loss
            reg_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)

            if self.net_param.decay > 0.0 and reg_losses:
                reg_loss = tf.reduce_mean(
                    [tf.reduce_mean(reg_loss) for reg_loss in reg_losses])
                loss = data_loss + reg_loss
            grads = self.optimiser.compute_gradients(loss)
            # collecting gradients variables
            gradients_collector.add_to_collection([grads])
            # collecting output variables
            outputs_collector.add_to_collection(var=self.current_loss,
                                                name='loss',
                                                average_over_devices=False,
                                                collection=CONSOLE)
            outputs_collector.add_to_collection(var=self.learning_rate,
                                                name='lr',
                                                average_over_devices=False,
                                                collection=CONSOLE)
            outputs_collector.add_to_collection(var=data_loss,
                                                name='dice_loss',
                                                average_over_devices=True,
                                                summary_type='scalar',
                                                collection=TF_SUMMARIES)
        else:
            # converting logits into final output for
            # classification probabilities or argmax classification labels
            SegmentationApplication.connect_data_and_network(
                self, outputs_collector, gradients_collector)
Пример #5
0
 def __init__(self, net_param, action_param, is_training):
     SegmentationApplication.__init__(self, net_param, action_param,
                                      is_training)
     tf.logging.info('starting decay learning segmentation application')
     self.learning_rate = None
     self.current_lr = action_param.lr
     self.init_lr = action_param.lr
     self.prec_loss = 10.0
     self.curent_loss = None
     self.count = 0
     self.tx = 0.2
     self.cpt = 0
     self.theta = float(action_param.max_iter)
     self.beta = float(self.theta * 3)
     self.avg = 0
Пример #6
0
    def connect_data_and_network(self,
                                 outputs_collector=None,
                                 gradients_collector=None):
        data_dict = self.get_sampler()[0][0].pop_batch_op()
        image = tf.cast(data_dict['image'], tf.float32)
        net_out = self.net(image, self.is_training)

        if self.is_training:
            with tf.name_scope('Optimiser'):
                self.learning_rate = tf.placeholder(tf.float32, shape=[])
                optimiser_class = OptimiserFactory.create(
                    name=self.action_param.optimiser)
                self.optimiser = optimiser_class.get_instance(
                    learning_rate=self.learning_rate)
            loss_func = LossFunction(
                n_class=self.segmentation_param.num_classes,
                loss_type=self.action_param.loss_type)
            data_loss = loss_func(
                prediction=net_out,
                ground_truth=data_dict.get('label', None),
                weight_map=data_dict.get('weight', None))

            loss = data_loss
            reg_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)

            if self.net_param.decay > 0.0 and reg_losses:
                reg_loss = tf.reduce_mean(
                    [tf.reduce_mean(reg_loss) for reg_loss in reg_losses])
                loss = data_loss + reg_loss
            grads = self.optimiser.compute_gradients(loss)
            # collecting gradients variables
            gradients_collector.add_to_collection([grads])
            # collecting output variables
            outputs_collector.add_to_collection(
                var=data_loss, name='dice_loss',
                average_over_devices=False, collection=CONSOLE)
            outputs_collector.add_to_collection(
                var=self.learning_rate, name='lr',
                average_over_devices=False, collection=CONSOLE)
            outputs_collector.add_to_collection(
                var=data_loss, name='dice_loss',
                average_over_devices=True, summary_type='scalar',
                collection=TF_SUMMARIES)
        else:
            # converting logits into final output for
            # classification probabilities or argmax classification labels
            SegmentationApplication.connect_data_and_network(
                self, outputs_collector, gradients_collector)
    def __init__(self, net_param, action_param, is_training):
        SegmentationApplication.__init__(self, net_param, action_param,
                                         is_training)
        tf.logging.info('starting decay learning segmentation application')
        self.learning_rate = None
        max_lr = action_param.lr
        self.max = action_param.max_iter
        pct = 1 / max_lr if max_lr > 3 else 0.3
        a = int(self.max * pct)
        b = self.max - a
        phases = (a, b)

        div_factor = 20
        final_div = div_factor * 1e3
        low_lr = max_lr / div_factor
        min_lr = max_lr / final_div
        step_cfg = ((low_lr, max_lr), (max_lr, min_lr))

        self.lr_prop = steps({'steps_cfg': step_cfg, 'phases': phases})
        self.current_lr = self.lr_prop[0].start
        self.res = {}
        print("\n\nThe maximum learning rate should be greater than 1e-3\n\n")
 def __init__(self, net_param, action_param, is_training):
     SegmentationApplication.__init__(self, net_param, action_param,
                                      is_training)
     tf.logging.info('starting segmentation application')
Пример #9
0
 def __init__(self, net_param, action_param, is_training):
     SegmentationApplication.__init__(
         self, net_param, action_param, is_training)
     tf.logging.info('starting decay learning segmentation application')
     self.learning_rate = None