コード例 #1
0
    def _create_accumulators(self, block, parameters):
        assert isinstance(block, framework.Block)

        main_block = block.program.global_block()
        # Create beta1 and beta2 power tensors
        beta_shape = [1]
        self._beta1_pow_acc = self.helper.create_global_variable(
            name=unique_name('beta1_pow_acc'),
            dtype='float32',
            shape=beta_shape,
            lod_level=0,
            persistable=True)
        self.helper.set_variable_initializer(self._beta1_pow_acc,
                                             initializer=Constant(self._beta1))

        self._beta2_pow_acc = self.helper.create_global_variable(
            name=unique_name('beta2_pow_acc'),
            dtype='float32',
            shape=beta_shape,
            lod_level=0,
            persistable=True)

        self.helper.set_variable_initializer(self._beta2_pow_acc,
                                             initializer=Constant(self._beta2))

        # Create accumulator tensors for first and second moments
        for p in parameters:
            self._add_accumulator(self._moment1_acc_str, p)
            self._add_accumulator(self._moment2_acc_str, p)
コード例 #2
0
    def _add_accumulator(self, name, param, dtype=None, fill_value=0.0):
        """Utility function to add an accumulator for a parameter

        Args:
            block: the block in which the loss variable is present
            name: name of the accumulator
            param: parameter variable for which accumulator is to be added
            dtype: data type of the accumulator variable
            fill_value: value to initialize the accumulator variable
        """
        if (name in self._accumulators
                and param.name in self._accumulators[name]):
            raise Exception(
                "Accumulator {} already exists for parameter {}".format(
                    name, param.name))

        assert isinstance(self.helper, LayerHelper)
        var = self.helper.create_global_variable(name=unique_name(name),
                                                 persistable=True,
                                                 dtype=dtype or param.dtype,
                                                 type=param.type,
                                                 shape=param.shape)
        self.helper.set_variable_initializer(
            var, initializer=Constant(value=float(fill_value)))
        self._accumulators[name][param.name] = var
コード例 #3
0
ファイル: evaluator.py プロジェクト: guochaorong/paddle_v3
    def __init__(self,
                 input,
                 gt_label,
                 gt_box,
                 gt_difficult,
                 class_num,
                 background_label=0,
                 overlap_threshold=0.5,
                 evaluate_difficult=True,
                 ap_version='integral'):
        super(DetectionMAP, self).__init__("map_eval")

        gt_label = layers.cast(x=gt_label, dtype=gt_box.dtype)
        gt_difficult = layers.cast(x=gt_difficult, dtype=gt_box.dtype)
        label = layers.concat([gt_label, gt_difficult, gt_box], axis=1)

        # calculate mean average precision (mAP) of current mini-batch
        map = layers.detection_map(input,
                                   label,
                                   class_num,
                                   background_label,
                                   overlap_threshold=overlap_threshold,
                                   evaluate_difficult=evaluate_difficult,
                                   ap_version=ap_version)

        self.create_state(dtype='int32', shape=None, suffix='accum_pos_count')
        self.create_state(dtype='float32', shape=None, suffix='accum_true_pos')
        self.create_state(dtype='float32',
                          shape=None,
                          suffix='accum_false_pos')

        self.has_state = None
        var = self.helper.create_variable(persistable=True,
                                          dtype='int32',
                                          shape=[1])
        self.helper.set_variable_initializer(
            var, initializer=Constant(value=int(0)))
        self.has_state = var

        # calculate accumulative mAP
        accum_map = layers.detection_map(input,
                                         label,
                                         class_num,
                                         background_label,
                                         overlap_threshold=overlap_threshold,
                                         evaluate_difficult=evaluate_difficult,
                                         has_state=self.has_state,
                                         input_states=self.states,
                                         out_states=self.states,
                                         ap_version=ap_version)

        layers.fill_constant(shape=self.has_state.shape,
                             value=1,
                             dtype=self.has_state.dtype,
                             out=self.has_state)

        self.cur_map = map
        self.accum_map = accum_map
コード例 #4
0
ファイル: param_attr.py プロジェクト: zhaoxiaoze/lac_chiniese
    def set_default_bias_initializer(self):
        """
        Set the default initializer for the bias with Constant(0.0).

        Args:
            None.

        Returns:
            None.
        """
        self.set_default_initializer(Constant(0.0))
コード例 #5
0
 def _create_param_lr(self, param_and_grad):
     # create learning rate variable for every parameter
     param = param_and_grad[0]
     param_lr = param.optimize_attr['learning_rate']
     param_lr_shape = [1]
     param_lr_var = self.helper.create_global_variable(
         name=unique_name("learning_rate"),
         dtype='float32',
         shape=param_lr_shape,
         lod_level=1,
         persistable=True)
     param_lr = param_lr * self._learning_rate
     self.helper.set_variable_initializer(var=param_lr_var,
                                          initializer=Constant(param_lr))
     return param_lr_var
コード例 #6
0
    def _create_accumulators(self, block, parameters):
        # Create beta1 power accumulator tensor
        beta_shape = [1]
        self._beta1_pow_acc = self.helper.create_global_variable(
            name=unique_name('beta1_pow_acc'),
            dtype='float32',
            shape=beta_shape,
            lod_level=0,
            persistable=True)
        self.helper.set_variable_initializer(self._beta1_pow_acc,
                                             initializer=Constant(self._beta1))

        # Create accumulator tensors for first moment and infinity norm
        for p in parameters:
            self._add_accumulator(self._moment_acc_str, p)
            self._add_accumulator(self._inf_norm_acc_str, p)
コード例 #7
0
ファイル: param_attr.py プロジェクト: wobushihuair/Paddle-1
 def set_default_bias_initializer(self):
     self.set_default_initializer(Constant(0.0))