コード例 #1
0
ファイル: evaluator.py プロジェクト: zhaoxiaoze/lac_chiniese
    def __init__(self, input, label, ignored_tokens=None, **kwargs):
        super(EditDistance, self).__init__("edit_distance", **kwargs)
        main_program = self.helper.main_program
        if main_program.current_block().idx != 0:
            raise ValueError("You can only invoke Evaluator in root block")

        self.total_distance = self._create_state(dtype='float32',
                                                 shape=[1],
                                                 suffix='total_distance')
        self.seq_num = self._create_state(dtype='int64',
                                          shape=[1],
                                          suffix='seq_num')
        self.instance_error = self._create_state(dtype='int64',
                                                 shape=[1],
                                                 suffix='instance_error')
        distances, seq_num = layers.edit_distance(
            input=input, label=label, ignored_tokens=ignored_tokens)

        zero = layers.fill_constant(shape=[1], value=0.0, dtype='float32')
        compare_result = layers.equal(distances, zero)
        compare_result_int = layers.cast(x=compare_result, dtype='int')
        seq_right_count = layers.reduce_sum(compare_result_int)
        instance_error_count = layers.elementwise_sub(x=seq_num,
                                                      y=seq_right_count)
        total_distance = layers.reduce_sum(distances)
        layers.sums(input=[self.total_distance, total_distance],
                    out=self.total_distance)
        layers.sums(input=[self.seq_num, seq_num], out=self.seq_num)
        layers.sums(input=[self.instance_error, instance_error_count],
                    out=self.instance_error)
        self.metrics.append(total_distance)
        self.metrics.append(instance_error_count)
コード例 #2
0
ファイル: evaluator.py プロジェクト: absorbguo/Paddle
    def __init__(self, input, label, ignored_tokens=None, **kwargs):
        super(EditDistance, self).__init__("edit_distance", **kwargs)
        main_program = self.helper.main_program
        if main_program.current_block().idx != 0:
            raise ValueError("You can only invoke Evaluator in root block")

        self.total_distance = self.create_state(
            dtype='float32', shape=[1], suffix='total_distance')
        self.seq_num = self.create_state(
            dtype='int64', shape=[1], suffix='seq_num')
        self.instance_error = self.create_state(
            dtype='int64', shape=[1], suffix='instance_error')
        distances, seq_num = layers.edit_distance(
            input=input, label=label, ignored_tokens=ignored_tokens)

        zero = layers.fill_constant(shape=[1], value=0.0, dtype='float32')
        compare_result = layers.equal(distances, zero)
        compare_result_int = layers.cast(x=compare_result, dtype='int')
        seq_right_count = layers.reduce_sum(compare_result_int)
        instance_error_count = layers.elementwise_sub(
            x=seq_num, y=seq_right_count)
        total_distance = layers.reduce_sum(distances)
        layers.sums(
            input=[self.total_distance, total_distance],
            out=self.total_distance)
        layers.sums(input=[self.seq_num, seq_num], out=self.seq_num)
        layers.sums(
            input=[self.instance_error, instance_error_count],
            out=self.instance_error)
        self.metrics.append(total_distance)
        self.metrics.append(instance_error_count)
コード例 #3
0
ファイル: clip.py プロジェクト: absorbguo/Paddle
    def process_context(self, context, param, grad):
        if self.group_name not in context:
            context[self.group_name] = []
            context[self.group_name + "_clip_value"] = self.clip_norm
            context[self.group_name + "_clip"] = layers.fill_constant(
                shape=[1], dtype="float32", value=self.clip_norm)
        else:
            if not self.clip_norm == context[self.group_name + "_clip_value"]:
                raise ValueError(
                    "All parameters' 'clip_norm' of a same group should be the same"
                )

        local_norm_var = layers.reduce_sum(input=layers.pow(x=grad, factor=2.0))
        context[self.group_name].append(local_norm_var)

        self.context = context
コード例 #4
0
    def process_context(self, context, param, grad):
        if self.group_name not in context:
            context[self.group_name] = []
            context[self.group_name + "_clip_value"] = self.clip_norm
            context[self.group_name + "_clip"] = layers.fill_constant(
                shape=[1], dtype="float32", value=self.clip_norm)
        else:
            if not self.clip_norm == context[self.group_name + "_clip_value"]:
                raise ValueError(
                    "All parameters' 'clip_norm' of a same group should be the same"
                )

        local_norm_var = layers.reduce_sum(
            input=layers.pow(x=grad, factor=2.0))
        context[self.group_name].append(local_norm_var)

        self.context = context