コード例 #1
0
 def __init__(self, optimizer, **kwargs):
     super(OptimizerWrapper, self).__init__(**kwargs)
     self.optimizer = optimizer
     self._optimizer_attributes = []
     for k, v in get_all_attributes(self.optimizer).items():
         if k not in dir(self):
             setattr(self, k, v)
             self._optimizer_attributes.append(k)
コード例 #2
0
#! -*- coding: utf-8 -*-
# 自定义层

import tensorflow as tf
from bert4keras.backend import keras, K, get_all_attributes


# 等价于 from keras.layers import *
locals().update(get_all_attributes(keras.layers))
initializers = keras.initializers
activations = keras.activations


def sequence_masking(x, mask, mode=0, axis=None, heads=1):
    """为序列条件mask的函数
    mask: 形如(batch_size, seq_len)的0-1矩阵;
    mode: 如果是0,则直接乘以mask;
          如果是1,则在padding部分减去一个大正数。
    axis: 序列所在轴,默认为1;
    heads: 相当于batch这一维要被重复的次数。
    """
    if mask is None or mode not in [0, 1]:
        return x
    else:
        if heads is not 1:
            mask = K.expand_dims(mask, 1)
            mask = K.tile(mask, (1, heads, 1))
            mask = K.reshape(mask, (-1, K.shape(mask)[2]))
        if axis is None:
            axis = 1
        if axis == -1: