Пример #1
0
    def __init__(self):
        self.fc1 = layers.fc(size=512, act='relu', name='fc1')
        self.fc2 = layers.fc(size=512, act='relu', name='fc2')
        self.fc3 = layers.fc(size=512, act='relu', name='fc3')
        self.fc4 = layers.fc(size=512, act='relu', name='fc4')
        self.fc5 = layers.fc(size=512, act='relu', name='fc5')
        self.fc6 = layers.fc(size=500, act='relu', name='fc6')

        self.month_embedding = layers.embedding(size=[12, 64],
                                                name='emb_month')
        self.hour_embedding = layers.embedding(size=[24, 64], name='emb_hour')
Пример #2
0
    def __init__(self):

        self.fc1 = layers.fc(size=1024, act='relu', name='fc1')
        self.fc2 = layers.fc(size=900, act='relu', name='fc2')
        self.fc3 = layers.fc(size=800, act='relu', name='fc3')
        self.fc4 = layers.fc(size=700, act='relu', name='fc4')
        self.fc5 = layers.fc(size=512, act='relu', name='fc5')
        self.fc6 = layers.fc(size=label_dim, act='relu', name='fc6')

        self.month_embedding = layers.embedding(size=[13, 256],
                                                name='emb_month')
        self.hour_embedding = layers.embedding(size=[24, 256], name='emb_hour')
        self.line_status_embedding = layers.embedding(size=[400, 256],
                                                      name='emb_line_status')
Пример #3
0
 def __init__(self, act_dim):
     hid1_size = 256
     hid2_size = 256
     hid3_size = 256
     # 3层全连接网络
     self.emb_1 = layers.embedding(size=[128, 64])
     self.fc1 = layers.fc(size=hid1_size, act='relu')
     self.fc2 = layers.fc(size=hid2_size, act='relu')
     self.fc4 = layers.fc(size=act_dim, act=None)
Пример #4
0
    def __init__(self):
        self.fc1 = layers.fc(100)
        self.fc2 = layers.fc(100)
        self.fc3 = layers.fc(100, bias_attr=False)
        self.fc4 = layers.fc(100, param_attr=False)
        self.fc5 = layers.fc(100, name="fc", bias_attr=False)
        self.fc6 = layers.fc(100, param_attr=fluid.initializer.Xavier())
        self.embedding = layers.embedding((100, 128))
        self.embedding_custom = layers.embedding((100, 128),
                                                 name="embedding_custom")
        ## although here self.conv2d shares param with self.embedding,
        ## it might be invalid because the param sizes do not match
        self.conv2d = layers.conv2d(
            num_filters=64,
            filter_size=3,
            param_attr=self.embedding.attr_holder.param_attr,
            name="my_conv2d")

        self.batch_norm = layers.batch_norm()
Пример #5
0
def default_embedding(size, name):
    gradient_clip = default_param_clip()
    reg = fluid.regularizer.L2Decay(1e-5)  # IMPORTANT, to prevent overfitting.
    embed = layers.embedding(name=name,
                             size=size,
                             param_attr=ParamAttr(
                                 initializer=fluid.initializer.Xavier(),
                                 gradient_clip=gradient_clip,
                                 regularizer=reg),
                             is_sparse=False)
    return embed
Пример #6
0
def default_embedding(size, name, embed_clip, regularizer=None):
    gradient_clip = default_param_clip() if embed_clip else None
    embed = layers.embedding(name=name,
                            size=size,
                            param_attr=ParamAttr(initializer=fluid.initializer.Xavier(),
                                                gradient_clip=gradient_clip,
                                                regularizer=regularizer),
                            is_sparse=False,     # turn on lazy_mode when using Adam
                            is_distributed=False,    # TODO https://github.com/PaddlePaddle/Paddle/issues/15133
                            )
    return embed
Пример #7
0
 def __init__(self):
     self.fc1 = layers.fc(64, bias_attr=False)
     self.fc2 = layers.fc(64, bias_attr=False)
     self.fc3 = layers.fc(64, name="fc")
     self.fc4 = layers.fc(64, name="fc")
     self.embedding = layers.embedding(
         (100, 64), param_attr=self.fc1.attr_holder.param_attr)
     self.created_param = layers.create_parameter(
         shape=[100],
         dtype='float32',
         default_initializer=fluid.initializer.Uniform(low=-1.0, high=1.0))
     self.batch_norm = layers.batch_norm()