Beispiel #1
0
    def __init__(self, act_dim, max_action):
        hidden_dim_1, hidden_dim_2 = 64, 64
        self.fc1 = layers.fc(size=hidden_dim_1, act='tanh')
        self.fc2 = layers.fc(size=hidden_dim_2, act='tanh')
        self.fc3 = layers.fc(size=act_dim, act='tanh')

        self.max_action = max_action
Beispiel #2
0
    def __init__(self, act_dim):
        hid1_size = act_dim * 50
        hid2_size = act_dim * 50

        self.fc1 = layers.fc(size=hid1_size, act='relu')
        self.fc2 = layers.fc(size=hid2_size, act='relu')
        self.fc3 = layers.fc(size=act_dim, act=None)
 def __init__(self, act_dim):
     hidden_size = 256
     self.net1 = layers.fc(size=hidden_size, act='relu')
     self.net2 = layers.fc(size=hidden_size, act='relu')
     self.net3 = layers.fc(size=hidden_size, act='relu')
     self.net4 = layers.fc(size=hidden_size, act='relu')
     self.net5 = layers.fc(size=act_dim, act=None)
Beispiel #4
0
    def __init__(self, act_dim):
        self.act_dim = act_dim

        self.conv1 = layers.conv2d(num_filters=32,
                                   filter_size=2,
                                   stride=1,
                                   act='relu')
        self.conv2 = layers.conv2d(num_filters=8,
                                   filter_size=4,
                                   stride=1,
                                   padding=2,
                                   act='relu')
        self.conv3 = layers.conv2d(num_filters=8,
                                   filter_size=4,
                                   stride=1,
                                   padding=2,
                                   act='relu')
        self.conv4 = layers.conv2d(num_filters=8,
                                   filter_size=4,
                                   stride=1,
                                   padding=2,
                                   act='relu')
        self.fc1 = layers.fc(size=act_dim, act=None)
        self.fc2 = layers.fc(size=act_dim, act=None)
        self.fc3 = layers.fc(size=1, act='sigmoid')
Beispiel #5
0
    def __init__(self, act_dim):
        hid1_size = 128
        hid2_size = 128

        self.fc1 = layers.fc(size=hid1_size, act='tanh')
        self.fc2 = layers.fc(size=hid2_size, act='tanh')
        self.fc3 = layers.fc(size=act_dim, act=None)
Beispiel #6
0
    def __init__(self, act_dim):
        hid1_size = 256
        hid2_size = 256

        self.fc1 = layers.fc(size=hid1_size, act="relu")
        self.fc2 = layers.fc(size=hid2_size, act="relu")
        self.fc3 = layers.fc(size=act_dim, act=None)
Beispiel #7
0
 def __init__(self, act_dim):
     hid1_size = 128
     hid2_size = 128
     # 神经网络共有三层:fc(128)+fc(128)+fc(act_dim)
     self.fc1 = layers.fc(size=hid1_size, act="relu")
     self.fc2 = layers.fc(size=hid2_size, act="relu")
     self.fc3 = layers.fc(size=act_dim, act=None)
Beispiel #8
0
 def __init__(self, act_dim):
     self.act_dim = act_dim
     hid1_size = 32
     hid2_size = 32
     self.fc1 = layers.fc(size=hid1_size, act='tanh')
     self.fc2 = layers.fc(size=hid2_size, act='tanh')
     self.fcOut = layers.fc(size=act_dim, act='softmax')
Beispiel #9
0
    def __init__(self, config):
        self.n_actions = config['n_actions']
        self.rnn_hidden_dim = config['rnn_hidden_dim']

        self.fc1 = layers.fc(size=self.rnn_hidden_dim, act=None, name='fc1')
        self.gru = layers.GRUCell(hidden_size=self.rnn_hidden_dim, name='gru')
        self.fc2 = layers.fc(size=self.n_actions, act=None, name='fc2')
Beispiel #10
0
    def __init__(self, act_dim):
        hid1_size = 256
        hid2_size = 256

        self.fc1 = layers.fc(size=hid1_size, act='tanh')
        self.fc2 = layers.fc(size=hid2_size, act='tanh')
        self.fc3 = layers.fc(size=act_dim)
Beispiel #11
0
    def __init__(self, act_dim):
        hidden_size_1 = 128
        hidden_size_2 = 128

        self.fc1 = layers.fc(size=hidden_size_1, act='relu')
        self.fc2 = layers.fc(size=hidden_size_2, act='relu')
        self.fc3 = layers.fc(size=act_dim, act=None)
Beispiel #12
0
 def __init__(self, act_dim):
     hid1_size = 128
     hid2_size = 128
     # 3层全连接网络
     self.fc1 = layers.fc(size=hid1_size, act='relu')
     self.fc2 = layers.fc(size=hid2_size, act='relu')
     self.fc3 = layers.fc(size=act_dim, act=None)
    def __init__(self, act_dim):
        act_dim = act_dim
        hidden_dim_1 = hidden_dim_2 = 128

        self.fc1 = layers.fc(size=hidden_dim_1, act='tanh')
        self.fc2 = layers.fc(size=hidden_dim_2, act='tanh')
        self.fc3 = layers.fc(size=act_dim, act="softmax")
Beispiel #14
0
 def __init__(self):
     self.fc1 = layers.fc(size=256,
                          act=None,
                          param_attr=ParamAttr(name='fc1.w'),
                          bias_attr=ParamAttr(name='fc1.b'))
     self.fc_tuple = (layers.fc(size=128,
                                act=None,
                                param_attr=ParamAttr(name='fc2.w'),
                                bias_attr=ParamAttr(name='fc2.b')),
                      (layers.fc(size=1,
                                 act=None,
                                 param_attr=ParamAttr(name='fc3.w'),
                                 bias_attr=ParamAttr(name='fc3.b')),
                       10), 10)
     self.fc_dict = {
         'k1':
         layers.fc(size=128,
                   act=None,
                   param_attr=ParamAttr(name='fc4.w'),
                   bias_attr=ParamAttr(name='fc4.b')),
         'k2': {
             'k22':
             layers.fc(size=1,
                       act=None,
                       param_attr=ParamAttr(name='fc5.w'),
                       bias_attr=ParamAttr(name='fc5.b'))
         },
         'k3':
         1,
     }
    def __init__(self, act_dim):
        act_dim = act_dim
        hid1_size = act_dim * 10

        self.fc1 = layers.fc(size=hid1_size, act='tanh')
        #self.fc2 = layers.fc(size=hid1_size, act='tanh')
        self.fc3 = layers.fc(size=act_dim, act='softmax')
Beispiel #16
0
    def __init__(self, act_dim, algo='DQN'):
        self.act_dim = act_dim

        self.conv1 = layers.conv2d(num_filters=32,
                                   filter_size=5,
                                   stride=1,
                                   padding=2,
                                   act='relu')
        self.conv2 = layers.conv2d(num_filters=32,
                                   filter_size=5,
                                   stride=1,
                                   padding=2,
                                   act='relu')
        self.conv3 = layers.conv2d(num_filters=64,
                                   filter_size=4,
                                   stride=1,
                                   padding=1,
                                   act='relu')
        self.conv4 = layers.conv2d(num_filters=64,
                                   filter_size=3,
                                   stride=1,
                                   padding=1,
                                   act='relu')

        self.algo = algo
        if algo == 'Dueling':
            self.fc1_adv = layers.fc(size=512, act='relu')
            self.fc2_adv = layers.fc(size=act_dim)
            self.fc1_val = layers.fc(size=512, act='relu')
            self.fc2_val = layers.fc(size=1)
        else:
            self.fc1 = layers.fc(size=act_dim)
Beispiel #17
0
    def __init__(self, act_dim):
        hid1_size = 400
        hid2_size = 300

        self.fc1 = layers.fc(size=hid1_size, act='relu')
        self.fc2 = layers.fc(size=hid2_size, act='relu')
        self.fc3 = layers.fc(size=act_dim, act='tanh')
Beispiel #18
0
 def __init__(self, act_dim):
     hid1_size = 256
     hid2_size = 128
     # 3 fully connected layers
     self.fc1 = layers.fc(size=hid1_size, act='relu')
     self.fc2 = layers.fc(size=hid2_size, act='relu')
     self.fc3 = layers.fc(size=act_dim, act=None)
    def __init__(self, act_dim):
        super(DecoderGeneratorModel, self).__init__()
        self.conv1 = self.conv2d_helper(num_filters=32,
                                        filter_size=8,
                                        stride=4,
                                        padding=1,
                                        act='relu')
        self.conv2 = self.conv2d_helper(num_filters=64,
                                        filter_size=4,
                                        stride=2,
                                        padding=2,
                                        act='relu')
        self.conv3 = self.conv2d_helper(num_filters=64,
                                        filter_size=3,
                                        stride=1,
                                        padding=0,
                                        act='relu')
        self.flat = self.flatten_helper(axis=1)
        self.fc = self.fc_helepr(size=512, act='relu')

        self.policy_fc = layers.fc(size=act_dim)
        self.value_fc = layers.fc(size=1)

        # self.encoder = [self.conv1, self.conv2, self.conv3, self.flat, self.fc]
        self.decoder = self.decoder_generator()
Beispiel #20
0
    def __init__(self):
        hid1_size = 400
        hid2_size = 300

        self.fc1 = layers.fc(size=hid1_size, act='relu')
        self.fc2 = layers.fc(size=hid2_size, act='relu')
        self.fc3 = layers.fc(size=1, act=None)
Beispiel #21
0
    def __init__(self, act_dim):

        # 配置model结构

        hid_size = 100

        self.fc1 = layers.fc(size=hid_size, act='relu')
        self.fc2 = layers.fc(size=act_dim, act='tanh')
Beispiel #22
0
    def __init__(self, act_dim):

        hid1_size = 32
        hid2_size = 64
        # 3层全连接网络
        self.fc1 = layers.fc(size=hid1_size, act='relu', name="fc1")
        self.fc2 = layers.fc(size=hid2_size, act='relu', name="fc2")
        self.fc3 = layers.fc(size=act_dim, act=None, name="fc3")
    def __init__(self, act_dim):
        act_dim = act_dim
        hid1_size = 512
        hid2_size = 128

        self.fc1 = layers.fc(size=hid1_size, act='relu')
        self.fc2 = layers.fc(size=hid2_size, act='relu')
        self.fc3 = layers.fc(size=act_dim, act='softmax')
Beispiel #24
0
    def __init__(self, act_dim):
        self.act_dim = act_dim

        hid1_size = 128
        hid2_size = 128
        self.fc1 = layers.fc(size=hid1_size, act='relu')
        self.fc2 = layers.fc(size=hid2_size, act='relu')
        self.fc3 = layers.fc(size=self.act_dim, act=None)
    def __init__(self, act_dim):
        self._act_dim = act_dim
        self._fc_1 = layers.fc(size=512, act='relu')
        self._fc_2 = layers.fc(size=256, act='relu')
        self._fc_3 = layers.fc(size=128, act='tanh')

        self.value_fc = layers.fc(size=1)
        self.policy_fc = layers.fc(size=act_dim)
Beispiel #26
0
 def __init__(self, act_num):
     """
     init
     :param act_num:
     """
     self.fc1 = layers.fc(size=64, act='relu')
     self.fc2 = layers.fc(size=64, act='relu')
     self.fc3 = layers.fc(size=act_num, act=None)
    def __init__(self, act_dim):
        hid1_size = 400
        hid2_size = 300

        self.fc1 = layers.fc(size=hid1_size, act='relu')
        self.fc2 = layers.fc(size=hid2_size, act='relu')
        self.mean_linear = layers.fc(size=act_dim)
        self.log_std_linear = layers.fc(size=act_dim)
    def __init__(self, act_dim):

        hid1_size = 32
        hid2_size = 32
        # 3层全连接网络
        self.fc1 = layers.fc(size=hid1_size, act='relu')
        self.fc2 = layers.fc(size=hid2_size, act='relu')
        self.fc3 = layers.fc(size=act_dim, act='tanh')
Beispiel #29
0
    def __init__(self, act_dim):
        hid0_size = 64
        hid1_size = 32
        hid2_size = 16

        self.fc0 = layers.fc(size=hid0_size, act='relu', name="catfc0")
        self.fc1 = layers.fc(size=hid1_size, act='relu', name="catfc1")
        self.fc2 = layers.fc(size=hid2_size, act='relu', name="catfc2")
        self.fc3 = layers.fc(size=act_dim, act=None, name="catfc3")
Beispiel #30
0
    def __init__(self):
        ######################################################################
        ######################################################################
        #
        # 4. 请配置model结构
        hid_size = 128

        self.fc1 = layers.fc(size=hid_size, act='relu')
        self.fc2 = layers.fc(size=1, act=None)