コード例 #1
0
def deconv_relu_drop(x, kernalshape, scope=None):
    with tf.name_scope(scope):
        W = weight_xavier_init(shape=kernalshape,
                               n_inputs=kernalshape[0] * kernalshape[1] *
                               kernalshape[-1],
                               n_outputs=kernalshape[-2],
                               activefuncation='relu',
                               variable_name=scope + 'W')
        B = bias_variable([kernalshape[-2]], variable_name=scope + 'B')
        dconv = tf.nn.relu(deconv2d(x, W) + B)
        return dconv
コード例 #2
0
def conv_sigmod(x, kernalshape, scope=None):
    with tf.name_scope(scope):
        W = weight_xavier_init(shape=kernalshape,
                               n_inputs=kernalshape[0] * kernalshape[1] *
                               kernalshape[2],
                               n_outputs=kernalshape[-1],
                               activefuncation='sigmoid',
                               variable_name=scope + 'W')
        B = bias_variable([kernalshape[-1]], variable_name=scope + 'B')
        conv = conv2d(x, W) + B
        conv = tf.nn.sigmoid(conv)
        return conv
コード例 #3
0
def down_sampling(x,
                  kernalshape,
                  phase,
                  drop_conv,
                  height=None,
                  width=None,
                  scope=None):
    with tf.name_scope(scope):
        W = weight_xavier_init(shape=kernalshape,
                               n_inputs=kernalshape[0] * kernalshape[1] *
                               kernalshape[2],
                               n_outputs=kernalshape[-1],
                               activefuncation='relu',
                               variable_name=scope + 'W')
        B = bias_variable([kernalshape[-1]], variable_name=scope + 'B')
        conv = conv2d(x, W, 2) + B
        conv = normalizationlayer(conv,
                                  phase,
                                  height=height,
                                  width=width,
                                  norm_type='group',
                                  scope=scope)
        conv = tf.nn.dropout(tf.nn.relu(conv), drop_conv)
        return conv