def AGModel(x, signal, kernalshape, phase, height=None, width=None, scope=None):
    with tf.name_scope(scope):
        # attention input
        Wg = weight_xavier_init(shape=kernalshape, n_inputs=kernalshape[0] * kernalshape[1] * kernalshape[2],
                                n_outputs=kernalshape[-1], activefunction='relu', variable_name=str(scope) + 'Wg')
        Bg = bias_variable([kernalshape[-1]], variable_name=str(scope) + 'Bg')
        convg = conv2d(signal, Wg) + Bg
        convg = normalizationlayer(convg, phase, height=height, width=width, norm_type='group',
                                   scope=str(scope) + 'normg')
        # input
        Wf = weight_xavier_init(shape=kernalshape, n_inputs=kernalshape[0] * kernalshape[1] * kernalshape[2],
                                n_outputs=kernalshape[-1], activefunction='relu', variable_name=str(scope) + 'Wf')
        Bf = bias_variable([kernalshape[-1]], variable_name=str(scope) + 'Bf')
        convf = conv2d(x, Wf) + Bf
        convf = normalizationlayer(convf, phase, height=height, width=width, norm_type='group',
                                   scope=str(scope) + 'normf')
        # add input and attention input
        convadd = resnet_Add(x1=convg, x2=convf)
        convadd = tf.nn.relu(convadd)

        # generate attention gat coe
        attencoekernalshape = (1, 1, kernalshape[-1], 1)
        Wpsi = weight_xavier_init(shape=attencoekernalshape,
                                  n_inputs=attencoekernalshape[0] * attencoekernalshape[1] * attencoekernalshape[2],
                                  n_outputs=attencoekernalshape[-1], activefunction='sigomd',
                                  variable_name=str(scope) + 'Wpsi')
        Bpsi = bias_variable([attencoekernalshape[-1]], variable_name=str(scope) + 'Bpsi')
        convpsi = conv2d(convadd, Wpsi) + Bpsi
        convpsi = normalizationlayer(convpsi, phase, height=height, width=width, norm_type='group',
                                     scope=str(scope) + 'normpsi')
        convpsi = tf.nn.sigmoid(convpsi)
        # generate attention gat coe
        attengatx = tf.multiply(x, convpsi)
        return attengatx
Exemplo n.º 2
0
def down_sampling(x, kernalshape, phase, drop_conv, height=None, width=None, scope=None):
    with tf.name_scope(scope):
        W = weight_xavier_init(shape=kernalshape, n_inputs=kernalshape[0] * kernalshape[1] * kernalshape[2],
                               n_outputs=kernalshape[-1], activefunction='relu', variable_name=str(scope) + 'W')
        B = bias_variable([kernalshape[-1]], variable_name=str(scope) + 'B')
        conv = conv2d(x, W, 2) + B
        conv = normalizationlayer(conv, phase, height=height, width=width, norm_type='group', scope=scope)
        conv = tf.nn.dropout(tf.nn.relu(conv), drop_conv)
        return conv