def stage_shuffle(input_data, stage, repeat_num, groups):
    avgpool_layer = AVGPooling(3, 3, 2, 1)
    residual = avgpool_layer.forward(input_data)
    #savetxt('./dump/' + 'avg_pool.txt', residual)

    w = np.load(stage + '0.g_conv_1x1_compress.conv1x1.weight.npy')
    b = np.load(stage + '0.g_conv_1x1_compress.conv1x1.bias.npy')

    if 'Stage2' in stage:
        conv_layer = Convolution(w, b, stride=1, pad=0)
    else:
        conv_layer = GroupConvolution(w, b, stride=1, pad=0, groups=groups)
    out = conv_layer.forward(input_data)
    out_N, out_C, out_H, out_W = out.shape

    gamma = np.load(stage +
                    '0.g_conv_1x1_compress.batch_norm.weight.npy').reshape(
                        (-1, 1))
    beta = np.load(stage +
                   '0.g_conv_1x1_compress.batch_norm.bias.npy').reshape(
                       (-1, 1))
    mean = np.load(
        stage + '0.g_conv_1x1_compress.batch_norm.running_mean.npy').reshape(
            (-1, 1))
    var = np.load(stage +
                  '0.g_conv_1x1_compress.batch_norm.running_var.npy').reshape(
                      (-1, 1))
    bn_layer = BatchNormalization(gamma,
                                  beta,
                                  running_mean=mean,
                                  running_var=var)
    out = bn_layer.forward(out.reshape(out_C, -1), train_flg=False)
    relu_layer = Relu()
    out = relu_layer.forward(out).reshape(out_N, out_C, out_H, out_W)
    #savetxt('./dump/' + '1x1_comp.txt', out)

    out = channel_shuffle(out, groups)
    #savetxt('./dump/' + 'channel_shuffle.txt', out)

    w = np.load(stage + '0.depthwise_conv3x3.weight.npy').transpose(1, 0, 2, 3)
    b = np.load(stage + '0.depthwise_conv3x3.bias.npy')
    dwconv_layer = DWConvolution(w, b, stride=2, pad=1)
    out = dwconv_layer.forward(out)
    #savetxt('./dump/' + 'dwconv.txt', out)

    gamma = np.load(stage + '0.bn_after_depthwise.weight.npy').reshape((-1, 1))
    beta = np.load(stage + '0.bn_after_depthwise.bias.npy').reshape((-1, 1))
    mean = np.load(stage + '0.bn_after_depthwise.running_mean.npy').reshape(
        (-1, 1))
    var = np.load(stage + '0.bn_after_depthwise.running_var.npy').reshape(
        (-1, 1))
    bn_layer = BatchNormalization(gamma,
                                  beta,
                                  running_mean=mean,
                                  running_var=var)
    out_N, out_C, out_H, out_W = out.shape
    out = bn_layer.forward(out.reshape(out_C, -1),
                           train_flg=False).reshape(out_N, out_C, out_H, out_W)
    #savetxt('./dump/' + 'after_bn.txt', out)

    w = np.load(stage + '0.g_conv_1x1_expand.conv1x1.weight.npy')
    b = np.load(stage + '0.g_conv_1x1_expand.conv1x1.bias.npy')
    groupconv_layer = GroupConvolution(w, b, stride=1, pad=0, groups=groups)
    out = groupconv_layer.forward(out)

    gamma = np.load(stage +
                    '0.g_conv_1x1_expand.batch_norm.weight.npy').reshape(
                        (-1, 1))
    beta = np.load(stage + '0.g_conv_1x1_expand.batch_norm.bias.npy').reshape(
        (-1, 1))
    mean = np.load(stage +
                   '0.g_conv_1x1_expand.batch_norm.running_mean.npy').reshape(
                       (-1, 1))
    var = np.load(stage +
                  '0.g_conv_1x1_expand.batch_norm.running_var.npy').reshape(
                      (-1, 1))
    bn_layer = BatchNormalization(gamma,
                                  beta,
                                  running_mean=mean,
                                  running_var=var)
    out_N, out_C, out_H, out_W = out.shape
    out = bn_layer.forward(out.reshape(out_C, -1),
                           train_flg=False).reshape(out_N, out_C, out_H, out_W)
    #savetxt('./dump/' + 'gconv.txt', out)

    out = np.concatenate((residual, out), 1)
    #savetxt('./dump/' + 'combine.txt', out)
    relu_layer = Relu()
    out_N, out_C, out_H, out_W = out.shape
    out = relu_layer.forward(out).reshape(out_N, out_C, out_H, out_W)
    #savetxt('./dump/' + 'stage2.txt', out)

    for i in range(1, repeat_num + 1):
        residual = out
        w = np.load(stage + str(i) + '.g_conv_1x1_compress.conv1x1.weight.npy')
        b = np.load(stage + str(i) + '.g_conv_1x1_compress.conv1x1.bias.npy')
        groupconv_layer = GroupConvolution(w,
                                           b,
                                           stride=1,
                                           pad=0,
                                           groups=groups)
        out = groupconv_layer.forward(out)
        out_N, out_C, out_H, out_W = out.shape

        gamma = np.load(stage + str(i) +
                        '.g_conv_1x1_compress.batch_norm.weight.npy').reshape(
                            (-1, 1))
        beta = np.load(stage + str(i) +
                       '.g_conv_1x1_compress.batch_norm.bias.npy').reshape(
                           (-1, 1))
        mean = np.load(
            stage + str(i) +
            '.g_conv_1x1_compress.batch_norm.running_mean.npy').reshape(
                (-1, 1))
        var = np.load(
            stage + str(i) +
            '.g_conv_1x1_compress.batch_norm.running_var.npy').reshape((-1, 1))
        bn_layer = BatchNormalization(gamma,
                                      beta,
                                      running_mean=mean,
                                      running_var=var)
        out = bn_layer.forward(out.reshape(out_C, -1), train_flg=False)
        relu_layer = Relu()
        out = relu_layer.forward(out).reshape(out_N, out_C, out_H, out_W)
        #savetxt('./dump/' + str(i) + '_1x1_comp.txt', out)

        out = channel_shuffle(out, groups)
        #savetxt('./dump/' + 'channel_shuffle.txt', out)

        w = np.load(stage + str(i) +
                    '.depthwise_conv3x3.weight.npy').transpose(1, 0, 2, 3)
        b = np.load(stage + str(i) + '.depthwise_conv3x3.bias.npy')
        dwconv_layer = DWConvolution(w, b, stride=1, pad=1)
        out = dwconv_layer.forward(out)
        #savetxt('./dump/' + 'dwconv.txt', out)

        gamma = np.load(stage + str(i) +
                        '.bn_after_depthwise.weight.npy').reshape((-1, 1))
        beta = np.load(stage + str(i) +
                       '.bn_after_depthwise.bias.npy').reshape((-1, 1))
        mean = np.load(stage + str(i) +
                       '.bn_after_depthwise.running_mean.npy').reshape((-1, 1))
        var = np.load(stage + str(i) +
                      '.bn_after_depthwise.running_var.npy').reshape((-1, 1))
        bn_layer = BatchNormalization(gamma,
                                      beta,
                                      running_mean=mean,
                                      running_var=var)
        out_N, out_C, out_H, out_W = out.shape
        out = bn_layer.forward(out.reshape(out_C, -1),
                               train_flg=False).reshape(
                                   out_N, out_C, out_H, out_W)
        #savetxt('./dump/' + 'after_bn.txt', out)

        w = np.load(stage + str(i) + '.g_conv_1x1_expand.conv1x1.weight.npy')
        b = np.load(stage + str(i) + '.g_conv_1x1_expand.conv1x1.bias.npy')
        groupconv_layer = GroupConvolution(w,
                                           b,
                                           stride=1,
                                           pad=0,
                                           groups=groups)
        out = groupconv_layer.forward(out)

        gamma = np.load(stage + str(i) +
                        '.g_conv_1x1_expand.batch_norm.weight.npy').reshape(
                            (-1, 1))
        beta = np.load(stage + str(i) +
                       '.g_conv_1x1_expand.batch_norm.bias.npy').reshape(
                           (-1, 1))
        mean = np.load(
            stage + str(i) +
            '.g_conv_1x1_expand.batch_norm.running_mean.npy').reshape((-1, 1))
        var = np.load(stage + str(i) +
                      '.g_conv_1x1_expand.batch_norm.running_var.npy').reshape(
                          (-1, 1))
        bn_layer = BatchNormalization(gamma,
                                      beta,
                                      running_mean=mean,
                                      running_var=var)
        out_N, out_C, out_H, out_W = out.shape
        out = bn_layer.forward(out.reshape(out_C, -1),
                               train_flg=False).reshape(
                                   out_N, out_C, out_H, out_W)
        #savetxt('./dump/' + 'gconv.txt', out)

        out = np.add(residual, out)
        #savetxt('./dump/' + str(i) + '_combine.txt', out)
        relu_layer = Relu()
        out_N, out_C, out_H, out_W = out.shape
        out = relu_layer.forward(out).reshape(out_N, out_C, out_H, out_W)
        #savetxt('./dump/' + str(i) + '_stage.txt', out)
    return out
Esempio n. 2
0
# coding: utf-8

import numpy as np
import sys
sys.path.append('../../')
from common.layers import Relu

relu = Relu()

#---------------------------------------
# forward
x = np.array([[1.0, -0.5], [-2.0, 3.0]])
print(x)
y = relu.forward(x)
print(y)

#---------------------------------------
# backward
dy = np.array([[5, 5], [5, 5]])
dx = relu.backward(dy)
print(dx)