예제 #1
0
    def generate(self, model_len=None, model_width=None):
        """Generates a Multi-Layer Perceptron.
        Args:
            model_len: An integer. Number of hidden layers.
            model_width: An integer or a list of integers of length `model_len`. If it is a list, it represents the
                number of nodes in each hidden layer. If it is an integer, all hidden layers have nodes equal to this
                value.
        Returns:
            An instance of the class Graph. Represents the neural architecture graph of the generated model.
        """
        if model_len is None:
            model_len = Constant.MODEL_LEN
        if model_width is None:
            model_width = Constant.MODEL_WIDTH
        if isinstance(model_width, list) and not len(model_width) == model_len:
            raise ValueError(
                "The length of 'model_width' does not match 'model_len'")
        elif isinstance(model_width, int):
            model_width = [model_width] * model_len

        graph = Graph(self.input_shape, False)
        output_node_id = 0
        n_nodes_prev_layer = self.input_shape[0]
        for width in model_width:
            output_node_id = graph.add_layer(
                StubDense(n_nodes_prev_layer, width), output_node_id)
            output_node_id = graph.add_layer(
                StubDropout1d(Constant.MLP_DROPOUT_RATE), output_node_id)
            output_node_id = graph.add_layer(StubReLU(), output_node_id)
            n_nodes_prev_layer = width

        graph.add_layer(StubDense(n_nodes_prev_layer, self.n_output_node),
                        output_node_id)
        return graph
예제 #2
0
def wider_pre_dense(layer, n_add, weighted=True):
    '''wider previous dense layer.
    '''
    if not weighted:
        return StubDense(layer.input_units, layer.units + n_add)

    n_units2 = layer.units

    teacher_w, teacher_b = layer.get_weights()
    rand = np.random.randint(n_units2, size=n_add)
    student_w = teacher_w.copy()
    student_b = teacher_b.copy()

    # target layer update (i)
    for i in range(n_add):
        teacher_index = rand[i]
        new_weight = teacher_w[teacher_index, :]
        new_weight = new_weight[np.newaxis, :]
        student_w = np.concatenate(
            (student_w, add_noise(new_weight, student_w)), axis=0)
        student_b = np.append(student_b,
                              add_noise(teacher_b[teacher_index], student_b))

    new_pre_layer = StubDense(layer.input_units, n_units2 + n_add)
    new_pre_layer.set_weights((student_w, student_b))

    return new_pre_layer
예제 #3
0
def dense_to_deeper_block(dense_layer, weighted=True):
    '''deeper dense layer.
    '''
    units = dense_layer.units
    weight = np.eye(units)
    bias = np.zeros(units)
    new_dense_layer = StubDense(units, units)
    if weighted:
        new_dense_layer.set_weights(
            (add_noise(weight,
                       np.array([0, 1])), add_noise(bias, np.array([0, 1]))))
    return [StubReLU(), new_dense_layer]
예제 #4
0
    def generate(self, model_len=None, model_width=None):
        """Generates a CNN.
        Args:
            model_len: An integer. Number of convolutional layers.
            model_width: An integer. Number of filters for the convolutional layers.
        Returns:
            An instance of the class Graph. Represents the neural architecture graph of the generated model.
        """

        if model_len is None:
            model_len = Constant.MODEL_LEN
        if model_width is None:
            model_width = Constant.MODEL_WIDTH
        pooling_len = int(model_len / 4)
        graph = Graph(self.input_shape, False)
        temp_input_channel = self.input_shape[-1]
        output_node_id = 0
        stride = 1
        for i in range(model_len):
            output_node_id = graph.add_layer(StubReLU(), output_node_id)
            output_node_id = graph.add_layer(
                self.batch_norm(graph.node_list[output_node_id].shape[-1]),
                output_node_id)
            output_node_id = graph.add_layer(
                self.conv(temp_input_channel,
                          model_width,
                          kernel_size=3,
                          stride=stride),
                output_node_id,
            )
            temp_input_channel = model_width
            if pooling_len == 0 or ((i + 1) % pooling_len == 0
                                    and i != model_len - 1):
                output_node_id = graph.add_layer(self.pooling(),
                                                 output_node_id)

        output_node_id = graph.add_layer(self.global_avg_pooling(),
                                         output_node_id)
        output_node_id = graph.add_layer(
            self.dropout(Constant.CONV_DROPOUT_RATE), output_node_id)
        output_node_id = graph.add_layer(
            StubDense(graph.node_list[output_node_id].shape[0], model_width),
            output_node_id,
        )
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        graph.add_layer(StubDense(model_width, self.n_output_node),
                        output_node_id)
        return graph
예제 #5
0
def create_new_layer(layer, n_dim):
    ''' create  new layer for the graph
    '''

    input_shape = layer.output.shape
    dense_deeper_classes = [StubDense, get_dropout_class(n_dim), StubReLU]
    conv_deeper_classes = [
        get_conv_class(n_dim),
        get_batch_norm_class(n_dim), StubReLU
    ]
    if is_layer(layer, "ReLU"):
        conv_deeper_classes = [
            get_conv_class(n_dim),
            get_batch_norm_class(n_dim)
        ]
        dense_deeper_classes = [StubDense, get_dropout_class(n_dim)]
    elif is_layer(layer, "Dropout"):
        dense_deeper_classes = [StubDense, StubReLU]
    elif is_layer(layer, "BatchNormalization"):
        conv_deeper_classes = [get_conv_class(n_dim), StubReLU]

    layer_class = None
    if len(input_shape) == 1:
        # It is in the dense layer part.
        layer_class = sample(dense_deeper_classes, 1)[0]
    else:
        # It is in the conv layer part.
        layer_class = sample(conv_deeper_classes, 1)[0]

    if layer_class == StubDense:
        new_layer = StubDense(input_shape[0], input_shape[0])

    elif layer_class == get_dropout_class(n_dim):
        new_layer = layer_class(Constant.DENSE_DROPOUT_RATE)

    elif layer_class == get_conv_class(n_dim):
        new_layer = layer_class(input_shape[-1],
                                input_shape[-1],
                                sample((1, 3, 5), 1)[0],
                                stride=1)

    elif layer_class == get_batch_norm_class(n_dim):
        new_layer = layer_class(input_shape[-1])

    elif layer_class == get_pooling_class(n_dim):
        new_layer = layer_class(sample((1, 3, 5), 1)[0])

    else:
        new_layer = layer_class()

    return new_layer
예제 #6
0
def wider_next_dense(layer, start_dim, total_dim, n_add, weighted=True):
    '''wider next dense layer.
    '''
    if not weighted:
        return StubDense(layer.input_units + n_add, layer.units)
    teacher_w, teacher_b = layer.get_weights()
    student_w = teacher_w.copy()
    n_units_each_channel = int(teacher_w.shape[1] / total_dim)

    new_weight = np.zeros((teacher_w.shape[0], n_add * n_units_each_channel))
    student_w = np.concatenate(
        (
            student_w[:, :start_dim * n_units_each_channel],
            add_noise(new_weight, student_w),
            student_w[:, start_dim * n_units_each_channel:total_dim *
                      n_units_each_channel],
        ),
        axis=1,
    )

    new_layer = StubDense(layer.input_units + n_add, layer.units)
    new_layer.set_weights((student_w, teacher_b))
    return new_layer
예제 #7
0
파일: nn.py 프로젝트: ouyanghaley/AAH
    def generate(self, model_len=None, model_width=None):
        """Generates a CNN.
        Args:
            model_len: An integer. Number of convolutional layers.
            model_width: An integer. Number of filters for the convolutional layers.
        Returns:
            An instance of the class Graph. Represents the neural architecture graph of the generated model.
        """

        if model_len is None:
            model_len = Constant.MODEL_LEN
        if model_width is None:
            model_width = Constant.MODEL_WIDTH
        pooling_len = int(model_len / 4)
        graph = Graph(self.input_shape, False)
        temp_input_channel = self.input_shape[-1]
        output_node_id = 0
        stride = 1
        ###
        ###
        ###conv_1 :7 2
        output_node_id = graph.add_layer(
            self.conv(temp_input_channel, 64, kernel_size=7, stride=2),
            output_node_id)
        output_node_id = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id].shape[-1]),
            output_node_id)
        output_node_id = graph.add_layer(StubReLU(), output_node_id)

        output_node_id = graph.add_layer(self.pooling(kernel_size=3, stride=2),
                                         output_node_id)

        ###
        ###
        ###conv_2
        output_node_id1 = graph.add_layer(
            self.conv(64, 64, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(64, 64, kernel_size=3, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(64, 256, kernel_size=1, stride=1), output_node_id1)

        output_node_id00 = graph.add_layer(
            self.conv(64, 256, kernel_size=1, stride=1), output_node_id)

        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id00 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id00].shape[-1]),
            output_node_id00)
        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        output_node_id00 = output_node_id

        output_node_id1 = graph.add_layer(
            self.conv(256, 64, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(64, 64, kernel_size=3, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(64, 256, kernel_size=1, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        output_node_id00 = output_node_id

        output_node_id1 = graph.add_layer(
            self.conv(256, 64, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(64, 64, kernel_size=3, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(64, 256, kernel_size=1, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        ###
        ###
        ###conv_3

        output_node_id1 = graph.add_layer(
            self.conv(256, 128, kernel_size=1, stride=2), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(128, 128, kernel_size=3, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(128, 512, kernel_size=1, stride=1), output_node_id1)
        output_node_id00 = graph.add_layer(
            self.conv(256, 512, kernel_size=1, stride=2), output_node_id)

        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id00 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id00].shape[-1]),
            output_node_id00)
        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)

        ###
        output_node_id00 = output_node_id

        output_node_id1 = graph.add_layer(
            self.conv(512, 128, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(128, 128, kernel_size=3, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(128, 512, kernel_size=1, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        output_node_id00 = output_node_id

        output_node_id1 = graph.add_layer(
            self.conv(512, 128, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(128, 128, kernel_size=3, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(128, 512, kernel_size=1, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        output_node_id00 = output_node_id

        output_node_id1 = graph.add_layer(
            self.conv(512, 128, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(128, 128, kernel_size=3, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(128, 512, kernel_size=1, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)

        ###
        ###
        ###conv_4

        output_node_id1 = graph.add_layer(
            self.conv(512, 256, kernel_size=1, stride=2), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(256, 256, kernel_size=3, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(256, 1024, kernel_size=1, stride=1), output_node_id1)
        output_node_id00 = graph.add_layer(
            self.conv(512, 1024, kernel_size=1, stride=2), output_node_id)

        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id00 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id00].shape[-1]),
            output_node_id00)
        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        output_node_id00 = output_node_id

        output_node_id1 = graph.add_layer(
            self.conv(1024, 256, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(256, 256, kernel_size=3, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(256, 1024, kernel_size=1, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        output_node_id00 = output_node_id

        output_node_id1 = graph.add_layer(
            self.conv(1024, 256, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(256, 256, kernel_size=3, stride=1), output_node_id1)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(256, 1024, kernel_size=1, stride=1),
            output_node_id1,
        )
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        output_node_id00 = output_node_id

        output_node_id1 = graph.add_layer(
            self.conv(1024, 256, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(256, 256, kernel_size=3, stride=1),
            output_node_id1,
        )
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(256, 1024, kernel_size=1, stride=1),
            output_node_id1,
        )
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        output_node_id00 = output_node_id

        output_node_id1 = graph.add_layer(
            self.conv(1024, 256, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(256, 256, kernel_size=3, stride=1),
            output_node_id1,
        )
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(256, 1024, kernel_size=1, stride=1),
            output_node_id1,
        )
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        output_node_id00 = output_node_id

        output_node_id1 = graph.add_layer(
            self.conv(1024, 256, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(256, 256, kernel_size=3, stride=1),
            output_node_id1,
        )
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(256, 1024, kernel_size=1, stride=1),
            output_node_id1,
        )
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        ###
        ###conv_5

        output_node_id1 = graph.add_layer(
            self.conv(1024, 512, kernel_size=1, stride=2), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(512, 512, kernel_size=3, stride=1),
            output_node_id1,
        )
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(512, 2048, kernel_size=1, stride=1),
            output_node_id1,
        )
        output_node_id00 = graph.add_layer(
            self.conv(1024, 2048, kernel_size=1, stride=2), output_node_id)

        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id00 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id00].shape[-1]),
            output_node_id00)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        output_node_id00 = output_node_id

        output_node_id1 = graph.add_layer(
            self.conv(2048, 512, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(512, 512, kernel_size=3, stride=1),
            output_node_id1,
        )
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(512, 2048, kernel_size=1, stride=1),
            output_node_id1,
        )
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)
        ###
        output_node_id00 = output_node_id

        output_node_id1 = graph.add_layer(
            self.conv(2048, 512, kernel_size=1, stride=1), output_node_id)
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(512, 512, kernel_size=3, stride=1),
            output_node_id1,
        )
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)
        output_node_id1 = graph.add_layer(StubReLU(), output_node_id1)

        output_node_id1 = graph.add_layer(
            self.conv(512, 2048, kernel_size=1, stride=1),
            output_node_id1,
        )
        output_node_id1 = graph.add_layer(
            self.batch_norm(graph.node_list[output_node_id1].shape[-1]),
            output_node_id1)

        output_node_id = graph.add_layer(StubAdd(),
                                         [output_node_id1, output_node_id00])
        output_node_id = graph.add_layer(StubReLU(), output_node_id)

        ###
        output_node_id = graph.add_layer(self.global_avg_pooling(),
                                         output_node_id)
        # output_node_id = graph.add_layer(
        #     self.dropout(Constant.CONV_DROPOUT_RATE), output_node_id
        # )
        # output_node_id = graph.add_layer(
        #     StubDense(graph.node_list[output_node_id].shape[0], model_width),
        #     output_node_id
        # )
        # output_node_id = graph.add_layer(StubReLU(), output_node_id)
        graph.add_layer(StubDense(2048, self.n_output_node), output_node_id)
        return graph