def generate(self, model_len=None, model_width=None): """Generates a CNN. Args: model_len: An integer. Number of convolutional layers. model_width: An integer. Number of filters for the convolutional layers. Returns: An instance of the class Graph. Represents the neural architecture graph of the generated model. """ if model_len is None: model_len = Constant.MODEL_LEN if model_width is None: model_width = Constant.MODEL_WIDTH pooling_len = int(model_len / 4) graph = Graph(self.input_shape, False) temp_input_channel = self.input_shape[-1] output_node_id = 0 stride = 1 for i in range(model_len): output_node_id = graph.add_layer(StubReLU(), output_node_id) output_node_id = graph.add_layer( self.batch_norm(graph.node_list[output_node_id].shape[-1]), output_node_id) output_node_id = graph.add_layer( self.conv(temp_input_channel, model_width, kernel_size=3, stride=stride), output_node_id, ) temp_input_channel = model_width if pooling_len == 0 or ((i + 1) % pooling_len == 0 and i != model_len - 1): output_node_id = graph.add_layer(self.pooling(), output_node_id) output_node_id = graph.add_layer(self.global_avg_pooling(), output_node_id) output_node_id = graph.add_layer( self.dropout(Constant.CONV_DROPOUT_RATE), output_node_id) output_node_id = graph.add_layer( StubDense(graph.node_list[output_node_id].shape[0], model_width), output_node_id, ) output_node_id = graph.add_layer(StubReLU(), output_node_id) graph.add_layer(StubDense(model_width, self.n_output_node), output_node_id) return graph
def generate(self, model_len=None, model_width=None): """Generates a Multi-Layer Perceptron. Args: model_len: An integer. Number of hidden layers. model_width: An integer or a list of integers of length `model_len`. If it is a list, it represents the number of nodes in each hidden layer. If it is an integer, all hidden layers have nodes equal to this value. Returns: An instance of the class Graph. Represents the neural architecture graph of the generated model. """ if model_len is None: model_len = Constant.MODEL_LEN if model_width is None: model_width = Constant.MODEL_WIDTH if isinstance(model_width, list) and not len(model_width) == model_len: raise ValueError( "The length of 'model_width' does not match 'model_len'") elif isinstance(model_width, int): model_width = [model_width] * model_len graph = Graph(self.input_shape, False) output_node_id = 0 n_nodes_prev_layer = self.input_shape[0] for width in model_width: output_node_id = graph.add_layer( StubDense(n_nodes_prev_layer, width), output_node_id) output_node_id = graph.add_layer( StubDropout1d(Constant.MLP_DROPOUT_RATE), output_node_id) output_node_id = graph.add_layer(StubReLU(), output_node_id) n_nodes_prev_layer = width graph.add_layer(StubDense(n_nodes_prev_layer, self.n_output_node), output_node_id) return graph
def deeper_conv_block(conv_layer, kernel_size, weighted=True): '''deeper conv layer. ''' n_dim = get_n_dim(conv_layer) filter_shape = (kernel_size, ) * 2 n_filters = conv_layer.filters weight = np.zeros((n_filters, n_filters) + filter_shape) center = tuple(map(lambda x: int((x - 1) / 2), filter_shape)) for i in range(n_filters): filter_weight = np.zeros((n_filters, ) + filter_shape) index = (i, ) + center filter_weight[index] = 1 weight[i, ...] = filter_weight bias = np.zeros(n_filters) new_conv_layer = get_conv_class(n_dim)(conv_layer.filters, n_filters, kernel_size=kernel_size) bn = get_batch_norm_class(n_dim)(n_filters) if weighted: new_conv_layer.set_weights( (add_noise(weight, np.array([0, 1])), add_noise(bias, np.array([0, 1])))) new_weights = [ add_noise(np.ones(n_filters, dtype=np.float32), np.array([0, 1])), add_noise(np.zeros(n_filters, dtype=np.float32), np.array([0, 1])), add_noise(np.zeros(n_filters, dtype=np.float32), np.array([0, 1])), add_noise(np.ones(n_filters, dtype=np.float32), np.array([0, 1])), ] bn.set_weights(new_weights) return [StubReLU(), new_conv_layer, bn]
def to_deeper_graph2(graph): ''' deeper graph ''' weighted_layer_ids = graph.deep_layer_ids2() if len(weighted_layer_ids) >= Constant.MAX_LAYERS: return None deeper_layer_ids = sample(weighted_layer_ids, 1)#选一层 for layer_id in deeper_layer_ids: layer = graph.layer_list[layer_id] input_shape = layer.output.shape layer_class = get_conv_class(graph.n_dim) new_layer = layer_class(input_shape[-1], input_shape[-1], 3, stride=1) output_id = graph.to_deeper_model(layer_id, new_layer) layer_id2 = graph.get_layers_id(output_id) layer2 = graph.layer_list[layer_id2] input_shape2 = layer2.output.shape layer_class = get_batch_norm_class(graph.n_dim) new_layer2 = layer_class(input_shape2[-1]) output_id2=graph.to_deeper_model(layer_id2, new_layer2) layer_id3 = graph.get_layers_id(output_id2) graph.to_deeper_model(layer_id3, StubReLU()) return graph
def dense_to_deeper_block(dense_layer, weighted=True): '''deeper dense layer. ''' units = dense_layer.units weight = np.eye(units) bias = np.zeros(units) new_dense_layer = StubDense(units, units) if weighted: new_dense_layer.set_weights( (add_noise(weight, np.array([0, 1])), add_noise(bias, np.array([0, 1])))) return [StubReLU(), new_dense_layer]
def _insert_pooling_layer_chain(self, start_node_id, end_node_id): skip_output_id = start_node_id for layer in self._get_pooling_layers(start_node_id, end_node_id): new_layer = deepcopy(layer) if is_layer(new_layer, "Conv"): filters = self.node_list[start_node_id].shape[-1] new_layer = get_conv_class(self.n_dim)(filters, filters, 1, layer.stride) if self.weighted: init_conv_weight(new_layer) else: new_layer = deepcopy(layer) skip_output_id = self.add_layer(new_layer, skip_output_id) skip_output_id = self.add_layer(StubReLU(), skip_output_id) return skip_output_id
def generate(self, model_len=None, model_width=None): """Generates a CNN. Args: model_len: An integer. Number of convolutional layers. model_width: An integer. Number of filters for the convolutional layers. Returns: An instance of the class Graph. Represents the neural architecture graph of the generated model. """ if model_len is None: model_len = Constant.MODEL_LEN if model_width is None: model_width = Constant.MODEL_WIDTH pooling_len = int(model_len / 4) graph = Graph(self.input_shape, False) temp_input_channel = self.input_shape[-1] output_node_id = 0 stride = 1 ### ### ###conv_1 :7 2 output_node_id = graph.add_layer( self.conv(temp_input_channel, 64, kernel_size=7, stride=2), output_node_id) output_node_id = graph.add_layer( self.batch_norm(graph.node_list[output_node_id].shape[-1]), output_node_id) output_node_id = graph.add_layer(StubReLU(), output_node_id) output_node_id = graph.add_layer(self.pooling(kernel_size=3, stride=2), output_node_id) ### ### ###conv_2 output_node_id1 = graph.add_layer( self.conv(64, 64, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(64, 64, kernel_size=3, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(64, 256, kernel_size=1, stride=1), output_node_id1) output_node_id00 = graph.add_layer( self.conv(64, 256, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id00 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id00].shape[-1]), output_node_id00) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id00 = output_node_id output_node_id1 = graph.add_layer( self.conv(256, 64, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(64, 64, kernel_size=3, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(64, 256, kernel_size=1, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id00 = output_node_id output_node_id1 = graph.add_layer( self.conv(256, 64, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(64, 64, kernel_size=3, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(64, 256, kernel_size=1, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### ### ### ###conv_3 output_node_id1 = graph.add_layer( self.conv(256, 128, kernel_size=1, stride=2), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(128, 128, kernel_size=3, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(128, 512, kernel_size=1, stride=1), output_node_id1) output_node_id00 = graph.add_layer( self.conv(256, 512, kernel_size=1, stride=2), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id00 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id00].shape[-1]), output_node_id00) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id00 = output_node_id output_node_id1 = graph.add_layer( self.conv(512, 128, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(128, 128, kernel_size=3, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(128, 512, kernel_size=1, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id00 = output_node_id output_node_id1 = graph.add_layer( self.conv(512, 128, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(128, 128, kernel_size=3, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(128, 512, kernel_size=1, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id00 = output_node_id output_node_id1 = graph.add_layer( self.conv(512, 128, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(128, 128, kernel_size=3, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(128, 512, kernel_size=1, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### ### ###conv_4 output_node_id1 = graph.add_layer( self.conv(512, 256, kernel_size=1, stride=2), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(256, 256, kernel_size=3, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(256, 1024, kernel_size=1, stride=1), output_node_id1) output_node_id00 = graph.add_layer( self.conv(512, 1024, kernel_size=1, stride=2), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id00 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id00].shape[-1]), output_node_id00) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id00 = output_node_id output_node_id1 = graph.add_layer( self.conv(1024, 256, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(256, 256, kernel_size=3, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(256, 1024, kernel_size=1, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id00 = output_node_id output_node_id1 = graph.add_layer( self.conv(1024, 256, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(256, 256, kernel_size=3, stride=1), output_node_id1) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(256, 1024, kernel_size=1, stride=1), output_node_id1, ) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id00 = output_node_id output_node_id1 = graph.add_layer( self.conv(1024, 256, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(256, 256, kernel_size=3, stride=1), output_node_id1, ) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(256, 1024, kernel_size=1, stride=1), output_node_id1, ) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id00 = output_node_id output_node_id1 = graph.add_layer( self.conv(1024, 256, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(256, 256, kernel_size=3, stride=1), output_node_id1, ) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(256, 1024, kernel_size=1, stride=1), output_node_id1, ) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id00 = output_node_id output_node_id1 = graph.add_layer( self.conv(1024, 256, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(256, 256, kernel_size=3, stride=1), output_node_id1, ) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(256, 1024, kernel_size=1, stride=1), output_node_id1, ) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### ### ###conv_5 output_node_id1 = graph.add_layer( self.conv(1024, 512, kernel_size=1, stride=2), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(512, 512, kernel_size=3, stride=1), output_node_id1, ) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(512, 2048, kernel_size=1, stride=1), output_node_id1, ) output_node_id00 = graph.add_layer( self.conv(1024, 2048, kernel_size=1, stride=2), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id00 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id00].shape[-1]), output_node_id00) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id00 = output_node_id output_node_id1 = graph.add_layer( self.conv(2048, 512, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(512, 512, kernel_size=3, stride=1), output_node_id1, ) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(512, 2048, kernel_size=1, stride=1), output_node_id1, ) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id00 = output_node_id output_node_id1 = graph.add_layer( self.conv(2048, 512, kernel_size=1, stride=1), output_node_id) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(512, 512, kernel_size=3, stride=1), output_node_id1, ) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id1 = graph.add_layer(StubReLU(), output_node_id1) output_node_id1 = graph.add_layer( self.conv(512, 2048, kernel_size=1, stride=1), output_node_id1, ) output_node_id1 = graph.add_layer( self.batch_norm(graph.node_list[output_node_id1].shape[-1]), output_node_id1) output_node_id = graph.add_layer(StubAdd(), [output_node_id1, output_node_id00]) output_node_id = graph.add_layer(StubReLU(), output_node_id) ### output_node_id = graph.add_layer(self.global_avg_pooling(), output_node_id) # output_node_id = graph.add_layer( # self.dropout(Constant.CONV_DROPOUT_RATE), output_node_id # ) # output_node_id = graph.add_layer( # StubDense(graph.node_list[output_node_id].shape[0], model_width), # output_node_id # ) # output_node_id = graph.add_layer(StubReLU(), output_node_id) graph.add_layer(StubDense(2048, self.n_output_node), output_node_id) return graph