def to_real_layer(layer): if is_layer(layer, 'Dense'): return Dense(layer.units, activation=layer.activation) if is_layer(layer, 'Conv'): return layer.func(layer.filters, kernel_size=layer.kernel_size, padding='same', kernel_initializer='he_normal', kernel_regularizer=l2(1e-4)) if is_layer(layer, 'Pooling'): return layer.func(padding='same') if is_layer(layer, 'BatchNormalization'): return BatchNormalization() if is_layer(layer, 'Concatenate'): return Concatenate() if is_layer(layer, 'WeightedAdd'): return WeightedAdd() if is_layer(layer, 'Dropout'): return Dropout(layer.rate) if is_layer(layer, 'Activation'): return Activation(layer.func) if is_layer(layer, 'Flatten'): return Flatten() if is_layer(layer, 'GlobalAveragePooling'): return layer.func()
def _upper_layer_width(self, u): for v, layer_id in self.reverse_adj_list[u]: layer = self.layer_list[layer_id] if is_layer(layer, 'Conv') or is_layer(layer, 'Dense'): return layer_width(layer) elif is_layer(layer, 'Concatenate'): a = self.layer_id_to_input_node_ids[layer_id][0] b = self.layer_id_to_input_node_ids[layer_id][1] return self._upper_layer_width(a) + self._upper_layer_width(b) else: return self._upper_layer_width(v) return self.input_shape[-1]
def _search_next(self, u, start_dim, total_dim, n_add): """Search downward the graph for widening the layers. Args: u: The starting node identifier. start_dim: The dimension to insert the additional dimensions. total_dim: The total number of dimensions the layer has before widening. n_add: The number of dimensions to add. """ for v, layer_id in self.adj_list[u]: layer = self.layer_list[layer_id] if is_layer(layer, 'Conv'): new_layer = wider_next_conv(layer, start_dim, total_dim, n_add, self.weighted) self._replace_layer(layer_id, new_layer) elif is_layer(layer, 'Dense'): new_layer = wider_next_dense(layer, start_dim, total_dim, n_add, self.weighted) self._replace_layer(layer_id, new_layer) elif is_layer(layer, 'BatchNormalization'): new_layer = wider_bn(layer, start_dim, total_dim, n_add, self.weighted) self._replace_layer(layer_id, new_layer) self._search_next(v, start_dim, total_dim, n_add) elif is_layer(layer, 'WeightedAdd'): new_layer = wider_weighted_add(layer, n_add, self.weighted) self._replace_layer(layer_id, new_layer) self._search_next(v, start_dim, total_dim, n_add) elif is_layer(layer, 'Concatenate'): if self.layer_id_to_input_node_ids[layer_id][1] == u: # u is on the right of the concat # next_start_dim += next_total_dim - total_dim left_dim = self._upper_layer_width( self.layer_id_to_input_node_ids[layer_id][0]) next_start_dim = start_dim + left_dim next_total_dim = total_dim + left_dim else: next_start_dim = start_dim next_total_dim = total_dim + self._upper_layer_width( self.layer_id_to_input_node_ids[layer_id][1]) self._search_next(v, next_start_dim, next_total_dim, n_add) else: self._search_next(v, start_dim, total_dim, n_add)
def _get_pooling_layers(self, start_node_id, end_node_id): layer_list = [] node_list = [start_node_id] self._depth_first_search(end_node_id, layer_list, node_list) return filter( lambda layer_id: is_layer(self.layer_list[layer_id], 'Pooling'), layer_list)
def extract_descriptor(self): ret = NetworkDescriptor() topological_node_list = self._topological_order() for u in topological_node_list: for v, layer_id in self.adj_list[u]: layer = self.layer_list[layer_id] if is_layer(layer, 'Conv'): ret.add_conv_width(layer_width(layer)) if is_layer(layer, 'Dense'): ret.add_dense_width(layer_width(layer)) layer_count = 0 # The position of each node, how many Conv and Dense layers before it. pos = [0] * len(topological_node_list) for u in topological_node_list: pos[u] = layer_count for v, layer_id in self.adj_list[u]: layer = self.layer_list[layer_id] if is_layer(layer, 'Conv') or is_layer(layer, 'Dense'): layer_count += 1 for u in topological_node_list: for v, layer_id in self.adj_list[u]: if pos[u] == pos[v]: continue layer = self.layer_list[layer_id] if is_layer(layer, 'Concatenate'): ret.add_skip_connection(pos[u], pos[v], NetworkDescriptor.CONCAT_CONNECT) if is_layer(layer, 'WeightedAdd'): ret.add_skip_connection(pos[u], pos[v], NetworkDescriptor.ADD_CONNECT) return ret
def _search_pre(self, u, start_dim, total_dim, n_add): """Search upward the graph for widening the layers. Args: u: The starting node identifier. start_dim: The dimension to insert the additional dimensions. total_dim: The total number of dimensions the layer has before widening. n_add: The number of dimensions to add. """ if self.pre_vis[u]: return self.pre_vis[u] = True self._search_next(u, start_dim, total_dim, n_add) for v, layer_id in self.reverse_adj_list[u]: layer = self.layer_list[layer_id] if is_layer(layer, 'Conv'): new_layer = wider_pre_conv(layer, n_add, self.weighted) self._replace_layer(layer_id, new_layer) elif is_layer(layer, 'Dense'): new_layer = wider_pre_dense(layer, n_add, self.weighted) self._replace_layer(layer_id, new_layer) elif is_layer(layer, 'BatchNormalization'): self._search_pre(v, start_dim, total_dim, n_add) elif is_layer(layer, 'Concatenate'): if self.layer_id_to_input_node_ids[layer_id][1] == v: # v is on the right other_branch_v = self.layer_id_to_input_node_ids[layer_id][ 0] if self.pre_vis[other_branch_v]: # The other branch is already been widen, which means the widen for upper part of this concat # layer is done. continue pre_total_dim = self._upper_layer_width(v) pre_start_dim = start_dim - (total_dim - pre_total_dim) self._search_pre(v, pre_start_dim, pre_total_dim, n_add) else: self._search_pre(v, start_dim, total_dim, n_add)
def to_wider_model(self, pre_layer_id, n_add): """Widen the last dimension of the output of the pre_layer. Args: pre_layer_id: A convolutional layer or dense layer. n_add: The number of dimensions to add. Returns: A new Keras model with the widened layers. """ self.operation_history.append(('to_wider_model', pre_layer_id, n_add)) pre_layer = self.layer_list[pre_layer_id] output_id = self.layer_id_to_output_node_ids[pre_layer_id][0] dim = layer_width(pre_layer) if is_layer(pre_layer, 'Conv'): new_layer = wider_pre_conv(pre_layer, n_add, self.weighted) self._replace_layer(pre_layer_id, new_layer) else: new_layer = wider_pre_dense(pre_layer, n_add, self.weighted) self._replace_layer(pre_layer_id, new_layer) self._search_next(output_id, dim, dim, n_add)
def _layer_ids_by_type(self, type_str): return list( filter( lambda layer_id: is_layer(self.layer_list[layer_id], type_str), range(self.n_layers)))