Пример #1
0
 def deep_layer_ids2(self):
     ret = []
     for layer_id in self.get_main_chain_layers():
         layer = self.layer_list[layer_id]
         if is_layer(layer, "GlobalAveragePooling"):
             break
         if is_layer(layer, "ReLU"):
             ret.append(layer_id)
     return ret
Пример #2
0
 def deep_layer_ids(self):
     ret = []
     for layer_id in self.get_main_chain_layers():
         layer = self.layer_list[layer_id]
         if is_layer(layer, "GlobalAveragePooling"):
             break
         if is_layer(layer, "Add") or is_layer(layer, "Concatenate"):
             continue
         ret.append(layer_id)
     return ret
Пример #3
0
 def _upper_layer_width(self, u):
     for v, layer_id in self.reverse_adj_list[u]:
         layer = self.layer_list[layer_id]
         if is_layer(layer, "Conv") or is_layer(layer, "Dense"):
             return layer_width(layer)
         elif is_layer(layer, "Concatenate"):
             a = self.layer_id_to_input_node_ids[layer_id][0]
             b = self.layer_id_to_input_node_ids[layer_id][1]
             return self._upper_layer_width(a) + self._upper_layer_width(b)
         else:
             return self._upper_layer_width(v)
     return self.node_list[0].shape[-1]
Пример #4
0
 def _get_pooling_layers(self, start_node_id, end_node_id):
     """Given two node IDs, return all the pooling layers between them."""
     layer_list = []
     node_list = [start_node_id]
     assert self._depth_first_search(end_node_id, layer_list, node_list)
     ret = []
     for layer_id in layer_list:
         layer = self.layer_list[layer_id]
         if is_layer(layer, "Pooling"):
             ret.append(layer)
         elif is_layer(layer, "Conv") and layer.stride != 1:
             ret.append(layer)
     return ret
Пример #5
0
def create_new_layer(layer, n_dim):
    ''' create  new layer for the graph
    '''

    input_shape = layer.output.shape
    dense_deeper_classes = [StubDense, get_dropout_class(n_dim), StubReLU]
    conv_deeper_classes = [
        get_conv_class(n_dim),
        get_batch_norm_class(n_dim), StubReLU
    ]
    if is_layer(layer, "ReLU"):
        conv_deeper_classes = [
            get_conv_class(n_dim),
            get_batch_norm_class(n_dim)
        ]
        dense_deeper_classes = [StubDense, get_dropout_class(n_dim)]
    elif is_layer(layer, "Dropout"):
        dense_deeper_classes = [StubDense, StubReLU]
    elif is_layer(layer, "BatchNormalization"):
        conv_deeper_classes = [get_conv_class(n_dim), StubReLU]

    layer_class = None
    if len(input_shape) == 1:
        # It is in the dense layer part.
        layer_class = sample(dense_deeper_classes, 1)[0]
    else:
        # It is in the conv layer part.
        layer_class = sample(conv_deeper_classes, 1)[0]

    if layer_class == StubDense:
        new_layer = StubDense(input_shape[0], input_shape[0])

    elif layer_class == get_dropout_class(n_dim):
        new_layer = layer_class(Constant.DENSE_DROPOUT_RATE)

    elif layer_class == get_conv_class(n_dim):
        new_layer = layer_class(input_shape[-1],
                                input_shape[-1],
                                sample((1, 3, 5), 1)[0],
                                stride=1)

    elif layer_class == get_batch_norm_class(n_dim):
        new_layer = layer_class(input_shape[-1])

    elif layer_class == get_pooling_class(n_dim):
        new_layer = layer_class(sample((1, 3, 5), 1)[0])

    else:
        new_layer = layer_class()

    return new_layer
Пример #6
0
    def extract_descriptor(self):
        """Extract the the description of the Graph as an instance of NetworkDescriptor."""
        main_chain = self.get_main_chain()
        index_in_main_chain = {}
        for index, u in enumerate(main_chain):
            index_in_main_chain[u] = index

        ret = NetworkDescriptor()
        for u in main_chain:
            for v, layer_id in self.adj_list[u]:
                if v not in index_in_main_chain:
                    continue
                layer = self.layer_list[layer_id]
                copied_layer = copy(layer)
                copied_layer.weights = None
                ret.add_layer(deepcopy(copied_layer))

        for u in index_in_main_chain:
            for v, layer_id in self.adj_list[u]:
                if v not in index_in_main_chain:
                    temp_u = u
                    temp_v = v
                    temp_layer_id = layer_id
                    skip_type = None
                    while not (
                            temp_v in index_in_main_chain and temp_u in index_in_main_chain):
                        if is_layer(
                                self.layer_list[temp_layer_id], "Concatenate"):
                            skip_type = NetworkDescriptor.CONCAT_CONNECT
                        if is_layer(self.layer_list[temp_layer_id], "Add"):
                            skip_type = NetworkDescriptor.ADD_CONNECT
                        temp_u = temp_v
                        temp_v, temp_layer_id = self.adj_list[temp_v][0]
                    ret.add_skip_connection(
                        index_in_main_chain[u], index_in_main_chain[temp_u], skip_type
                    )

                elif index_in_main_chain[v] - index_in_main_chain[u] != 1:
                    skip_type = None
                    if is_layer(self.layer_list[layer_id], "Concatenate"):
                        skip_type = NetworkDescriptor.CONCAT_CONNECT
                    if is_layer(self.layer_list[layer_id], "Add"):
                        skip_type = NetworkDescriptor.ADD_CONNECT
                    ret.add_skip_connection(
                        index_in_main_chain[u], index_in_main_chain[v], skip_type
                    )

        return ret
Пример #7
0
 def _conv_layer_ids_in_order(self):
     return list(
         filter(
             lambda layer_id: is_layer(self.layer_list[layer_id], "Conv"),
             self.get_main_chain_layers(),
         )
     )
Пример #8
0
 def _layer_ids_by_type(self, type_str):
     return list(
         filter(
             lambda layer_id: is_layer(self.layer_list[layer_id], type_str),
             range(self.n_layers),
         )
     )
Пример #9
0
    def to_deeper_model(self, target_id, new_layer):
        """Insert a relu-conv-bn block after the target block.
        Args:
            target_id: A convolutional layer ID. The new block should be inserted after the block.
            new_layer: An instance of StubLayer subclasses.
        """
        self.operation_history.append(
            ("to_deeper_model", target_id, new_layer))
        input_id = self.layer_id_to_input_node_ids[target_id][0]
        output_id = self.layer_id_to_output_node_ids[target_id][0]
        if self.weighted:
            if is_layer(new_layer, "Dense"):
                init_dense_weight(new_layer)
            elif is_layer(new_layer, "Conv"):
                init_conv_weight(new_layer)
            elif is_layer(new_layer, "BatchNormalization"):
                init_bn_weight(new_layer)

        self._insert_new_layers([new_layer], input_id, output_id)
Пример #10
0
def layer_distance(a, b):
    """The distance between two layers."""
    # pylint: disable=unidiomatic-typecheck
    if not isinstance(a, type(b)):
        return 1.0
    if is_layer(a, "Conv"):
        att_diff = [
            (a.filters, b.filters),
            (a.kernel_size, b.kernel_size),
            (a.stride, b.stride),
        ]
        return attribute_difference(att_diff)
    if is_layer(a, "Pooling"):
        att_diff = [
            (a.padding, b.padding),
            (a.kernel_size, b.kernel_size),
            (a.stride, b.stride),
        ]
        return attribute_difference(att_diff)
    return 0.0
Пример #11
0
def to_skip_connection_graph2(graph):
    ''' skip connection graph
    '''
    # The last conv layer cannot be widen since wider operator cannot be done
    # over the two sides of flatten.
    weighted_layer_ids = graph.skip_connection_layer_ids()
    valid_connection = []
    for index_a in range(4, len(weighted_layer_ids)):
        a_id = weighted_layer_ids[index_a]
        layer = graph.layer_list[a_id]
        if is_layer(layer, "ReLU"):
            for index_b in range(len(weighted_layer_ids))[index_a + 1:]:
                b_id = weighted_layer_ids[index_b]
                layer = graph.layer_list[b_id]
                if is_layer(layer, "BatchNormalization"):
                    valid_connection.append((index_a, index_b, NetworkDescriptor.ADD_CONNECT))
    '''
    for index_a in range(4, len(weighted_layer_ids)):
        a_id = weighted_layer_ids[index_a]
        layer = graph.layer_list[a_id]
        if is_layer(layer, "ReLU"):
            for index_b in range(len(weighted_layer_ids))[index_a + 1:]:
                b_id = weighted_layer_ids[index_b]
                layer = graph.layer_list[b_id]
                if is_layer(layer, "ReLU"):
                    valid_connection.append((index_a, index_b, NetworkDescriptor.CONCAT_CONNECT))
    '''
    
    if not valid_connection:
        return graph
    for index_a, index_b, skip_type in sample(valid_connection, 1):
        a_id = weighted_layer_ids[index_a]
        b_id = weighted_layer_ids[index_b]

        if skip_type == NetworkDescriptor.ADD_CONNECT:
            graph.to_add_skip_model(a_id, b_id)
        else:
            graph.to_concat_skip_model(a_id, b_id)

    return graph
Пример #12
0
 def _insert_pooling_layer_chain(self, start_node_id, end_node_id):
     skip_output_id = start_node_id
     for layer in self._get_pooling_layers(start_node_id, end_node_id):
         new_layer = deepcopy(layer)
         if is_layer(new_layer, "Conv"):
             filters = self.node_list[start_node_id].shape[-1]
             new_layer = get_conv_class(self.n_dim)(filters, filters, 1,
                                                    layer.stride)
             if self.weighted:
                 init_conv_weight(new_layer)
         else:
             new_layer = deepcopy(layer)
         skip_output_id = self.add_layer(new_layer, skip_output_id)
     skip_output_id = self.add_layer(StubReLU(), skip_output_id)
     return skip_output_id
Пример #13
0
def to_wider_graph(graph):
    ''' wider graph
    '''
    weighted_layer_ids = graph.wide_layer_ids()
    weighted_layer_ids = list(
        filter(lambda x: graph.layer_list[x].output.shape[-1],
               weighted_layer_ids))
    wider_layers = sample(weighted_layer_ids, 1)

    for layer_id in wider_layers:
        layer = graph.layer_list[layer_id]
        if is_layer(layer, "Conv"):
            n_add = layer.filters
        else:
            n_add = layer.units

        graph.to_wider_model(layer_id, n_add)
    return graph
Пример #14
0
    def _search(self, u, start_dim, total_dim, n_add):
        """Search the graph for all the layers to be widened caused by an operation.
        It is an recursive function with duplication check to avoid deadlock.
        It searches from a starting node u until the corresponding layers has been widened.
        Args:
            u: The starting node ID.
            start_dim: The position to insert the additional dimensions.
            total_dim: The total number of dimensions the layer has before widening.
            n_add: The number of dimensions to add.
        """
        if (u, start_dim, total_dim, n_add) in self.vis:
            return
        self.vis[(u, start_dim, total_dim, n_add)] = True
        for v, layer_id in self.adj_list[u]:
            layer = self.layer_list[layer_id]

            if is_layer(layer, "Conv"):
                new_layer = wider_next_conv(layer, start_dim, total_dim, n_add,
                                            self.weighted)
                self._replace_layer(layer_id, new_layer)

            elif is_layer(layer, "Dense"):
                new_layer = wider_next_dense(layer, start_dim, total_dim,
                                             n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)

            elif is_layer(layer, "BatchNormalization"):
                new_layer = wider_bn(layer, start_dim, total_dim, n_add,
                                     self.weighted)
                self._replace_layer(layer_id, new_layer)
                self._search(v, start_dim, total_dim, n_add)

            elif is_layer(layer, "Concatenate"):
                if self.layer_id_to_input_node_ids[layer_id][1] == u:
                    # u is on the right of the concat
                    # next_start_dim += next_total_dim - total_dim
                    left_dim = self._upper_layer_width(
                        self.layer_id_to_input_node_ids[layer_id][0])
                    next_start_dim = start_dim + left_dim
                    next_total_dim = total_dim + left_dim
                else:
                    next_start_dim = start_dim
                    next_total_dim = total_dim + self._upper_layer_width(
                        self.layer_id_to_input_node_ids[layer_id][1])
                self._search(v, next_start_dim, next_total_dim, n_add)

            else:
                self._search(v, start_dim, total_dim, n_add)

        for v, layer_id in self.reverse_adj_list[u]:
            layer = self.layer_list[layer_id]
            if is_layer(layer, "Conv"):
                new_layer = wider_pre_conv(layer, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)
            elif is_layer(layer, "Dense"):
                new_layer = wider_pre_dense(layer, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)
            elif is_layer(layer, "Concatenate"):
                continue
            else:
                self._search(v, start_dim, total_dim, n_add)