Exemplo n.º 1
0
 def deep_layer_ids(self):
     ret = []
     for layer_id in self.get_main_chain_layers():
         layer = self.layer_list[layer_id]
         if is_layer(layer, LayerType.GLOBAL_POOL):
             break
         if is_layer(layer, LayerType.ADD) or is_layer(layer, LayerType.CONCAT):
             continue
         ret.append(layer_id)
     return ret
Exemplo n.º 2
0
 def deep_layer_ids(self):
     ret = []
     for layer_id in self.get_main_chain_layers():
         layer = self.layer_list[layer_id]
         if is_layer(layer, LayerType.GLOBAL_POOL):
             break
         if is_layer(layer, LayerType.ADD) or is_layer(layer, LayerType.CONCAT):
             continue
         ret.append(layer_id)
     return ret
Exemplo n.º 3
0
 def deep_layer_ids(self):
     ret = []
     for layer_id in self.get_main_chain_layers():
         layer = self.layer_list[layer_id]
         if is_layer(layer, 'GlobalAveragePooling'):
             break
         if is_layer(layer, 'Add') or is_layer(layer, 'Concatenate'):
             continue
         ret.append(layer_id)
     return ret
Exemplo n.º 4
0
 def _upper_layer_width(self, u):
     for v, layer_id in self.reverse_adj_list[u]:
         layer = self.layer_list[layer_id]
         if is_layer(layer, LayerType.CONV) or is_layer(layer, LayerType.DENSE):
             return layer_width(layer)
         elif is_layer(layer, LayerType.CONCAT):
             a = self.layer_id_to_input_node_ids[layer_id][0]
             b = self.layer_id_to_input_node_ids[layer_id][1]
             return self._upper_layer_width(a) + self._upper_layer_width(b)
         else:
             return self._upper_layer_width(v)
     return self.node_list[0].shape[-1]
Exemplo n.º 5
0
 def _upper_layer_width(self, u):
     for v, layer_id in self.reverse_adj_list[u]:
         layer = self.layer_list[layer_id]
         if is_layer(layer, LayerType.CONV) or is_layer(layer, LayerType.DENSE):
             return layer_width(layer)
         elif is_layer(layer, LayerType.CONCAT):
             a = self.layer_id_to_input_node_ids[layer_id][0]
             b = self.layer_id_to_input_node_ids[layer_id][1]
             return self._upper_layer_width(a) + self._upper_layer_width(b)
         else:
             return self._upper_layer_width(v)
     return self.node_list[0].shape[-1]
Exemplo n.º 6
0
 def _upper_layer_width(self, u):
     for v, layer_id in self.reverse_adj_list[u]:
         layer = self.layer_list[layer_id]
         if is_layer(layer, 'Conv') or is_layer(layer, 'Dense'):
             return layer_width(layer)
         elif is_layer(layer, 'Concatenate'):
             a = self.layer_id_to_input_node_ids[layer_id][0]
             b = self.layer_id_to_input_node_ids[layer_id][1]
             return self._upper_layer_width(a) + self._upper_layer_width(b)
         else:
             return self._upper_layer_width(v)
     return self.node_list[0].shape[-1]
Exemplo n.º 7
0
 def _get_pooling_layers(self, start_node_id, end_node_id):
     """Given two node IDs, return all the pooling layers between them."""
     layer_list = []
     node_list = [start_node_id]
     assert self._depth_first_search(end_node_id, layer_list, node_list)
     ret = []
     for layer_id in layer_list:
         layer = self.layer_list[layer_id]
         if is_layer(layer, 'Pooling'):
             ret.append(layer)
         elif is_layer(layer, 'Conv') and layer.stride != 1:
             ret.append(layer)
     return ret
Exemplo n.º 8
0
def layer_distance(a, b):
    """The distance between two layers."""
    if type(a) != type(b):
        return 1.0
    if is_layer(a, LayerType.CONV):
        att_diff = [(a.filters, b.filters), (a.kernel_size, b.kernel_size),
                    (a.stride, b.stride)]
        return attribute_difference(att_diff)
    if is_layer(a, LayerType.POOL):
        att_diff = [(a.padding, b.padding), (a.kernel_size, b.kernel_size),
                    (a.stride, b.stride)]
        return attribute_difference(att_diff)
    return 0.0
Exemplo n.º 9
0
    def _get_pooling_layers(self, start_node_id, end_node_id):
        """Given two node IDs, return all the pooling layers between them."""
        layer_list = []
        node_list = [start_node_id]
        assert self._depth_first_search(end_node_id, layer_list, node_list)
        ret = []
        for layer_id in layer_list:
            layer = self.layer_list[layer_id]
            if is_layer(layer, LayerType.POOL):
                ret.append(layer)
            elif is_layer(layer, LayerType.CONV) and (layer.stride != 1 or layer.padding != int(layer.kernel_size / 2)):
                ret.append(layer)

        return ret
Exemplo n.º 10
0
    def _get_pooling_layers(self, start_node_id, end_node_id):
        """Given two node IDs, return all the pooling layers between them."""
        layer_list = []
        node_list = [start_node_id]
        assert self._depth_first_search(end_node_id, layer_list, node_list)
        ret = []
        for layer_id in layer_list:
            layer = self.layer_list[layer_id]
            if is_layer(layer, LayerType.POOL):
                ret.append(layer)
            elif is_layer(layer, LayerType.CONV) and (layer.stride != 1 or layer.padding != int(layer.kernel_size / 2)):
                ret.append(layer)

        return ret
Exemplo n.º 11
0
def layer_distance(a, b):
    """The distance between two layers."""
    if type(a) != type(b):
        return 1.0
    if is_layer(a, LayerType.CONV):
        att_diff = [(a.filters, b.filters),
                    (a.kernel_size, b.kernel_size),
                    (a.stride, b.stride)]
        return attribute_difference(att_diff)
    if is_layer(a, LayerType.POOL):
        att_diff = [(a.padding, b.padding),
                    (a.kernel_size, b.kernel_size),
                    (a.stride, b.stride)]
        return attribute_difference(att_diff)
    return 0.0
Exemplo n.º 12
0
    def extract_descriptor(self):
        """Extract the the description of the Graph as an instance of NetworkDescriptor."""
        main_chain = self.get_main_chain()
        index_in_main_chain = {}
        for index, u in enumerate(main_chain):
            index_in_main_chain[u] = index

        ret = NetworkDescriptor()
        for u in main_chain:
            for v, layer_id in self.adj_list[u]:
                if v not in index_in_main_chain:
                    continue
                layer = self.layer_list[layer_id]
                copied_layer = copy(layer)
                copied_layer.weights = None
                ret.add_layer(deepcopy(copied_layer))

        for u in index_in_main_chain:
            for v, layer_id in self.adj_list[u]:
                if v not in index_in_main_chain:
                    temp_u = u
                    temp_v = v
                    temp_layer_id = layer_id
                    skip_type = None
                    while not (temp_v in index_in_main_chain
                               and temp_u in index_in_main_chain):
                        if is_layer(self.layer_list[temp_layer_id],
                                    LayerType.CONCAT):
                            skip_type = NetworkDescriptor.CONCAT_CONNECT
                        if is_layer(self.layer_list[temp_layer_id],
                                    LayerType.ADD):
                            skip_type = NetworkDescriptor.ADD_CONNECT
                        temp_u = temp_v
                        temp_v, temp_layer_id = self.adj_list[temp_v][0]
                    ret.add_skip_connection(index_in_main_chain[u],
                                            index_in_main_chain[temp_u],
                                            skip_type)

                elif index_in_main_chain[v] - index_in_main_chain[u] != 1:
                    skip_type = None
                    if is_layer(self.layer_list[layer_id], LayerType.CONCAT):
                        skip_type = NetworkDescriptor.CONCAT_CONNECT
                    if is_layer(self.layer_list[layer_id], LayerType.ADD):
                        skip_type = NetworkDescriptor.ADD_CONNECT
                    ret.add_skip_connection(index_in_main_chain[u],
                                            index_in_main_chain[v], skip_type)

        return ret
Exemplo n.º 13
0
 def _get_pooling_layers(self, start_node_id, end_node_id):
     layer_list = []
     node_list = [start_node_id]
     self._depth_first_search(end_node_id, layer_list, node_list)
     return filter(
         lambda layer_id: is_layer(self.layer_list[layer_id], 'Pooling'),
         layer_list)
Exemplo n.º 14
0
def create_new_layer(layer, n_dim):
    input_shape = layer.output.shape
    dense_deeper_classes = [StubDense, get_dropout_class(n_dim), StubReLU]
    conv_deeper_classes = [
        get_conv_class(n_dim),
        get_batch_norm_class(n_dim), StubReLU
    ]
    if is_layer(layer, LayerType.RELU):
        conv_deeper_classes = [
            get_conv_class(n_dim),
            get_batch_norm_class(n_dim)
        ]
        dense_deeper_classes = [StubDense, get_dropout_class(n_dim)]
    elif is_layer(layer, LayerType.DROPOUT):
        dense_deeper_classes = [StubDense, StubReLU]
    elif is_layer(layer, LayerType.BATCH_NORM):
        conv_deeper_classes = [get_conv_class(n_dim), StubReLU]

    if len(input_shape) == 1:
        # It is in the dense layer part.
        layer_class = sample(dense_deeper_classes, 1)[0]
    else:
        # It is in the conv layer part.
        layer_class = sample(conv_deeper_classes, 1)[0]

    if layer_class == StubDense:
        new_layer = StubDense(input_shape[0], input_shape[0])

    elif layer_class == get_dropout_class(n_dim):
        new_layer = layer_class(Constant.DENSE_DROPOUT_RATE)

    elif layer_class == get_conv_class(n_dim):
        new_layer = layer_class(input_shape[-1],
                                input_shape[-1],
                                sample((1, 3, 5), 1)[0],
                                stride=1)

    elif layer_class == get_batch_norm_class(n_dim):
        new_layer = layer_class(input_shape[-1])

    elif layer_class == get_pooling_class(n_dim):
        new_layer = layer_class(sample((1, 3, 5), 1)[0])

    else:
        new_layer = layer_class()

    return new_layer
Exemplo n.º 15
0
 def _get_pooling_layers(self, start_node_id, end_node_id):
     """Given two node IDs, return all the pooling layers between them."""
     layer_list = []
     node_list = [start_node_id]
     self._depth_first_search(end_node_id, layer_list, node_list)
     return filter(
         lambda layer_id: is_layer(self.layer_list[layer_id], 'Pooling'),
         layer_list)
Exemplo n.º 16
0
    def _search(self, u, start_dim, total_dim, n_add):
        """Search the graph for widening the layers.

        Args:
            u: The starting node identifier.
            start_dim: The position to insert the additional dimensions.
            total_dim: The total number of dimensions the layer has before widening.
            n_add: The number of dimensions to add.
        """
        if (u, start_dim, total_dim, n_add) in self.vis:
            return
        self.vis[(u, start_dim, total_dim, n_add)] = True
        for v, layer_id in self.adj_list[u]:
            layer = self.layer_list[layer_id]

            if is_layer(layer, 'Conv'):
                new_layer = wider_next_conv(layer, start_dim, total_dim, n_add,
                                            self.weighted)
                self._replace_layer(layer_id, new_layer)

            elif is_layer(layer, 'Dense'):
                new_layer = wider_next_dense(layer, start_dim, total_dim,
                                             n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)

            elif is_layer(layer, 'BatchNormalization'):
                new_layer = wider_bn(layer, start_dim, total_dim, n_add,
                                     self.weighted)
                self._replace_layer(layer_id, new_layer)
                self._search(v, start_dim, total_dim, n_add)

            elif is_layer(layer, 'Concatenate'):
                if self.layer_id_to_input_node_ids[layer_id][1] == u:
                    # u is on the right of the concat
                    # next_start_dim += next_total_dim - total_dim
                    left_dim = self._upper_layer_width(
                        self.layer_id_to_input_node_ids[layer_id][0])
                    next_start_dim = start_dim + left_dim
                    next_total_dim = total_dim + left_dim
                else:
                    next_start_dim = start_dim
                    next_total_dim = total_dim + self._upper_layer_width(
                        self.layer_id_to_input_node_ids[layer_id][1])
                self._search(v, next_start_dim, next_total_dim, n_add)

            else:
                self._search(v, start_dim, total_dim, n_add)

        for v, layer_id in self.reverse_adj_list[u]:
            layer = self.layer_list[layer_id]
            if is_layer(layer, 'Conv'):
                new_layer = wider_pre_conv(layer, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)
            elif is_layer(layer, 'Dense'):
                new_layer = wider_pre_dense(layer, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)
            elif is_layer(layer, 'Concatenate'):
                continue
            else:
                self._search(v, start_dim, total_dim, n_add)
Exemplo n.º 17
0
 def deep_layer_ids(self):
     ret = []
     pre_layer = None
     pre_layer_id = None
     for layer_id in self.get_main_chain_layers():
         layer = self.layer_list[layer_id]
         if is_layer(layer, LayerType.GLOBAL_POOL):
             break
         if is_layer(layer, LayerType.ADD) or is_layer(
                 layer, LayerType.CONCAT):
             continue
         if is_layer(layer, LayerType.CONV) and (pre_layer
                                                 is not None) and is_layer(
                                                     pre_layer,
                                                     LayerType.BATCH_NORM):
             ret.append(pre_layer_id)
         pre_layer = layer
         pre_layer_id = layer_id
     return ret
Exemplo n.º 18
0
    def _search(self, u, start_dim, total_dim, n_add):
        """Search the graph for all the layers to be widened caused by an operation.

        It is an recursive function with duplication check to avoid deadlock.
        It searches from a starting node u until the corresponding layers has been widened.

        Args:
            u: The starting node ID.
            start_dim: The position to insert the additional dimensions.
            total_dim: The total number of dimensions the layer has before widening.
            n_add: The number of dimensions to add.
        """
        if (u, start_dim, total_dim, n_add) in self.vis:
            return
        self.vis[(u, start_dim, total_dim, n_add)] = True
        for v, layer_id in self.adj_list[u]:
            layer = self.layer_list[layer_id]

            if is_layer(layer, LayerType.CONV):
                new_layer = wider_next_conv(layer, start_dim, total_dim, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)

            elif is_layer(layer, LayerType.DENSE):
                new_layer = wider_next_dense(layer, start_dim, total_dim, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)

            elif is_layer(layer, LayerType.BATCH_NORM):
                new_layer = wider_bn(layer, start_dim, total_dim, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)
                self._search(v, start_dim, total_dim, n_add)

            elif is_layer(layer, LayerType.CONCAT):
                if self.layer_id_to_input_node_ids[layer_id][1] == u:
                    # u is on the right of the concat
                    # next_start_dim += next_total_dim - total_dim
                    left_dim = self._upper_layer_width(self.layer_id_to_input_node_ids[layer_id][0])
                    next_start_dim = start_dim + left_dim
                    next_total_dim = total_dim + left_dim
                else:
                    next_start_dim = start_dim
                    next_total_dim = total_dim + self._upper_layer_width(self.layer_id_to_input_node_ids[layer_id][1])
                self._search(v, next_start_dim, next_total_dim, n_add)

            else:
                self._search(v, start_dim, total_dim, n_add)

        for v, layer_id in self.reverse_adj_list[u]:
            layer = self.layer_list[layer_id]
            if is_layer(layer, LayerType.CONV):
                new_layer = wider_pre_conv(layer, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)
            elif is_layer(layer, LayerType.DENSE):
                new_layer = wider_pre_dense(layer, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)
            elif is_layer(layer, LayerType.CONCAT):
                continue
            else:
                self._search(v, start_dim, total_dim, n_add)
Exemplo n.º 19
0
    def _search(self, u, start_dim, total_dim, n_add):
        """Search the graph for all the layers to be widened caused by an operation.

        It is an recursive function with duplication check to avoid deadlock.
        It searches from a starting node u until the corresponding layers has been widened.

        Args:
            u: The starting node ID.
            start_dim: The position to insert the additional dimensions.
            total_dim: The total number of dimensions the layer has before widening.
            n_add: The number of dimensions to add.
        """
        if (u, start_dim, total_dim, n_add) in self.vis:
            return
        self.vis[(u, start_dim, total_dim, n_add)] = True
        for v, layer_id in self.adj_list[u]:
            layer = self.layer_list[layer_id]

            if is_layer(layer, 'Conv'):
                new_layer = wider_next_conv(layer, start_dim, total_dim, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)

            elif is_layer(layer, 'Dense'):
                new_layer = wider_next_dense(layer, start_dim, total_dim, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)

            elif is_layer(layer, 'BatchNormalization'):
                new_layer = wider_bn(layer, start_dim, total_dim, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)
                self._search(v, start_dim, total_dim, n_add)

            elif is_layer(layer, 'Concatenate'):
                if self.layer_id_to_input_node_ids[layer_id][1] == u:
                    # u is on the right of the concat
                    # next_start_dim += next_total_dim - total_dim
                    left_dim = self._upper_layer_width(self.layer_id_to_input_node_ids[layer_id][0])
                    next_start_dim = start_dim + left_dim
                    next_total_dim = total_dim + left_dim
                else:
                    next_start_dim = start_dim
                    next_total_dim = total_dim + self._upper_layer_width(self.layer_id_to_input_node_ids[layer_id][1])
                self._search(v, next_start_dim, next_total_dim, n_add)

            else:
                self._search(v, start_dim, total_dim, n_add)

        for v, layer_id in self.reverse_adj_list[u]:
            layer = self.layer_list[layer_id]
            if is_layer(layer, 'Conv'):
                new_layer = wider_pre_conv(layer, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)
            elif is_layer(layer, 'Dense'):
                new_layer = wider_pre_dense(layer, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)
            elif is_layer(layer, 'Concatenate'):
                continue
            else:
                self._search(v, start_dim, total_dim, n_add)
Exemplo n.º 20
0
    def to_deeper_model(self, target_id, new_layer):
        """Insert a relu-conv-bn block after the target block.

        Args:
            target_id: A convolutional layer ID. The new block should be inserted after the block.
            new_layer: An instance of StubLayer subclasses.
        """
        self.operation_history.append(('to_deeper_model', target_id, new_layer))
        input_id = self.layer_id_to_input_node_ids[target_id][0]
        output_id = self.layer_id_to_output_node_ids[target_id][0]
        if self.weighted:
            if is_layer(new_layer, LayerType.DENSE):
                init_dense_weight(new_layer)
            elif is_layer(new_layer, LayerType.CONV):
                init_conv_weight(new_layer)
            elif is_layer(new_layer, LayerType.BATCH_NORM):
                init_bn_weight(new_layer)

        self._insert_new_layers([new_layer], input_id, output_id)
Exemplo n.º 21
0
    def to_deeper_model(self, target_id, new_layer):
        """Insert a relu-conv-bn block after the target block.

        Args:
            target_id: A convolutional layer ID. The new block should be inserted after the block.
            new_layer: An instance of StubLayer subclasses.
        """
        self.operation_history.append(('to_deeper_model', target_id, new_layer))
        input_id = self.layer_id_to_input_node_ids[target_id][0]
        output_id = self.layer_id_to_output_node_ids[target_id][0]
        if self.weighted:
            if is_layer(new_layer, 'Dense'):
                init_dense_weight(new_layer)
            elif is_layer(new_layer, 'Conv'):
                init_conv_weight(new_layer)
            elif is_layer(new_layer, 'BatchNormalization'):
                init_bn_weight(new_layer)

        self._insert_new_layers([new_layer], input_id, output_id)
Exemplo n.º 22
0
    def extract_descriptor(self):
        """Extract the the description of the Graph as an instance of NetworkDescriptor."""
        main_chain = self.get_main_chain()
        index_in_main_chain = {}
        for index, u in enumerate(main_chain):
            index_in_main_chain[u] = index

        ret = NetworkDescriptor()
        for u in main_chain:
            for v, layer_id in self.adj_list[u]:
                if v not in index_in_main_chain:
                    continue
                layer = self.layer_list[layer_id]
                copied_layer = copy(layer)
                copied_layer.weights = None
                ret.add_layer(deepcopy(copied_layer))

        for u in index_in_main_chain:
            for v, layer_id in self.adj_list[u]:
                if v not in index_in_main_chain:
                    temp_u = u
                    temp_v = v
                    temp_layer_id = layer_id
                    skip_type = None
                    while not (temp_v in index_in_main_chain and temp_u in index_in_main_chain):
                        if is_layer(self.layer_list[temp_layer_id], LayerType.CONCAT):
                            skip_type = NetworkDescriptor.CONCAT_CONNECT
                        if is_layer(self.layer_list[temp_layer_id], LayerType.ADD):
                            skip_type = NetworkDescriptor.ADD_CONNECT
                        temp_u = temp_v
                        temp_v, temp_layer_id = self.adj_list[temp_v][0]
                    ret.add_skip_connection(index_in_main_chain[u], index_in_main_chain[temp_u], skip_type)

                elif index_in_main_chain[v] - index_in_main_chain[u] != 1:
                    skip_type = None
                    if is_layer(self.layer_list[layer_id], LayerType.CONCAT):
                        skip_type = NetworkDescriptor.CONCAT_CONNECT
                    if is_layer(self.layer_list[layer_id], LayerType.ADD):
                        skip_type = NetworkDescriptor.ADD_CONNECT
                    ret.add_skip_connection(index_in_main_chain[u], index_in_main_chain[v], skip_type)

        return ret
Exemplo n.º 23
0
 def _insert_pooling_layer_chain(self, start_node_id, end_node_id):
     skip_output_id = start_node_id
     for layer in self._get_pooling_layers(start_node_id, end_node_id):
         new_layer = deepcopy(layer)
         if is_layer(new_layer, 'Conv'):
             filters = self.node_list[start_node_id].shape[-1]
             new_layer = get_conv_class(self.n_dim)(filters, filters, 1, layer.stride)
         else:
             new_layer = deepcopy(layer)
         skip_output_id = self.add_layer(new_layer, skip_output_id)
     skip_output_id = self.add_layer(StubReLU(), skip_output_id)
     return skip_output_id
Exemplo n.º 24
0
def to_deeper_graph(graph):
    weighted_layer_ids = graph.deep_layer_ids()

    deeper_layer_ids = sample(weighted_layer_ids, 1)

    for layer_id in deeper_layer_ids:
        layer = graph.layer_list[layer_id]
        if is_layer(layer, 'Conv'):
            graph.to_conv_deeper_model(layer_id, 3)
        else:
            graph.to_dense_deeper_model(layer_id)
    return graph
Exemplo n.º 25
0
    def extract_descriptor(self):
        ret = NetworkDescriptor()
        topological_node_list = self.topological_order
        for u in topological_node_list:
            for v, layer_id in self.adj_list[u]:
                layer = self.layer_list[layer_id]
                if is_layer(layer, 'Conv') and layer.kernel_size not in [1, (1,), (1, 1), (1, 1, 1)]:
                    ret.add_conv_width(layer_width(layer))
                if is_layer(layer, 'Dense'):
                    ret.add_dense_width(layer_width(layer))

        # The position of each node, how many Conv and Dense layers before it.
        pos = [0] * len(topological_node_list)
        for v in topological_node_list:
            layer_count = 0
            for u, layer_id in self.reverse_adj_list[v]:
                layer = self.layer_list[layer_id]
                weighted = 0
                if (is_layer(layer, 'Conv') and layer.kernel_size not in [1, (1,), (1, 1), (1, 1, 1)]) \
                        or is_layer(layer, 'Dense'):
                    weighted = 1
                layer_count = max(pos[u] + weighted, layer_count)
            pos[v] = layer_count

        for u in topological_node_list:
            for v, layer_id in self.adj_list[u]:
                if pos[u] == pos[v]:
                    continue
                layer = self.layer_list[layer_id]
                if is_layer(layer, 'Concatenate'):
                    ret.add_skip_connection(pos[u], pos[v], NetworkDescriptor.CONCAT_CONNECT)
                if is_layer(layer, 'Add'):
                    ret.add_skip_connection(pos[u], pos[v], NetworkDescriptor.ADD_CONNECT)

        return ret
Exemplo n.º 26
0
def to_wider_graph(graph):
    weighted_layer_ids = graph.wide_layer_ids()
    wider_layers = sample(weighted_layer_ids, 1)

    for layer_id in wider_layers:
        layer = graph.layer_list[layer_id]
        if is_layer(layer, 'Conv'):
            n_add = layer.filters
        else:
            n_add = layer.units

        graph.to_wider_model(layer_id, n_add)
    return graph
Exemplo n.º 27
0
def create_new_layer(layer, n_dim):
    input_shape = layer.output.shape
    dense_deeper_classes = [StubDense, get_dropout_class(n_dim), StubReLU]
    conv_deeper_classes = [get_conv_class(n_dim), get_batch_norm_class(n_dim), StubReLU]
    if is_layer(layer, LayerType.RELU):
        conv_deeper_classes = [get_conv_class(n_dim), get_batch_norm_class(n_dim)]
        dense_deeper_classes = [StubDense, get_dropout_class(n_dim)]
    elif is_layer(layer, LayerType.DROPOUT):
        dense_deeper_classes = [StubDense, StubReLU]
    elif is_layer(layer, LayerType.BATCH_NORM):
        conv_deeper_classes = [get_conv_class(n_dim), StubReLU]

    if len(input_shape) == 1:
        # It is in the dense layer part.
        layer_class = sample(dense_deeper_classes, 1)[0]
    else:
        # It is in the conv layer part.
        layer_class = sample(conv_deeper_classes, 1)[0]

    if layer_class == StubDense:
        new_layer = StubDense(input_shape[0], input_shape[0])

    elif layer_class == get_dropout_class(n_dim):
        new_layer = layer_class(Constant.DENSE_DROPOUT_RATE)

    elif layer_class == get_conv_class(n_dim):
        new_layer = layer_class(input_shape[-1], input_shape[-1], sample((1, 3, 5), 1)[0], stride=1)

    elif layer_class == get_batch_norm_class(n_dim):
        new_layer = layer_class(input_shape[-1])

    elif layer_class == get_pooling_class(n_dim):
        new_layer = layer_class(sample((1, 3, 5), 1)[0])

    else:
        new_layer = layer_class()

    return new_layer
Exemplo n.º 28
0
def to_wider_graph(graph):
    weighted_layer_ids = graph.wide_layer_ids()
    weighted_layer_ids = list(filter(lambda x: graph.layer_list[x].output.shape[-1], weighted_layer_ids))
    wider_layers = sample(weighted_layer_ids, 1)

    for layer_id in wider_layers:
        layer = graph.layer_list[layer_id]
        if is_layer(layer, LayerType.CONV):
            n_add = layer.filters
        else:
            n_add = layer.units

        graph.to_wider_model(layer_id, n_add)
    return graph
def to_deeper_graph(graph):
    weighted_layer_ids = graph.deep_layer_ids()
    if len(weighted_layer_ids) >= Constant.MAX_LAYERS:
        return None

    deeper_layer_ids = sample(weighted_layer_ids, 1)

    for layer_id in deeper_layer_ids:
        layer = graph.layer_list[layer_id]
        if is_layer(layer, 'Conv'):
            graph.to_conv_deeper_model(layer_id, 3)
        else:
            graph.to_dense_deeper_model(layer_id)
    return graph
def to_wider_graph(graph):
    weighted_layer_ids = graph.wide_layer_ids()
    weighted_layer_ids = list(
        filter(lambda x: graph.layer_list[x].output.shape[-1],
               weighted_layer_ids))
    wider_layers = sample(weighted_layer_ids, 1)

    for layer_id in wider_layers:
        layer = graph.layer_list[layer_id]
        if is_layer(layer, 'Conv'):
            n_add = layer.filters
        else:
            n_add = layer.units

        graph.to_wider_model(layer_id, n_add)
    return graph
Exemplo n.º 31
0
 def _insert_pooling_layer_chain(self, start_node_id, end_node_id):
     skip_output_id = start_node_id
     for layer in self._get_pooling_layers(start_node_id, end_node_id):
         new_layer = deepcopy(layer)
         if is_layer(new_layer, LayerType.CONV):
             filters = self.node_list[start_node_id].shape[-1]
             kernel_size = layer.kernel_size if layer.padding != int(
                 layer.kernel_size / 2) or layer.stride != 1 else 1
             new_layer = get_conv_class(self.n_dim)(filters, filters, kernel_size, layer.stride,
                                                    padding=layer.padding)
             if self.weighted:
                 init_conv_weight(new_layer)
         else:
             new_layer = deepcopy(layer)
         skip_output_id = self.add_layer(new_layer, skip_output_id)
     skip_output_id = self.add_layer(StubReLU(), skip_output_id)
     return skip_output_id
Exemplo n.º 32
0
 def _insert_pooling_layer_chain(self, start_node_id, end_node_id):
     skip_output_id = start_node_id
     for layer in self._get_pooling_layers(start_node_id, end_node_id):
         new_layer = deepcopy(layer)
         if is_layer(new_layer, 'Conv'):
             filters = self.node_list[start_node_id].shape[-1]
             kernel_size = layer.kernel_size if layer.padding != int(
                 layer.kernel_size / 2) or layer.stride != 1 else 1
             new_layer = get_conv_class(self.n_dim)(filters, filters, kernel_size, layer.stride,
                                                    padding=layer.padding)
             if self.weighted:
                 init_conv_weight(new_layer)
         else:
             new_layer = deepcopy(layer)
         skip_output_id = self.add_layer(new_layer, skip_output_id)
     skip_output_id = self.add_layer(StubReLU(), skip_output_id)
     return skip_output_id
Exemplo n.º 33
0
    def _search(self, u, start_dim, total_dim, n_add):
        """Search the graph for all the layers to be widened caused by an operation.

        It is an recursive function with duplication check to avoid deadlock.
        It searches from a starting node u until the corresponding layers has been widened.

        Args:
            u: The starting node ID.
            start_dim: The position to insert the additional dimensions.
            total_dim: The total number of dimensions the layer has before widening.
            n_add: The number of dimensions to add.
        """
        is_exist = True if (u, start_dim, total_dim,
                            n_add) in self.vis else False
        #with open("log.txt", "a") as resfile:
        #    resfile.write("start_node_id:{},start_dim:{},total_dim:{},n_add:{},is_exist:{}\n".format(u, start_dim, total_dim, n_add,is_exist))
        if (u, start_dim, total_dim, n_add) in self.vis:
            return
        self.vis[(u, start_dim, total_dim, n_add)] = True
        for v, layer_id in self.adj_list[u]:
            layer = self.layer_list[layer_id]

            if is_layer(layer, LayerType.CONV):
                new_layer = wider_next_conv(layer, start_dim, total_dim, n_add,
                                            self.weighted)
                self._replace_layer(layer_id, new_layer)
                if layer.groups > 1:
                    self._search(v, start_dim, total_dim, n_add)

            elif is_layer(layer, LayerType.DENSE):
                new_layer = wider_next_dense(layer, start_dim, total_dim,
                                             n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)

            elif is_layer(layer, LayerType.BATCH_NORM):
                new_layer = wider_bn(layer, start_dim, total_dim, n_add,
                                     self.weighted)
                self._replace_layer(layer_id, new_layer)
                self._search(v, start_dim, total_dim, n_add)

            elif is_layer(layer, LayerType.CONCAT):
                if self.layer_id_to_input_node_ids[layer_id][1] == u:
                    # u is on the right of the concat
                    # next_start_dim += next_total_dim - total_dim
                    left_dim = self._upper_layer_width(
                        self.layer_id_to_input_node_ids[layer_id][0])
                    next_start_dim = start_dim + left_dim
                    next_total_dim = total_dim + left_dim
                else:
                    next_start_dim = start_dim
                    next_total_dim = total_dim + self._upper_layer_width(
                        self.layer_id_to_input_node_ids[layer_id][1])
                self._search(v, next_start_dim, next_total_dim, n_add)

            else:
                self._search(v, start_dim, total_dim, n_add)

        for v, layer_id in self.reverse_adj_list[u]:
            layer = self.layer_list[layer_id]
            if is_layer(layer, LayerType.CONV):
                new_layer = wider_pre_conv(layer, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)
                if layer.groups > 1:
                    self._search(v, start_dim, total_dim, n_add)

            elif is_layer(layer, LayerType.DENSE):
                new_layer = wider_pre_dense(layer, n_add, self.weighted)
                self._replace_layer(layer_id, new_layer)
            elif is_layer(layer, LayerType.CONCAT):
                continue
            else:
                self._search(v, start_dim, total_dim, n_add)
Exemplo n.º 34
0
 def _layer_ids_by_type(self, type_str):
     return list(filter(lambda layer_id: is_layer(self.layer_list[layer_id], type_str), range(self.n_layers)))
Exemplo n.º 35
0
 def _conv_layer_ids_in_order(self):
     return list(filter(lambda layer_id: is_layer(self.layer_list[layer_id], LayerType.CONV),
                        self.get_main_chain_layers()))
Exemplo n.º 36
0
 def _layer_ids_by_type(self, type_str):
     return list(
         filter(
             lambda layer_id: is_layer(self.layer_list[layer_id], type_str),
             range(self.n_layers)))
Exemplo n.º 37
0
 def _conv_layer_ids_in_order(self):
     return list(
         filter(
             lambda layer_id: is_layer(self.layer_list[layer_id], LayerType.
                                       CONV), self.get_main_chain_layers()))
Exemplo n.º 38
0
def create_new_layer(layer, n_dim):
    input_shape = layer.output.shape
    dense_deeper_classes = [StubDense, get_dropout_class(n_dim), StubReLU]
    conv_deeper_classes = [
        get_conv_class(n_dim),
        get_batch_norm_class(n_dim), StubReLU
    ]
    if is_layer(layer, LayerType.RELU):
        conv_deeper_classes = [
            get_conv_class(n_dim),
            get_batch_norm_class(n_dim)
        ]
        dense_deeper_classes = [StubDense, get_dropout_class(n_dim)]
    elif is_layer(layer, LayerType.DROPOUT):
        dense_deeper_classes = [StubDense, StubReLU]
    elif is_layer(layer, LayerType.BATCH_NORM):
        conv_deeper_classes = [get_conv_class(n_dim)]  #, StubReLU]

    new_layers = []
    if len(input_shape) == 1:
        # It is in the dense layer part.
        layer_class = sample(dense_deeper_classes, 1)[0]
    else:
        # It is in the conv layer part.
        layer_class = sample(conv_deeper_classes, 1)[0]

    if layer_class == StubDense:
        new_layer = StubDense(input_shape[0], input_shape[0])
        new_layers.append(new_layer)

    elif layer_class == get_dropout_class(n_dim):
        new_layer = layer_class(Constant.DENSE_DROPOUT_RATE)
        new_layers.append(new_layer)

    elif layer_class == get_conv_class(n_dim):
        # add conv layer
        # new_layer = layer_class(input_shape[-1],, input_shape[-1], sample((1, 3, 5), 1)[0], stride=1)

        # add mobilenet block
        in_planes = input_shape[-1]
        expansion = sample((1, 6), 1)[0]
        stride = sample((1, 2), 1)[0]
        planes = expansion * in_planes

        new_layer = layer_class(in_planes, planes, 1, stride=1, padding=0)
        new_layers.append(new_layer)

        new_layer = get_batch_norm_class(n_dim)(planes)
        new_layers.append(new_layer)

        new_layer = StubReLU()
        new_layers.append(new_layer)

        new_layer = layer_class(planes,
                                planes,
                                3,
                                stride=stride,
                                padding=1,
                                groups=planes)
        new_layers.append(new_layer)

        new_layer = get_batch_norm_class(n_dim)(planes)
        new_layers.append(new_layer)

        new_layer = StubReLU()
        new_layers.append(new_layer)

        new_layer = layer_class(planes, in_planes, 1, stride=1, padding=0)
        new_layers.append(new_layer)

        new_layer = get_batch_norm_class(n_dim)(in_planes)
        new_layers.append(new_layer)

    elif layer_class == get_batch_norm_class(n_dim):
        new_layer = layer_class(input_shape[-1])
        new_layers.append(new_layer)

    elif layer_class == get_pooling_class(n_dim):
        new_layer = layer_class(sample((1, 3, 5), 1)[0])
        new_layers.append(new_layer)

    else:
        new_layer = layer_class()
        new_layers.append(new_layer)

    return new_layers