Esempio n. 1
0
    def extract_descriptor(self):
        ret = NetworkDescriptor()
        topological_node_list = self.topological_order
        for u in topological_node_list:
            for v, layer_id in self.adj_list[u]:
                layer = self.layer_list[layer_id]
                if is_layer(layer, 'Conv') and layer.kernel_size not in [1, (1,), (1, 1), (1, 1, 1)]:
                    ret.add_conv_width(layer_width(layer))
                if is_layer(layer, 'Dense'):
                    ret.add_dense_width(layer_width(layer))

        # The position of each node, how many Conv and Dense layers before it.
        pos = [0] * len(topological_node_list)
        for v in topological_node_list:
            layer_count = 0
            for u, layer_id in self.reverse_adj_list[v]:
                layer = self.layer_list[layer_id]
                weighted = 0
                if (is_layer(layer, 'Conv') and layer.kernel_size not in [1, (1,), (1, 1), (1, 1, 1)]) \
                        or is_layer(layer, 'Dense'):
                    weighted = 1
                layer_count = max(pos[u] + weighted, layer_count)
            pos[v] = layer_count

        for u in topological_node_list:
            for v, layer_id in self.adj_list[u]:
                if pos[u] == pos[v]:
                    continue
                layer = self.layer_list[layer_id]
                if is_layer(layer, 'Concatenate'):
                    ret.add_skip_connection(pos[u], pos[v], NetworkDescriptor.CONCAT_CONNECT)
                if is_layer(layer, 'Add'):
                    ret.add_skip_connection(pos[u], pos[v], NetworkDescriptor.ADD_CONNECT)

        return ret
Esempio n. 2
0
 def _upper_layer_width(self, u):
     for v, layer_id in self.reverse_adj_list[u]:
         layer = self.layer_list[layer_id]
         if is_layer(layer, 'Conv') or is_layer(layer, 'Dense'):
             return layer_width(layer)
         elif is_layer(layer, 'Concatenate'):
             a = self.layer_id_to_input_node_ids[layer_id][0]
             b = self.layer_id_to_input_node_ids[layer_id][1]
             return self._upper_layer_width(a) + self._upper_layer_width(b)
         else:
             return self._upper_layer_width(v)
     return self.node_list[0].shape[-1]
Esempio n. 3
0
 def _upper_layer_width(self, u):
     for v, layer_id in self.reverse_adj_list[u]:
         layer = self.layer_list[layer_id]
         if is_layer(layer, LayerType.CONV) or is_layer(layer, LayerType.DENSE):
             return layer_width(layer)
         elif is_layer(layer, LayerType.CONCAT):
             a = self.layer_id_to_input_node_ids[layer_id][0]
             b = self.layer_id_to_input_node_ids[layer_id][1]
             return self._upper_layer_width(a) + self._upper_layer_width(b)
         else:
             return self._upper_layer_width(v)
     return self.node_list[0].shape[-1]
Esempio n. 4
0
 def _upper_layer_width(self, u):
     for v, layer_id in self.reverse_adj_list[u]:
         layer = self.layer_list[layer_id]
         if is_layer(layer, LayerType.CONV) or is_layer(layer, LayerType.DENSE):
             return layer_width(layer)
         elif is_layer(layer, LayerType.CONCAT):
             a = self.layer_id_to_input_node_ids[layer_id][0]
             b = self.layer_id_to_input_node_ids[layer_id][1]
             return self._upper_layer_width(a) + self._upper_layer_width(b)
         else:
             return self._upper_layer_width(v)
     return self.node_list[0].shape[-1]
Esempio n. 5
0
    def to_wider_model(self, pre_layer_id, n_add):
        """Widen the last dimension of the output of the pre_layer.

        Args:
            pre_layer_id: The ID of a convolutional layer or dense layer.
            n_add: The number of dimensions to add.
        """
        self.operation_history.append(('to_wider_model', pre_layer_id, n_add))
        pre_layer = self.layer_list[pre_layer_id]
        output_id = self.layer_id_to_output_node_ids[pre_layer_id][0]
        dim = layer_width(pre_layer)
        self.vis = {}
        self._search(output_id, dim, dim, n_add)
        for u in self.topological_order:
            for v, layer_id in self.adj_list[u]:
                self.node_list[v].shape = self.layer_list[layer_id].output_shape
Esempio n. 6
0
    def to_wider_model(self, pre_layer_id, n_add):
        """Widen the last dimension of the output of the pre_layer.

        Args:
            pre_layer_id: The ID of a convolutional layer or dense layer.
            n_add: The number of dimensions to add.
        """
        self.operation_history.append(('to_wider_model', pre_layer_id, n_add))
        pre_layer = self.layer_list[pre_layer_id]
        output_id = self.layer_id_to_output_node_ids[pre_layer_id][0]
        dim = layer_width(pre_layer)
        self.vis = {}
        self._search(output_id, dim, dim, n_add)
        # Update the tensor shapes.
        for u in self.topological_order:
            for v, layer_id in self.adj_list[u]:
                self.node_list[v].shape = self.layer_list[layer_id].output_shape
Esempio n. 7
0
    def to_wider_model(self, pre_layer_id, n_add):
        """Widen the last dimension of the output of the pre_layer.

        Args:
            pre_layer_id: The ID of a convolutional layer or dense layer.
            n_add: The number of dimensions to add.
        """
        self.operation_history.append(('to_wider_model', pre_layer_id, n_add))
        pre_layer = self.layer_list[pre_layer_id]
        output_id = self.layer_id_to_output_node_ids[pre_layer_id][0]
        dim = layer_width(pre_layer)
        self.vis = {}
        #with open("log.txt", "a") as resfile:
        #    resfile.write("to_wider_model:{} {}\n\n\n".format(pre_layer_id,n_add))
        self._search(output_id, dim, dim, n_add)
        # Update the tensor shapes.
        for u in self.topological_order:
            for v, layer_id in self.adj_list[u]:
                self.node_list[v].shape = self.layer_list[
                    layer_id].output_shape