Пример #1
0
 def _upper_layer_width(self, u):
     for v, layer_id in self.reverse_adj_list[u]:
         layer = self.layer_list[layer_id]
         if is_layer(layer, "Conv") or is_layer(layer, "Dense"):
             return layer_width(layer)
         elif is_layer(layer, "Concatenate"):
             a = self.layer_id_to_input_node_ids[layer_id][0]
             b = self.layer_id_to_input_node_ids[layer_id][1]
             return self._upper_layer_width(a) + self._upper_layer_width(b)
         else:
             return self._upper_layer_width(v)
     return self.node_list[0].shape[-1]
Пример #2
0
 def to_wider_model(self, pre_layer_id, n_add):
     """Widen the last dimension of the output of the pre_layer.
     Args:
         pre_layer_id: The ID of a convolutional layer or dense layer.
         n_add: The number of dimensions to add.
     """
     self.operation_history.append(("to_wider_model", pre_layer_id, n_add))
     pre_layer = self.layer_list[pre_layer_id]
     output_id = self.layer_id_to_output_node_ids[pre_layer_id][0]
     dim = layer_width(pre_layer)
     self.vis = {}
     self._search(output_id, dim, dim, n_add)
     # Update the tensor shapes.
     for u in self.topological_order:
         for v, layer_id in self.adj_list[u]:
             self.node_list[v].shape = self.layer_list[layer_id].output_shape