Esempio n. 1
0
 def load_using_pb(self):
     self.caffemodel = get_caffe_resolver().NetParameter()
     self.caffemodel.MergeFromString(open(self.data_path, 'rb').read())
     pair = lambda layer: (layer.name, self.normalize_pb_data(layer))
     layers = self.caffemodel.layers or self.caffemodel.layer
     self.params = [pair(layer) for layer in layers if layer.blobs]
     self.did_use_pb = True
Esempio n. 2
0
 def compute_output_shapes(self, model):
     sorted_nodes = self.topologically_sorted()
     (tmp_handle, tmp_prototxt) = tempfile.mkstemp(suffix=".prototxt")
     with open(tmp_prototxt, 'w') as f:
         f.write(text_format.MessageToString(model))
     self.prototxt = tmp_prototxt
     if has_pycaffe():
         caffe = get_caffe_resolver().caffe
         net = caffe.Net(tmp_prototxt, caffe.TEST)
         for key, value in net.blobs.items():
             try:
                 node = self.get_node(key)
                 dims = list(value.shape)
                 dims = dims + [1] * (4 - len(dims))
                 node.output_shape = TensorShape(*dims)
             except:
                 continue
         for node in sorted_nodes:
             if node.output_shape is None:
                 node.output_shape = TensorShape(
                     *NodeKind.compute_output_shape(node))
         os.close(tmp_handle)
     else:
         for node in sorted_nodes:
             node.output_shape = TensorShape(
                 *NodeKind.compute_output_shape(node))
Esempio n. 3
0
 def compute_output_shapes(self, model):
     sorted_nodes = self.topologically_sorted()
     (tmp_handle, tmp_prototxt) = tempfile.mkstemp(suffix=".prototxt")
     with open(tmp_prototxt, 'w') as f:
         f.write(text_format.MessageToString(model))
     self.prototxt = tmp_prototxt
     if has_pycaffe():
         caffe = get_caffe_resolver().caffe
         net = caffe.Net(tmp_prototxt, caffe.TEST)
         for key, value in net.blobs.items():
             try:
                 node = self.get_node(key)
                 dims = list(value.shape)
                 dims = dims + [1] * (4 - len(dims))
                 node.output_shape = TensorShape(*dims)
             except:
                 continue
         for node in sorted_nodes:
             if node.output_shape is None:
                 node.output_shape = TensorShape(*NodeKind.compute_output_shape(node))
         os.close(tmp_handle)
         os.remove(tmp_prototxt)
     else:
         for node in sorted_nodes:
             node.output_shape = TensorShape(*NodeKind.compute_output_shape(node))
Esempio n. 4
0
 def load_using_pb(self):
     self.caffemodel = get_caffe_resolver().NetParameter()
     self.caffemodel.MergeFromString(open(self.data_path, 'rb').read())
     pair = lambda layer: (layer.name, self.normalize_pb_data(layer))
     layers = self.caffemodel.layers or self.caffemodel.layer
     self.params = [pair(layer) for layer in layers if layer.blobs]
     self.did_use_pb = True
Esempio n. 5
0
 def __init__(self, def_path, data_path, target_toolkit, input_shape=None, phase='test'):
     self.layer_name_map = {}
     self.data_injector = None
     self.is_train_proto = False
     self.input_shape = input_shape
     if def_path is None:
         if self.input_shape is None:
             raise ConversionError('if the graph prototxt is not provided, the input shape should be provided')
         self.input_shape = [1] + self.input_shape
         def_path, self.data_injector = self.gen_prototxt_from_caffemodel(data_path, self.input_shape)
         self.is_train_proto = True
     else:
         model = get_caffe_resolver().NetParameter()
         with open(def_path, 'r') as f:
             text_format.Merge(f.read(), model)
         layers = model.layers or model.layer
         if len([layer for layer in layers if NodeKind.map_raw_kind(layer.type) in LAYER_IN_TRAIN_PROTO]) > 0:
             if self.input_shape is None:
                 raise ConversionError('the train_val.prototxt should be provided with the input shape')
             self.input_shape = [1] + self.input_shape
             self.is_train_proto = True
     graph = GraphBuilder(def_path, self.input_shape, self.is_train_proto, phase).build()
     if self.is_train_proto:
         def_path = graph.prototxt
     if data_path is not None:
         graph = graph.transformed([
             self.data_injector if self.data_injector else DataInjector(def_path, data_path), # Load and associate learned parameters
             BatchNormScaleBiasFuser(),
             BatchNormPreprocessor() # Pre-process batch normalization data
         ])
         target_toolkit = target_toolkit.lower()
         if target_toolkit not in ('caffe', 'caffe2'):
             graph = graph.transformed([DataReshaper({ # Reshape the parameters to TensorFlow's ordering
                 NodeKind.Convolution: (2, 3, 1, 0), # (c_o, c_i, h, w) -> (h, w, c_i, c_o)
                 NodeKind.Deconvolution: (2, 3, 1, 0), # (c_o, c_i, h, w) -> (h, w, c_i, c_o)
                 NodeKind.InnerProduct: (1, 0) # (c_o, c_i) -> (c_i, c_o)
             }),
                 ParameterNamer() # Convert parameters to dictionaries
             ])
     self.graph = graph
     #  self.graph = NodeRenamer()(graph)
     print_stderr(self.graph)
Esempio n. 6
0
 def load(self):
     self.model = get_caffe_resolver().NetParameter()
     with open(self.model_path, 'r') as f:
         text_format.Merge(f.read(), self.model)
     if self.is_train_proto:
         self.process_train_proto()
Esempio n. 7
0
 def load_using_caffe(self):
     caffe = get_caffe_resolver().caffe
     net = caffe.Net(str(self.def_path), str(self.data_path), caffe.TEST)
     data = lambda blob: blob.data
     self.params = [(k, list(map(data, v))) for k, v in net.params.items()]
Esempio n. 8
0
 def load(self):
     self.model = get_caffe_resolver().NetParameter()
     with open(self.model_path, 'r') as f:
         text_format.Merge(f.read(), self.model)
     if self.is_train_proto:
         self.process_train_proto()
Esempio n. 9
0
 def load_using_caffe(self):
     caffe = get_caffe_resolver().caffe
     net = caffe.Net(str(self.def_path), str(self.data_path), caffe.TEST)
     data = lambda blob: blob.data
     self.params = [(k, list(map(data, v))) for k, v in net.params.items()]
Esempio n. 10
0
 def __init__(self,
              def_path,
              data_path,
              target_toolkit,
              input_shape=None,
              phase='test'):
     self.layer_name_map = {}
     self.data_injector = None
     self.is_train_proto = False
     self.input_shape = input_shape
     if def_path is None:
         if self.input_shape is None:
             raise ConversionError(
                 'if the graph prototxt is not provided, the input shape should be provided'
             )
         self.input_shape = [1] + self.input_shape
         def_path, self.data_injector = self.gen_prototxt_from_caffemodel(
             data_path, self.input_shape)
         self.is_train_proto = True
     else:
         model = get_caffe_resolver().NetParameter()
         with open(def_path, 'r') as f:
             text_format.Merge(f.read(), model)
         layers = model.layers or model.layer
         if len([
                 layer for layer in layers if NodeKind.map_raw_kind(
                     layer.type) in LAYER_IN_TRAIN_PROTO
         ]) > 0:
             if self.input_shape is None:
                 raise ConversionError(
                     'the train_val.prototxt should be provided with the input shape'
                 )
             self.input_shape = [1] + self.input_shape
             self.is_train_proto = True
     graph = GraphBuilder(def_path, self.input_shape, self.is_train_proto,
                          phase).build()
     if self.is_train_proto:
         def_path = graph.prototxt
     if data_path is not None:
         graph = graph.transformed([
             self.data_injector if self.data_injector else DataInjector(
                 def_path,
                 data_path),  # Load and associate learned parameters
             BatchNormScaleBiasFuser(),
             BatchNormPreprocessor()  # Pre-process batch normalization data
         ])
         target_toolkit = target_toolkit.lower()
         if target_toolkit not in ('caffe', 'caffe2'):
             graph = graph.transformed([
                 DataReshaper(
                     {  # Reshape the parameters to TensorFlow's ordering
                         NodeKind.Convolution:
                         (2, 3, 1,
                          0),  # (c_o, c_i, h, w) -> (h, w, c_i, c_o)
                         NodeKind.Deconvolution:
                         (2, 3, 1,
                          0),  # (c_o, c_i, h, w) -> (h, w, c_i, c_o)
                         NodeKind.InnerProduct:
                         (1, 0)  # (c_o, c_i) -> (c_i, c_o)
                     }),
                 ParameterNamer()  # Convert parameters to dictionaries
             ])
     self.graph = graph
     #  self.graph = NodeRenamer()(graph)
     print(self.graph)