Beispiel #1
0
 def compute_output_shape(node):
     try:
         return LAYER_DESCRIPTORS[node.kind](node)
     except NotImplementedError:
         raise ConversionError(
             'Output shape computation not implemented for type: %s' %
             node.kind)
Beispiel #2
0
 def map_eltwise(cls, node):
     operations = {0: 'Mul', 1: 'Add', 2: 'Max'}
     op_code = node.parameters.operation
     try:
         return Node.create(operations[op_code])
     except KeyError:
         raise ConversionError('Unknown elementwise operation: {}'.format(op_code))
Beispiel #3
0
 def transformed(self, transformers):
     graph = self
     for transformer in transformers:
         graph = transformer(graph)
         if graph is None:
             raise ConversionError('Transformer failed: {}'.format(transformer))
         assert isinstance(graph, CaffeGraph)
     return graph
Beispiel #4
0
 def get_handler(self, node_kind, prefix):
     name = get_handler_name(node_kind)
     name = '_'.join((prefix, name))
     try:
         return getattr(NodeMapper, name)
     except AttributeError:
         raise ConversionError(
             'No handler found for node kind: %s (expected: %s)' %
             (node_kind, name))
Beispiel #5
0
 def map_eltwise(cls, node):
     operations = {0: 'Mul', 1: 'Add', 2: 'Max'}
     op_code = node.parameters.operation
     try:
         kwargs = {}
         cls._convert_output_shape(kwargs, node)
         return Node.create(operations[op_code], **kwargs)
     except KeyError:
         raise ConversionError(
             'Unknown elementwise operation: {}'.format(op_code))
Beispiel #6
0
 def parameters(self):
     if self.layer is not None:
         params = get_handler_name(self.kind)
         if params == 'deconvolution':
             params = 'convolution'
         params = '_'.join((params, 'param'))
         try:
             return getattr(self.layer, params)
         except AttributeError:
             raise ConversionError('Caffe parameters not found for layer kind: %s' % (self.kind))
     return None
Beispiel #7
0
 def map_pooling(cls, node):
     parent, _ = node.get_only_parent()
     kwargs = cls.get_kernel_params(node, parent.output_shape)
     if node.parameters.pool == 0:
         kwargs['pooling_type'] = 'MAX'
     elif node.parameters.pool == 1:
         kwargs['pooling_type'] = 'AVG'
     else:
         # Stochastic pooling, for instance.
         raise ConversionError('Unsupported pooling type.')
     cls._convert_output_shape(kwargs, node)
     return Node.create('Pool', **kwargs)
Beispiel #8
0
 def __init__(self, graph, data, target):
     self.graph = graph
     self.data = data
     self.tab = ' ' * 4
     self.prefix = ''
     target = target.lower()
     if target == 'tensorflow':
         self.target = target
         self.net = 'TensorFlowNetwork'
     elif target == 'keras':
         self.target = target
         self.net = 'KerasNetwork'
     elif target == 'caffe':
         self.target = target
         self.net = 'CaffeNetwork'
     else:
         raise ConversionError('Target %s is not supported yet.' % target)
Beispiel #9
0
    def map_pooling(cls, node):
        kwargs, padding = cls.get_kernel_params(node)
        if node.parameters.pool == 0:
            kwargs['pooling_type'] = 'MAX'
        elif node.parameters.pool == 1:
            kwargs['pooling_type'] = 'AVG'
        else:
            # Stochastic pooling, for instance.
            raise ConversionError('Unsupported pooling type.')
        kwargs['window_shape'] = [
            1, node.kernel_parameters.k_h, node.kernel_parameters.k_w, 1
        ]
        cls._convert_output_shape(kwargs, node)

        if padding['paddings'] != None:
            return [
                Node.create('Pad', **padding),
                Node.create('Pool', **kwargs)
            ]
        else:
            return Node.create('Pool', **kwargs)
Beispiel #10
0
 def get_node(self, name):
     try:
         return self.node_lut[name]
     except KeyError:
         raise ConversionError('Layer not found: %s' % name)
Beispiel #11
0
 def get_only_parent(self):
     if len(self.parents) != 1:
         raise ConversionError(
             'Node (%s) expected to have 1 parent. Found %s.' %
             (self, len(self.parents)))
     return self.parents[0]
Beispiel #12
0
 def __init__(self,
              def_path,
              data_path,
              target_toolkit,
              input_shape=None,
              phase='test'):
     self.layer_name_map = {}
     self.data_injector = None
     self.is_train_proto = False
     self.input_shape = input_shape
     if def_path is None:
         if self.input_shape is None:
             raise ConversionError(
                 'if the graph prototxt is not provided, the input shape should be provided'
             )
         self.input_shape = [1] + self.input_shape
         def_path, self.data_injector = self.gen_prototxt_from_caffemodel(
             data_path, self.input_shape)
         self.is_train_proto = True
     else:
         model = get_caffe_resolver().NetParameter()
         with open(def_path, 'r') as f:
             text_format.Merge(f.read(), model)
         layers = model.layers or model.layer
         if len([
                 layer for layer in layers if NodeKind.map_raw_kind(
                     layer.type) in LAYER_IN_TRAIN_PROTO
         ]) > 0:
             if self.input_shape is None:
                 raise ConversionError(
                     'the train_val.prototxt should be provided with the input shape'
                 )
             self.input_shape = [1] + self.input_shape
             self.is_train_proto = True
     graph = GraphBuilder(def_path, self.input_shape, self.is_train_proto,
                          phase).build()
     if self.is_train_proto:
         def_path = graph.prototxt
     if data_path is not None:
         graph = graph.transformed([
             self.data_injector if self.data_injector else DataInjector(
                 def_path,
                 data_path),  # Load and associate learned parameters
             BatchNormScaleBiasFuser(),
             BatchNormPreprocessor()  # Pre-process batch normalization data
         ])
         target_toolkit = target_toolkit.lower()
         if target_toolkit not in ('caffe', 'caffe2'):
             graph = graph.transformed([
                 DataReshaper(
                     {  # Reshape the parameters to TensorFlow's ordering
                         NodeKind.Convolution:
                         (2, 3, 1,
                          0),  # (c_o, c_i, h, w) -> (h, w, c_i, c_o)
                         NodeKind.Deconvolution:
                         (2, 3, 1,
                          0),  # (c_o, c_i, h, w) -> (h, w, c_i, c_o)
                         NodeKind.InnerProduct:
                         (1, 0)  # (c_o, c_i) -> (c_i, c_o)
                     }),
                 ParameterNamer()  # Convert parameters to dictionaries
             ])
     self.graph = graph
     #  self.graph = NodeRenamer()(graph)
     print(self.graph)
Beispiel #13
0
 def map(self, node_kind):
     try:
         return self.mapping[node_kind]
     except KeyError:
         raise ConversionError(
             'Ordering not found for node kind: {}'.format(node_kind))