Exemplo n.º 1
0
 def init_modules(self):
     n = nn.Sequential()
     n = nn.SpatialConvolution(1, 1, 1,1, 1,1, 1,1)
     n = nn.SpatialBatchNormalization(1)
     n = nn.ReLU()
     n = nn.Sigmoid()
     n = nn.SpatialMaxPooling(1,1, 1,1, 1,1)
     n = nn.SpatialAveragePooling(1,1, 1,1, 1,1)
     n = nn.Linear(1, 1)
     n = nn.Dropout(0.1)
     n = nn.SoftMax()
     n = nn.Identity()
     n = nn.Reshape(1)
     n = nn.BCECriterion()
     n = nn.MSECriterion()
tnet["rpn_cls_score"] = nn.SpatialConvolutionMM(512, 18, 1, 1, 1, 1, 0, 0)
transferW(tnet["rpn_cls_score"].weight,
          net.params['rpn_cls_score'][0].data.reshape(18, 512))
transferB(tnet["rpn_cls_score"].bias, net.params['rpn_cls_score'][1].data)

tnet["rpn_bbox_pred"] = nn.SpatialConvolutionMM(512, 36, 1, 1, 1, 1, 0, 0)
transferW(tnet["rpn_bbox_pred"].weight,
          net.params['rpn_bbox_pred'][0].data.reshape(36, 512))
transferB(tnet["rpn_bbox_pred"].bias, net.params['rpn_bbox_pred'][1].data)

tnet["fc6"] = nn.Linear(25088, 4096)
transferW(tnet["fc6"].weight, net.params['fc6'][0].data)
transferB(tnet["fc6"].bias, net.params['fc6'][1].data)
tnet["relu6"] = nn.ReLU()
tnet["drop6"] = nn.Dropout(0.5)

tnet["fc7"] = nn.Linear(4096, 4096)
transferW(tnet["fc7"].weight, net.params['fc7'][0].data)
transferB(tnet["fc7"].bias, net.params['fc7'][1].data)
tnet["relu7"] = nn.ReLU()
tnet["drop7"] = nn.Dropout(0.5)

tnet["cls_score"] = nn.Linear(4096, 21)
transferW(tnet["cls_score"].weight, net.params['cls_score'][0].data)
transferB(tnet["cls_score"].bias, net.params['cls_score'][1].data)

tnet["bbox_pred"] = nn.Linear(4096, 84)
transferW(tnet["bbox_pred"].weight, net.params['bbox_pred'][0].data)
transferB(tnet["bbox_pred"].bias, net.params['bbox_pred'][1].data)
Exemplo n.º 3
0
    def exportModel(self, graph):
        order, expanded_order = graph.topologicalSort()
        net = nn.Sequential()
        for id in order:
            node = graph.nodes.get(id)
            name = node.type
            m = node.params
            l = node.learned_params

            if m.get('in_channels') :
                nInputPlane = m.get('in_channels')
            else:
                nInputPlane = 1

            if m.get('out_channels') :
                nOutputPlane = m.get('out_channels')
            else:
                nOutputPlane = 1

            if m.get('num_features') :
                nFeatures = m.get('num_features')
            else:
                nFeatures = 1

            if m.get('in_features') :
                inputDimension = m.get('in_features')
            else:
                inputDimension = 1

            if m.get('out_features') :
                outputDimension = m.get('out_features')
            else:
                outputDimension = 1

            if m.get('p') :
                p = m.get('p')
            else:
                p = 0.1

            if m.get('kernel_size') :
                kernel_size = m.get('kernel_size')
                if type(kernel_size) == type((3,3)):
                    kW = kernel_size[0]
                    kH = kernel_size[1]
                else:
                    kW = kernel_size
                    kH = kernel_size
            else:
                kW = 1
                kH = 1

            if m.get('stride') :
                stride = m.get('stride')
                if type(stride) == type((3,3)):
                    dW = stride[0]
                    dH = stride[1]
                else:
                    dW = stride
                    dH = stride
            else:
                dW = 1
                dH = 1

            if m.get('padding') :
                padding = m.get('padding')
                if type(padding) == type((3,3)):
                    padW = padding[0]
                    padH = padding[1]
                else:
                    padW = padding
                    padH = padding
            else:
                padW = 0
                padH = 0

            if m.get('subgraph'):
                subgraph = m.get('subgraph')

            # copy the network architecture
            if name == 'Conv2d':
                n = nn.SpatialConvolution(nInputPlane, nOutputPlane, kW, kH, dW, dH, padW, padH)
            elif name == 'BatchNorm2d':
                n = nn.SpatialBatchNormalization(nFeatures)
            elif name == 'ReLU':
                n = nn.ReLU()
            elif name == 'Sigmoid':
                n = nn.Sigmoid()
            elif name == 'MaxPool2d':
                n = nn.SpatialMaxPooling(kW, kH, dW, dH, padW, padH)
            elif name == 'AvgPool2d':
                n = nn.SpatialAveragePooling(kW, kH, dW, dH, padW, padH)
            elif name == 'Linear':
                n = nn.Linear(inputDimension, outputDimension)
            elif name == 'Dropout':
                n = nn.Dropout(p)
            elif name == 'Softmax':
                n = nn.SoftMax()
            elif name == 'Identity':
                n = nn.Identity()
            elif name == 'Reshape':
                # add params here
                n = nn.Reshape()
            elif name == 'BCELoss':
                n = nn.BCECriterion()
            elif name == 'MSELoss':
                n = nn.MSECriterion()
            elif name == 'Sequential':
                n = self.exportModel(subgraph)
            elif name == 'ResNet':
                n, core, _ = self.createResidualLayer()
                core.add(nn.SpatialConvolution(128,128, 3,3, 2,2, 1,1))
                core.add(nn.SpatialBatchNormalization(128))
                core.add(nn.ReLU())
                core.add(nn.SpatialConvolution(128,128, 3,3, 1,1, 1,1))
                core.add(nn.SpatialBatchNormalization(128))
            else:
                # Group or Sequential nodes
                if node.group == True:
                    n = self.exportModel(subgraph)
                else:
                    print('Not Implement', name)

            # copy the network weights
            weight = l.get('weight')
            bias = l.get('bias')
            running_mean = l.get('running_mean')
            running_var = l.get('running_var')
            # copy over the learned params if they exist
            self.copyWeights(n.weight, weight)
            self.copyWeights(n.bias, bias)
            self.copyWeights(n.running_mean, running_mean)
            self.copyWeights(n.running_var, running_var)

            net.add(n)

        return net