Пример #1
0
    def create_network(self, blocks):
        hyperparams = blocks[0]
        output_filters = [int(hyperparams['channels'])]
        module_list = nn.ModuleList()
        routs = []  # list of layers which rout to deeper layers
        ind = -2
        filters = -1

        for mdef in blocks:
            ind += 1
            modules = nn.Sequential()

            if mdef['type'] in ['net', 'learnet']:
                continue
            if mdef['type'] == 'convolutional':
                bn = int(mdef['batch_normalize'])
                filters = int(mdef['filters'])
                size = int(mdef['size'])
                stride = int(mdef['stride']) if 'stride' in mdef else (int(mdef['stride_y']), int(mdef['stride_x']))
                pad = (size - 1) // 2 if int(mdef['pad']) else 0
                dynamic = True if 'dynamic' in mdef and int(mdef['dynamic']) == 1 else False

                if dynamic:
                    partial = int(mdef['partial']) if 'partial' in mdef else None
                    Conv2d = dynamic_conv2d(is_first=True, partial=partial)
                else:
                    Conv2d = nn.Conv2d

                modules.add_module('Conv2d', Conv2d(in_channels=output_filters[-1],
                                                    out_channels=filters,
                                                    kernel_size=size,
                                                    stride=stride,
                                                    padding=pad,
                                                    groups=int(mdef['groups']) if 'groups' in mdef else 1,
                                                    bias=not bn))
                if bn:
                    modules.add_module('BatchNorm2d', nn.BatchNorm2d(filters, momentum=0.1))
                if mdef['activation'] == 'leaky':
                    # TODO: activation study https://github.com/ultralytics/yolov3/issues/441
                    modules.add_module('activation', nn.LeakyReLU(0.1, inplace=True))
                    # modules.add_module('activation', nn.PReLU(num_parameters=1, init=0.10))
                elif mdef['activation'] == 'swish':
                    modules.add_module('activation', Swish())

            elif mdef['type'] == 'maxpool':
                size = int(mdef['size'])
                stride = int(mdef['stride'])
                maxpool = nn.MaxPool2d(kernel_size=size, stride=stride, padding=int((size - 1) // 2))
                if size == 2 and stride == 1:  # yolov3-tiny
                    modules.add_module('ZeroPad2d', nn.ZeroPad2d((0, 1, 0, 1)))
                    modules.add_module('MaxPool2d', maxpool)
                else:
                    modules = maxpool

            elif mdef['type'] == 'upsample':
                modules = nn.Upsample(scale_factor=int(mdef['stride']), mode='nearest')

            elif mdef['type'] == 'route':  # nn.Sequential() placeholder for 'route' layer
                layers = [int(x) for x in mdef['layers'].split(',')]
                filters = sum([output_filters[i + 1 if i > 0 else i] for i in layers])
                routs.extend([l if l > 0 else l + ind for l in layers])
                modules = EmptyModule()
                # if mdef[i+1]['type'] == 'reorg3d':
                #     modules = nn.Upsample(scale_factor=1/float(mdef[i+1]['stride']), mode='nearest')  # reorg3d

            elif mdef['type'] == 'shortcut':  # nn.Sequential() placeholder for 'shortcut' layer
                filters = output_filters[int(mdef['from'])]
                layer = int(mdef['from'])
                routs.extend([ind + layer if layer < 0 else layer])
                modules = EmptyModule()

            elif mdef['type'] == 'region':
                loss = RegionLossV2()
                anchors = mdef['anchors'].split(',')
                loss.anchors = [float(i) for i in anchors]
                loss.num_classes = int(mdef['classes'])
                loss.num_anchors = int(mdef['num'])
                loss.object_scale = float(mdef['object_scale'])
                loss.noobject_scale = float(mdef['noobject_scale'])
                loss.class_scale = float(mdef['class_scale'])
                loss.coord_scale = float(mdef['coord_scale'])
                loss.input_size = (self.height, self.width)
                modules = loss

            elif mdef['type'] == 'globalmax':
                modules = GlobalMaxPool2d()

            elif mdef['type'] == 'reorg3d':  # yolov3-spp-pan-scale
                # torch.Size([16, 128, 104, 104])
                # torch.Size([16, 64, 208, 208]) <-- # stride 2 interpolate dimensions 2 and 3 to cat with prior layer
                pass

            else:
                print('Warning: Unrecognized Layer Type: ' + mdef['type'])

            # Register module list and number of output filters
            module_list.append(modules)
            output_filters.append(filters)

        return module_list, routs
    def create_network(self, blocks):
        models = nn.ModuleList()

        prev_filters = 3
        out_filters = []
        conv_id = 0
        dynamic_count = 0
        for block in blocks:
            if block['type'] == 'net' or block['type'] == 'learnet':
                prev_filters = int(block['channels'])
                continue
            elif block['type'] == 'convolutional':
                conv_id = conv_id + 1
                batch_normalize = int(block['batch_normalize'])
                filters = int(block['filters'])
                kernel_size = int(block['size'])
                stride = int(block['stride'])
                is_pad = int(block['pad'])
                # pad = (kernel_size-1)/2 if is_pad else 0  # for python2
                pad = (kernel_size - 1) // 2 if is_pad else 0  # for python3
                activation = block['activation']
                groups = 1
                bias = bool(int(block['bias'])) if 'bias' in block else True

                if self.is_dynamic(block):
                    partial = int(
                        block['partial']) if 'partial' in block else None
                    Conv2d = dynamic_conv2d(dynamic_count == 0,
                                            partial=partial)
                    dynamic_count += 1
                else:
                    Conv2d = nn.Conv2d
                if 'groups' in block:
                    groups = int(block['groups'])

                model = nn.Sequential()
                if batch_normalize:
                    model.add_module(
                        'conv{0}'.format(conv_id),
                        Conv2d(prev_filters,
                               filters,
                               kernel_size,
                               stride,
                               pad,
                               groups=groups,
                               bias=False))
                    model.add_module('bn{0}'.format(conv_id),
                                     nn.BatchNorm2d(filters))
                    #model.add_module('bn{0}'.format(conv_id), BN2d(filters))
                else:
                    model.add_module(
                        'conv{0}'.format(conv_id),
                        Conv2d(prev_filters,
                               filters,
                               kernel_size,
                               stride,
                               pad,
                               groups=groups,
                               bias=bias))
                if activation == 'leaky':
                    model.add_module('leaky{0}'.format(conv_id),
                                     nn.LeakyReLU(0.1, inplace=True))
                elif activation == 'relu':
                    model.add_module('relu{0}'.format(conv_id),
                                     nn.ReLU(inplace=True))
                prev_filters = filters
                out_filters.append(prev_filters)
                models.append(model)
            elif block['type'] == 'maxpool':
                pool_size = int(block['size'])
                stride = int(block['stride'])
                if stride > 1:
                    model = nn.MaxPool2d(pool_size, stride)
                else:
                    model = MaxPoolStride1()
                out_filters.append(prev_filters)
                models.append(model)
            elif block['type'] == 'avgpool':
                model = GlobalAvgPool2d()
                out_filters.append(prev_filters)
                models.append(model)
            elif block['type'] == 'softmax':
                model = nn.Softmax()
                out_filters.append(prev_filters)
                models.append(model)
            elif block['type'] == 'cost':
                if block['_type'] == 'sse':
                    model = nn.MSELoss(size_average=True)
                elif block['_type'] == 'L1':
                    model = nn.L1Loss(size_average=True)
                elif block['_type'] == 'smooth':
                    model = nn.SmoothL1Loss(size_average=True)
                out_filters.append(1)
                models.append(model)
            elif block['type'] == 'reorg':
                stride = int(block['stride'])
                prev_filters = stride * stride * prev_filters
                out_filters.append(prev_filters)
                models.append(Reorg(stride))
            elif block['type'] == 'route':
                layers = block['layers'].split(',')
                ind = len(models)
                layers = [
                    int(i) if int(i) > 0 else int(i) + ind for i in layers
                ]
                if len(layers) == 1:
                    prev_filters = out_filters[layers[0]]
                elif len(layers) == 2:
                    assert (layers[0] == ind - 1)
                    prev_filters = out_filters[layers[0]] + out_filters[
                        layers[1]]
                out_filters.append(prev_filters)
                models.append(EmptyModule())
            elif block['type'] == 'shortcut':
                ind = len(models)
                prev_filters = out_filters[ind - 1]
                out_filters.append(prev_filters)
                models.append(EmptyModule())
            elif block['type'] == 'connected':
                filters = int(block['output'])
                if block['activation'] == 'linear':
                    model = nn.Linear(prev_filters, filters)
                elif block['activation'] == 'leaky':
                    model = nn.Sequential(nn.Linear(prev_filters, filters),
                                          nn.LeakyReLU(0.1, inplace=True))
                elif block['activation'] == 'relu':
                    model = nn.Sequential(nn.Linear(prev_filters, filters),
                                          nn.ReLU(inplace=True))
                prev_filters = filters
                out_filters.append(prev_filters)
                models.append(model)
            elif block['type'] == 'region':
                loss = RegionLossV2()
                anchors = block['anchors'].split(',')
                loss.anchors = [float(i) for i in anchors]
                loss.num_classes = int(block['classes'])
                loss.num_anchors = int(block['num'])
                loss.anchor_step = len(loss.anchors) / loss.num_anchors
                loss.object_scale = float(block['object_scale'])
                loss.noobject_scale = float(block['noobject_scale'])
                loss.class_scale = float(block['class_scale'])
                loss.coord_scale = float(block['coord_scale'])
                out_filters.append(prev_filters)
                models.append(loss)
            elif block['type'] == 'globalmax':
                model = GlobalMaxPool2d()
                out_filters.append(prev_filters)
                models.append(model)
            elif block['type'] == 'globalavg':
                model = GlobalAvgPool2d()
                out_filters.append(prev_filters)
                models.append(model)
            elif block['type'] == 'split':
                splits = [int(sz) for sz in block['splits'].split(',')]
                model = Split(splits)
                prev_filters = splits[-1]
                out_filters.append(prev_filters)
                models.append(model)
            elif block['type'] == 'dynamic_routing':
                model = DynamicRouting()
                out_filters.append(prev_filters)
                models.append(model)
            else:
                print('unknown type %s' % (block['type']))

        # pdb.set_trace()
        return models
    def create_network(self, blocks):
        models = nn.ModuleList()

        prev_filters = 3
        out_filters = []
        conv_id = 0
        dynamic_count = 0

        for block in blocks:
            if block['type'] == 'net' or block['type'] == 'learnet':
                prev_filters = int(block['channels'])
                continue
            elif block['type'] == 'convolutional':
                conv_id = conv_id + 1
                batch_normalize = int(block['batch_normalize'])
                filters = int(block['filters'])
                kernel_size = int(block['size'])
                stride = int(block['stride'])
                is_pad = int(block['pad'])
                pad = int((kernel_size - 1) / 2) if is_pad else 0
                activation = block['activation']
                groups = 1
                bias = bool(int(block['bias'])) if 'bias' in block else True

                if self.is_dynamic(block):
                    # don't know what partial parameter is doing, seems partial is always set to None
                    # partial = int(block['partial']) if 'partial' in block else None
                    # Conv2d = dynamic_conv2d(dynamic_count == 0, partial=partial)
                    # Conv2d = dynamic_conv2d(dynamic_count == 0)
                    Conv2d = EmbeddedConv
                    dynamic_count += 1
                else:
                    Conv2d = nn.Conv2d
                if 'groups' in block:
                    groups = int(block['groups'])

                model = nn.Sequential()
                if batch_normalize:
                    model.add_module(
                        'conv{0}'.format(conv_id),
                        Conv2d(prev_filters,
                               filters,
                               kernel_size,
                               stride,
                               pad,
                               groups=groups,
                               bias=False))
                    model.add_module('bn{0}'.format(conv_id),
                                     nn.BatchNorm2d(filters))
                else:
                    model.add_module(
                        'conv{0}'.format(conv_id),
                        Conv2d(prev_filters,
                               filters,
                               kernel_size,
                               stride,
                               pad,
                               groups=groups,
                               bias=bias))
                if activation == 'leaky':
                    model.add_module('leaky{0}'.format(conv_id),
                                     nn.LeakyReLU(0.1, inplace=True))
                elif activation == 'relu':
                    model.add_module('relu{0}'.format(conv_id),
                                     nn.ReLU(inplace=True))
                prev_filters = filters
                out_filters.append(prev_filters)
                models.append(model)
            elif block['type'] == 'maxpool':
                pool_size = int(block['size'])
                stride = int(block['stride'])
                # if stride > 1:
                #     model = nn.MaxPool2d(pool_size, stride)
                # else:
                #     model = MaxPoolStride1()
                model = nn.MaxPool2d(pool_size, stride)
                out_filters.append(prev_filters)
                models.append(model)
            elif block['type'] == 'globalavg':
                model = nn.AdaptiveAvgPool2d(1)
                out_filters.append(prev_filters)
                models.append(model)
            elif block['type'] == 'globalmax':
                model = nn.AdaptiveMaxPool2d(1)
                out_filters.append(prev_filters)
                models.append(model)
            elif block['type'] == 'softmax':
                model = nn.Softmax()
                out_filters.append(prev_filters)
                models.append(model)
            # elif block['type'] == 'cost':
            #     if block['_type'] == 'sse':
            #         model = nn.MSELoss(size_average=True)
            #     elif block['_type'] == 'L1':
            #         model = nn.L1Loss(size_average=True)
            #     elif block['_type'] == 'smooth':
            #         model = nn.SmoothL1Loss(size_average=True)
            #     out_filters.append(1)
            #     models.append(model)
            elif block['type'] == 'reorg':
                stride = int(block['stride'])
                prev_filters = stride * stride * prev_filters
                out_filters.append(prev_filters)
                models.append(Reorg(stride))
            elif block['type'] == 'route':
                layers = block['layers'].split(',')
                ind = len(models)
                layers = [
                    int(i) if int(i) > 0 else int(i) + ind for i in layers
                ]
                if len(layers) == 1:
                    prev_filters = out_filters[layers[0]]
                elif len(layers) == 2:
                    assert (layers[0] == ind - 1)
                    prev_filters = out_filters[layers[0]] + out_filters[
                        layers[1]]
                out_filters.append(prev_filters)
                models.append(EmptyModule())
            elif block['type'] == 'shortcut':
                ind = len(models)
                prev_filters = out_filters[ind - 1]
                out_filters.append(prev_filters)
                models.append(EmptyModule())
            elif block['type'] == 'connected':
                filters = int(block['output'])
                model = None
                if block['activation'] == 'linear':
                    model = nn.Linear(prev_filters, filters)
                elif block['activation'] == 'leaky':
                    model = nn.Sequential(nn.Linear(prev_filters, filters),
                                          nn.LeakyReLU(0.1, inplace=True))
                elif block['activation'] == 'relu':
                    model = nn.Sequential(nn.Linear(prev_filters, filters),
                                          nn.ReLU(inplace=True))
                prev_filters = filters
                out_filters.append(prev_filters)
                assert model is not None
                models.append(model)
            elif block['type'] == 'region':
                anchors = block['anchors'].split(',')
                anchors = [float(i) for i in anchors]
                num_classes = int(block['classes'])
                num_anchors = int(block['num'])
                object_scale = float(block['object_scale'])
                noobject_scale = float(block['noobject_scale'])
                class_scale = float(block['class_scale'])
                coord_scale = float(block['coord_scale'])
                loss = RegionLossV2(num_classes, anchors, num_anchors,
                                    coord_scale, noobject_scale, object_scale,
                                    class_scale)
                out_filters.append(prev_filters)
                models.append(loss)
            else:
                print('unknown type %s' % (block['type']))
        return models