def get_layers(model, normalize_input):
    layers = []
    if normalize_input:
        layers.append(models.Normalize())
    for m in model.modules():
        if len(list(m.modules())) == 1:
            layers.append(m)
    for i,l in enumerate(layers):
        if isinstance(l, nn.AdaptiveAvgPool2d) and i < len(layers)-1 and not isinstance(layers[i+1], models.Flatten):
            layers.insert(i+1, models.Flatten())
    layers = nn.Sequential(*layers)
    return layers
Exemple #2
0
 def __init__(self, model, logger, args):
     super(StudentModelWrapper2, self).__init__()
     self.logger = logger
     self.args = args
     containers = [nn.Sequential, nn.ModuleList, nn.ModuleDict, type(model)]
     is_container = lambda m: 1 in [isinstance(m, t) for t in containers]
     layers = [m for m in model.modules() if not is_container(m)]
     for i, l in enumerate(layers):
         if isinstance(l, nn.AdaptiveAvgPool2d
                       ) and i < len(layers) - 1 and not isinstance(
                           layers[i + 1], models.Flatten):
             layers.insert(i + 1, models.Flatten())
     self.layers = nn.Sequential(*layers)
        opt.dataset = 'Artificial'

    opt.device = args.device

    opt.model = './models/pretrained/%s_%s.pth' % (args.network, args.dataset)
    opt.savedir = './save/%s_%s_%s' % (args.network, args.dataset, args.suffix)
    opt.writer = SummaryWriter('./runs/%s_%s_%s' %
                               (args.network, args.dataset, args.suffix))
    assert not (os.path.exists(opt.savedir)), 'Overwriting existing files!'

    print(
        'Start compression. Please check the TensorBoard log in the folder ./runs/%s_%s_%s.'
        % (args.network, args.dataset, args.suffix))

    model = torch.load(opt.model).to(opt.device)
    if args.network == 'alexnet':
        model.flatten = models.Flatten()
    elif args.network != 'sample7':
        model.avgpool = nn.AvgPool2d(4, stride=1)
    teacher = Architecture(*(getattr(gr, opt.co_graph_gen)(model)))
    #print(teacher)
    dataset = getattr(datasets, opt.dataset)()
    record = Record()
    if opt.bo:
        compression(teacher, dataset, record, args.objective, args.constype,
                    args.consval)
    else:
        random_compression(teacher, dataset, args.objective, args.constype,
                           args.consval, 80)
    fully_train(teacher, dataset=opt.dataset)