コード例 #1
0
def model_with_probes(model_path=None,which='Imagenet'):
    if which == 'Imagenet':
        nc = 1000
    elif which == 'Places':
        nc = 205
    state_dict = torch.load(model_path) # ['state_dict']
    ncls = []
    for q in (state_dict.keys()):
        if 'top_layer' in q:
            if 'weight' in q:
                ncl = state_dict[q].shape[0]
                ncls.append(ncl)
    outs = ncls
    model = models.__dict__[args.arch](num_classes=outs)
    model.load_state_dict(state_dict)
    layers = [1, 4, 7, 9, 11]  # because BN.
    util.search_absorb_bn(model)
    model = util.sequential_skipping_bn_cut(model)
    for relu in filter(lambda x: issubclass(x.__class__, nn.ReLU), model.children()):
        relu.inplace = False
    model = Probes(model, layers, num_classes=nc)
    return model
コード例 #2
0
def model_with_probes(model_path=None, which='Imagenet'):
    if which == 'Imagenet':
        nc = 1000
    elif which == 'Places':
        nc = 205
    state_dict = torch.load(model_path)['state_dict']
    # automatically determine size of last FC layer that is not needed anyhow.
    ncls = []
    for q in (state_dict.keys()):
        if 'top_layer' in q:
            if 'weight' in q:
                ncl = state_dict[q].shape[0]
                ncls.append(ncl)
    outs = ncls
    model = alexnet.alexnet(out=outs)
    model.load_state_dict(state_dict)
    layers = [1, 4, 7, 9, 11]  # because BN.
    util.search_absorb_bn(model)
    model = util.sequential_skipping_bn_cut(model)
    for relu in filter(lambda x: issubclass(x.__class__, nn.ReLU),
                       model.children()):
        relu.inplace = False
    model = Probes(model, layers, num_classes=nc)
    return model