Exemplo n.º 1
0
def read_models_from_file(model_path, heirarchy, args):
    """
    initializes models and reads them from file
    """
    encoder, binarizer, decoder, unet = util.get_models(
        args=args, v_compress=args.v_compress, 
        bits=args.bits,
        encoder_fuse_level=args.encoder_fuse_level,
        decoder_fuse_level=args.decoder_fuse_level)

    d2 = network.DecoderCell2(v_compress=args.v_compress, shrink=args.shrink,bits=args.bits,fuse_level=args.decoder_fuse_level, itrs=args.iterations).cuda()
    print(d2)
    nets = [d2]
    if unet is not None:
        nets.append(unet)

    names = ['unet', 'd2']

    for net_idx, net in enumerate(nets):
        if net is not None:
            name = names[net_idx]
            checkpoint_path = '{}/{}_{}_{:08d}.pth'.format(
                model_path + "/h"+ str(heirarchy), 'demo', 
                name, 100000)

            print('Loading %s from %s...' % (name, checkpoint_path))
            net.load_state_dict(torch.load(checkpoint_path))

    return nets
Exemplo n.º 2
0
                   args=args),
    }
    return eval_loaders


############### Model ###############
encoder, binarizer, decoder, unet = get_models(
    args=args,
    v_compress=args.v_compress,
    bits=args.bits,
    encoder_fuse_level=args.encoder_fuse_level,
    decoder_fuse_level=args.decoder_fuse_level)

d2 = network.DecoderCell2(v_compress=args.v_compress,
                          shrink=args.shrink,
                          bits=args.bits,
                          fuse_level=args.decoder_fuse_level,
                          itrs=args.iterations).cuda()

nets = [encoder, binarizer, decoder, d2]
if unet is not None:
    nets.append(unet)

print(nets)

gpus = [int(gpu) for gpu in args.gpus.split(',')]
if len(gpus) > 1:
    print("Using GPUs {}.".format(gpus))
    for net in nets:
        net = nn.DataParallel(net, device_ids=gpus)