コード例 #1
0
ファイル: SVS.py プロジェクト: qinzhang2016/SVSP
"""




##init weights

def weights_init(m):
    classname=m.__class__.__name__
    if classname.find('Conv') != -1:
        xavier(m.weight.data)
        xavier(m.bias.data)
"""

#build cnn
SVS = NET.SVS()
SVS.cuda()
#print(SVS)
#SVS.apply(weights_init)
#torch.save(SVS,'/home/lisa/SVSP/SVS.pkl')

optimizer = torch.optim.Adam(SVS.parameters(),
                             lr=args.LR)  # optimize all paramenters
loss_func = nn.L1Loss().cuda()
lossM = nn.MSELoss().cuda()

if __name__ == '__main__':

    for epoch in range(args.epochs):
        ##training------------------------------------------
        for step, (batch_x, batch_y) in enumerate(trainloader):
コード例 #2
0
            torch.cuda.set_device(args.gpus[0])

    # Infer the dataset from the model name
    args.dataset = 'cifar10' if 'cifar' in args.arch else 'imagenet'
    args.num_classes = 10 if args.dataset == 'cifar10' else 1000

    if args.earlyexit_thresholds:
        args.num_exits = len(args.earlyexit_thresholds) + 1
        args.loss_exits = [0] * args.num_exits
        args.losses_exits = []
        args.exiterrors = []

    # Create the model
    #model = create_model(args.pretrained, args.dataset, args.arch,
    #                     parallel=not args.load_serialized, device_ids=args.gpus)
    model = NET.SVS()
    compression_scheduler = None
    # Create a couple of logging backends.  TensorBoardLogger writes log files in a format
    # that can be read by Google's Tensor Board.  PythonLogger writes to the Python logger.
    tflogger = TensorBoardLogger(msglogger.logdir)
    pylogger = PythonLogger(msglogger)

    # capture thresholds for early-exit training
    if args.earlyexit_thresholds:
        msglogger.info('=> using early-exit threshold values of %s', args.earlyexit_thresholds)

    # TODO(barrh): args.deprecated_resume is deprecated since v0.3.1
    if args.deprecated_resume:
        msglogger.warning('The "--resume" flag is deprecated. Please use "--resume-from=YOUR_PATH" instead.')
        if not args.reset_optimizer:
            msglogger.warning('If you wish to also reset the optimizer, call with: --reset-optimizer')