def getNetwork(args): if args.loss_fn is None: extra_class = 0 else: extra_class = 1 if (args.net_type == 'lenet'): net = lenet.LeNet(num_classes+extra_class) file_name = 'lenet' net.apply(lenet.conv_init) elif (args.net_type == 'vggnet'): #net = vggnet.VGG(args.depth, num_classes+extra_class, args.dropout) net = vggnet.VGG(args.depth, num_classes+extra_class) file_name = 'vgg-'+str(args.depth) net.apply(vggnet.conv_init) elif (args.net_type == 'resnet'): net = resnet.ResNet(args.depth, num_classes+extra_class) file_name = 'resnet-'+str(args.depth) net.apply(resnet.conv_init) elif (args.net_type == 'resnet2'): if args.dataset == 'mnist' or args.dataset == 'fashion': num_channels = 1 else: num_channels = 3 if args.depth == 34: net = resnet2.ResNet34(num_classes=num_classes+extra_class,num_input_channels=num_channels) file_name = 'resnet2-34'#+str(args.depth) elif args.depth == 18: #pdb.set_trace() net = resnet2.ResNet18(num_classes=num_classes+extra_class,num_input_channels=num_channels) file_name = 'resnet2-18'#+str(args.depth) else: print('Error : Resnet-2 Network depth should either be 18 or 34') sys.exit(0) net.apply(resnet2.conv_init) elif (args.net_type == 'wide-resnet'): net = wide_resnet.Wide_ResNet(args.depth, args.widen_factor, args.dropout, num_classes+extra_class) file_name = 'wide-resnet-'+str(args.depth)+'x'+str(args.widen_factor) net.apply(wide_resnet.conv_init) elif (args.net_type == 'tsc-resnet'): net = resnet1d.ResNet(series_length, num_classes+extra_class) file_name = 'tsc-resnet-'+str(args.depth)+'x'+str(args.widen_factor) elif (args.net_type == 'tsc-lstm'): # the input dimension has dimension of 1 net = lstm.TSCLSTM(1,series_length, args.depth, num_classes+extra_class) file_name = 'tsc-lstm-'+str(args.depth)+'x'+str(args.widen_factor) else: print('Error : Network should be either [LeNet / VGGNet / ResNet / Wide_ResNet / ResNet 1d') sys.exit(0) return net, file_name
# generate teacher if model_name == 'WideResNet': teacher = wide_resnet.WideResNet(depth=40, width=2, number_of_classes=number_of_classes, dropout_rate=0.3) if flag_gpu: if len(devices) != 1: teacher = torch.nn.DataParallel(teacher, device_ids=devices) teacher.load_state_dict(torch.load(teacher_model_file_path)) teacher = teacher.cuda(devices[0]) else: teacher.load_state_dict( torch.load(teacher_model_file_path, map_location='cpu')) elif model_name == 'ResNet': teacher = resnet.ResNet(depth=110, number_of_classes=number_of_classes) if flag_gpu: if len(devices) != 1: teacher = torch.nn.DataParallel(teacher, device_ids=devices) teacher.load_state_dict(torch.load(teacher_model_file_path)) teacher = teacher.cuda(devices[0]) else: teacher.load_state_dict( torch.load(teacher_model_file_path, map_location='cpu')) elif model_name == 'MobileNet': # teacher = resnet.ResNet(depth = 110, number_of_classes = number_of_classes) teacher = mobile_net.MobileNet(number_of_classes=number_of_classes, ca=1) if flag_gpu: if len(devices) != 1: teacher = torch.nn.DataParallel(teacher, device_ids=devices)