def getModel(self, modelType, dataset="CIFAR100"): if modelType=="densenet": if dataset=="MNIST": print ("MNIST dataset not supported in this model. Try resnet20 or 32") assert(False) return dn.DenseNet(growthRate=12, depth=40, reduction=0.5, bottleneck=True, nClasses=100) elif modelType=="resnet32": if dataset=="MNIST": return res.resnet32mnist(10) return res.resnet32(100) elif modelType=="resnet20": if dataset=="MNIST": return res.resnet20mnist(10) return res.resnet20(100) elif modelType=="resnet44": if dataset == "MNIST": print("MNIST dataset not supported in this model. Try resnet20 or 32") assert (False) return res.resnet44(100) elif modelType=="test": if dataset=="MNIST": print ("MNIST dataset not supported in this model. Try resnet20 or 32") assert(False) return tm.Net(100) else: print ("Unsupported model; either implement the model in model/modelFactory or choose a different model") assert(False)
def build_model(cfg): if cfg.model.type == "resnet": model_ft = resnet50(pretrained=True) set_parameter_requires_grad(model_ft, False) num_ftrs = model_ft.fc.in_features model_ft.fc = nn.Linear(num_ftrs, 100) return model_ft if cfg.model.type == "wide-resnet": return wide_resnet50_2(pretrained=True, num_classes=100) if cfg.model.type == "densenet": return densenet.DenseNet(growthRate=12, depth=100, reduction=0.5, bottleneck=True, nClasses=100) if cfg.model.type == "EfficientNet-B0": return EfficientNet.from_pretrained('efficientnet-b0', num_classes=100, batch_norm_momentum=0.9)
Trying to run on CPU will then trigger errors (too time-consuming anyway)! """ teacher_model_load_start = time.time() if params.teacher == "resnet18": teacher_model = resnet.ResNet18() teacher_checkpoint = 'experiments/base_resnet18/best.pth.tar' teacher_model = teacher_model.cuda() if params.cuda else teacher_model elif params.teacher == "wrn": teacher_model = wrn.WideResNet(depth=28, num_classes=10, widen_factor=10, dropRate=0.3) teacher_checkpoint = 'experiments/base_wrn/best.pth.tar' teacher_model = nn.DataParallel(teacher_model).cuda() elif params.teacher == "densenet": teacher_model = densenet.DenseNet(depth=100, growthRate=12) teacher_checkpoint = 'experiments/base_densenet/best.pth.tar' teacher_model = nn.DataParallel(teacher_model).cuda() elif params.teacher == "resnext29": teacher_model = resnext.CifarResNeXt(cardinality=8, depth=29, num_classes=10) teacher_checkpoint = 'experiments/base_resnext29/best.pth.tar' teacher_model = nn.DataParallel(teacher_model).cuda() elif params.teacher == "preresnet110": teacher_model = preresnet.PreResNet(depth=110, num_classes=10) teacher_checkpoint = 'experiments/base_preresnet110/best.pth.tar' teacher_model = nn.DataParallel(teacher_model).cuda() utils.load_checkpoint(teacher_checkpoint, teacher_model) teacher_model_load_time = time.time() - teacher_model_load_start
def model_confirm(self, choosed_model): if choosed_model == 'VGG16': model = MODEL(self.config).VGG16() elif choosed_model == 'VGG19': model = MODEL(self.config).VGG19() elif choosed_model == 'AlexNet': model = MODEL(self.config).AlexNet() elif choosed_model == 'LeNet': model = MODEL(self.config).LeNet() elif choosed_model == 'ZF_Net': model = MODEL(self.config).ZF_Net() elif choosed_model == 'ResNet18': model = ResnetBuilder().build_resnet18(self.config) elif choosed_model == 'ResNet34': model = ResnetBuilder().build_resnet34(self.config) elif choosed_model == 'ResNet101': model = ResnetBuilder().build_resnet101(self.config) elif choosed_model == 'ResNet152': model = ResnetBuilder().build_resnet152(self.config) elif choosed_model == 'mnist_net': model = MODEL(self.config).mnist_net() elif choosed_model == 'TSL16': model = MODEL(self.config).TSL16() elif choosed_model == 'ResNet50': model = keras.applications.ResNet50(include_top=True, weights=None, input_tensor=None, input_shape=(self.normal_size, self.normal_size, self.channles), pooling='max', classes=self.classNumber) elif choosed_model == 'InceptionV3': model = keras.applications.InceptionV3( include_top=True, weights=None, input_tensor=None, input_shape=(self.normal_size, self.normal_size, self.channles), pooling='max', classes=self.classNumber) elif choosed_model == 'Xception': model = keras.applications.Xception(include_top=True, weights=None, input_tensor=None, input_shape=(self.normal_size, self.normal_size, self.channles), pooling='max', classes=self.classNumber) elif choosed_model == 'MobileNet': model = keras.applications.MobileNet(include_top=True, weights=None, input_tensor=None, input_shape=(self.normal_size, self.normal_size, self.channles), pooling='max', classes=self.classNumber) elif choosed_model == 'InceptionResNetV2': model = keras.applications.InceptionResNetV2( include_top=True, weights=None, input_tensor=None, input_shape=(self.normal_size, self.normal_size, self.channles), pooling='max', classes=self.classNumber) elif choosed_model == 'SEResNetXt': model = SEResNetXt(self.config).model elif choosed_model == 'DenseNet': depth = 40 nb_dense_block = 3 growth_rate = 12 nb_filter = 12 bottleneck = False reduction = 0.0 dropout_rate = 0.0 img_dim = (self.channles, self.normal_size ) if K.image_data_format == 'channels_last' else ( self.normal_size, self.normal_size, self.channles) model = densenet.DenseNet(img_dim, classNumber=self.classNumber, depth=depth, nb_dense_block=nb_dense_block, growth_rate=growth_rate, nb_filter=nb_filter, dropout_rate=dropout_rate, bottleneck=bottleneck, reduction=reduction, weights=None) elif choosed_model == 'SENet': model = sm.Unet('senet154', input_shape=(self.normal_size, self.normal_size, self.channles), classes=4, activation='softmax', encoder_weights=None) #model.summary() elif choosed_model == 'EfficientNetB5': model = EfficientNetB5(input_shape=(self.normal_size, self.normal_size, self.channles), classes=4, weights=None) elif choosed_model == 'EfficientNetB4': model = EfficientNetB4(input_shape=(self.normal_size, self.normal_size, self.channles), classes=4, weights=None) elif choosed_model == 'EfficientNetB3': model = EfficientNetB3(input_shape=(self.normal_size, self.normal_size, self.channles), classes=4, weights=None) elif choosed_model == 'EfficientNetB2': model = EfficientNetB2(input_shape=(self.normal_size, self.normal_size, self.channles), classes=4, weights=None) elif choosed_model == 'EfficientNetB1': model = EfficientNetB1(input_shape=(self.normal_size, self.normal_size, self.channles), classes=4, weights=None) elif choosed_model == 'EfficientNetB0': model = EfficientNetB0(input_shape=(self.normal_size, self.normal_size, self.channles), classes=4, weights=None) elif choosed_model == 'MobileNetV3_Large': model = MobileNetV3_Large(shape=(self.normal_size, self.normal_size, self.channles), n_class=4).build() elif choosed_model == 'MobileNetV3_Small': model = MobileNetV3_Small(shape=(self.normal_size, self.normal_size, self.channles), n_class=4).build() elif choosed_model == 'NASNetLarge': model = NASNetLarge(input_shape=(self.normal_size, self.normal_size, self.channles), weights=None, use_auxiliary_branch=False, classes=4) elif choosed_model == 'NASNetMobile': model = NASNetMobile(input_shape=(self.normal_size, self.normal_size, self.channles), weights=None, use_auxiliary_branch=False, classes=4) elif choosed_model == 'NASNetMiddle': model = NASNetMiddle(input_shape=(self.normal_size, self.normal_size, self.channles), weights=None, use_auxiliary_branch=False, classes=4) elif choosed_model == 'ShuffleNet': model = ShuffleNet(input_shape=(self.normal_size, self.normal_size, self.channles), classes=4) elif choosed_model == 'ShuffleNetV2': model = ShuffleNetV2(input_shape=(self.normal_size, self.normal_size, self.channles), classes=4) return model
def get_model(self, model_type, dataset="CIFAR100", use_mbd=False, d=64): if model_type == "densenet": if dataset == "MNIST": print( "MNIST dataset not supported in this model. Try resnet20 or 32" ) assert (False) return dn.DenseNet(growth_rate=12, depth=40, reduction=0.5, bottleneck=True, n_classes=100) elif model_type == "resnet32": if dataset == "MNIST": return res.resnet32mnist(10) elif dataset == "CIFAR10": return res.resnet32(10) return res.resnet32(100) elif model_type == "resnet20": if dataset == "MNIST": return res.resnet20mnist(10) return res.resnet20(100) elif model_type == "resnet44": if dataset == "MNIST": print( "MNIST dataset not supported in this model. Try resnet20 or 32" ) assert (False) return res.resnet44(100) elif model_type == "test": if dataset == "MNIST": print( "MNIST dataset not supported in this model. Try resnet20 or 32" ) assert (False) return tm.Net(100) elif model_type == "cdcgan": if dataset == "CIFAR100": G = cdcgan.Generator(d, 3, 100) D = cdcgan.Discriminator(d, 3, 100, use_mbd) elif dataset == "CIFAR10": G = cdcgan.Generator(d, 3, 10) D = cdcgan.Discriminator(d, 3, 10, use_mbd) else: G = cdcgan.Generator(d) D = cdcgan.Discriminator(d, 1, 10, use_mbd) G.init_weights(mean=0.0, std=0.02) D.init_weights(mean=0.0, std=0.02) return G, D elif model_type == "dcgan": if dataset == "CIFAR100" or dataset == "CIFAR10": G = dcgan.Generator(d, 3) D = dcgan.Discriminator(d, 3) else: G = dcgan.Generator(d) D = dcgan.Discriminator(d) G.init_weights(mean=0.0, std=0.02) D.init_weights(mean=0.0, std=0.02) return G, D elif model_type == "wgan": if dataset == "CIFAR100" or dataset == "CIFAR10": G = wgan.Generator(d, 3) D = wgan.Discriminator(d, 3) else: G = wgan.Generator(d) D = wgan.Discriminator(d) G.init_weights(mean=0.0, std=0.02) D.init_weights(mean=0.0, std=0.02) return G, D elif model_type == "acgan": num_classes = 100 if dataset == "CIFAR100" else 10 gen_d = 384 if d < 16: print("[!!!] d<16, You sure??") assert False if d == 32: gen_d = 768 if dataset == "CIFAR100" or dataset == "CIFAR10": G = acgan.Generator(gen_d, 3, num_classes) D = acgan.Discriminator(d, 3, num_classes) else: G = acgan.Generator(gen_d, 1, num_classes) D = acgan.Discriminator(d, 1, num_classes) G.init_weights(mean=0.0, std=0.02) D.init_weights(mean=0.0, std=0.02) return G, D else: print( "Unsupported model; either implement the model in model/ModelFactory or choose a different model" ) assert (False)