Пример #1
0
def train(epoch, lr):
    print('\nEpoch: %d' % epoch)
    net.train()
    lr = lr_schedule(lr, epoch, decay, 0.1)
    optimizer = optim.Adam(net.parameters(), lr=lr)
    train_loss = 0
    correct = 0
    total = 0
    for batch_idx, batch_data in enumerate(trainloader):
        inputs = batch_data['data'].to(device)
        targets = batch_data['label'].to(device)
        optimizer.zero_grad()
        outputs = net(inputs)
        loss = criterion(outputs, targets)
        loss.backward()
        optimizer.step()

        train_loss += loss.item()
        _, predicted = outputs.max(1)
        total += targets.size(0)
        correct += predicted.eq(targets).sum().item()

        progress_bar(
            batch_idx, len(trainloader), 'Loss: %.2f | Acc: %.2f%% (%d/%d)' %
            (train_loss /
             (batch_idx + 1), 100. * correct / total, correct, total))
    return train_loss / (batch_idx + 1), 100. * correct / total, lr
Пример #2
0
def get_model(version, input_shape, depth):
    if version == 2:
        model = resnet_v2(input_shape=input_shape, depth=depth)
    else:
        model = resnet_v1(input_shape=input_shape, depth=depth)

    model.compile(loss='categorical_crossentropy',
                  optimizer=Adam(lr=lr_schedule(0)),
                  metrics=['accuracy'])
    model.summary()
    return model
Пример #3
0
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
print('y_train shape:', y_train.shape)

# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

depth = 20 # For ResNet, specify the depth (e.g. ResNet50: depth=50)
model = resnet_v1.resnet_v1(input_shape=input_shape, depth=depth, activation=activation)
# model = resnet_v2.resnet_v2(input_shape=input_shape, depth=depth, activation=activation)   

model.compile(loss='categorical_crossentropy',
              optimizer=Adam(lr=lr_schedule(0)),
              metrics=['accuracy'])
model.summary()
print(model_type)

# Prepare model model saving directory.
save_dir = os.path.join(os.getcwd(), 'saved_models')
model_name = 'cifar10_%s_model.{epoch:03d}.h5' % model_type
if not os.path.isdir(save_dir):
    os.makedirs(save_dir)
filepath = os.path.join(save_dir, model_name)

# Prepare callbacks for model saving and for learning rate adjustment.
checkpoint = ModelCheckpoint(filepath=filepath,
                             monitor='val_acc',
                             verbose=1,
Пример #4
0
    # model.layers.pop()    #弹出模型的最后一层
    #
    # #一个层意味着将其排除在训练之外,即其权重将永远不会更新。
    for layer in model.layers:
        layer.trainable = False
    #
    last = model.layers[-1].output  #输出
    #全连接层 神经元数量和激活函数
    print('神经元数量', len(classes))
    # last = Dropout()(last)
    x = Dense(len(classes), activation="softmax")(last)
    print(model.layers[-1].name)
    model = Model(model.input, x)

    # 设置损失函数,优化器,模型在训练和测试时的性能指标
    model.compile(optimizer=Adam(lr_schedule(0)),
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])
    # for c in batches.class_indices:
    #     classes[batches.class_indices[c]] = c
    # finetuned_model.classes = classes
    #早停法防止过拟合,patience: 当early stop被激活(如发现loss相比上一个epoch训练没有下降),则经过patience个epoch后停止训练
    #early_stopping = EarlyStopping(patience=10)
    checkpointer = ModelCheckpoint('resnet50_cbam.h5',
                                   verbose=1,
                                   save_best_only=True)  # 添加模型保存点
    lr_scheduler = LearningRateScheduler(lr_schedule)
    lr_reducer = ReduceLROnPlateau(factor=np.sqrt(0.1),
                                   cooldown=0,
                                   patience=5,
                                   min_lr=0.5e-6)
Пример #5
0
batch_size = 64
epochs = 200
data_augment = False
training_path = '/ext/xueshengke/caffe-1.0/examples/waveletCASR/data/train/wavelet_small_x4_1.h5'
model_name = 'WRANSR-%d_x%d' % (depth, scale)

# load DIV2K data (.h5 files from Matlab)
train_data, train_label, test_data, test_label = hdf5.load_data(training_path)
# h5 = h5py.File('/ext/xueshengke/caffe-1.0/examples/waveletCASR/data/train/wavelet_small_x4_1.h5', 'r')

# create WRANSR network model
input_shape = train_data[0].shape[1:]
model = wransr.wran_net(input_shape, depth, ratio, width, alpha)

# sgd = SGD(lr=1e-4, momentum=0.9, decay=1e-4, nesterov=False)
adam = Adam(lr=lr_schedule(0),
            beta_1=0.9,
            beta_2=0.999,
            epsilon=1e-8,
            decay=0.0)
model.compile(loss='mean_absolute_error',
              optimizer=adam,
              metrics=[PSNR_Loss, SSIM_Loss])
model.summary()
print(model_name)

# prepare model model saving directory
ck_dir = os.path.join(os.getcwd(), 'checkpoints')
ck_name = 'wransr%d_epoch{epoch:03d}_psnr{PSNR_Loss:02.4f}.h5' % depth
if not os.path.exists(ck_dir):
    os.makedirs(ck_dir)
Пример #6
0
def main_(pretrain_cifar_ss_dir=None):
    split_step = -28

    if pretrain_cifar_ss_dir == None:
        pretrain_model_dir = \
            os.path.join(MODEL_DIR, 'imagenet_cifar_best_val_acc.h5')
        
        model, pretrain_model = biway_resnet(split_step, pretrain_model_dir)
        model.compile(loss=['categorical_crossentropy', 'categorical_crossentropy'],
                      optimizer=Adam(lr=lr_schedule(0)),
                      metrics=['accuracy'])
        plot_model(model, to_file='/mnt/home/20160022/pre-train-ssl-evaluation/biway_model.png')
        
        tb_log_dir = os.path.join(LOG_DIR + '_biway_49', 
                                datetime.datetime.now().strftime("%Y-%m-%d-%Hh-%Mmin"))
        model_dir = os.path.join(MODEL_DIR, 'biway_49_best_val_acc.h5')
        tensorboard = TensorBoard(log_dir=os.path.join(tb_log_dir, 
                                                       datetime.datetime.now().strftime("%Y-%m-%d-%Hh-%Mmin")), 
                                  histogram_freq=0, batch_size=BATCH_SIZE, 
                                  write_graph=True, write_grads=True)
        model_checkpoint = ModelCheckpoint(model_dir, monitor='val_dense_1_acc', 
                                           verbose=1, mode='max', save_best_only=True)
        lr_scheduler = LearningRateScheduler(lr_schedule)
        callbacks = [model_checkpoint, lr_scheduler, tensorboard]

        x_train_total, y_cifar10_train_total, y_ss_train, \
            x_valid_total, y_cifar10_valid_total, y_ss_valid = load_data_biway(pretrain_model)

        model.fit(x_train_total, [y_cifar10_train_total, y_ss_train],
                  batch_size=BATCH_SIZE, epochs=EPOCHS,
                  validation_data=(x_valid_total, [y_cifar10_valid_total, y_ss_valid]),
                  shuffle=True,
                  callbacks=callbacks)
        
        pretrain_cifar_ss_dir = model_dir

    model, pretrain_model = biway_resnet(split_step)
    model.load_weights(pretrain_cifar_ss_dir)
    #final_model = split_biway(model, 123 + split_step)

    model_input = model.inputs
    model_output = model.outputs[0]
    final_model = Model(inputs=model_input,
                        outputs=model_output)
    
    x_labeled, y_labeled, x_test, y_test, \
        x_ss_train, y_ss_train, x_ss_valid, y_ss_valid  = load_cifar10()
    
    print('Final model architecture')

    '''
    for i in range(123 + split_step):
        final_model.layers[i].trainable = False
	'''

    final_model.summary()
    final_model.compile(loss='categorical_crossentropy', 
                    optimizer=Adam(lr=lr_schedule(0)),
                    metrics=['accuracy', 'top_k_categorical_accuracy'])

    tb_log_dir = os.path.join(LOG_DIR + '_imagenet_cifar10_ss_supervised_28_original', 
                                datetime.datetime.now().strftime("%Y-%m-%d-%Hh-%Mmin"))
    model_dir = os.path.join(MODEL_DIR, 
                             'imagenet_cifar10_ss_supervised_28_best_val_acc_original.h5')

    train_model(x_labeled, y_labeled, x_test, y_test, 
                final_model, tb_log_dir, model_dir, True)
Пример #7
0
def main(learning_case, pretrain=None, ss_pretrain=None, split_step=None):
    x_labeled, y_labeled, x_test, y_test, \
        x_ss_train, y_ss_train, x_ss_valid, y_ss_valid  = load_cifar10()

    if learning_case == 'supervised':
        model = resnet.ResnetBuilder.build_resnet_34((32, 32, 3), NB_CIFAR10_CLASS)
        model.summary()
        model.compile(loss='categorical_crossentropy', 
                      optimizer=Adam(lr=lr_schedule(0)),
                      metrics=['accuracy', 'top_k_categorical_accuracy'])

        tb_log_dir = os.path.join(LOG_DIR + '_supervised', 
                                  datetime.datetime.now().strftime("%Y-%m-%d-%Hh-%Mmin"))
        model_dir = os.path.join(MODEL_DIR, 'supervised_best_val_acc.h5')

        train_model(x_labeled, y_labeled, x_test, y_test, 
                    model, tb_log_dir, model_dir, True)

    elif learning_case == 's4l':
        model = resnet.ResnetBuilder.build_resnet_34((32, 32, 3), NB_CIFAR10_CLASS)
        model_input = model.inputs
        common_model_output = model.layers[-1]

        cifar_class = Dense(units=NB_CIFAR10_CLASS, kernel_initializer="he_normal",
                      activation="softmax")(common_model_output)
        ss_class = Dense(units=NB_SS_CLASS, kernel_initializer="he_normal",
                      activation="softmax")(common_model_output)
        
        s4l_model_ss_branch = Model(inputs=model_input,
                                     outputs=ss_class)
        s4l_model_cifar_branch = Model(inputs=model_input,
                                       output=cifar_class)
        s4l_model_two_branch = Model(inputs=model_input,
                                     outputs=[cifar_class, ss_class])
        
        s4l_model_two_branch.summary()

        s4l_model_ss_branch.compile(loss='categorical_crossentropy', 
                      optimizer=Adam(),
                      metrics=['accuracy', 'top_k_categorical_accuracy'])
        
        s4l_model_cifar_branch.compile(loss='categorical_crossentropy', 
                      optimizer=Adam(),
                      metrics=['accuracy', 'top_k_categorical_accuracy'])
        
        s4l_model_two_branch.compile(loss=['categorical_crossentropy', 'categorical_crossentropy'],
                      loss_weights=[1, 1],
                      optimizer=Adam(),
                      metrics=['accuracy', 'top_k_categorical_accuracy'])

        for epoch in range(200):
            idx = 0
            for batch_idx in range(NB_SS//100):
                if batch_idx % (NB_SS // NB_LABELED) == 0:
                    x, y = make_s4l_two_branch(x_labeled, y_labeled, idx)
                    idx += 1
                    s4l_model_two_branch.train_on_batch(x, y)
                x, y = make_ss_branch(x_train, y_train, batch_idx)
                s4l_model_ss_branch.train_on_batch(x, y)
            loss, acc = s4l_model_cifar_branch.evaluate(x_test, y_test)
            print('loss: {}, acc: {}', loss, acc)
            

    elif learning_case == 'self_supervised':
        model = resnet.ResnetBuilder.build_resnet_34((32, 32, 3), NB_SS_CLASS)
        model.summary()
        model.compile(loss='categorical_crossentropy', 
                      optimizer=Adam(lr=lr_schedule(0)),
                      metrics=['accuracy', 'top_k_categorical_accuracy'])

        tb_log_dir = os.path.join(LOG_DIR + '_self_supervised', 
                                  datetime.datetime.now().strftime("%Y-%m-%d-%Hh-%Mmin"))
        model_dir = os.path.join(MODEL_DIR, 'self_supervised_best_val_acc.h5')

        train_model(x_ss_train, y_ss_train, x_ss_valid, y_ss_valid,
                    model, tb_log_dir, model_dir)

    elif learning_case == 'imagenet_pretrain':
        if pretrain == None:
            x_imagenet_train, y_imagenet_train, \
                x_imagenet_valid, y_imagenet_valid = load_cifar100()

            model = resnet.ResnetBuilder.build_resnet_34((32, 32, 3), NB_CIFAR100_CLASS)
            model.summary()
            model.compile(loss='categorical_crossentropy', 
                        optimizer=Adam(lr=lr_schedule(0)),
                        metrics=[ 'accuracy', 'top_k_categorical_accuracy'])

            tb_log_dir = os.path.join(LOG_DIR + '_imagenet', 
                                    datetime.datetime.now().strftime("%Y-%m-%d-%Hh-%Mmin"))
            model_dir = os.path.join(MODEL_DIR, 'imagenet_best_val_acc.h5')

            train_model(x_imagenet_train, y_imagenet_train, x_imagenet_valid, y_imagenet_valid, 
                        model, tb_log_dir, model_dir, True)
            pretrain = model_dir

        model = resnet.ResnetBuilder.build_resnet_34((32, 32, 3), NB_CIFAR100_CLASS)
        model.load_weights(pretrain)

        model_input = model.input
        outputs = [layer.output for layer in model.layers]
        last_layer = outputs[RESNET_BLOCK_4]
        last_layer = Flatten()(last_layer)
        last_layer = Dense(NB_CIFAR10_CLASS) (last_layer)
        last_layer = Activation('softmax') (last_layer)

        imagenet_cifar_model = Model(inputs=model_input, outputs=last_layer)
        imagenet_cifar_model.summary()

        imagenet_cifar_model.compile(loss='categorical_crossentropy', 
                                    optimizer=Adam(lr=lr_schedule(0)),
                                    metrics=['accuracy', 'top_k_categorical_accuracy'])

        tb_log_dir = os.path.join(LOG_DIR + '_imagenet_cifar', 
                                datetime.datetime.now().strftime("%Y-%m-%d-%Hh-%Mmin"))
        model_dir = os.path.join(MODEL_DIR, 'imagenet_cifar_best_val_acc.h5')

        train_model(x_labeled, y_labeled, x_test, y_test, 
                    imagenet_cifar_model, tb_log_dir, model_dir, True)

    elif learning_case == 'ss_pretrain':
        if pretrain == None:
            x_imagenet_train, y_imagenet_train, \
                x_imagenet_valid, y_imagenet_valid = load_cifar100()

            model = resnet.ResnetBuilder.build_resnet_34((32, 32, 3), NB_CIFAR100_CLASS)
            model.summary()
            model.compile(loss='categorical_crossentropy', 
                          optimizer=Adam(lr=lr_schedule(0)),
                          metrics=[ 'accuracy', 'top_k_categorical_accuracy'])

            tb_log_dir = os.path.join(LOG_DIR + '_imagenet', 
                                      datetime.datetime.now().strftime("%Y-%m-%d-%Hh-%Mmin"))
            model_dir = os.path.join(MODEL_DIR, 'imagenet_best_val_acc.h5')

            train_model(x_imagenet_train, y_imagenet_train, x_imagenet_valid, y_imagenet_valid, 
                        model, tb_log_dir, model_dir, True)
            pretrain = model_dir

        if ss_pretrain == None:
            model = resnet.ResnetBuilder.build_resnet_34((32, 32, 3), NB_CIFAR100_CLASS)
            model.load_weights(pretrain)

            model_input = model.input
            outputs = [layer.output for layer in model.layers]
            last_layer = outputs[RESNET_BLOCK_4]
            last_layer = Flatten()(last_layer)
            last_layer = Dense(NB_SS_CLASS) (last_layer)
            last_layer = Activation('softmax') (last_layer)

            imagenet_ss_model = Model(inputs=model_input, outputs=last_layer)
            imagenet_ss_model.summary()
            imagenet_ss_model.compile(loss='categorical_crossentropy', 
                                      optimizer=Adam(lr=lr_schedule(0)),
                                      metrics=['accuracy', 'top_k_categorical_accuracy'])

            tb_log_dir = os.path.join(LOG_DIR + '_imagenet_ss', 
                                      datetime.datetime.now().strftime("%Y-%m-%d-%Hh-%Mmin"))
            model_dir = os.path.join(MODEL_DIR, '_imagenet_ss_best_val_acc.h5')

            train_model(x_ss_train, y_ss_train, x_ss_valid, y_ss_valid, 
                        imagenet_ss_model, tb_log_dir, model_dir)
            ss_pretrain = model_dir

        prefix = ''
        if split_step == RESNET_BLOCK_2:
            prefix = 'block2'
        elif split_step == RESNET_BLOCK_3:
            prefix = 'block3'
        elif split_step == RESNET_BLOCK_4:
            prefix = 'block4'
        else:
            print('Not supported')

        imagenet_ss_cifar_model = split_resnet(split_step, ss_pretrain)
        imagenet_ss_cifar_model.compile(loss='categorical_crossentropy', 
                                        optimizer=Adam(lr=lr_schedule(0)),
                                        metrics=['accuracy', 'top_k_categorical_accuracy'])

        tb_log_dir = os.path.join(LOG_DIR + 'imagenet_ss_cifar_' + prefix, 
                                datetime.datetime.now().strftime("%Y-%m-%d-%Hh-%Mmin"))
        model_dir = os.path.join(MODEL_DIR, 
                                'imagenet_ss_cifar_' + prefix + '_best_val_acc.h5')

        train_model(x_labeled, y_labeled, x_test, y_test, 
                    imagenet_ss_cifar_model, tb_log_dir, model_dir, True)

    else:
        print('Not supported')
Пример #8
0
classes = list(iter(batches.class_indices))  # 用训练好的模型预测时,预测概率序列和Labels的对应关系
model.layers.pop()  # 弹出模型的最后一层

# 一个层意味着将其排除在训练之外,即其权重将永远不会更新。
for layer in model.layers:
    layer.trainable = False

last = model.layers[-1].output  # 输出
# 全连接层 神经元数量和激活函数
print('神经元数量', len(classes))
x = Dense(len(classes), activation="softmax")(last)
finetuned_model = Model(model.input, x)

# 设置损失函数,优化器,模型在训练和测试时的性能指标
finetuned_model.compile(optimizer=Adam(lr=lr_schedule(0)),
                        loss='categorical_crossentropy',
                        metrics=['accuracy'])  #Adam(lr=lr_schedule(0)
for c in batches.class_indices:
    classes[batches.class_indices[c]] = c
finetuned_model.classes = classes
# 早停法防止过拟合,patience: 当early stop被激活(如发现loss相比上一个epoch训练没有下降),则经过patience个epoch后停止训练
#early_stopping = EarlyStopping(patience=10)
checkpointer = ModelCheckpoint('resnet50_cbam.h5',
                               verbose=1,
                               save_best_only=True)  # 添加模型保存点
lr_scheduler = LearningRateScheduler(lr_schedule)
lr_reducer = ReduceLROnPlateau(factor=np.sqrt(0.1),
                               cooldown=0,
                               patience=5,
                               min_lr=0.5e-6)