예제 #1
0
    x = Activation(activation='relu', name='ac5')(x)
    print("dense1 shape:", x.shape)
    x = Dense(16, name='fc2', kernel_initializer='he_normal')(x)
    x = Activation(activation='relu', name='ac6')(x)
    print("dense2 shape:", x.shape)
    x = Dense(classes, name='predictions', kernel_initializer='he_normal')(x)
    x = Activation(activation='softmax', name='ac7')(x)
    print("dense3 shape:", x.shape)

    model = Model(inputs=inputs, outputs=x, name='vgg16')

    return model


# 構建網絡
model = resnet(use_bias_flag=True, classes=2)
# model.compile(optimizer=SGD(lr=1e-6, momentum=0.9), loss='binary_crossentropy')

# 構建輸入
data_input_c = np.zeros([1, 280, 280, 16, 1], dtype=np.float32)
H5_file = h5py.File(r'/data/@data_NENs_level_ok/4test/102_1.h5', 'r')
batch_x = H5_file['data'][:]
H5_file.close()
batch_x = np.transpose(batch_x, (1, 2, 0))
data_input_c[0, :, :, :, 0] = batch_x[:, :, :]

# 保存或加載權重
# pre = model.predict_on_batch(data_input_c)
# model.save('G:\qweqweqweqwe\model.h5')
model.load_weights(
    filepath=
예제 #2
0
    #d_model.compile(optimizer=adam(lr=1e-5), loss=EuiLoss, metrics=[y_t, y_pre, Acc])

    #实验5:10000,di 2 zhe
    #d_model = resnet(use_bias_flag=True)
    #d_model.compile(optimizer=SGD(lr=1e-6, momentum=0.9), loss=EuiLoss, metrics=[y_t, y_pre, Acc])

    #实验6:10000,di 1 zhe
    #d_model = dual_path_net(initial_conv_filters = 64, filter_increment = [16, 32, 24, 128], depth = [3, 4, 6, 3], cardinality=16, width=3, pooling='max-avg',bias_flag=True)
    #d_model.compile(optimizer=SGD(lr=1e-6, momentum=0.9), loss=EuiLoss, metrics=[y_t, y_pre, Acc])#貌似card和width才是控制网络大小的重要参数啊

    #实验7:10000,di 1 zhe
    #d_model = resnext()
    #d_model.compile(optimizer=SGD(lr=1e-6, momentum=0.9), loss=EuiLoss, metrics=[y_t, y_pre, Acc])

    #实验8:10000,di 3 zhe
    d_model = resnet(use_bias_flag=True)
    d_model.compile(optimizer=SGD(lr=1e-6, momentum=0.9), loss=EuiLoss, metrics=[y_t, y_pre, Acc])

    #===================================================================================



    pause()  # identify
    print(d_model.summary())  # view net
    #print(model.summary())  # view net
    pause()  # identify
    # extra param initialization
    Iter = 0
    epoch = 0
    max_acc_verify = 0.6
    max_acc_verify_iter = 0
예제 #3
0
pause()
print('test set size :%d' % testtset_num)
pause()
print('or_train set size :%d' % or_train_num)
pause()

# 下面参数没用,算着玩
Num_list_train = list(range(trainset_num))
Num_list_verify = list(range(verifset_num))
Num_list_test = list(range(testtset_num))
Num_list_or_train = list(range(or_train_num))

if __name__ == "__main__":

    #实验20:10000,di 2 zhe
    d_model = resnet(use_bias_flag=True, classes=2, weight_decay=0.)
    d_model.compile(optimizer=adam(lr=2e-6),
                    loss=EuiLoss,
                    metrics=[y_t, y_pre, Acc])

    lr_txt = Result_save_Path + file_sep[
        0] + '@' + foldname + '_lr.txt'  # 学习率曲线  txt_s11

    Iter = 0
    for i in range(max_iter):
        Iter = Iter + 1
        txt_s11 = open(lr_txt, 'a')
        txt_s11.write(str(K.get_value(d_model.optimizer.lr)) + '\n')

        if Iter == optimizer_switch_point:
            lr_new = lr_mod(Iter,