def main():

    outputdir = 'result/special_v3/'
    if os.path.isdir(outputdir):
        print('save in :' + outputdir)
    else:
        os.makedirs(outputdir)

    train_img_path = '/data/MIT-BIH_AD_v3/'
    test_img_path = '/data/MIT-BIH_AD_v3/'
    train_file = './MIT-BIH_AD_sp_train.txt'
    test_file = './MIT-BIH_AD_sp_val.txt'
    num_classes = 7

    f1 = open(train_file, 'r')
    f2 = open(test_file, 'r')
    lines = f1.readlines()
    f1.close()
    train_samples = len(lines)
    lines = f2.readlines()
    f2.close()
    val_samples = len(lines)

    batch_size = 32
    epochs = 120
    input_h = 128
    input_w = 128

    model = proposed_model(nb_classes=num_classes)

    lr = 0.0001
    adam = Adam(lr=lr)
    model.compile(loss='categorical_crossentropy',
                  optimizer=adam,
                  metrics=['accuracy'])
    model.summary()
    history = model.fit_generator(
        generator_train_batch(train_file, batch_size, num_classes,
                              train_img_path, input_h, input_w),
        steps_per_epoch=train_samples // batch_size,
        epochs=epochs,
        callbacks=[Step()],
        validation_data=generator_val_batch(test_file, batch_size, num_classes,
                                            test_img_path, input_h, input_w),
        validation_steps=val_samples // batch_size,
        verbose=1)
    plot_history(history, outputdir)
    save_history(history, outputdir)
    model.save_weights(outputdir + 'proposed_model')
Exemple #2
0
def main():
    proposed = False
    if proposed:
        outputdir = 'result/NoAugment_{}/'.format(proposed)
        if os.path.isdir(outputdir):
            print('save in :'+outputdir)
        else:
            os.makedirs(outputdir)

        train_img_path = '/data/MIT-BIH_AD/'
        train_file = '/home/ccl/Documents/ECG-Arrhythmia-classification-in-2D-CNN/MIT-BIH_AD_train_paper.txt'
        num_classes = 8
        k = 10


        f1 = open(train_file, 'r')
        lines = f1.readlines()
        f1.close()

        train_samples = len(lines)
        val_samples = len(lines)//k

        num = len(lines)
        new_lines = []
        index = [n for n in range(num)]
        random.shuffle(index)
        for m in range(num):
            new_lines.append(lines[index[m]])

        lines = new_lines
        temp = []
        new_lines = []
        for i in range(num):
            if i % val_samples == 0:
                temp = []
                new_lines.append(temp)
            temp.append(lines[i])

        batch_size = 32
        epochs = 40
        input_h = 96
        input_w = 96
        augmentation = False
        model = proposed_model()


        lr = 0.0001
        adam = Adam(lr=lr)
        model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])
        model.summary()
        history = model.fit_generator(generator_train_batch_proposed(new_lines, k, batch_size, num_classes, train_img_path, input_h, input_w, augmentation=augmentation),
                                      steps_per_epoch=train_samples // batch_size,
                                      epochs=epochs,
                                      callbacks=[Step()],
                                      validation_data=generator_val_batch_proposed(new_lines, k, batch_size, num_classes, train_img_path, input_h, input_w, augmentation=augmentation),
                                      validation_steps=val_samples // batch_size,
                                      verbose=1)
        plot_history(history, outputdir)
        save_history(history, outputdir)
        model.save_weights(outputdir+'proposed_model_{}.h5'.format(proposed))
    else:
        outputdir = 'result/NoAugment_{}/'.format(proposed)
        if os.path.isdir(outputdir):
            print('save in :' + outputdir)
        else:
            os.makedirs(outputdir)

        train_img_path = '/data/MIT-BIH_AD/'
        test_img_path = '/data/MIT-BIH_AD/'
        train_file = '/home/ccl/Documents/ECG-Arrhythmia-classification-in-2D-CNN/MIT-BIH_AD_train.txt'
        test_file = '/home/ccl/Documents/ECG-Arrhythmia-classification-in-2D-CNN/MIT-BIH_AD_val.txt'
        num_classes = 8

        f1 = open(train_file, 'r')
        f2 = open(test_file, 'r')
        lines = f1.readlines()
        f1.close()
        train_samples = len(lines)
        lines = f2.readlines()
        f2.close()
        val_samples = len(lines)

        batch_size = 32
        epochs = 40
        input_h = 96
        input_w = 96

        model = proposed_model()

        lr = 0.0001
        adam = Adam(lr=lr)
        model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])
        model.summary()
        history = model.fit_generator(
            generator_train_batch(train_file, batch_size, num_classes, train_img_path, input_h, input_w),
            steps_per_epoch=train_samples // batch_size,
            epochs=epochs,
            callbacks=[Step()],
            validation_data=generator_val_batch(test_file, batch_size, num_classes, test_img_path, input_h, input_w),
            validation_steps=val_samples // batch_size,
            verbose=1)
        plot_history(history, outputdir)
        save_history(history, outputdir)
        model.save_weights(outputdir+'proposed_model_{}.h5'.format(proposed))
Exemple #3
0
                                       shuffle=True,
                                       use_color=use_color,
                                       use_height=use_height,
                                       use_v1=use_v1,
                                       augment=False)
net = create_votenet(num_points=num_points,
                     pcd_feature_dims=num_features,
                     vote_factor=vote_factor,
                     num_class=Dataset_Config.num_class,
                     num_head_bin=Dataset_Config.num_heading_bin,
                     num_size_cluster=Dataset_Config.num_size_cluster,
                     num_proposal=num_proposals,
                     mean_size_arr=Dataset_Config.mean_size_arr,
                     random_proposal=random_proposal,
                     config=Dataset_Config)
step = Step(lr_decay_step, lr_decay_result, 0)
bn_decay_scheduler = BNDecayScheduler(bn_init=bn_momentum_init,
                                      decay_rate=bn_decay_rate,
                                      interval=bn_decay_interval,
                                      clip=bn_clip)
ckpt = ModelCheckpoint(os.path.join(
    log_dir, 'ep{epoch:03d}-loss{loss:.3f}-val_loss{val_loss:.3f}.h5'),
                       save_best_only=False,
                       monitor='val_loss',
                       save_weights_only=True,
                       period=1)
net.summary()


def loss_components(idx, name):
    def choice(y_true, y_pred):
def main():

    outputdir = 'result/192_128_class_weight_v3_120eps/'
    if os.path.isdir(outputdir):
        print('save in :' + outputdir)
    else:
        os.makedirs(outputdir)

    train_img_path = '/data/MIT-BIH_AD_v3/'
    test_img_path = '/data/MIT-BIH_AD_v3/'
    train_file = './MIT-BIH_AD_train.txt'
    test_file = './MIT-BIH_AD_val.txt'
    num_classes = 8

    f1 = open(train_file, 'r')
    f2 = open(test_file, 'r')
    lines = f1.readlines()
    f1.close()
    train_samples = len(lines)
    lines = f2.readlines()
    f2.close()
    val_samples = len(lines)

    batch_size = 32
    epochs = 120
    input_h = 128
    input_w = 128

    class_weight = {
        0: (1 - (75016 / 107620)) * 8,
        1: (1 - (8072 / 107620)) * 8,
        2: (1 - (7256 / 107620)) * 8,
        3: (1 - (2544 / 107620)) * 8,
        4: (1 - (7130 / 107620)) * 8,
        5: (1 - (7024 / 107620)) * 8,
        6: (1 - (106 / 107620)) * 8,
        7: (1 - (472 / 107620)) * 8
    }

    model = proposed_model(nb_classes=num_classes)

    lr = 0.0001
    adam = Adam(lr=lr)
    model.compile(loss='categorical_crossentropy',
                  optimizer=adam,
                  metrics=['accuracy'])
    model.summary()
    history = model.fit_generator(
        generator_train_batch(train_file, batch_size, num_classes,
                              train_img_path, input_h, input_w),
        steps_per_epoch=train_samples // batch_size,
        epochs=epochs,
        callbacks=[Step()],
        validation_data=generator_val_batch(test_file, batch_size, num_classes,
                                            test_img_path, input_h, input_w),
        validation_steps=val_samples // batch_size,
        verbose=1,
        class_weight=class_weight)
    plot_history(history, outputdir)
    save_history(history, outputdir)
    model.save_weights(outputdir + 'proposed_model.h5')
Exemple #5
0
def onetenth_4_8_12(lr):
    steps = [4, 8,12]
    lrs = [lr, lr/10, lr/100,lr/1000]
    return Step(steps, lrs)
Exemple #6
0
def wideresnet_step(lr):
    steps = [60, 120, 160]
    lrs = [lr, lr/5, lr/25, lr/125]
    return Step(steps, lrs)
Exemple #7
0
def onetenth_50_75(lr):
    steps = [25, 40]
    lrs = [lr, lr/10, lr/100]
    return Step(steps, lrs)
def dsn_step_200_230(dataset, lr):
    steps = [200, 230]
    lrs = [lr, lr / 2.5, lr / 25]
    return Step(steps, lrs)
def onetenth_200_230(dataset, lr):
    steps = [200, 230]
    lrs = [lr, lr / 10, lr / 100]
    return Step(steps, lrs)
def dsn_step_40_60(dataset, lr):
    steps = [40, 60]
    lrs = [lr, lr / 2.5, lr / 25]
    return Step(steps, lrs)
def nin_nobn_mnist(dataset, lr):
    steps = [40, 50]
    lrs = [lr, lr / 2, lr / 10]
    return Step(steps, lrs)
Exemple #12
0
# severe overfitting (~98% train_acc) at the same time. In conclusion, half data augumentation might be the best choice.
# Perhaps as the network has already used strong dropout (two layers with 0.5 drop rate), strong data
# augumentation harms the performance.
train_set = ModelNet40Dataset(root=data_root,
                              batch_size=batch_size,
                              npoints=num_points,
                              split='train',
                              shuffle=True,
                              augment=True)
val_set = ModelNet40Dataset(root=data_root,
                            batch_size=batch_size,
                            npoints=num_points,
                            split='test',
                            shuffle=False,
                            augment=False)
lr_scheduler = Step([20, 40, 60], [lr, lr / 10, lr / 100, lr / 1000])
lr_divider = Divide_lr(15, 1.5)
bn_decay_scheduler = BNDecayScheduler(bn_init=bn_momentum_init,
                                      decay_rate=bn_decay_rate,
                                      interval=bn_decay_interval,
                                      clip=bn_clip)
ckpt = ModelCheckpoint(os.path.join(
    log_dir, 'ep{epoch:03d}-acc{acc:.3f}-val_acc{val_acc:.3f}.h5'),
                       save_best_only=True,
                       monitor='val_acc',
                       save_weights_only=True,
                       period=1)
if classifier_name == 'pointnet2':
    model = pointnet2_cls_ssg(num_class, num_points, num_dim)
elif classifier_name == 'votenet_backbone':
    model = make_classifier(num_points, 0)
def onetenth_10_15_20(lr):
    steps = [10, 15, 15]
    lrs = [lr, lr / 10, lr / 100, lr / 1000]
    return Step(steps, lrs)
def onetenth_35_70(lr):
    steps = [35, 70]
    lrs = [lr, lr / 10, lr / 100]
    return Step(steps, lrs)
Exemple #15
0
def onetenth_20_30_40(lr):
    steps = [20, 30, 40]
    lrs = [lr, lr/10, lr/100, lr/1000]
    return Step(steps, lrs)