Example #1
0
    # some default params dataset/architecture related
    print("Params:")
    for k, v in model_params.items():
        print("\t%s: %s" % (k, v))
    print("Train params:")
    for k, v in train_params.items():
        print("\t%s: %s" % (k, v))

    # ==========================================================================
    # DATA PREPARATION
    # ==========================================================================
    train_params['test'] = args.test
    train_params['train'] = args.train
    if not args.train:
        train_params['validation_set'] = False
    data_provider = get_data_provider_by_path(args.dataset, train_params)

    # ==========================================================================
    # TRAINING & TESTING & EVALUATING
    # ==========================================================================
    print("Initialize the model..")
    model_params['sequence_length'] = train_params['sequence_length']
    model_params['crop_size'] = train_params['crop_size']
    model_params['Inc_params'] = Inc_params
    # print('model_params:', model_params['crop_size'])
    # print('model_params[crop_size][0]:', model_params['crop_size'][0])
    # print('model_params[crop_size][1]:', model_params['crop_size'][1])
    model = I3D_Net(data_provider=data_provider, **model_params)
    # data_provider.data_shape
    if args.train:
        print("Data provider train videos: ", data_provider.train.num_examples)
Example #2
0
def main(argv=None):  # pylint: disable=unused-argument

    config = argparser(is_train=True)
    all_train_dir, all_result_file_name = construct_train_dir(config)

    if config.hdf5FileNametrain == 'train_MRIdata_3_AD_MCI_Normal.hdf5':
        dataset_path = os.path.join(r"./datasets/mri/3_AD_MCI_Normal/")
    elif config.hdf5FileNametrain == 'train_MRIdata_2_AD_MCI.hdf5':
        dataset_path = os.path.join(r"./datasets/mri/2_AD_MCI/")
    elif config.hdf5FileNametrain == 'train_MRIdata_2_AD_Normal.hdf5':
        dataset_path = os.path.join(r"./datasets/mri/2_AD_Normal/")
    else:
        dataset_path = os.path.join(r"./datasets/mri/2_MCI_Normal/")

    input_file_name = config.hdf5FileNametrain
    name_list = input_file_name.split("_")
    class_num = name_list[2]

    dataset_train_unlabelled, dataset_test, all_hdf5_data_train, all_hdf5_data_test, dataset_train_labelled, dataset_val, all_hdf5_data_val = dataset.create_default_splits8020(
        dataset_path, config.hdf5FileNametrain, config.testhdf5FileName,
        config.valhdf5FileName, config.idFileNametrain, config.testidFileName,
        config.validFileName, config.num_less_label_data, class_num)

    data_provider = get_data_provider_by_path(config, dataset_train_unlabelled,
                                              dataset_train_labelled,
                                              dataset_test,
                                              all_hdf5_data_train,
                                              all_hdf5_data_test, dataset_val,
                                              all_hdf5_data_val, 0)

    model = TripleGAN3D(config, data_provider, all_train_dir, 0, is_train=True)

    if tf.gfile.Exists(all_train_dir[0] + "/GANconfusionMatrixResults"):
        log.infov(all_train_dir[0] + "/GANconfusionMatrixResults")
    else:
        os.makedirs(all_train_dir[0] + "/GANconfusionMatrixResults")

    if config.train:
        total_start_time = time.time()
        print("Data provider train labelled images: ",
              data_provider.train_labelled.num_examples)
        print("Data provider train unlabelled images: ",
              data_provider.train_unlabelled.num_examples)
        best_epoch = model.train_all_epochs(config)
        total_training_time = time.time() - total_start_time

        print(
            "\n  Total training time for all epoches : %s  and %s seconds" %
            (str(timedelta(seconds=total_training_time)), total_training_time))

        fxx = open(
            all_train_dir[0] +
            "/GANconfusionMatrixResults/train_timeReport.txt", 'w')
        fxx.write(
            "\n  Total training time for all epoches : %s  and %s seconds" %
            (str(timedelta(seconds=total_training_time)), total_training_time))
        fxx.write('\n')
        fxx.close()

    if config.test:

        model.load_model(best_epoch)
        print("Data provider test images: ", data_provider.test.num_examples)
        print("Testing...")

        total_start_time = time.time()

        model.test_and_record(all_result_file_name[0],
                              0,
                              config,
                              all_train_dir[0],
                              data_provider.test,
                              batch_size=config.batch_size_label)

        total_test_time = time.time() - total_start_time

        print("\n   Total test time for all epoches : %s  and %s seconds" %
              (str(timedelta(seconds=total_test_time)), total_test_time))

        fxx = open(
            all_train_dir[0] +
            "/GANconfusionMatrixResults/test_timeReport.txt", 'w')

        fxx.write("\n  Total test time for all epoches : %s  and %s seconds" %
                  (str(timedelta(seconds=total_test_time)), total_test_time))
        fxx.write('\n')

        fxx.write(
            "\n   test time for each record : %s  and %s seconds" %
            (str(
                timedelta(seconds=(total_test_time /
                                   float(data_provider.test.num_examples)))),
             (total_test_time / float(data_provider.test.num_examples))))
        fxx.write('\n')

        fxx.close()

    input_file_name = config.hdf5FileNametrain
    class_labels = []
    name_list = input_file_name.split("_")
    if int(name_list[2]) == 3:
        class_labels.append(name_list[3])
        class_labels.append(name_list[4])
        last_class = name_list[5].split(".")
        class_labels.append(last_class[0])
    else:
        class_labels.append(name_list[3])
        last_class = name_list[4].split(".")
        class_labels.append(last_class[0])

    accuracy_10folds_all = []
    fold_write = 0
    for each_result_file_name in all_result_file_name:
        if fold_write < 1:
            accuracy, cr, cm, auc = calculateConfusionMatrix(
                each_result_file_name, class_labels, all_train_dir[0],
                int(name_list[2]))
        else:
            accuracy, cr, cm, auc = calculateConfusionMatrix(
                each_result_file_name, class_labels, './train_dir',
                int(name_list[2]))

        f = open(
            all_train_dir[0] +
            "/GANconfusionMatrixResults/ConfusionMatrix.txt", 'w')
        log.info("Fold: {}".format(fold_write))
        f.write(each_result_file_name)
        f.write(
            '{}\n\nClassification Report\n\n{}\n\nConfusion Matrix\n\n{}\n'.
            format(config.hdf5FileNametrain, cr, cm))
        f.write("accuracy: {}\n".format(accuracy))
        log.info("accuracy: {}".format(accuracy))

        if int(name_list[2]) == 3:
            for each_auc in auc:
                f.write("auc: {}\n".format(each_auc))
                log.info("auc: {}".format(each_auc))

        else:
            f.write("auc: {}\n".format(auc))
            log.info("auc: {}".format(auc))

        f.close()