Пример #1
0
    # Load the training data and metadata from Gen-1
    g1_d = load_pickle(os.path.join(__DATA_DIR,
                                    'g1-train-test/test_fd.pickle'))
    g1_md = load_pickle(
        os.path.join(__DATA_DIR, 'g1-train-test/test_md.pickle'))

    # Convert data to time domain, take magnitude, apply window
    g1_d = correct_g1_ini_ant_ang(g1_d)
    g1_d = np.abs(to_td(g1_d))
    g2_d = np.abs(to_td(g2_d))

    # Perform data augmentation
    g2_d, g2_md = aug_hor_translate(g2_d, g2_md)

    g2_d = resize_features_for_keras(g2_d)
    g1_d = resize_features_for_keras(g1_d)
    g2_labels = to_categorical(get_class_labels(g2_md))
    g1_labels = to_categorical(get_class_labels(g1_md))

    n_runs = 20

    # Init arrays for storing performance metrics
    auc_scores = np.zeros([
        n_runs,
    ])
    accs = np.zeros([
        n_runs,
    ])
    sens = np.zeros([
        n_runs,
Пример #2
0
    g1_tr_d = load_pickle(os.path.join(__DATA_DIR,
                                       'g1-train-test/train_fd.pickle'))
    g1_tr_md = load_pickle(os.path.join(__DATA_DIR,
                                        'g1-train-test/train_md.pickle'))

    # Set the G1 data to be the validation data
    val_data = g1_tr_d
    val_md = g1_tr_md

    # Correct the initial antenna position used in G1 scans
    val_data = correct_g1_ini_ant_ang(val_data)

    # Preprocess data, take magnitude and apply time-window, augment
    # training dataset
    val_data = np.abs(to_td(val_data))
    val_data = resize_features_for_keras(val_data)
    train_data = np.abs(to_td(train_data))
    train_data, train_md = full_aug(train_data, train_md)
    train_data = resize_features_for_keras(train_data)

    # Get the validation and train set class labels and make categorical
    val_labels = get_class_labels(val_md)
    val_labels = to_categorical(val_labels)
    train_labels = get_class_labels(train_md)
    train_labels = to_categorical(train_labels)

    # Create arrays for storing the AUC on the train and validation
    # sets for this regularization parameter after training with
    # correct labels
    train_set_aucs = np.zeros([__N_RUNS, __N_EPOCHS])
    val_set_aucs = np.zeros([__N_RUNS, __N_EPOCHS])
        test_data = g2_d[tar_idxs, :, :]
        train_data = g2_d[~tar_idxs, :, :]
        test_md = g2_md[tar_idxs]
        train_md = g2_md[~tar_idxs]

        # Perform data augmentation on the training set here
        train_data, train_md = full_aug(train_data, train_md)

        # Get class labels for train/test sets here
        test_labels = get_class_labels(test_md)
        train_labels = get_class_labels(train_md)
        test_labels = to_categorical(test_labels)
        train_labels = to_categorical(train_labels)

        # Resize data for use with keras
        test_data = resize_features_for_keras(test_data)
        train_data = resize_features_for_keras(train_data)

        # Iterate over number of desired runs
        for run_idx in range(__N_RUNS):

            logger.info('\tWorking on run [%2d / %2d]...' %
                        (run_idx + 1, __N_RUNS))

            # Create the cnn model
            model = get_sino_cnn(input_shape=np.shape(train_data)[1:],
                                 lr=0.001)

            # Train the model
            model.fit(x=train_data,
                      y=train_labels,