Exemplo n.º 1
0
                  class_weight=None,
                  max_queue_size=10,
                  initial_epoch=iter00,
                  verbose=1,
                  workers=workers_num)

        iter00 = iter00 + 1
        ''' Test after N*(display_status_ratio) iteration.'''
        weight_tmp1 = model.get_weights()
        model_512.set_weights(weight_tmp1)
        train_output = model_512.predict(
            [traindata_90d, traindata_0d, traindata_45d, traindata_m45d],
            batch_size=1)
        ''' Save prediction image(disparity map) in 'current_output/' folder '''
        train_error, train_bp = display_current_output(train_output,
                                                       traindata_label, iter00,
                                                       directory_t)

        training_mean_squared_error_x100 = 100 * np.average(
            np.square(train_error))
        training_bad_pixel_ratio = 100 * np.average(train_bp)

        save_path_file_new = (directory_ckp +
                              '/iter%04d_trainmse%.3f_bp%.2f.hdf5' %
                              (iter00, training_mean_squared_error_x100,
                               training_bad_pixel_ratio))
        """ 
        Save bad pixel & mean squared error
        """
        print(save_path_file_new)
        f1 = open(txt_name, 'a')
Exemplo n.º 2
0
    model.fit_generator(my_generator, steps_per_epoch = steps_per_epoch, 
                        epochs = iter00+1, class_weight=None, max_queue_size=10, 
                        initial_epoch=iter00, verbose=1,workers=workers_num,callbacks=[reduce_lr])

    iter00=iter00+1
    
    
    ''' Test after N*(display_status_ratio) iteration.'''
    weight_tmp1=model.get_weights() 
    model_512.set_weights(weight_tmp1)
    train_output=model_512.predict([train512_data["90d"],train512_data["0d"],train512_data["45d"],train512_data["m45d"]],batch_size=1)
    val_output=model_512.predict([val512_data["90d"],val512_data["0d"],val512_data["45d"],val512_data["m45d"]],batch_size=1)


    ''' Save prediction image(disparity map) in 'current_output/' folder '''    
    train_error, train_bp = display_current_output(train_output, train512_data["label"], iter00, output_dir, split="train")
    val_error, val_bp = display_current_output(val_output, val512_data["label"], iter00, output_dir, split='val')


#    training_mean_squared_error_x100=100*np.average(np.square(train_error))
#    training_bad_pixel_ratio=100*np.average(train_bp)
    train_mse = np.average(np.square(train_error))
    val_mse = np.average(np.square(val_error))
    val_mean_squared_error_x100 = 100*val_mse
    val_bad_pixel_ratio=100*np.average(val_bp)
    
    val_loss.append(val_mse)
    train_loss.append(train_mse)
    if plot_losses:
        plt.plot(train_loss)
        plt.plot(val_loss)
Exemplo n.º 3
0
                                  k=tcind2k[test_corner_ind],
                                  axes=(1, 2))
    backrot_val_label = np.rot90(val512_data[test_corner]["label"],
                                 k=tcind2k[test_corner_ind],
                                 axes=(1, 2))

    backrot_train_output = np.rot90(train_output,
                                    k=tcind2k[test_corner_ind],
                                    axes=(1, 2))
    backrot_train_label = np.rot90(train512_data[test_corner]["label"],
                                   k=tcind2k[test_corner_ind],
                                   axes=(1, 2))

    train_error, train_bp = display_current_output(backrot_train_output,
                                                   backrot_train_label,
                                                   iter00,
                                                   output_dir,
                                                   split='train',
                                                   corner=test_corner)
    val_error, val_bp = display_current_output(backrot_val_output,
                                               backrot_val_label,
                                               iter00,
                                               output_dir,
                                               split='val',
                                               corner=test_corner)

    #    training_mean_squared_error_x100=100*np.average(np.square(train_error))
    #    training_bad_pixel_ratio=100*np.average(train_bp)
    train_mse = np.average(np.square(train_error))
    val_mse = np.average(np.square(val_error))
    val_mean_squared_error_x100 = 100 * val_mse
    val_bad_pixel_ratio = 100 * np.average(val_bp)
Exemplo n.º 4
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--dir',
                        type=str,
                        default='epinet_output/mytrain/',
                        help='save directory')
    args = parser.parse_args()
    '''
    settings
    '''
    directory_t = args.dir
    if not os.path.exists(directory_t):
        os.makedirs(directory_t)
    Setting02_AngualrViews = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8])
    ''' 
    Define Model parameters    
        first layer:  3 convolutional blocks, 
        second layer: 7 convolutional blocks, 
        last layer:   1 convolutional block
    '''
    model_conv_depth = 7  # 7 convolutional blocks for second layer
    model_filt_num = 70
    model_learning_rate = 0.1**7
    ''' 
    Load Train data from LF .png files
    '''
    print('Load training data...')
    dir_LFimages = [
        'additional/antinous', 'additional/boardgames', 'additional/dishes',
        'additional/greek', 'additional/kitchen', 'additional/medieval2',
        'additional/museum', 'additional/pens', 'additional/pillows',
        'additional/platonic', 'additional/rosemary', 'additional/table',
        'additional/tomb', 'additional/tower', 'additional/town',
        'additional/vinyl'
    ]
    traindata_all, traindata_label = load_LFdata(dir_LFimages)
    traindata_90d, traindata_0d, traindata_45d, traindata_m45d, _ = generate_traindata512(
        traindata_all, traindata_label, Setting02_AngualrViews)
    ''' 
    Load Test data from LF .png files
    '''
    print('Load test data...')
    dir_LFimages = [
        'stratified/backgammon', 'stratified/dots', 'stratified/pyramids',
        'stratified/stripes', 'training/boxes', 'training/cotton',
        'training/dino', 'training/sideboard'
    ]
    valdata_all, valdata_label = load_LFdata(dir_LFimages)

    valdata_90d, valdata_0d, valdata_45d, valdata_m45d, valdata_label = generate_traindata512(
        valdata_all, valdata_label, Setting02_AngualrViews)
    # (valdata_90d, 0d, 45d, m45d) to validation or test
    print('Load test data... Complete')
    ''' 
    Model for predicting full-size LF images  
    '''
    image_w = 512
    image_h = 512
    model_512 = define_epinet(image_w, image_h, Setting02_AngualrViews,
                              model_conv_depth, model_filt_num,
                              model_learning_rate)
    '''
    load weights
    '''
    # model_512.load_weights('epinet_checkpoints/pretrained_9x9.hdf5')
    model_512.load_weights(
        'epinet_checkpoints/EPINET_train_ckp/iter0198_trainmse0.827_bp2.56.hdf5'
    )
    '''    
    show train results
    '''
    train_output = model_512.predict(
        [traindata_90d, traindata_0d, traindata_45d, traindata_m45d],
        batch_size=1)
    train_error, train_bp = display_current_output(train_output,
                                                   traindata_label, 0,
                                                   directory_t)
    training_mean_squared_error_x100 = 100 * np.average(np.square(train_error))
    training_bad_pixel_ratio = 100 * np.average(train_bp)
    print('train mse%.3f,bp%.2f' %
          (training_mean_squared_error_x100, training_bad_pixel_ratio))

    val_output = model_512.predict(
        [valdata_90d, valdata_0d, valdata_45d, valdata_m45d], batch_size=1)
    val_error, val_bp = display_current_output(val_output, valdata_label, 1,
                                               directory_t)
    val_mean_squared_error_x100 = 100 * np.average(np.square(val_error))
    val_bad_pixel_ratio = 100 * np.average(val_bp)
    print('val mse%.3f,bp%.2f' %
          (val_mean_squared_error_x100, val_bad_pixel_ratio))