Exemplo n.º 1
0
def test_pool():
    dataset_df = load_dataset_df(
        '/store/kaggle/tgs',
        'train.csv',
    )
    dataset = TgsSaltDataset(dataset_df, has_y=True)

    dataloader = DataLoader(dataset, batch_size=8, shuffle=True)
    sample = pipe(dataloader, first)['mask']
    interpolated = F.max_pool2d(sample, kernel_size=101)

    writer = SummaryWriter(f'{config["TENSORBORAD_LOG_DIR"]}/test')
    dataset_df = load_dataset_df('/store/kaggle/tgs')
    writer.add_image(f"mask", vutils.make_grid([*sample]), 0)

    writer.add_image(f"pooled", vutils.make_grid([*interpolated]), 0)
Exemplo n.º 2
0
def test_rle_decode():
    dataset_df = load_dataset_df('/store/kaggle/tgs')
    idx = dataset_df.index.get_loc('1fba03699e')
    sample = dataset_df.iloc[idx]
    output = rle_decode(sample['rle_mask'], (101, 101))
    output = torch.FloatTensor(output).view(1, 101, 101)
    assert output.sum() == 487
Exemplo n.º 3
0
def test_add_noise():
    dataset_df = load_dataset_df(
        '/store/kaggle/tgs',
        'train.csv',
    )
    dataset = TgsSaltDataset(dataset_df)

    dataloader = DataLoader(dataset, batch_size=2, shuffle=True)
    sample = pipe(dataloader, first)['image']
    noised = add_noise(sample)

    writer = SummaryWriter(f'{config["TENSORBORAD_LOG_DIR"]}/test')
    dataset_df = load_dataset_df('/store/kaggle/tgs')
    writer.add_image(
        f"add_noise",
        vutils.make_grid([*sample, *noised]),
    )
Exemplo n.º 4
0
def test_dataset():
    dataset_df = load_dataset_df('/store/kaggle/tgs')
    dataset = TgsSaltDataset(dataset_df)
    scores = []
    for i in range(12):
        sample = dataset[i]
        score = iou(sample['mask'].numpy(), sample['mask'].numpy())
        scores.append(score)
    assert np.round(np.mean(scores), decimals=3) == 1.0
Exemplo n.º 5
0
def test_erase():
    dataset_df = load_dataset_df(
        '/store/kaggle/tgs',
        'train.csv',
    )
    dataset = TgsSaltDataset(dataset_df, has_y=True)

    dataloader = DataLoader(dataset, batch_size=8, shuffle=True)
    sample = pipe(dataloader, first)['image']
    random_erase = RandomErasing()

    noised = add_noise(sample, erase_num=5, erase_p=1)

    writer = SummaryWriter(f'{config["TENSORBORAD_LOG_DIR"]}/test')
    dataset_df = load_dataset_df('/store/kaggle/tgs')
    writer.add_image(
        f"random_erase",
        vutils.make_grid([*sample, *noised]),
    )
Exemplo n.º 6
0
def test_flip():
    writer = SummaryWriter(f'{config["TENSORBORAD_LOG_DIR"]}/test')
    dataset_df = load_dataset_df('/store/kaggle/tgs')
    dataset = TgsSaltDataset(dataset_df)
    writer.add_image(
        f"flip",
        vutils.make_grid(
            pipe(range(8), map(lambda x: dataset[12]),
                 map(lambda x: [x['image'], x['mask']]), concat, list)),
    )
Exemplo n.º 7
0
def test_predict(csv_fn, is_train):
    dataset_df = load_dataset_df('/store/kaggle/tgs', csv_fn).sample(10)
    dataset = TgsSaltDataset(dataset_df, is_train=is_train)
    model = UNet()
    model_paths = ['/store/tmp/model.pt']
    torch.save(model, model_paths[0])
    predicted_df = predict(model_paths=model_paths,
                           log_dir=f'{config["TENSORBORAD_LOG_DIR"]}/test',
                           dataset=dataset,
                           log_interval=1)
    print(predicted_df)
    assert len(predicted_df) == len(dataset_df)
Exemplo n.º 8
0
def test_train():
    dataset_df = load_dataset_df('/store/kaggle/tgs')
    train_df, val_df = train_test_split(dataset_df)
    output_dir = '/store/tmp'
    train(
        model_path=f"{output_dir}/model.pt",
        train_dataset=TgsSaltDataset(train_df),
        val_dataset=TgsSaltDataset(val_df),
        epochs=1000,
        batch_size=32,
        feature_size=32,
        patience=5,
        base_size=5,
        log_dir=f'{config["TENSORBORAD_LOG_DIR"]}/test',
    )
Exemplo n.º 9
0
def test_cleanup():
    dataset_df = load_dataset_df('/store/kaggle/tgs')
    dataset_df = cleanup(dataset_df)
    assert len(dataset_df) == 3808
Exemplo n.º 10
0
def test_dataset():
    dataset_df = load_dataset_df('/store/kaggle/tgs')
    dataset = TgsSaltDataset(dataset_df)
    assert len(dataset) == 4000
Exemplo n.º 11
0
def test_dense_crf():
    dataset_df = load_dataset_df('/store/kaggle/tgs').dropna().head(5)
    dataset = TgsSaltDataset(dataset_df)
    sample = dataset[0]
    print(sample['image'].shape)
    aa = dense_crf(sample['image'].view(-1, 101, 101), sample['mask'].view(-1, 101, 101))