Ejemplo n.º 1
0
    filename='detection-{epoch:02d}-{val_loss:.2f}',
    save_top_k=10,
    save_last=True,
    verbose=True,
    mode='min')

from dataset.dataset import get_train_transforms, get_valid_transforms, zaloDataset, collate_fn

train_dataset = zaloDataset(
    root_path=os.path.join(
        os.path.join(os.getcwd(),
                     'data/za_traffic_2020/traffic_train/images')),
    file_name=os.path.join(
        os.getcwd(),
        'data/za_traffic_2020/traffic_train/train_traffic_sign_dataset.json'),
    transforms=get_train_transforms())
valid_dataset = zaloDataset(
    root_path=os.path.join(
        os.path.join(os.getcwd(),
                     'data/za_traffic_2020/traffic_train/images')),
    file_name=os.path.join(
        os.getcwd(),
        'data/za_traffic_2020/traffic_train/train_traffic_sign_dataset.json'),
    transforms=get_valid_transforms())
train_loader = torch.utils.data.DataLoader(train_dataset,
                                           batch_size=10,
                                           shuffle=True,
                                           num_workers=8,
                                           collate_fn=collate_fn)
valid_loader = torch.utils.data.DataLoader(valid_dataset,
                                           batch_size=10,
Ejemplo n.º 2
0
                          header=['epoch', 'loss', 'acc', 'lr'])

    # criterion = nn.CrossEntropyLoss()
    criterion = LabelSmoothing(smoothing=0.05).cuda(device_id)
    # optimizer = optim.SGD(model.parameters(), lr=lr, momentum=0.9)
    # optimizer = optim.Adam(model.parameters(), lr=lr)
    optimizer = optim.AdamW(model.parameters(), lr=lr)
    scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=2, gamma=0.9)

    is_train = False
    if is_train:
        xdl = DeeperForensicsDatasetNew(
            real_npys=train_real_paths_npy,
            fake_npys=train_fake_paths_npy,
            is_one_hot=True,
            transforms=get_train_transforms(size=300))
        train_loader = DataLoader(xdl,
                                  batch_size=batch_size,
                                  shuffle=False,
                                  num_workers=4,
                                  sampler=BalanceClassSampler(
                                      labels=xdl.get_labels(),
                                      mode="downsampling"))
        # train_loader = DataLoader(xdl, batch_size=batch_size, shuffle=True, num_workers=4)
        train_dataset_len = len(xdl)

        xdl_eval = DeeperForensicsDatasetNew(
            real_npys=val_real_paths_npy,
            fake_npys=val_fake_paths_npy,
            is_one_hot=False,
            transforms=get_valid_transforms(size=300))