Beispiel #1
0
def init_dataloaders_1s_mt(args,
                           batch_size,
                           num_workers,
                           is_bgr=True,
                           is_shuffle=True):
    # source dataset
    imdb, roidb, ratio_list, ratio_index = combined_roidb(args.imdb_name)
    train_size = len(roidb)

    sampler_batch = sampler(train_size, batch_size)
    dataset_s = roibatchLoader(roidb, ratio_list, ratio_index, batch_size, \
                               imdb.num_classes, training=True, is_bgr=is_bgr)
    dataloader_s = torch.utils.data.DataLoader(dataset_s,
                                               batch_size=batch_size,
                                               sampler=sampler_batch,
                                               num_workers=num_workers)

    m_dataloader_t = []
    m_imdb_t = []
    # target dataset
    for i_n_t in args.imdb_name_target:
        imdb_t, roidb_t, ratio_list_t, ratio_index_t = combined_roidb(i_n_t)
        m_imdb_t.append(imdb_t)
        train_size_t = len(roidb_t)

        sampler_batch_t = sampler(train_size_t, batch_size)

        dataset_t = roibatchLoader(roidb_t, ratio_list_t, ratio_index_t, batch_size, \
                                   imdb.num_classes, training=True, is_bgr=is_bgr)
        dataloader_t = torch.utils.data.DataLoader(dataset_t,
                                                   batch_size=batch_size,
                                                   sampler=sampler_batch_t,
                                                   num_workers=num_workers)
        m_dataloader_t.append(dataloader_t)
    return dataloader_s, m_dataloader_t, imdb, m_imdb_t
Beispiel #2
0
def init_dataloaders_1s_1t(args,
                           batch_size,
                           num_workers,
                           is_bgr=True,
                           is_train=True):
    # source dataset
    imdb, roidb, ratio_list, ratio_index = combined_roidb(args.imdb_name)
    train_size = len(roidb)
    sampler_batch = sampler(train_size, batch_size)
    dataset_s = roibatchLoader(roidb, ratio_list, ratio_index, batch_size, \
                               imdb.num_classes, training=is_train, is_bgr=is_bgr)
    dataloader_s = torch.utils.data.DataLoader(dataset_s,
                                               batch_size=batch_size,
                                               sampler=sampler_batch,
                                               num_workers=num_workers)
    # target dataset
    dataloader_t = None
    imdb_t = None
    if args.dataset_t != "":
        imdb_t, roidb_t, ratio_list_t, ratio_index_t = combined_roidb(
            args.imdb_name_target)
        train_size_t = len(roidb_t)
        sampler_batch_t = sampler(train_size_t, batch_size)

        dataset_t = roibatchLoader(roidb_t, ratio_list_t, ratio_index_t, batch_size, \
                                   imdb.num_classes, training=is_train, is_bgr=is_bgr)
        dataloader_t = torch.utils.data.DataLoader(dataset_t,
                                                   batch_size=batch_size,
                                                   sampler=sampler_batch_t,
                                                   num_workers=num_workers)
    return dataloader_s, dataloader_t, imdb, imdb_t
Beispiel #3
0
def init_dataloaders_1s_mixed_mt(args, batch_size, num_workers):
    # source dataset
    imdb, roidb, ratio_list, ratio_index = combined_roidb(args.imdb_name)
    train_size = len(roidb)

    sampler_batch = sampler(train_size, batch_size)
    dataset_s = roibatchLoader(roidb, ratio_list, ratio_index, batch_size, \
                               imdb.num_classes, training=True)
    dataloader_s = torch.utils.data.DataLoader(dataset_s,
                                               batch_size=batch_size,
                                               sampler=sampler_batch,
                                               num_workers=num_workers)

    m_datasets = []
    m_imdb_t = []
    total_train_size = 0
    # target dataset
    for i_n_t in args.imdb_name_target:
        imdb_t, roidb_t, ratio_list_t, ratio_index_t = combined_roidb(i_n_t)
        m_imdb_t.append(imdb_t)
        total_train_size += len(roidb_t)



        dataset_t = roibatchLoader(roidb_t, ratio_list_t, ratio_index_t, batch_size, \
                                   imdb.num_classes, training=True)

        m_datasets.append(dataset_t)

    sampler_batch_t = sampler(total_train_size, batch_size)
    concat_dataset = torch.utils.data.ConcatDataset(m_datasets)
    dataloader_t = torch.utils.data.DataLoader(concat_dataset,
                                               batch_size=batch_size,
                                               sampler=sampler_batch_t,
                                               num_workers=num_workers)

    return dataloader_s, dataloader_t, imdb
Beispiel #4
0
    # source dataset
    imdb, roidb, ratio_list, ratio_index = combined_roidb(args.imdb_name)
    train_size = len(roidb)
    # target dataset
    imdb_t, roidb_t, ratio_list_t, ratio_index_t = combined_roidb(
        args.imdb_name_target)
    train_size_t = len(roidb_t)

    print('{:d} source roidb entries'.format(len(roidb)))
    print('{:d} target roidb entries'.format(len(roidb_t)))

    output_dir = args.save_dir + "/" + args.net + "/" + args.dataset
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    sampler_batch = sampler(train_size, args.batch_size)
    sampler_batch_t = sampler(train_size_t, args.batch_size)

    dataset_s = roibatchLoader(roidb, ratio_list, ratio_index, args.batch_size, \
                               imdb.num_classes, training=True)

    dataloader_s = torch.utils.data.DataLoader(dataset_s,
                                               batch_size=args.batch_size,
                                               sampler=sampler_batch,
                                               num_workers=args.num_workers)
    dataset_t = roibatchLoader(roidb_t, ratio_list_t, ratio_index_t, args.batch_size, \
                               imdb.num_classes, training=True)
    dataloader_t = torch.utils.data.DataLoader(dataset_t,
                                               batch_size=args.batch_size,
                                               sampler=sampler_batch_t,
                                               num_workers=args.num_workers)