コード例 #1
0
## Load unlabeled data from SVHN
svhn_data = torchvision.datasets.SVHN(root=args.OOD_dir,
                                      download=True,
                                      transform=transform_train)
svhn_num = svhn_data.data.shape[0]
svhn_data_copy = svhn_data.data
svhn_labels_copy = svhn_data.labels

# Launch the different runs
for n_run in range(args.nb_runs):
    # Select the order for the class learning
    order_name = "./checkpoint/{}_order_run_{}.pkl".format(args.dataset, n_run)
    print("Order name:{}".format(order_name))
    if os.path.exists(order_name):
        print("Loading orders")
        order = utils_pytorch.unpickle(order_name)
    else:
        print("Generating orders")
        order = np.arange(args.num_classes)
        np.random.shuffle(order)
        utils_pytorch.savepickle(order, order_name)
    order_list = list(order)
    print(order_list)

    start_iter = 0
    for iteration in range(start_iter, int(args.num_classes / args.nb_cl)):
        # Prepare the training data for the current batch of classes(total class(100)/group class(20))
        actual_cl = order[range(iteration * args.nb_cl,
                                (iteration + 1) * args.nb_cl)]
        indices_train_subset = np.array([
            i in order[range(iteration * args.nb_cl,
コード例 #2
0
## Load unlabeled data from SVHN
svhn_data = torchvision.datasets.SVHN(root=args.OOD_dir,
                                      download=False,
                                      transform=transform_train)
svhn_num = svhn_data.data.shape[0]
svhn_data_copy = svhn_data.data
svhn_labels_copy = svhn_data.labels

# Launch the different runs
for n_run in range(args.nb_runs):
    # Select the order for the class learning
    order_name = "./checkpoint/{}_order_run_{}.pkl".format(args.dataset, n_run)
    print("Order name:{}".format(order_name))
    if os.path.exists(order_name):
        print("Loading orders")
        order = utils_pytorch.unpickle(order_name)
    else:
        print("Generating orders")
        order = np.arange(args.num_classes)
        np.random.shuffle(order)
        utils_pytorch.savepickle(order, order_name)
    order_list = list(order)
    print(order_list)

    start_iter = 0
    for iteration in range(start_iter, int(args.num_classes / args.nb_cl)):
        # Prepare the training data for the current batch of classes
        actual_cl = order[range(iteration * args.nb_cl,
                                (iteration + 1) * args.nb_cl)]
        indices_train_subset = np.array([
            i in order[range(iteration * args.nb_cl,
コード例 #3
0
parser.add_argument('--ckp_prefix', \
    default='seed_1993_rs_ratio_0.0_class_incremental_MR_LFAD_cosine_imagenet', \
    type=str)
parser.add_argument('--run_id', default=0, type=int, \
    help='ID of run')
parser.add_argument('--order', \
    default='./checkpoint/seed_1993_subset_100_imagenet_order_run_0.pkl', \
    type=str)
parser.add_argument('--DCE', action='store_true', \
    help='train with DCE')
parser.add_argument('--top_k', default=10, type=int, \
    help='top_k used for DCE')
args = parser.parse_args()
print(args)

order = utils_pytorch.unpickle(args.order)
order_list = list(order)
# order_list = [87, 0, 52, 58, 44, 91, 68, 97, 51, 15, 94, 92, 10, 72, \
# 49, 78, 61, 14, 8, 86, 84, 96, 18, 24, 32, 45, 88, 11, 4, \
# 67, 69, 66, 77, 47, 79, 93, 29, 50, 57, 83, 17, 81, 41, 12, \
# 37, 59, 25, 20, 80, 73, 1, 28, 6, 46, 62, 82, 53, 9, 31, 75,\
# 38, 63, 33, 74, 27, 22, 36, 3, 16, 21, 60, 19, 70, 90, 89, 43,\
# 5, 42, 65, 76, 40, 30, 23, 85, 2, 95, 56, 48, 71, 64, 98, 13, \
# 99, 7, 34, 55, 54, 26, 35, 39]
# order = np.array(order_list)

args.ckp_prefix = '{}_nb_cl_fg_{}_nb_cl_{}_nb_protos_{}'.format(args.ckp_prefix, args.nb_cl_fg, args.nb_cl, args.nb_protos)
if args.DCE:
    args.ckp_prefix += '_top_' + str(args.top_k)
ckp_path = './checkpoint/{}'.format(args.ckp_prefix)