def _getitem_cluster(self, item):
        # 当前样本
        now_label_image_tuple = self.data_list[item]
        now_index, _, now_image_filename = now_label_image_tuple

        now_cluster = self.cluster_list[item]
        now_label_k_shot_index = random.sample(
            list(
                np.squeeze(np.argwhere(self.cluster_list == now_cluster),
                           axis=1)), self.num_shot)

        # 其他样本
        index_list = list(range(len(self.data_list)))
        index_list.remove(now_index)
        other_label_k_shot_index_list = random.sample(
            index_list, self.num_shot * (self.num_way - 1))

        c_way_k_shot_index_list = now_label_k_shot_index + other_label_k_shot_index_list
        random.shuffle(c_way_k_shot_index_list)

        if len(c_way_k_shot_index_list) != self.num_shot * self.num_way:
            return self.__getitem__(
                random.sample(list(range(0, len(self.data_list))), 1)[0])

        #######################################################################################
        query_list = [now_label_image_tuple]
        support_list = [
            self.data_list[index] for index in c_way_k_shot_index_list
        ]
        task_list = support_list + query_list

        support_data = [
            torch.unsqueeze(MyDataset.read_image(one[2], self.transform_train),
                            dim=0) for one in support_list
        ]
        query_data = [
            torch.unsqueeze(MyDataset.read_image(one[2], self.transform_train),
                            dim=0) for one in query_list
        ]
        task_data = torch.cat(support_data + query_data)
        #######################################################################################

        task_label = torch.Tensor([
            int(index in now_label_k_shot_index)
            for index in c_way_k_shot_index_list
        ])
        task_index = torch.Tensor([one[0] for one in task_list]).long()
        return task_data, task_label, task_index
Example #2
0
def final_eval(gpu_id, name, mn_checkpoint, dataset_name, is_conv_4,
               test_episode=1000, result_dir="result", split=MyDataset.dataset_split_test, ways_and_shots=None):
    config = Config(gpu_id, dataset_name=dataset_name, is_conv_4=is_conv_4,
                    name=name, mn_checkpoint=mn_checkpoint, result_dir=result_dir, split=split)
    runner = Runner(config=config)

    runner.load_model()
    runner.matching_net.eval()
    image_features = runner.get_features()
    test_tool_fsl = runner.get_test_tool(image_features=image_features)

    if ways_and_shots is None:
        ways, shots = MyDataset.get_ways_shots(dataset_name=dataset_name, split=split)
        for index, way in enumerate(ways):
            Tools.print("{}/{} way={}".format(index, len(ways), way))
            m, pm = test_tool_fsl.eval(num_way=way, num_shot=1, episode_size=15, test_episode=test_episode, split=split)
            Tools.print("way={},shot=1,acc={},con={}".format(way, m, pm), txt_path=config.log_file)
        for index, shot in enumerate(shots):
            Tools.print("{}/{} shot={}".format(index, len(shots), shot))
            m, pm = test_tool_fsl.eval(num_way=5, num_shot=shot, episode_size=15, test_episode=test_episode, split=split)
            Tools.print("way=5,shot={},acc={},con={}".format(shot, m, pm), txt_path=config.log_file)
    else:
        for index, (way, shot) in enumerate(ways_and_shots):
            Tools.print("{}/{} way={} shot={}".format(index, len(ways_and_shots), way, shot))
            m, pm = test_tool_fsl.eval(num_way=way, num_shot=shot, episode_size=15, test_episode=test_episode, split=split)
            Tools.print("way={},shot={},acc={},con={}".format(way, shot, m, pm), txt_path=config.log_file)
            pass

    pass
    def __init__(self):
        # all data
        self.data_train = MyDataset.get_data_split(Config.data_root,
                                                   split="train")
        self.task_train = RandomAndCssDataset(self.data_train, Config.num_way,
                                              Config.num_shot,
                                              Config.transform_train,
                                              Config.transform_test)
        self.task_train_loader = DataLoader(self.task_train,
                                            Config.batch_size,
                                            True,
                                            num_workers=Config.num_workers)

        # model
        self.net = RunnerTool.to_cuda(Config.net)
        RunnerTool.to_cuda(self.net.apply(RunnerTool.weights_init))
        self.norm = Normalize(2)

        # optim
        self.loss = RunnerTool.to_cuda(nn.MSELoss())
        self.net_optim = torch.optim.SGD(self.net.parameters(),
                                         lr=Config.learning_rate,
                                         momentum=0.9,
                                         weight_decay=5e-4)

        # Eval
        self.test_tool_fsl = FSLTestTool(
            self.matching_test,
            data_root=Config.data_root,
            num_way=Config.num_way_test,
            num_shot=Config.num_shot,
            episode_size=Config.episode_size,
            test_episode=Config.test_episode,
            transform=self.task_train.transform_test)
        pass
Example #4
0
def final_eval(gpu_id,
               name,
               mn_checkpoint,
               dataset_name,
               is_conv_4,
               test_episode=1000):
    config = Config(gpu_id,
                    dataset_name=dataset_name,
                    is_conv_4=is_conv_4,
                    name=name,
                    mn_checkpoint=mn_checkpoint)
    runner = Runner(config=config)

    runner.load_model()
    runner.matching_net.eval()

    ways, shots = MyDataset.get_ways_shots(dataset_name=dataset_name)
    for index, way in enumerate(ways):
        Tools.print("{}/{} way={}".format(index, len(ways), way))
        m, pm = runner.test_tool_fsl.eval(num_way=way,
                                          num_shot=1,
                                          episode_size=15,
                                          test_episode=test_episode)
        Tools.print("way={},shot=1,acc={},con={}".format(way, m, pm),
                    txt_path=config.log_file)
    for index, shot in enumerate(shots):
        Tools.print("{}/{} shot={}".format(index, len(shots), shot))
        m, pm = runner.test_tool_fsl.eval(num_way=5,
                                          num_shot=shot,
                                          episode_size=15,
                                          test_episode=test_episode)
        Tools.print("way=5,shot={},acc={},con={}".format(shot, m, pm),
                    txt_path=config.log_file)
    pass
    def _getitem_random_and_css(self, item):
        # 当前样本
        now_label_image_tuple = self.data_list[item]
        now_index, _, now_image_filename = now_label_image_tuple

        # 其他样本
        index_list = list(range(len(self.data_list)))
        index_list.remove(now_index)
        c_way_k_shot_index_list = random.sample(index_list,
                                                self.num_shot * self.num_way)

        if Config.baseline_type == "css":
            c_way_k_shot_index_list[0] = now_index
        label_index = c_way_k_shot_index_list[0]
        random.shuffle(c_way_k_shot_index_list)

        if len(c_way_k_shot_index_list) != self.num_shot * self.num_way:
            return self.__getitem__(
                random.sample(list(range(0, len(self.data_list))), 1)[0])

        #######################################################################################
        query_list = [now_label_image_tuple]
        support_list = [
            self.data_list[index] for index in c_way_k_shot_index_list
        ]
        task_list = support_list + query_list

        support_data = [
            torch.unsqueeze(MyDataset.read_image(one[2], self.transform_train),
                            dim=0) for one in support_list
        ]
        query_data = [
            torch.unsqueeze(MyDataset.read_image(one[2], self.transform_train),
                            dim=0) for one in query_list
        ]
        task_data = torch.cat(support_data + query_data)
        #######################################################################################

        task_label = torch.Tensor(
            [int(index == label_index) for index in c_way_k_shot_index_list])
        task_index = torch.Tensor([one[0] for one in task_list]).long()
        return task_data, task_label, task_index
Example #6
0
    def __init__(self,
                 gpu_id=1,
                 name=None,
                 is_conv_4=True,
                 mn_checkpoint=None,
                 dataset_name=MyDataset.dataset_name_miniimagenet,
                 is_check=False):
        self.gpu_id = gpu_id
        os.environ["CUDA_VISIBLE_DEVICES"] = str(self.gpu_id)

        self.name = name
        self.is_conv_4 = is_conv_4
        self.dataset_name = dataset_name
        self.num_way = 5
        self.num_shot = 1
        self.num_workers = 8
        self.episode_size = 15
        self.test_episode = 600
        self.mn_checkpoint = mn_checkpoint

        ###############################################################################################
        if self.is_conv_4:
            self.matching_net, self.batch_size = C4Net(hid_dim=64,
                                                       z_dim=64), 64
        else:
            self.matching_net, self.batch_size = ResNet12Small(
                avg_pool=True, drop_rate=0.1), 32
        ###############################################################################################

        self.is_check = is_check
        if self.is_check:
            self.log_file = None
            return

        self.log_file = Tools.new_dir(
            os.path.join(
                "../models_abl/{}/mn/result".format(self.dataset_name),
                "{}_{}.txt".format(self.name, Tools.get_format_time())))

        ###############################################################################################
        self.is_png = True
        self.data_root = MyDataset.get_data_root(
            dataset_name=self.dataset_name, is_png=self.is_png)
        _, _, self.transform_test = MyTransforms.get_transform(
            dataset_name=self.dataset_name,
            has_ic=True,
            is_fsl_simple=True,
            is_css=False)
        ###############################################################################################
        pass
Example #7
0
class Config(object):
    gpu_id = 3
    os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_id)

    is_png = True

    # dataset_name = "miniimagenet"
    # dataset_name = "tieredimagenet"
    dataset_name = MyDataset.dataset_name_omniglot

    data_root = MyDataset.get_data_root(dataset_name=dataset_name,
                                        is_png=is_png)
    Tools.print(data_root)

    features_save_path = Tools.new_dir("{}_feature".format(data_root))
    Tools.print(features_save_path)
    pass
Example #8
0
    def get_features(self):
        Tools.print("get_features")

        output_feature = {}
        with torch.no_grad():
            self.matching_net.eval()

            _, _, transform_test = MyTransforms.get_transform(
                dataset_name=self.config.dataset_name, has_ic=True, is_fsl_simple=True, is_css=False)
            data_test = MyDataset.get_data_split(self.config.data_root, split=self.config.split)
            loader = DataLoader(EvalFeatureDataset(data_test, transform_test),
                                self.config.batch_size, False, num_workers=self.config.num_workers)
            for image, image_name in tqdm(loader):
                output = self.matching_net(image.cuda()).data.cpu().numpy()
                for output_one, image_name_one in zip(output, image_name):
                    output_feature[image_name_one] = output_one
                pass
            pass
        return output_feature
class Config(object):
    gpu_id = 3
    os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_id)

    num_workers = 8
    batch_size = 8

    is_png = True

    # dataset_name = "miniimagenet"
    # dataset_name = "tieredimagenet"
    dataset_name = CommonMyDataset.dataset_name_omniglot

    data_root = CommonMyDataset.get_data_root(dataset_name=dataset_name,
                                              is_png=is_png)
    _, _, transform = MyTransforms.get_transform(dataset_name=dataset_name,
                                                 omniglot_size=32)
    Tools.print(data_root)

    features_save_path = Tools.new_dir("{}_feature".format(data_root))
    Tools.print(features_save_path)
    pass
Example #10
0
    def __init__(self,
                 gpu_id=1,
                 dataset_name=MyDataset.dataset_name_miniimagenet,
                 is_conv_4=True,
                 is_res34=True,
                 is_modify_head=True):
        self.gpu_id = gpu_id
        os.environ["CUDA_VISIBLE_DEVICES"] = str(self.gpu_id)

        self.dataset_name = dataset_name
        self.is_conv_4 = is_conv_4
        self.is_res34 = is_res34
        self.modify_head = is_modify_head

        self.num_workers = 8
        self.num_way = 5
        self.num_shot = 1
        self.val_freq = 10
        self.episode_size = 15
        self.test_episode = 600
        self.ic_out_dim = 512
        self.ic_ratio = 1
        self.learning_rate = 0.01
        self.loss_fsl_ratio = 1.0
        self.loss_ic_ratio = 1.0

        ###############################################################################################
        self.train_epoch = 1500
        self.first_epoch, self.t_epoch = 300, 200
        self.adjust_learning_rate = RunnerTool.adjust_learning_rate1
        ###############################################################################################

        ###############################################################################################
        self.is_png = True
        self.data_root = MyDataset.get_data_root(
            dataset_name=self.dataset_name, is_png=self.is_png)
        self.transform_train_ic, self.transform_train_fsl, self.transform_test = MyTransforms.get_transform(
            dataset_name=self.dataset_name,
            has_ic=True,
            is_fsl_simple=True,
            is_css=False)

        if self.is_res34:
            self.resnet = resnet34
            self.ic_net_name = "res34{}".format(
                "_head" if self.modify_head else "")
        else:
            self.resnet = resnet18
            self.ic_net_name = "res18{}".format(
                "_head" if self.modify_head else "")
            pass

        if self.is_conv_4:
            self.matching_net, self.batch_size, self.e_net_name = C4Net(
                hid_dim=64, z_dim=64), 64, "C4"
        else:
            self.matching_net, self.batch_size, self.e_net_name = ResNet12Small(
                avg_pool=True, drop_rate=0.1), 32, "R12S"
        ###############################################################################################

        self.model_name = "{}_{}_{}_{}_{}_{}_{}_{}_{}_{}_{}_{}_{}{}".format(
            self.gpu_id, self.ic_net_name, self.e_net_name, self.train_epoch,
            self.batch_size, self.num_way, self.num_shot, self.first_epoch,
            self.t_epoch, self.ic_out_dim, self.ic_ratio, self.loss_fsl_ratio,
            self.loss_ic_ratio, "_png" if self.is_png else "")

        self.time = Tools.get_format_time()
        _root_path = "../models_abl/{}/mn".format(self.dataset_name)
        self.mn_dir = "{}/{}_{}_mn.pkl".format(_root_path, self.time,
                                               self.model_name)
        self.ic_dir = "{}/{}_{}_ic.pkl".format(_root_path, self.time,
                                               self.model_name)
        self.log_file = self.ic_dir.replace(".pkl", ".txt")

        Tools.print(self.data_root, txt_path=self.log_file)
        Tools.print(self.model_name, txt_path=self.log_file)
        Tools.print(self.mn_dir, txt_path=self.log_file)
        Tools.print(self.ic_dir, txt_path=self.log_file)
        pass
Example #11
0
    def __init__(self, config):
        self.config = config

        # all data
        self.data_train = MyDataset.get_data_split(
            self.config.data_root, split=MyDataset.dataset_split_train)
        self.task_train = TrainDataset(
            self.data_train,
            self.config.num_way,
            self.config.num_shot,
            transform_train_ic=self.config.transform_train_ic,
            transform_train_fsl=self.config.transform_train_fsl,
            transform_test=self.config.transform_test)
        self.task_train_loader = DataLoader(
            self.task_train,
            self.config.batch_size,
            True,
            num_workers=self.config.num_workers)

        # IC
        self.produce_class = ProduceClass(len(self.data_train),
                                          self.config.ic_out_dim,
                                          self.config.ic_ratio)
        self.produce_class.init()
        self.task_train.set_samples_class(self.produce_class.classes)

        # model
        self.norm = Normalize(2)
        self.matching_net = RunnerTool.to_cuda(self.config.matching_net)
        self.ic_model = RunnerTool.to_cuda(
            ICResNet(low_dim=self.config.ic_out_dim,
                     resnet=self.config.resnet,
                     modify_head=self.config.modify_head))
        RunnerTool.to_cuda(self.matching_net.apply(RunnerTool.weights_init))
        RunnerTool.to_cuda(self.ic_model.apply(RunnerTool.weights_init))

        # optim
        self.matching_net_optim = torch.optim.SGD(
            self.matching_net.parameters(),
            lr=self.config.learning_rate,
            momentum=0.9,
            weight_decay=5e-4)
        self.ic_model_optim = torch.optim.SGD(self.ic_model.parameters(),
                                              lr=self.config.learning_rate,
                                              momentum=0.9,
                                              weight_decay=5e-4)

        # loss
        self.ic_loss = RunnerTool.to_cuda(nn.CrossEntropyLoss())
        self.fsl_loss = RunnerTool.to_cuda(nn.MSELoss())

        # Eval
        self.test_tool_fsl = FSLTestTool(
            self.matching_test,
            data_root=self.config.data_root,
            num_way=self.config.num_way,
            num_shot=self.config.num_shot,
            episode_size=self.config.episode_size,
            test_episode=self.config.test_episode,
            transform=self.task_train.transform_test,
            txt_path=self.config.log_file)
        self.test_tool_ic = ICTestTool(
            feature_encoder=None,
            ic_model=self.ic_model,
            data_root=self.config.data_root,
            batch_size=self.config.batch_size,
            num_workers=self.config.num_workers,
            ic_out_dim=self.config.ic_out_dim,
            transform=self.task_train.transform_test,
            txt_path=self.config.log_file)
        pass
class Config(object):
    gpu_id = 3
    os.environ["CUDA_VISIBLE_DEVICES"] = str(gpu_id)

    num_workers = 16

    num_way = 5
    num_way_test = 5
    num_shot = 1
    learning_rate = 0.01

    episode_size = 15
    test_episode = 600

    # batch_size = 64
    # train_epoch = 1700
    # first_epoch, t_epoch = 500, 200
    # adjust_learning_rate = RunnerTool.adjust_learning_rate1

    is_png = True

    ###############################################################################################
    baseline_type_list = ["random", "css", "cluster"]
    # baseline_type = "css"
    # baseline_type = "random"
    baseline_type = "cluster"
    ###############################################################################################

    ###############################################################################################
    # val_freq = 10
    # dataset_name = "miniimagenet"
    # train_epoch = 300
    # first_epoch, t_epoch = 100, 100
    # adjust_learning_rate = RunnerTool.adjust_learning_rate2
    # net, net_name, batch_size = C4Net(hid_dim=64, z_dim=64, has_norm=False), "conv4", 64

    # val_freq = 10
    # dataset_name = "miniimagenet"
    # train_epoch = 150
    # first_epoch, t_epoch = 80, 40
    # adjust_learning_rate = RunnerTool.adjust_learning_rate2
    # net, net_name, batch_size = ResNet12Small(avg_pool=True, drop_rate=0.1), "res12", 32
    ###############################################################################################

    ###############################################################################################
    # val_freq = 5
    # dataset_name = "tieredimagenet"
    # train_epoch = 150
    # first_epoch, t_epoch = 80, 40
    # adjust_learning_rate = RunnerTool.adjust_learning_rate2
    # net, net_name, batch_size = C4Net(hid_dim=64, z_dim=64, has_norm=False), "conv4", 64

    # val_freq = 2
    # dataset_name = "tieredimagenet"
    # # train_epoch = 50
    # # first_epoch, t_epoch = 30, 10
    # train_epoch = 30
    # first_epoch, t_epoch = 16, 8
    # adjust_learning_rate = RunnerTool.adjust_learning_rate2
    # net, net_name, batch_size = ResNet12Small(avg_pool=True, drop_rate=0.1), "res12", 32
    ###############################################################################################

    ###############################################################################################
    val_freq = 10
    num_way = 10
    num_way_test = 5
    dataset_name = "omniglot"
    train_epoch = 300
    first_epoch, t_epoch = 200, 50
    adjust_learning_rate = RunnerTool.adjust_learning_rate2
    net, net_name, batch_size = C4Net(hid_dim=64, z_dim=64,
                                      has_norm=False), "conv4", 64
    ###############################################################################################

    transform_train, transform_test = MyTransforms.get_transform(
        dataset_name=dataset_name,
        has_ic=False,
        is_fsl_simple=False,
        is_css=baseline_type == "css")

    model_name = "{}_{}_{}_{}_{}_{}_{}_{}_{}{}".format(
        gpu_id, baseline_type, net_name, train_epoch, batch_size, num_way,
        num_shot, first_epoch, t_epoch, "_png" if is_png else "")
    net_dir = Tools.new_dir("../models_baseline/{}/{}/{}.pkl".format(
        dataset_name, baseline_type, model_name))

    data_root = MyDataset.get_data_root(dataset_name=dataset_name,
                                        is_png=is_png)
    if baseline_type == "cluster":
        cluster_path = os.path.join("{}_feature".format(data_root),
                                    "train_cluster.pkl")

    Tools.print(model_name)
    Tools.print(net_dir)
    Tools.print(data_root)
    pass