def test_slicing_list_path_array(list_tasks): x_train, y_train = gen_string() dummy = InMemoryDataset(x_train, y_train, data_type=TaskType.IMAGE_PATH) scenario = ClassIncremental(dummy, increment=1) subscenario = create_subscenario(scenario, list_tasks) assert subscenario.nb_tasks == len(list_tasks), print( f"{len(subscenario)} - vs - {len(list_tasks)}")
def test_slicing_list(list_tasks): train = gen_data() dummy = InMemoryDataset(*train) scenario = ClassIncremental(dummy, increment=1) subscenario = create_subscenario(scenario, list_tasks) assert subscenario.nb_tasks == len(list_tasks), print( f"{len(subscenario)} - vs - {len(list_tasks)}")
def sample(self, seed: int = None, nb_tasks: int = None) -> _BaseScenario: # seed the generator if seed is None: seed = np.random.randint(10000) # generate a random task order task_order = self.get_task_order(seed, nb_tasks) subscenario = create_subscenario(self.base_scenario, task_order[:nb_tasks]) return subscenario
def test_Online_Fellowship_subscenarios(dataset7c, dataset10c, dataset20c): scenario = OnlineFellowship([dataset7c, dataset10c, dataset20c]) sub_scenario = create_subscenario(scenario, np.arange(scenario.nb_tasks - 1)) for task_set in sub_scenario: loader = DataLoader(task_set) for _ in loader: pass assert sub_scenario.nb_tasks == scenario.nb_tasks - 1 task_order = np.arange(scenario.nb_tasks) np.random.shuffle(task_order) sub_scenario = create_subscenario(scenario, task_order) for task_set in sub_scenario: loader = DataLoader(task_set) for _ in loader: pass assert sub_scenario.nb_tasks == scenario.nb_tasks
def test_h5dataset_reloading_slow(tmpdir): filename_h5 = os.path.join(tmpdir, "test_h5.hdf5") nb_tasks = 5 cl_dataset = CIFAR100(data_path=DATA_PATH, download=False, train=True, labels_type="category", task_labels="lifelong") x, y, t = cl_dataset.get_data() # create dataset h5dataset = H5Dataset(x, y, t, data_path=filename_h5) # destroy object del h5dataset # reload data set h5dataset_reloaded = H5Dataset(x=None, y=None, t=None, data_path=filename_h5) scenario = ContinualScenario(h5dataset_reloaded) for task_set in scenario: loader = DataLoader(task_set) for _ in loader: pass assert scenario.nb_tasks == nb_tasks task_order = np.arange(nb_tasks) sub_scenario = create_subscenario(scenario, task_order[:-1]) assert sub_scenario.nb_tasks == nb_tasks-1 np.random.shuffle(task_order) sub_scenario = create_subscenario(scenario, task_order) assert sub_scenario.nb_tasks == nb_tasks
def test_sequence_transforms(list_tasks): x_train, y_train, t_train = gen_data() dummy = InMemoryDataset(x_train, y_train, t_train, data_type=TaskType.IMAGE_PATH) nb_task = len(np.unique(y_train)) list_trsfs = [] for _ in range(nb_task): list_trsfs.append([transforms.RandomAffine(degrees=[0, 90])]) scenario = ClassIncremental(dummy, increment=1, transformations=list_trsfs) subscenario = create_subscenario(scenario, list_tasks) assert subscenario.nb_tasks == len(list_tasks), print( f"{len(subscenario)} - vs - {len(list_tasks)}")
def test_create_subscenario_h5dataset(data, tmpdir): from continuum.scenarios import create_subscenario filename_h5 = os.path.join(tmpdir, "test_h5.hdf5") x_, y_, t_ = data h5dataset = H5Dataset(x_, y_, t_, data_path=filename_h5) nb_task = len(np.unique(t_)) scenario = ContinualScenario(h5dataset) sub_scenario = create_subscenario(scenario, np.arange(nb_task - 1)) for task_set in sub_scenario: loader = DataLoader(task_set) for _ in loader: pass assert sub_scenario.nb_tasks == nb_task - 1
def test_create_subscenario_suffle_h5dataset(data, tmpdir): filename_h5 = os.path.join(tmpdir, "test_h5.hdf5") x_, y_, t_ = data h5dataset = H5Dataset(x_, y_, t_, data_path=filename_h5) nb_task = len(np.unique(t_)) scenario = ContinualScenario(h5dataset) task_order = np.arange(nb_task) np.random.shuffle(task_order) sub_scenario = create_subscenario(scenario, task_order) for task_set in sub_scenario: loader = DataLoader(task_set) for _ in loader: pass assert sub_scenario.nb_tasks == nb_task