def __init__(self, prm):

        self.data_source = prm.data_source
        self.data_transform = prm.data_transform
        self.data_path = prm.data_path

        if self.data_source == 'Omniglot':
            # Randomly split the characters to meta-train and meta-test
            # Later, tasks will be generated using this characters
            self.chars_splits = omniglot.split_chars(prm.data_path, prm.chars_split_type, prm.n_meta_train_chars)

        elif self.data_source == 'SmallImageNet':
            self.class_split = imagenet_data.split_classes(prm)
    def __init__(self, prm):

        self.data_source = prm.data_source
        self.data_transform = prm.data_transform
        self.data_path = prm.data_path

        if self.data_source == 'Omniglot':
            # Randomly split the characters to meta-train and meta-test
            # Later, tasks will be generated using this characters
            self.chars_splits = omniglot.split_chars(prm.data_path,
                                                     prm.chars_split_type,
                                                     prm.n_meta_train_chars)

        #elif self.data_source == 'SmallImageNet':
        #    self.class_split = imagenet_data.split_classes(prm)

        elif self.data_source == 'Caltech256':
            self.class_split = Caltech256.split_classes()

        elif self.data_source == 'CIFAR100':
            self.class_split = cifar100.split_classes()

        else:
            raise Exception("No such dataset")