Beispiel #1
0
    def __init__(
        self,
        meta_dataset: ClfMetaDataset,
        sampler_config: Dict[str, Any],
        num_query_shots: int = 1,
        num_support_shots: int = 1,
        max_labeled_points: int = None,
        init_labeled_points: int = None,
        name: Optional[str] = None,
        **_unused_kwargs,
    ):
        super(LimitedSupervisedTaskDistribution, self).__init__(
            meta_dataset=meta_dataset,
            num_query_shots=num_query_shots,
            sampler=samplers.get(**sampler_config),
            name=(name or self.__class__.__name__),
        )
        self.num_support_shots = num_support_shots
        self.max_labeled_points = max_labeled_points
        self.init_labeled_points = init_labeled_points

        # The seeds must be set globally.
        self._rng = np.random

        # Internals.
        self.num_requested_labels = None
 def __init__(
     self,
     meta_dataset: ClfMetaDataset,
     num_query_shots: int = 1,
     num_support_shots: int = 1,
     num_task_batches_to_cache: int = 100,
     name: Optional[str] = None,
     **_unused_kwargs,
 ):
     super(ClassicSupervisedTaskDistribution, self).__init__(
         meta_dataset=meta_dataset,
         num_query_shots=num_query_shots,
         num_support_shots=num_support_shots,
         sampler=samplers.get(name="uniform", stratified=True),
         name=(name or self.__class__.__name__),
     )
     self.num_task_batches_to_cache = num_task_batches_to_cache
Beispiel #3
0
def build_and_initialize(cfg, mode=common.ModeKeys.TRAIN):
    """Builds and initializes all parts of the graph.

    Parameters
    ----------
    cfg : OmegaConf
        The experiment configuration.

    mode : str, optional (default: common.ModeKeys.TRAIN)
        Defines the mode of the computation graph (TRAIN or EVAL).
        Note: this is likely to be removed from the API down the line.

    Returns
    -------
    exp : Experiment
        An object that represents the experiment.
        Contains `meta_learners`, `samplers`, and `task_dists`.
    """
    sess = tf.get_default_session()

    # Build the data source.
    data_source = datasets.get_data_source(name=cfg.data.name,
                                           **cfg.data.source).build()

    # Build meta-datasets.
    meta_datasets = {}
    for set_name in set(task.set_name for task in cfg[mode].tasks):
        meta_datasets[set_name] = datasets.get_meta_dataset(
            name=cfg.data.name,
            data_sources=data_source[set_name],
            **cfg[mode].meta_dataset,
        ).build()

    # Build task distributions.
    task_dists = []
    for task in cfg[mode].tasks:
        task_dist = tasks.get_distribution(
            meta_dataset=meta_datasets[task.set_name],
            name_suffix=f"{task.set_name}_{task.regime}",
            sampler_config=task.sampler,
            **task.config,
        ).build()
        task_dists.append(task_dist)

    # Build model.
    network_builder = networks.get(**cfg.network)
    model_builder = models.get(
        input_shapes=data_source.data_shapes,
        input_types=data_source.data_types,
        num_classes=cfg[mode].meta_dataset.num_classes,
        network_builder=network_builder,
        **cfg[mode].model,
    )

    # Build optimizer.
    optimizer = optimizers.get(**cfg.train.optimizer)

    # Build meta-learner.
    meta_learner = adaptation.get(
        model_builder=model_builder,
        optimizer=optimizer,
        task_dists=task_dists,
        mode=mode,
        **cfg[mode].adapt,
    )

    # Variable initialization.
    if mode == common.ModeKeys.TRAIN:
        # Initialize all the variables in the graph.
        sess.run(tf.global_variables_initializer())
    else:  # mode == common.ModeKeys.EVAL:
        # Initialize only non-trainable variables.
        # Note: Trainable variables must be loaded from a checkpoint.
        #       Being explicit about which variables are initialized is better
        #       prevents weird side effects when we are unaware of some created
        #       variables that are silently initialized at evaluation time.
        sess.run(
            tf.variables_initializer(meta_learner.non_trainable_parameters))

    # Initialize task distributions.
    for task, task_dist in zip(cfg[mode].tasks, task_dists):
        sampler = None
        if task.sampler is not None:
            sampler = samplers.get(**task.sampler)
            sampler.build(task_dist=task_dist, meta_learner=meta_learner)
        task_dist.initialize(sampler=sampler)

    return meta_learner
Beispiel #4
0
def build_and_initialize(cfg, sess, categories, mode=common.ModeKeys.TRAIN):
    """Builds and initializes all parts of the graph.

    Parameters
    ----------
    cfg : OmegaConf
        The experiment configuration.

    sess : tf.Session
        The TF session used for executing the computation graph.

    categories : dict of lists of Categories
        Each list of Categories is used to construct meta-datasets.

    mode : str, optional (default: common.ModeKeys.TRAIN)
        Defines the mode of the computation graph (TRAIN or EVAL).
        Note: this is likely to be removed from the API down the line.

    Returns
    -------
    exp : Experiment
        An object that represents the experiment.
        Contains `meta_learners`, `samplers`, and `task_dists`.
    """
    # Build and initialize data pools.
    data_pools = {
        task.set_name: datasets.get_datapool(
            dataset_name=cfg.data.name,
            categories=categories[task.set_name],
            name=f"DP_{task.log_dir.replace('/', '_')}",
        )
        .build(**cfg.data.build_config)
        .initialize(sess)
        for task in cfg[mode].tasks
    }

    # Build meta-dataset.
    meta_datasets = {
        task.set_name: datasets.get_metadataset(
            dataset_name=cfg.data.name,
            data_pool=data_pools[task.set_name],
            batch_size=cfg[mode].meta.batch_size,
            name=f"MD_{task.log_dir.replace('/', '_')}",
            **cfg[mode].dataset,
        ).build()
        for task in cfg[mode].tasks
    }

    # Build model.
    model = models.get(
        dataset_name=cfg.data.name,
        num_classes=cfg[mode].dataset.num_classes,
        **cfg.model,
    )

    # Build optimizer.
    optimizer = optimizers.get(**cfg.train.optimizer)

    # Build task distributions.
    task_dists = [
        tasks.get_distribution(
            meta_dataset=meta_datasets[task.set_name],
            name_suffix=task.log_dir.replace("/", "_"),
            **task.config,
        )
        for task in cfg[mode].tasks
    ]

    # Build meta-learners.
    meta_learners = [
        adaptation.get(
            model=model,
            optimizer=optimizer,
            mode=mode,
            tasks=task_dists[i].task_batch,
            **cfg.adapt,
        )
        for i, task in enumerate(cfg[mode].tasks)
    ]

    # Build samplers.
    samplers_list = [
        samplers.get(
            learner=meta_learners[i], tasks=task_dists[i].task_batch, **task.sampler
        )
        for i, task in enumerate(cfg[mode].tasks)
    ]

    # Run global init.
    sess.run(tf.global_variables_initializer())

    # Initialize task distribution.
    for task_dist, sampler in zip(task_dists, samplers_list):
        task_dist.initialize(sampler=sampler, sess=sess)

    return Experiment(
        meta_learners=meta_learners, samplers=samplers_list, task_dists=task_dists
    )
 def initialize(self, **_unused_kwargs):
     """Initializes the task distribution using uniform sampler."""
     sampler = samplers.get(name="uniform", stratified=self.stratified)
     super(ClassicSupervisedTaskDistribution,
           self).initialize(sampler=sampler.build(task_dist=self))