Exemplo n.º 1
0
 def clone(self, first_order=None, allow_unused=None, allow_nograd=None):
     """
     **Description**
     Returns a `MAML`-wrapped copy of the module whose parameters and buffers
     are `torch.clone`d from the original module.
     This implies that back-propagating losses on the cloned module will
     populate the buffers of the original module.
     For more information, refer to learn2learn.clone_module().
     **Arguments**
     * **first_order** (bool, *optional*, default=None) - Whether the clone uses first-
         or second-order updates. Defaults to self.first_order.
     * **allow_unused** (bool, *optional*, default=None) - Whether to allow differentiation
     of unused parameters. Defaults to self.allow_unused.
     * **allow_nograd** (bool, *optional*, default=False) - Whether to allow adaptation with
         parameters that have `requires_grad = False`. Defaults to self.allow_nograd.
     """
     if first_order is None:
         first_order = self.first_order
     if allow_unused is None:
         allow_unused = self.allow_unused
     if allow_nograd is None:
         allow_nograd = self.allow_nograd
     return MAML(clone_module(self.module),
                 lr=self.lr,
                 first_order=first_order,
                 allow_unused=allow_unused,
                 allow_nograd=allow_nograd)
Exemplo n.º 2
0
 def clone(self):
     """
     **Descritpion**
     Akin to `MAML.clone()` but for MetaSGD: it includes a set of learnable fast-adaptation
     learning rates.
     """
     return MetaSGD(clone_module(self.module),
                    lrs=clone_parameters(self.lrs),
                    first_order=self.first_order)
Exemplo n.º 3
0
    def clone(self, first_order=None):
        """
        **Description**

        Returns a `MAML`-wrapped copy of the module whose parameters and buffers
        are `torch.clone`d from the original module.

        This implies that back-propagating losses on the cloned module will
        populate the buffers of the original module.
        For more information, refer to learn2learn.clone_module().

        **Arguments**

        * **first_order** (bool, *optional*, default=None) - Whether the clone uses first-
            or second-order updates. Defaults to self.first_order.

        """
        if first_order is None:
            first_order = self.first_order
        return MAML(clone_module(self.module),
                    lr=self.lr,
                    first_order=first_order)
Exemplo n.º 4
0
 def clone(self):
     return MetaSGD(clone_module(self.module),
                    lrs=clone_parameters(self.lrs),
                    first_order=self.first_order)