Пример #1
0
 def __init__(self, data, name=None, root=None, *args, **kwargs):
   """ Dataset builder constructor.
   Args:
     data Dataset string name, (infinite) generator instance (that will be used to generate samples), or any other instance (that will then be fed as the only sample)
     name Optional user-defined dataset name, to attach to some error messages for debugging purpose
     root Dataset cache root directory to use, None for default (only relevant if 'data' is a dataset name)
     ...  Forwarded (keyword-)arguments to the dataset constructor, ignored if 'data' is not a string
   Raises:
     'TypeError' if the some of the given (keyword-)arguments cannot be used to call the dataset or loader constructor or the batch loader
   """
   # Handle different dataset types
   if isinstance(data, str): # Load sampler from available datasets
     if name is None:
       name = data
     datasets = type(self)._get_datasets()
     build = datasets.get(name, None)
     if build is None:
       raise tools.UnavailableException(datasets, name, what="dataset name")
     root = root or type(self).get_default_root()
     self._iter = build(root=root, *args, **kwargs)
   elif isinstance(data, types.GeneratorType): # Forward sampling to custom generator
     if name is None:
       name = "<generator>"
     self._iter = data
   else: # Single-batch dataset of any value
     if name is None:
       name = "<single-batch>"
     def single_batch():
       while True:
         yield data
     self._iter = single_batch()
   # Finalization
   self.name = name
Пример #2
0
 def __init__(self, name_build, *args, **kwargs):
     """ Loss constructor.
 Args:
   name_build Loss name or constructor function
   ...        Additional (keyword-)arguments forwarded to the constructor
 """
     # Reserved custom initialization
     if name_build is type(self).__reserved_init:
         self._loss = args[0]
         self._fact = args[1]
         self._name = args[2]
         return
     # Recover name/constructor
     if callable(name_build):
         name = tools.fullqual(name_build)
         build = name_build
     else:
         losses = type(self)._get_losses()
         name = str(name_build)
         build = losses.get(name, None)
         if build is None:
             raise tools.UnavailableException(losses,
                                              name,
                                              what="loss name")
     # Build loss
     loss = build(*args, **kwargs)
     # Finalization
     self._loss = loss
     self._fact = None
     self._name = name
Пример #3
0
 def make_init(name, args):
   inits = type(self)._get_inits()
   func = inits.get(name, None)
   if func is None:
     raise tools.UnavailableException(inits, name, what="initializer name")
   args = dict() if args is None else args
   def init(params):
     return func(params, **args)
   return init
Пример #4
0
 def default(self, name, new=None, erase=False):
   """ Get and/or set the named default.
   Args:
     name  Name of the default
     new   Optional new instance, set only if not 'None' or erase is 'True'
     erase Force the replacement by 'None'
   Returns:
     (Old) value of the default
   """
   # Check existence
   if name not in self._defaults:
     raise tools.UnavailableException(self._defaults, name, what="model default")
   # Get current
   old = self._defaults[name]
   # Set if needed
   if erase or new is not None:
     self._defaults[name] = new
   # Return current/old
   return old
Пример #5
0
 def __init__(self, name_build, config=Configuration(), *args, **kwargs):
   """ Model builder constructor.
   Args:
     name_build Model name or constructor function
     config     Configuration to use for the parameter tensors
     ...        Additional (keyword-)arguments forwarded to the constructor
   Notes:
     If possible, data parallelism is enabled automatically
   """
   # Recover name/constructor
   if callable(name_build):
     name  = tools.fullqual(name_build)
     build = name_build
   else:
     models = type(self)._get_models()
     name  = str(name_build)
     build = models.get(name, None)
     if build is None:
       raise tools.UnavailableException(models, name, what="model name")
   # Build model
   with torch.no_grad():
     model = build(*args, **kwargs)
     if not isinstance(model, torch.nn.Module):
       raise tools.UserException("Expected built model %r to be an instance of 'torch.nn.Module', found %r instead" % (name, getattr(type(model), "__name__", "<unknown>")))
     model = model.to(**config)
     device = config["device"]
     if device.type == "cuda" and device.index is None: # Model is on GPU and not explicitly restricted to one particular card => enable data parallelism
       model = torch.nn.DataParallel(model)
   params = tools.flatten(model.parameters()) # NOTE: Ordering across runs/nodes seems to be ensured (i.e. only dependent on the model constructor)
   # Finalization
   self._model    = model
   self._name     = name
   self._config   = config
   self._params   = params
   self._gradient = None
   self._defaults = {
     "trainset":  None,
     "testset":   None,
     "loss":      None,
     "criterion": None,
     "optimizer": None }
Пример #6
0
 def __init__(self, name_build, model, *args, **kwargs):
   """ Optimizer constructor.
   Args:
     name_build Optimizer name or constructor function
     model      Model to optimize
     ...        Additional (keyword-)arguments forwarded to the constructor
   """
   # Recover name/constructor
   if callable(name_build):
     name  = tools.fullqual(name_build)
     build = name_build
   else:
     optims = type(self)._get_optimizers()
     name   = str(name_build)
     build  = optims.get(name, None)
     if build is None:
       raise tools.UnavailableException(optims, name, what="optimizer name")
   # Build optimizer
   optim = build(model._model.parameters(), *args, **kwargs)
   # Finalization
   self._optim = optim
   self._name  = name
Пример #7
0
 def __init__(self, name_build, *args, **kwargs):
     """ Criterion constructor.
 Args:
   name_build Criterion name or constructor function
   ...        Additional (keyword-)arguments forwarded to the constructor
 """
     # Recover name/constructor
     if callable(name_build):
         name = tools.fullqual(name_build)
         build = name_build
     else:
         crits = type(self)._get_criterions()
         name = str(name_build)
         build = crits.get(name, None)
         if build is None:
             raise tools.UnavailableException(crits,
                                              name,
                                              what="criterion name")
     # Build criterion
     crit = build(*args, **kwargs)
     # Finalization
     self._crit = crit
     self._name = name
Пример #8
0
    def __init__(self,
                 data,
                 name=None,
                 ds_args=(),
                 ds_kwargs={},
                 ld_args=(),
                 ld_kwargs={}):
        """ Dataset builder constructor.
    Args:
      data       Dataset string name, 'torch.utils.data.Dataset' instance, 'torch.utils.data.DataLoader' instance, or any other instance (that will then be fed as the only batch)
      name       Optional user-defined dataset name, to attach to some error messages for debugging purpose
      ds_args    Arguments forwarded to the dataset constructor, ignored if 'name_ds_ld' is not a string
      ds_kwargs  Keyword-arguments forwarded to the dataset constructor, ignored if 'name_ds_ld' is not a string
      ld_args    Arguments forwarded to the loader constructor, ignored if 'name_ds_ld' is not a string or a Dataset instance
      ld_kwargs  Keyword-arguments forwarded to the loader constructor, ignored if 'name_ds_ld' is not a string or a Dataset instance
    Raises:
      'TypeError' if the some of the given (keyword-)arguments cannot be used to call the dataset or loader constructor or the batch loader
    """
        # Pre-handling instantiate dataset from name
        if isinstance(data, str):
            if name is None:
                name = data
            datasets = type(self)._get_datasets()
            build = datasets.get(name, None)
            if build is None:
                raise tools.UnavailableException(datasets,
                                                 name,
                                                 what="dataset name")
            data = build(*ds_args, **ds_kwargs)
            assert isinstance(
                data, torch.utils.data.Dataset
            ), "Internal heuristic failed: %r was not a dataset name" % data
        # Pre-handling instantiate dataset loader
        if isinstance(data, torch.utils.data.Dataset):
            self.dataset = data
            data = torch.utils.data.DataLoader(data, *ld_args, **ld_kwargs)
        else:
            self.dataset = None
        # Handle different dataset types
        if isinstance(data,
                      torch.utils.data.DataLoader):  # Data loader for sampling
            if name is None:
                name = "<custom loader>"
            self._loader = data
            self._iter = None
        elif isinstance(
                data,
                types.GeneratorType):  # Forward sampling to custom generator
            if name is None:
                name = "<generator>"
            self._iter = data
        else:  # Single-batch dataset of any value
            if name is None:
                name = "<single-batch>"

            def single_batch():
                while True:
                    yield data

            self._iter = single_batch()
        # Finalization
        self.name = name
Пример #9
0
 def __init__(self, name_build, config=Configuration(), init_multi=None, init_multi_args=None, init_mono=None, init_mono_args=None, *args, **kwargs):
   """ Model builder constructor.
   Args:
     name_build      Model name or constructor function
     config          Configuration to use for the parameter tensors
     init_multi      Weight initialization algorithm name, or initialization function, for tensors of dimension >= 2
     init_multi_args Additional keyword-arguments for 'init', if 'init' specified as a name
     init_mono       Weight initialization algorithm name, or initialization function, for tensors of dimension == 1
     init_mono_args  Additional keyword-arguments for 'init_mono', if 'init_mono' specified as a name
     ...             Additional (keyword-)arguments forwarded to the constructor
   Notes:
     If possible, data parallelism is enabled automatically
   """
   def make_init(name, args):
     inits = type(self)._get_inits()
     func = inits.get(name, None)
     if func is None:
       raise tools.UnavailableException(inits, name, what="initializer name")
     args = dict() if args is None else args
     def init(params):
       return func(params, **args)
     return init
   # Recover name/constructor
   if callable(name_build):
     name  = tools.fullqual(name_build)
     build = name_build
   else:
     models = type(self)._get_models()
     name  = str(name_build)
     build = models.get(name, None)
     if build is None:
       raise tools.UnavailableException(models, name, what="model name")
   # Recover initialization algorithms
   if isinstance(init_multi, str):
     init_multi = make_init(init_multi, init_multi_args)
   if isinstance(init_mono, str):
     init_mono = make_init(init_mono, init_mono_args)
   # Build model
   with torch.no_grad():
     model = build(*args, **kwargs)
     if not isinstance(model, torch.nn.Module):
       raise tools.UserException(f"Expected built model {name!r} to be an instance of 'torch.nn.Module', found {getattr(type(model), '__name__', '<unknown>')!r} instead")
     # Initialize parameters
     for param in model.parameters():
       if len(param.shape) > 1: # Multi-dimensional
         if init_multi is not None:
           init_multi(param)
       else: # Mono-dimensional
         if init_mono is not None:
           init_mono(param)
     # Move parameters to target device
     model = model.to(**config)
     device = config["device"]
     if device.type == "cuda" and device.index is None: # Model is on GPU and not explicitly restricted to one particular card => enable data parallelism
       model = torch.nn.DataParallel(model)
   params = tools.flatten(model.parameters()) # NOTE: Ordering across runs/nodes seems to be ensured (i.e. only dependent on the model constructor)
   # Finalization
   self._model    = model
   self._name     = name
   self._config   = config
   self._params   = params
   self._gradient = None
   self._defaults = {
     "trainset":  None,
     "testset":   None,
     "loss":      None,
     "criterion": None,
     "optimizer": None }