Esempio n. 1
0
 def __init__(self, name_build, *args, **kwargs):
     """ Loss constructor.
 Args:
   name_build Loss name or constructor function
   ...        Additional (keyword-)arguments forwarded to the constructor
 """
     # Reserved custom initialization
     if name_build is type(self).__reserved_init:
         self._loss = args[0]
         self._fact = args[1]
         self._name = args[2]
         return
     # Recover name/constructor
     if callable(name_build):
         name = tools.fullqual(name_build)
         build = name_build
     else:
         losses = type(self)._get_losses()
         name = str(name_build)
         build = losses.get(name, None)
         if build is None:
             raise tools.UnavailableException(losses,
                                              name,
                                              what="loss name")
     # Build loss
     loss = build(*args, **kwargs)
     # Finalization
     self._loss = loss
     self._fact = None
     self._name = name
Esempio n. 2
0
 def include_single(self, data, key, col, err=None, lalp=1., ccnt=None):
     """ Add one line with column of the given data frame, can only be done before finalization.
 Args:
   data Session or dataframe holding the column(s) to add
   key  Displayed name (in the key)
   col  Single column name to include
   err  Optional associated error column name
   lalp Line alpha level
   ccnt Color and linestyle number to use
 Returns:
   self
 """
     # Assert not already finalized
     if self._fin:
         raise RuntimeError(
             "Plot is already finalized and cannot include another line")
     # Recover the dataframe if a session was given
     if isinstance(data, Session):
         data = data.data
     elif not isinstance(data, pandas.DataFrame):
         raise RuntimeError(
             "Expected a Session or DataFrame for 'data', got a %r" %
             tools.fullqual(type(data)))
     # Get the x-axis values
     if self._idx is None:
         x = data.index.to_numpy()
     else:
         if self._idx not in data:
             raise RuntimeError(
                 "No column named %r to use as index in the given session/dataframe"
                 % (self._idx, ))
         x = data[self._idx].to_numpy()
     # Pick a new line style and color
     linestyle, color = self._get_line_style(
         self._cnt if ccnt is None else ccnt)
     # Plot the data (line and error line)
     davg = data[col].to_numpy()
     derr = None if err is None else data[err].to_numpy()
     axis = self._get_ax(col)
     if derr is not None:
         axis.fill_between(x,
                           davg - derr,
                           davg + derr,
                           facecolor=color,
                           alpha=0.2)
     axis.plot(x,
               davg,
               label=key,
               linestyle=linestyle,
               color=color,
               alpha=lalp)
     # Increase the counter only on success
     self._cnt += 1
     # Return self for chaining
     return self
Esempio n. 3
0
 def _prepare(self, instance):
   """ Prepare the given instance for checkpointing.
   Args:
     instance Instance to snapshot/restore
   Returns:
     Checkpoint-able instance, key for the associated storage
   """
   # Recover instance's class
   cls = type(instance)
   # Transfer if available
   if cls in self._transfers:
     res = self._transfers[cls](instance)
   else:
     res = instance
   # Assert the instance is checkpoint-able
   for prop in ("state_dict", "load_state_dict"):
     if not callable(getattr(res, prop, None)):
       raise tools.UserException(f"Given instance {instance!r} is not checkpoint-able (missing callable member {prop!r})")
   # Return the instance and the associated storage key
   return res, tools.fullqual(cls)
Esempio n. 4
0
 def __init__(self, name_build, config=Configuration(), *args, **kwargs):
   """ Model builder constructor.
   Args:
     name_build Model name or constructor function
     config     Configuration to use for the parameter tensors
     ...        Additional (keyword-)arguments forwarded to the constructor
   Notes:
     If possible, data parallelism is enabled automatically
   """
   # Recover name/constructor
   if callable(name_build):
     name  = tools.fullqual(name_build)
     build = name_build
   else:
     models = type(self)._get_models()
     name  = str(name_build)
     build = models.get(name, None)
     if build is None:
       raise tools.UnavailableException(models, name, what="model name")
   # Build model
   with torch.no_grad():
     model = build(*args, **kwargs)
     if not isinstance(model, torch.nn.Module):
       raise tools.UserException("Expected built model %r to be an instance of 'torch.nn.Module', found %r instead" % (name, getattr(type(model), "__name__", "<unknown>")))
     model = model.to(**config)
     device = config["device"]
     if device.type == "cuda" and device.index is None: # Model is on GPU and not explicitly restricted to one particular card => enable data parallelism
       model = torch.nn.DataParallel(model)
   params = tools.flatten(model.parameters()) # NOTE: Ordering across runs/nodes seems to be ensured (i.e. only dependent on the model constructor)
   # Finalization
   self._model    = model
   self._name     = name
   self._config   = config
   self._params   = params
   self._gradient = None
   self._defaults = {
     "trainset":  None,
     "testset":   None,
     "loss":      None,
     "criterion": None,
     "optimizer": None }
Esempio n. 5
0
 def __init__(self, name_build, model, *args, **kwargs):
   """ Optimizer constructor.
   Args:
     name_build Optimizer name or constructor function
     model      Model to optimize
     ...        Additional (keyword-)arguments forwarded to the constructor
   """
   # Recover name/constructor
   if callable(name_build):
     name  = tools.fullqual(name_build)
     build = name_build
   else:
     optims = type(self)._get_optimizers()
     name   = str(name_build)
     build  = optims.get(name, None)
     if build is None:
       raise tools.UnavailableException(optims, name, what="optimizer name")
   # Build optimizer
   optim = build(model._model.parameters(), *args, **kwargs)
   # Finalization
   self._optim = optim
   self._name  = name
 def restore(self, instance, nothrow=False):
     """ Restore the snapshot for a given instance, warn if restoring a reference.
 Args:
   instance Instance to restore
   nothrow  Do not raise exception if no snapshot available for the instance
 Returns:
   self
 """
     instance, key = type(self)._prepare(instance)
     # Restore the state dictionary
     if key in self._store:
         instance.load_state_dict(self._store[key])
         # Check if restoring a reference
         if __debug__ and not self._copied[key]:
             tools.warning(
                 "Restoring a state dictionary reference in an instance of %s; the resulting behavior may not be the one expected"
                 % tools.fullqual(type(instance)))
     elif not nothrow:
         raise tools.UserException(
             "No snapshot for %r is available in the checkpoint" % key)
     # Enable chaining
     return self
Esempio n. 7
0
 def __init__(self, name_build, *args, **kwargs):
     """ Criterion constructor.
 Args:
   name_build Criterion name or constructor function
   ...        Additional (keyword-)arguments forwarded to the constructor
 """
     # Recover name/constructor
     if callable(name_build):
         name = tools.fullqual(name_build)
         build = name_build
     else:
         crits = type(self)._get_criterions()
         name = str(name_build)
         build = crits.get(name, None)
         if build is None:
             raise tools.UnavailableException(crits,
                                              name,
                                              what="criterion name")
     # Build criterion
     crit = build(*args, **kwargs)
     # Finalization
     self._crit = crit
     self._name = name
Esempio n. 8
0
 def __init__(self, name_build, config=Configuration(), init_multi=None, init_multi_args=None, init_mono=None, init_mono_args=None, *args, **kwargs):
   """ Model builder constructor.
   Args:
     name_build      Model name or constructor function
     config          Configuration to use for the parameter tensors
     init_multi      Weight initialization algorithm name, or initialization function, for tensors of dimension >= 2
     init_multi_args Additional keyword-arguments for 'init', if 'init' specified as a name
     init_mono       Weight initialization algorithm name, or initialization function, for tensors of dimension == 1
     init_mono_args  Additional keyword-arguments for 'init_mono', if 'init_mono' specified as a name
     ...             Additional (keyword-)arguments forwarded to the constructor
   Notes:
     If possible, data parallelism is enabled automatically
   """
   def make_init(name, args):
     inits = type(self)._get_inits()
     func = inits.get(name, None)
     if func is None:
       raise tools.UnavailableException(inits, name, what="initializer name")
     args = dict() if args is None else args
     def init(params):
       return func(params, **args)
     return init
   # Recover name/constructor
   if callable(name_build):
     name  = tools.fullqual(name_build)
     build = name_build
   else:
     models = type(self)._get_models()
     name  = str(name_build)
     build = models.get(name, None)
     if build is None:
       raise tools.UnavailableException(models, name, what="model name")
   # Recover initialization algorithms
   if isinstance(init_multi, str):
     init_multi = make_init(init_multi, init_multi_args)
   if isinstance(init_mono, str):
     init_mono = make_init(init_mono, init_mono_args)
   # Build model
   with torch.no_grad():
     model = build(*args, **kwargs)
     if not isinstance(model, torch.nn.Module):
       raise tools.UserException(f"Expected built model {name!r} to be an instance of 'torch.nn.Module', found {getattr(type(model), '__name__', '<unknown>')!r} instead")
     # Initialize parameters
     for param in model.parameters():
       if len(param.shape) > 1: # Multi-dimensional
         if init_multi is not None:
           init_multi(param)
       else: # Mono-dimensional
         if init_mono is not None:
           init_mono(param)
     # Move parameters to target device
     model = model.to(**config)
     device = config["device"]
     if device.type == "cuda" and device.index is None: # Model is on GPU and not explicitly restricted to one particular card => enable data parallelism
       model = torch.nn.DataParallel(model)
   params = tools.flatten(model.parameters()) # NOTE: Ordering across runs/nodes seems to be ensured (i.e. only dependent on the model constructor)
   # Finalization
   self._model    = model
   self._name     = name
   self._config   = config
   self._params   = params
   self._gradient = None
   self._defaults = {
     "trainset":  None,
     "testset":   None,
     "loss":      None,
     "criterion": None,
     "optimizer": None }
Esempio n. 9
0
 def include(self, data, *cols, errs=None, lalp=1., ccnt=None):
     """ Add the columns of the given data frame, can only be done before finalization.
 Args:
   data Session or dataframe holding the column(s) to add
   cols Column name(s) to include, mix selected columns together (same y-axis)
   errs Error suffix: for every selected column's real label, if a columns with 'real_label + errs' exists, it is used to display error bars
   lalp Line alpha level
   ccnt Color and linestyle number to use
 Returns:
   self
 """
     # Assert not already finalized
     if self._fin:
         raise RuntimeError(
             "Plot is already finalized and cannot include another line")
     # Recover the dataframe if a session was given
     if isinstance(data, Session):
         data = data.data
     elif not isinstance(data, pandas.DataFrame):
         raise RuntimeError(
             "Expected a Session or DataFrame for 'data', got a %r" %
             tools.fullqual(type(data)))
     # Get the x-axis values
     if self._idx is None:
         x = data.index.to_numpy()
     else:
         if self._idx not in data:
             raise RuntimeError(
                 "No column named %r to use as index in the given session/dataframe"
                 % (self._idx, ))
         x = data[self._idx].to_numpy()
     # Select semantic: empty list = select all
     if len(cols) == 0:
         cols = data.columns.to_list()
     # For every selection
     axis = None
     for col in cols:
         # Get associated data
         subd = select(data, col)
         # For every selected column
         for scol in subd:
             # Ignore index column
             if self._idx is not None and scol == self._idx:
                 continue
             # Ignore error column
             if errs is not None and scol[:-len(errs)] in subd:
                 continue
             # Get associated axis (if not done yet)
             if axis is None:
                 axis = self._get_ax(col)
             # Pick a new line style and color
             linestyle, color = self._get_line_style(
                 self._cnt if ccnt is None else ccnt)
             # Plot the data (line or error line)
             davg = subd[scol].to_numpy()
             errn = None if errs is None else (scol + errs)
             if errn is not None and errn in data:
                 derr = data[errn].to_numpy()
                 axis.fill_between(x,
                                   davg - derr,
                                   davg + derr,
                                   facecolor=color,
                                   alpha=0.2)
             axis.plot(x,
                       davg,
                       label=scol,
                       linestyle=linestyle,
                       color=color,
                       alpha=lalp)
             # Increase the counter only on success
             self._cnt += 1
         # Reset axis for next iteration
         axis = None
     # Return self for chaining
     return self