def _fix_legacy(parameter: Data) -> None: """Ugly hack for keeping the legacy behaviors with considers bounds always after the exponent and can still sample "before" the exponent (log-uniform). """ from . import _datalayers legacy = [ x for x in _datalayers.Operation.filter_from(parameter) if x._LEGACY ] if len(legacy) < 2: return if len(legacy) > 2: raise errors.NevergradRuntimeError( "More than 2 legacy layers, this should not happen, open an issue") # warnings.warn( # "Settings bounds and exponent through the Array/Scalar API will change behavior " # " (this is an early warning, more on this asap)", # errors.NevergradBehaviorChangesWarning, # ) # TODO activate when ready value = parameter.value layers_inds = tuple(leg._layer_index for leg in legacy) if abs(layers_inds[0] - layers_inds[1]) > 1: raise errors.NevergradRuntimeError( "Non-legacy layers between 2 legacy layers") parameter._layers = [ x for x in parameter._layers if x._layer_index not in layers_inds ] # fix parameter layers for k, sub in enumerate(parameter._layers): sub._layer_index = k sub._layers = parameter._layers parameter.value = value bound_ind = int(isinstance(legacy[0], _datalayers.Exponent)) bound: _datalayers.BoundLayer = legacy[bound_ind] # type: ignore exp: _datalayers.Exponent = legacy[(bound_ind + 1) % 2] # type: ignore bound.bounds = tuple(None if b is None else exp.backward(b) for b in bound.bounds) # type: ignore if isinstance(bound, _datalayers.Bound): bound = _datalayers.Bound( lower=bound.bounds[0], upper=bound.bounds[1], method=bound._method, uniform_sampling=bound.uniform_sampling, ) for l in (bound, exp): l._layer_index = 0 l._layers = [l] parameter.add_layer(l) return
def copy(self: P) -> P: """Creates a full copy of the parameter (with new unique uid). Use spawn_child instead to make sure to add the parenthood information. """ child = super().copy() child.uid = uuid.uuid4().hex child._frozen = False child._subobjects = self._subobjects.new(child) child._meta = {} child.parents_uids = list(self.parents_uids) child.heritage = dict(self.heritage) child.loss = None child._losses = None child._constraint_checkers = list(self._constraint_checkers) # layers if self is not self._layers[0]: raise errors.NevergradRuntimeError( "Something has gone horribly wrong with the layers") # subparameters attribute = self._subobjects.attribute container = getattr(child, attribute) if attribute != "__dict__": # make a copy of the container if different from __dict__ container = dict(container) if isinstance( container, dict) else list(container) setattr(child, attribute, container) for key, val in self._subobjects.items(): container[key] = val.copy() del child.value # clear cache return child
def _layered_get_value(self) -> float: out = super()._layered_get_value() # pulls from previous layer if not isinstance(out, np.ndarray) or not out.size == 1: raise errors.NevergradRuntimeError("Scalar casting can only be applied to size=1 Data parameters") integer = np.issubdtype(out.dtype, np.integer) out = (int if integer else float)(out[0]) return out # type: ignore
def __call__(self, optimizer: base.Optimizer, *args: tp.Any, **kwargs: tp.Any) -> None: if args or kwargs: raise errors.NevergradRuntimeError( "EarlyStopping must be registered on ask method") if self.stopping_criterion(optimizer): raise errors.NevergradEarlyStopping( "Early stopping criterion is reached")
def add_layer(self: L, other: "Layered") -> L: """Adds a layer which will modify the object behavior""" if self is not self._layers[0] or self._LAYER_LEVEL != Level.ROOT: raise errors.NevergradRuntimeError("Layers can only be added from the root.") if len(other._layers) > 1: raise errors.NevergradRuntimeError("Cannot append multiple layers at once") if other._LAYER_LEVEL.value >= self._layers[-1]._LAYER_LEVEL.value: other._layer_index = len(self._layers) self._layers.append(other) else: levels = [x._LAYER_LEVEL.value for x in self._layers] ind = bisect.bisect_right(levels, other._LAYER_LEVEL.value) self._layers.insert(ind, other) for k, x in enumerate(self._layers): x._layer_index = k other._layers = self._layers return self
def sample(self: P) -> P: """Sample a new instance of the parameter. This usually means spawning a child and mutating it. This function should be used in optimizers when creating an initial population, and parameter.heritage["lineage"] is reset to parameter.uid instead of its parent's """ # inner working can be overrided by _layer_sample() self.random_state # make sure to populate it before copy child = self._layers[-1]._layered_sample() if not isinstance(child, Parameter) and not isinstance( child, type(self)): raise errors.NevergradRuntimeError("Unexpected sample return type") child._set_parenthood(None) return child # type: ignore
def _call_deeper(self, name: str, *args: tp.Any, **kwargs: tp.Any) -> tp.Any: if self._layers[self._layer_index] is not self: layers = [f"{lay.name}({lay._layer_index})" for lay in self._layers] raise errors.NevergradRuntimeError( "Layer indexing has changed for an unknown reason. Please open an issue:\n" f"Caller at index {self._layer_index}: {self.name}" f"Layers: {layers}.\n" ) if not name.startswith("_layered_"): raise errors.NevergradValueError("For consistency, only _layered functions can be used.") for layer in reversed(self._layers[: self._layer_index]): func = getattr(layer, name) if func.__func__ is not getattr(Layered, name): # skip unecessary stack calls return func(*args, **kwargs) types = [type(x) for x in self._layers] raise errors.NevergradNotImplementedError(f"No implementation for {name} on layers: {types}.")