def _initializer_for(init, rank_params=None): if init is None: raise ValueError("init parameter cannot be None") # if default then select if init is _default_sentinel_init: init = _current_default_options.init elif init is _default_sentinel_init_bias: init = _current_default_options.init_bias # scalar constant: that's it, nothing further to do here if np.isscalar(init): # BUGBUG: this is sometimes required when dimensions are unknown; shouldn't. from _cntk_py import constant_initializer return constant_initializer(init) # return init # TODO: change to this once this works, e.g. for layers.BatchNormalization() # implant additional rank parameters if rank_params: from cntk.initializer import initializer_with_rank init = initializer_with_rank(init, **rank_params) return init
def _initializer_for(init, rank_params=None): if init is None: raise ValueError("init parameter cannot be None") # scalar constant: that's it, nothing further to do here if np.isscalar(init): # BUGBUG: this is sometimes required when dimensions are unknown; shouldn't. from _cntk_py import constant_initializer return constant_initializer(init) #return init # TODO: change to this once this works, e.g. for layers.BatchNormalization() # implant additional rank parameters if rank_params: from cntk.initializer import initializer_with_rank init = initializer_with_rank(init, **rank_params) return init