def _prepare(self, sample_weight, **kwargs): lr = kwargs.get("lr", self._params["lr"]) self._alpha = np.random.random(len(self._x)).astype(np.float32) self._b = np.random.random(1).astype(np.float32) self._model_parameters = [self._alpha, self._b] self._optimizer = OptFactory().get_optimizer_by_name( self._optimizer, self._model_parameters, lr, self._params["epoch"])
def __init__(self, **kwargs): super(NNDist, self).__init__(**kwargs) self._layers, self._weights, self._bias = [], [], [] self._layer_names, self._layer_shapes, self._layer_params = [], [], [] self._lr, self._epoch, self._regularization_param = 0, 0, 0 self._w_optimizer, self._b_optimizer, self._optimizer_name = None, None, "" self.verbose = 1 self._apply_bias = False self._current_dimension = 0 self._logs = {} self._metrics, self._metric_names = [], [] self._x_min, self._x_max = 0, 0 self._y_min, self._y_max = 0, 0 self._layer_factory = LayerFactory() self._optimizer_factory = OptFactory() self._available_metrics = { "acc": self.acc, "_acc": self.acc, "f1": self.f1_score, "_f1_score": self.f1_score }
def __init__(self): self._layers, self._weights, self._bias = [], [], [] self._layer_names, self._layer_shapes, self._layer_params = [], [], [] self._lr, self._epoch, self._regularization_param = 0, 0, 0 self._w_optimizer, self._b_optimizer, self._optimizer_name = None, None, "" self.verbose = 0 self._whether_apply_bias = False self._current_dimension = 0 self._cost_layer = "Undefined" self._logs = {} self._timings = {} self._metrics, self._metric_names = [], [] self._x, self._y = None, None self._x_min, self._x_max = 0, 0 self._y_min, self._y_max = 0, 0 self._layer_factory = LayerFactory() self._optimizer_factory = OptFactory() self._available_metrics = { "acc": NNDist._acc, "_acc": NNDist._acc, "f1": NNDist._f1_score, "_f1_score": NNDist._f1_score }
def fit(self, x, y, sample_weight=None, c=None, lr=None, optimizer=None, batch_size=None, epoch=None, tol=None, animation_params=None): if sample_weight is None: sample_weight = self._params["sample_weight"] if c is None: c = self._params["c"] if lr is None: lr = self._params["lr"] if batch_size is None: batch_size = self._params["batch_size"] if epoch is None: epoch = self._params["epoch"] if tol is None: tol = self._params["tol"] if optimizer is None: optimizer = self._params["optimizer"] *animation_properties, animation_params = self._get_animation_params( animation_params) x, y = np.atleast_2d(x), np.asarray(y, dtype=np.float32) if sample_weight is None: sample_weight = np.ones(len(y)) else: sample_weight = np.asarray(sample_weight) * len(y) self._w = np.zeros(x.shape[1], dtype=np.float32) self._b = np.zeros(1, dtype=np.float32) self._model_parameters = [self._w, self._b] self._optimizer = OptFactory().get_optimizer_by_name( optimizer, self._model_parameters, lr, epoch) bar = ProgressBar(max_value=epoch, name="LinearSVM") ims = [] train_repeat = self._get_train_repeat(x, batch_size) for i in range(epoch): self._optimizer.update() l = self._batch_training(x, y, batch_size, train_repeat, sample_weight, c) if l < tol: bar.terminate() break self._handle_animation(i, x, y, ims, animation_params, *animation_properties) bar.update() self._handle_mp4(ims, animation_properties)