def evaluate(self, dataset): rows, cols, vals = dataset.find(self._app) u = self.U[:, rows] v = self.V[:, cols] predictions = u * v q = nps.sum(predictions, axis=0) return mean_squared_error(q.get(), vals.get(), squared=False)
def example(max_iters, batch_size): app = am.instance() model = LogisticRegression(app=app, cluster_shape=(1, 1), fit_intercept=False) X, y = sample(app, sample_size=8) model.init(X) for i in range(max_iters): # Take a step. X, y = sample(app, batch_size) model.partial_fit(X, y) print("train accuracy", (nps.sum(y == model.predict(X)) / X.shape[0]).get())
import nums import nums.numpy as nps from nums.models.glms import LogisticRegression nums.init() # Make dataset. X1 = nps.random.randn(500, 1) + 5.0 y1 = nps.zeros(shape=(500, ), dtype=bool) X2 = nps.random.randn(500, 1) + 10.0 y2 = nps.ones(shape=(500, ), dtype=bool) X = nps.concatenate([X1, X2], axis=0) y = nps.concatenate([y1, y2], axis=0) # Train Logistic Regression Model. model = LogisticRegression(solver="newton", tol=1e-8, max_iter=1) model.fit(X, y) y_pred = model.predict(X) print("accuracy", (nps.sum(y == y_pred) / X.shape[0]).get())