def test_coeff_of_determination(): # make up some data and a model data = np.arange(0,100) model = np.arange(0,100) # compute cod cod = utils.coeff_of_determination(data,model) # assert npt.assert_equal(cod, 100) # nan test cod = utils.coeff_of_determination(np.zeros_like(model),model) npt.assert_equal(cod,np.nan)
def test_coeff_of_determination(): # make up some data and a model data = np.arange(0, 100) model = np.arange(0, 100) # compute cod cod = utils.coeff_of_determination(data, model) # assert npt.assert_equal(cod, 100) # nan test cod = utils.coeff_of_determination(np.zeros_like(model), model) npt.assert_equal(cod, np.nan)
def optimize_parameters(args): data, ix, row = args #if row['r2'] < r2_thr: #return ix, return_grid_results(row) #else: ballpark = row.x, row.y, row.size, row.amplitude, row.baseline r = gradient_descent_search( data, error_function, self.model_func.generate_prediction, ballpark, bounds, verbose) row['x'] = r[0][0] row['y'] = r[0][1] row['size'] = r[0][2] row['amplitude'] = r[0][3] row['baseline'] = r[0][4] pred = self.model_func.generate_prediction(*r[0]) row['r2'] = coeff_of_determination(data, pred) / 100 row['estimation_method'] = 'Traditional method' return ix, row
def optimize_parameters(args): data, ix, row = args #if row['r2'] < r2_thr: #return ix, return_grid_results(row) #else: data_ = (data - data.mean()) / data.std() ballpark = row.x, row.y, row.size r = gradient_descent_search(data, error_function, make_zscored_prediction, ballpark, bounds, verbose) row['x'] = r[0][0] row['y'] = r[0][1] row['size'] = r[0][2] X = np.ones((len(data_), 2)) X[:, 1] = make_zscored_prediction(*r[0], normalize=False) beta, residuals, _, _ = np.linalg.lstsq(X, data) row['baseline'] = beta[0] row['amplitude'] = beta[1] row['r2'] = coeff_of_determination(data, X.dot(beta)) / 100 row['estimation_method'] = 'Correlation method' return ix, row
def iterative_fit(self): if self.gridsearch_params is None: raise Exception('First use self.fit_grid!') prf_params = Parallel(self.n_jobs, verbose=10)( delayed(fit_gradient_descent)(self.model_func, data, ballpark, self.bound_fits) for (data, ballpark) in zip(self.data, self.gridsearch_params)) prf_params = np.vstack(prf_params) if self.fit_model == 'gauss' or self.fit_model == 'gauss_sg': output = np.ones((self.n_units, 6)) * nan elif self.fit_model == 'css' or self.fit_model == 'css_sg': output = np.ones((self.n_units, 7)) * nan for vox in range(0, self.n_units): data_tc = self.data[:, vox] if self.fit_model == 'gauss' or self.fit_model == 'gauss_sg': model_tc = self.model_func.generate_prediction( prf_params[vox, 0], prf_params[vox, 1], prf_params[vox, 2], prf_params[vox, 3], prf_params[vox, 4]) elif self.fit_model == 'css' or self.fit_model == 'css_sg': model_tc = self.model_func.generate_prediction( prf_params[vox, 0], prf_params[vox, 1], prf_params[vox, 2], prf_params[vox, 3], prf_params[vox, 4], prf_params[vox, 5]) output[vox, :] = np.hstack([ prf_params[vox, :], utils.coeff_of_determination(data_tc, model_tc) / 100.0 ]) self.fit_output = output return output