def compute_func_values(cell, theta, model, fobj, f, options, istrain): ''' Compute the objective function value, correlation, explained variance Outputs ------- fun (double): objective value cc (double): correlation coefficient ev (double): explained variance ''' if istrain: data = ccls.get_data(cell, model, options) else: temp_options = {} for key in options.keys(): if key == 'crossval': temp_options[key] = False else: temp_options[key] = options[key] data = ccls.get_data(cell, model, temp_options) options['basis'] = lnks.LinearFilterBasis_8param() options['stim'] = data[0] - np.mean(data[0]) if options['is_grad']: fun, grad = fobj(theta, data[0], data[1], options) else: fun = fobj(theta, data[0], data[1], options) if model.lower() == 'lnks_mp': y = data[1][1] v, y_est = f(theta, data[0], options) else: y = data[1] y_est = f(theta, data[0], options) if istrain: cc = stats.corrcoef(y, y_est) ev = stats.variance_explained(y, y_est) else: cc = stats.corrcoef(y[-20000:], y_est[-20000:]) ev = stats.variance_explained(y[-20000:], y_est[-20000:]) return fun, cc, ev
def optimize(fobj, f, theta, data, bnds=None, options=None): ''' Optimization using scipy.optimize.minimize module Input ----- fobj (function): objective function f (function): model function theta (ndarray): initial parameter data (tuple of ndarray): input and output data bnds (tuple of tuples or None): Boundaries of model parameters options (dictionary) grad (Bool): True if using gradient for optimization, else False pathway (int): LNK model pathway (1, 2, otherwise None) Output ------ result theta: estimated parameter success: optimization converged fun: objective value corrcoef: correlation coefficient evar: explained variance theta_init: initial theta jac: gradient ''' theta_init = theta if bnds: if options['is_grad']: res = minimize(fobj, theta_init, args=data, method='L-BFGS-B', jac=True, bounds=bnds, options={'disp':DISP, 'maxiter':MAX_ITER}) else: res = minimize(fobj, theta_init, args=data, method='L-BFGS-B',bounds=bnds, options={'disp':DISP, 'maxiter':MAX_ITER}) else: # optimization if options['is_grad']: res = minimize(fobj, theta_init, args=data, method='L-BFGS-B', jac=True, options={'disp':DISP, 'maxiter':MAX_ITER}) else: res = minimize(fobj, theta_init, args=data, method='L-BFGS-B', options={'disp':DISP, 'maxiter':MAX_ITER}) model = options['model'] if model.lower() == 'lnks_mp': y = data[1][1] # v, y_est = f(theta, data[0], options['pathway']) v, y_est = f(theta, data[0], options) # fast_lnks_objective else: y = data[1] # y_est = f(theta, data[0], options['pathway']) y_est = f(theta, data[0], options) # fast_lnks_objective cc = _stats.corrcoef(y_est, y) ev = _stats.variance_explained(y_est, y) # results if options['is_grad']: jac = res.jac else: jac = None result = { "theta": res.x, "success": res.success, "fun": res.fun, "corrcoef": cc, "evar": ev, "theta_init": theta_init, "jac": jac} return result