Beispiel #1
0
 def vectorize_params(self, p):
     """ ensures that all parameters are converted to arrays before simulation. see
     doc strings for prepare_fit() method of Model class (in build.py) for details
     regarding pcmap and logic for fitting models with parameters that depend on
     experimental conditions
     ::Arguments::
         p (dict):
             dictionary with all model parameters as
             scalars/vectors/or both
     ::Returns::
         p (dict):
             dictionary with all parameters as vectors
     """
     nl_ones = np.ones(self.nlevels)
     if 'si' in list(p):
         self.si = p['si']
     self.dx = np.sqrt(self.si * self.dt)
     if 'xb' not in list(p):
         p['xb'] = 1.0
     if self.nlevels==1:
         p = theta.scalarize_params(p)
         return {pk:p[pk]*nl_ones for pk in list(p)}
     for pkey in self.pvc:
         p[pkey] = p[pkey] * nl_ones
     for pkey, pkc in self.pcmap.items():
         if pkc[0] not in list(p):
             p[pkey] = p[pkey] * nl_ones
         else:
             p[pkey] = array([p[pc] for pc in pkc])
     return p
Beispiel #2
0
 def assess_fit(self, flat=True):
     """ wrapper for analyze.assess_fit calculates and stores
     rchi, AIC, BIC and other fit statistics
     ::Arguments::
         flat (bool):
             if flat, yhat have ndim=1, else ndim>1
     ::Returns::
         yhat (array), finfo (pd.Series), popt (dict)
         see gradient_descent() docstrings
     """
     fp = deepcopy(self.fitparams)
     y = self.simulator.y.flatten()
     wts = self.simulator.wts.flatten()
     # gen dict of lmfit optimized Parameters object
     popt = dict(self.lmMin.params.valuesdict())
     # un-vectorize all parameters except conditionals
     popt = theta.scalarize_params(popt, pc_map=self.pc_map, is_flat=flat)
     finfo = pd.Series(popt)
     # get model-predicted yhat vector
     fp['yhat'] = (self.lmMin.residual / wts) + y
     # fill finfo dict with goodness-of-fit info
     finfo['cnvrg'] = self.lmMin.success
     finfo['nfev'] = self.lmMin.nfev
     finfo['nvary'] = len(self.lmMin.var_names)
     finfo['chi'] = np.sum(wts*(fp['yhat'] - y)**2)
     finfo['ndata'] = len(fp['yhat'])
     finfo['df'] = finfo.ndata - finfo.nvary
     finfo['rchi'] = finfo.chi / finfo.df
     finfo['logp'] = finfo.ndata * np.log(finfo.rchi)
     finfo['AIC'] = finfo.logp + 2 * finfo.nvary
     finfo['BIC'] = finfo.logp + np.log(finfo.ndata * finfo.nvary)
     return finfo, popt, fp['yhat']
Beispiel #3
0
    def run_basinhopping(self, p):
        """ uses fmin_tnc in combination with basinhopping to perform bounded global
         minimization of multivariate model
        ::Arguments::
            p (dict):
                parameter dictionary
            callback (function):
                callable function for displaying optimization progress
        """
        bp = self.basinparams
        nl = self.fitparams['nlevels']
        nsuccess = bp['nsuccess']
        if nl==1:
            reset = 0
            basin_keys = np.sort(list(p))
            basin_params = theta.scalarize_params(deepcopy(p))
        else:
            reset = 1
            basin_keys = np.sort(list(self.pcmap))
            basin_params = deepcopy(p)

        if self.progress:
            self.callback = self.gbar.reset(get_call=True, gbasin=reset)

        self.simulator.__prep_global__(basin_params=basin_params, basin_keys=basin_keys)

        # make list of init values for all pkeys included in fit
        x0 = np.hstack(np.hstack([basin_params[pk]*np.ones(nl) for pk in basin_keys]))

        # define parameter boundaries for all params in pcmap.keys()
        # to be used by basinhopping minimizer & tnc local optimizer
        xmin, xmax = theta.format_basinhopping_bounds(basin_keys, nlevels=nl, kind=self.kind)
        tncbounds = theta.format_local_bounds(xmin, xmax)
        tncopt = {'xtol': bp['tol'], 'ftol': bp['tol']}
        mkwargs = {"method": bp['method'], 'bounds': tncbounds, 'tol': bp['tol'], 'options': tncopt}

        # define custom take_step and accept_test functions
        accept_step = GlobalBounds(xmin, xmax)
        custom_step = HopStep(basin_keys, nlevels=nl, stepsize=bp['stepsize'])

        # run basinhopping on simulator.basinhopping_minimizer func
        out = basinhopping(self.simulator.global_cost_fx, x0=x0, minimizer_kwargs=mkwargs, take_step=custom_step, accept_test=accept_step, T=bp['T'], stepsize=bp['stepsize'], niter_success=bp['nsuccess'], niter=bp['niter'], interval=bp['interval'], callback=self.callback)

        xopt = out.x
        funcmin = out.fun

        if nl > 1:
            xopt = np.array(xopt).reshape(len(basin_keys), nl).squeeze()
        for i, k in enumerate(basin_keys):
            p[k] = xopt[i]

        return p, funcmin
Beispiel #4
0
def animate_dpm(model):
    """ to render animation within a notebook :
        vis.render_animation(vis.animated_dpm_example(MODEL))
    """
    from matplotlib import animation
    params = deepcopy(model.inits)
    bound=theta.scalarize_params(params)['a']
    x, gtraces, straces, xi, yi, nframes = gen_re_traces(model, params)
    f, axes = build_decision_axis(onset=x[0][0], bound=bound)
    glines = [axes[i].plot([], [], linewidth=1.5)[0] for i, n in enumerate(gtraces)]
    slines = [axes[i].plot([xi[i]], [yi[i]], linewidth=1.5)[0] for i, n in enumerate(straces)]
    f_args = (x, gtraces, glines, straces, slines, params, xi, yi)
    anim=animation.FuncAnimation(f, re_animate_multiax, fargs=f_args, frames=nframes, interval=1, blit=True)
    return anim
Beispiel #5
0
def animate_dpm(model):
    """ to render animation within a notebook :
        vis.render_animation(vis.animated_dpm_example(MODEL))
    """
    model.set_fitparams(dt=.001)
    params = deepcopy(model.inits)
    bound=theta.scalarize_params(params)['a']
    # generate reactive model simulations
    x, goTraces, brakeTraces, xi, yi, nframes = gen_re_traces(model)
    f, axes = build_decision_axis(onset=x[0][0], bound=bound)
    # axes line object for "go" process
    goLine = [axes[i].plot([], [], linewidth=1.5)[0] for i, n in enumerate(gtraces)]
    # axes line object for "brake" process
    brakeLine = [axes[i].plot([xi[i]], [yi[i]], linewidth=1.5)[0] for i, n in enumerate(straces)]
    f_args = (x, goTraces, goLine, brakeTraces, brakeLine, params, xi, yi)
    anim=animation.FuncAnimation(f, re_animate_multiax, fargs=f_args, frames=nframes, interval=1, blit=True)
    return anim
Beispiel #6
0
    def assess_fit(self, flat=True, learn=False):
        """ wrapper for analyze.assess_fit calculates and stores
        rchi, AIC, BIC and other fit statistics
        ::Arguments::
            flat (bool):
                if flat, yhat have ndim=1, else ndim>1
        ::Returns::
            yhat (array), finfo (pd.Series), popt (dict)
            see gradient_descent() docstrings
        """
        fp = deepcopy(self.fitparams)
        if learn:
            y = np.hstack([self.simRL.rtBlocks, self.simRL.saccBlocks])
            wts = np.hstack([self.simRL.rt_weights, self.simRL.sacc_weights])
            sim = self.simRL
        else:
            y = self.sim.y.flatten()
            wts = self.sim.wts.flatten()
            sim = self.sim

        # gen dict of lmfit optimized Parameters object
        popt = dict(self.lmMin.params.valuesdict())

        if self.kind=='dpm' and 'xb' in list(popt):
            _=popt.pop('xb')

        fmin = self.lmMin.chisqr
        nvary = len(self.lmMin.var_names)
        residuals = self.lmMin.residual
        yhat = (residuals / wts) + y
        if self.lmMin.method=='brute':
            success = not self.lmMin.aborted
        else:
            success = self.lmMin.success
        nfev = self.lmMin.nfev
        try: niter = self.lmMin.nit
        except Exception: niter = nfev

        # TODO: extract, calculate, and store std.errors of popt
        # presults is scipy.minimize object (see hop_around() for global_results)
        # presults = self.global_results.lowest_optimization_result
        # then take sqrt of the diag. of the hessian to get errors
        # poptErr = np.sqrt(np.diag(presults.hess_inv.todense()))

        if not self.learn:
            # un-vectorize all parameters except conditionals
            popt = theta.scalarize_params(popt, self.pcmap)
            if fp.nlevels>1:
                popt = theta.pvary_levels_to_array(popt, self.pcmap)

        finfo = pd.Series()
        # get model-predicted yhat vector
        fp['yhat'] = yhat
        finfo['idx'] = fp.idx
        finfo['pvary'] = '_'.join(list(fp.depends_on))
        finfo['cnvrg'] = success
        finfo['nfev'] = nfev
        finfo['niter'] = niter
        finfo['nvary'] = nvary
        finfo['chi'] = fmin
        finfo['ndata'] = y.size
        finfo['df'] = finfo.ndata - finfo.nvary
        finfo['rchi'] = finfo.chi / finfo.df
        finfo['logp'] = finfo.ndata * np.log(finfo.chi / finfo.ndata)
        finfo['AIC'] = finfo.logp + 2 * finfo.nvary
        finfo['BIC'] = finfo.logp + finfo.nvary * np.log(finfo.ndata)
        return finfo, popt, fp['yhat']