# shorthand for: # ctx = {} # for xfa in xfspec: # ctx = xforms.evaluate_step(xfa, ctx) if browse_results: import nems.gui.editors as gui ex = gui.browse_xform_fit(ctx, xfspec) if save_results: # ---------------------------------------------------------------------------- # SAVE YOUR RESULTS # save results to file destination = os.path.join(results_dir, str(batch), xforms.get_meta(ctx)['cellid'], ms.get_modelspec_longname(ctx['modelspec'])) log.info('Saving modelspec(s) to {0} ...'.format(destination)) xforms.save_analysis(destination, recording=ctx['rec'], modelspec=ctx['modelspec'], xfspec=xfspec, figures=ctx['figures'], log=log_xf) # save summary of results to a database log.info('Saving metadata to db ...') modelspec = ctx['modelspec'] modelspec.meta['modelpath'] = destination modelspec.meta['figurefile'] = destination + 'figure.0000.png' nd.update_results_table(modelspec)
def test_get_meta(context): m = xforms.get_meta(context)
def test_fit_performance(context): # Note: will no longer pass if the modelspec setup in conftest changes. m1 = np.round(xforms.get_meta(context)['r_test']*100) m2 = np.array([47.0]) assert m1 == m2
def test_get_meta(context): m = xf.get_meta(context)