def test_set_filter_unmasked(clean_ui): """What happens when we call set_filter to an unfiltered dataset?""" x = np.asarray([10, 20, 30]) y = np.asarray([2, 3, 4]) ui.load_arrays(1, x, y) data = ui.get_data() assert data.mask ui.set_filter(np.asarray([True, False, True])) assert data.mask == pytest.approx([True, False, True])
def test_set_filter_unmasked_wrong(clean_ui): """What happens when we call set_filter to an unfiltered dataset with the wrong size?""" x = np.asarray([10, 20, 30]) y = np.asarray([2, 3, 4]) ui.load_arrays(1, x, y) data = ui.get_data() with pytest.raises(DataErr) as err: ui.set_filter(np.asarray([True, False])) assert str(err.value) == "size mismatch between 3 and 2"
def test_set_filter_masked(clean_ui): """What happens when we call set_filter to a filtered dataset?""" x = np.asarray([10, 20, 30, 40, 50]) y = np.asarray([2, 3, 4, 5, 6]) ui.load_arrays(1, x, y) ui.ignore(lo=15, hi=45) data = ui.get_data() assert data.mask == pytest.approx([True, False, False, False, True]) ui.set_filter(np.asarray([True, False, True, False, False])) assert data.mask == pytest.approx([True, False, True, False, True])
def change_example(idval): """Change the example y values (created by setup_example)""" d = ui.get_data(idval) # copy the values to ensure _data_y2 isn't changed by accident d.y = [d for d in _data_y2]
def run_fits(obsids, ax, user_pars=None, fixed_pars=None, guess_pars=None, label='model', per_obs_dir='per_obs_nfits', outdir=None, redo=False): if len(obsids) == 0: print "No obsids, nothing to fit" return None if user_pars is None: user_pars = USER_PARS if not os.path.exists(per_obs_dir): os.makedirs(per_obs_dir) obsfits = [] for obsid in obsids: outdir = os.path.join(per_obs_dir, 'obs{:05d}'.format(obsid)) if not os.path.exists(outdir): os.makedirs(outdir) model_file = os.path.join(outdir, '{}.pkl'.format(label)) if os.path.exists(model_file) and not redo: #logger.warn('Using previous fit found in %s' % model_file) print model_file mod_pick = open(model_file, 'r') modelfit = cPickle.load( mod_pick ) mod_pick.close() obsfits.append(modelfit) continue modelfit = {'label': obsid} ui.clean() data_id = 0 obsdir = "%s/obs%05d" % (DATADIR, obsid) tf = open(os.path.join(obsdir,'tilt.pkl'), 'r') tilt = cPickle.load(tf) tf.close() pf = open(os.path.join(obsdir, 'pos.pkl'), 'r') pos = cPickle.load(pf) pf.close() pos_data = pos[ax] point_error = 5 pos_data_mean = np.mean(pos_data) ui.set_method('simplex') # Fit a line to get more reasonable errors init_staterror = np.zeros(len(pos_data))+point_error ui.load_arrays(data_id, pos['time']-pos['time'][0], pos_data-np.mean(pos_data), init_staterror) ui.polynom1d.ypoly ui.set_model(data_id, 'ypoly') ui.thaw(ypoly.c0, ypoly.c1) ui.fit(data_id) fit = ui.get_fit_results() calc_staterror = init_staterror * np.sqrt(fit.rstat) ui.set_staterror(data_id, calc_staterror) # Confirm those errors ui.fit(data_id) fit = ui.get_fit_results() if ( abs(fit.rstat-1) > .2): raise ValueError('Reduced statistic not close to 1 for error calc') # Load up data to do the real model fit fit_times = pos['time'] tm_func = tilt_model(tilt, fit_times, user_pars=user_pars) ui.get_data(data_id).name = str(obsid) ui.load_user_model(tm_func, 'tiltm%d' % data_id) ui.add_user_pars('tiltm%d' % data_id, user_pars) ui.set_method('simplex') ui.set_model(data_id, 'tiltm%d' % (data_id)) ui.set_par('tiltm%d.diam' % data_id, 0) if fixed_pars is not None and ax in fixed_pars: for par in fixed_pars[ax]: ui.set_par('tiltm{}.{}'.format(0, par), fixed_pars[ax][par]) ui.freeze('tiltm{}.{}'.format(0, par)) if guess_pars is not None and ax in guess_pars: for par in guess_pars[ax]: ui.set_par('tiltm{}.{}'.format(0, par), guess_pars[ax][par]) ui.show_all() # Fit the tilt model ui.fit(data_id) fitres = ui.get_fit_results() ui.confidence(data_id) myconf = ui.get_confidence_results() # save_fits(ax=ax, fit=fitres, conf=myconf, outdir=outdir) # plot_fits(ids,outdir=os.path.join(outdir,'fit_plots')) axmod = dict(fit=fitres, conf=myconf) for idx, modpar in enumerate(myconf.parnames): par = modpar.lstrip('tiltm0.') axmod[par] = ui.get_par('tiltm0.%s' % par).val axmod["{}_parmax".format(par)] = myconf.parmaxes[idx] axmod["{}_parmin".format(par)] = myconf.parmins[idx] modelfit[ax] = axmod mod_pick = open(model_file, 'w') cPickle.dump( modelfit, mod_pick) mod_pick.close() obsfits.append(modelfit) plot_fits([dict(obsid=obsid, data_id=data_id, ax=ax)], posdir=obsdir, outdir=outdir) return obsfits
def run_fits(obsids, ax, user_pars=None, fixed_pars=None, guess_pars=None, label='model', per_obs_dir='per_obs_nfits', outdir=None, redo=False): if len(obsids) == 0: print "No obsids, nothing to fit" return None if user_pars is None: user_pars = USER_PARS if not os.path.exists(per_obs_dir): os.makedirs(per_obs_dir) obsfits = [] for obsid in obsids: outdir = os.path.join(per_obs_dir, 'obs{:05d}'.format(obsid)) if not os.path.exists(outdir): os.makedirs(outdir) model_file = os.path.join(outdir, '{}.pkl'.format(label)) if os.path.exists(model_file) and not redo: #logger.warn('Using previous fit found in %s' % model_file) print model_file mod_pick = open(model_file, 'r') modelfit = cPickle.load(mod_pick) mod_pick.close() obsfits.append(modelfit) continue modelfit = {'label': obsid} ui.clean() data_id = 0 obsdir = "%s/obs%05d" % (DATADIR, obsid) tf = open(os.path.join(obsdir, 'tilt.pkl'), 'r') tilt = cPickle.load(tf) tf.close() pf = open(os.path.join(obsdir, 'pos.pkl'), 'r') pos = cPickle.load(pf) pf.close() pos_data = pos[ax] point_error = 5 pos_data_mean = np.mean(pos_data) ui.set_method('simplex') # Fit a line to get more reasonable errors init_staterror = np.zeros(len(pos_data)) + point_error ui.load_arrays(data_id, pos['time'] - pos['time'][0], pos_data - np.mean(pos_data), init_staterror) ui.polynom1d.ypoly ui.set_model(data_id, 'ypoly') ui.thaw(ypoly.c0, ypoly.c1) ui.fit(data_id) fit = ui.get_fit_results() calc_staterror = init_staterror * np.sqrt(fit.rstat) ui.set_staterror(data_id, calc_staterror) # Confirm those errors ui.fit(data_id) fit = ui.get_fit_results() if (abs(fit.rstat - 1) > .2): raise ValueError('Reduced statistic not close to 1 for error calc') # Load up data to do the real model fit fit_times = pos['time'] tm_func = tilt_model(tilt, fit_times, user_pars=user_pars) ui.get_data(data_id).name = str(obsid) ui.load_user_model(tm_func, 'tiltm%d' % data_id) ui.add_user_pars('tiltm%d' % data_id, user_pars) ui.set_method('simplex') ui.set_model(data_id, 'tiltm%d' % (data_id)) ui.set_par('tiltm%d.diam' % data_id, 0) if fixed_pars is not None and ax in fixed_pars: for par in fixed_pars[ax]: ui.set_par('tiltm{}.{}'.format(0, par), fixed_pars[ax][par]) ui.freeze('tiltm{}.{}'.format(0, par)) if guess_pars is not None and ax in guess_pars: for par in guess_pars[ax]: ui.set_par('tiltm{}.{}'.format(0, par), guess_pars[ax][par]) ui.show_all() # Fit the tilt model ui.fit(data_id) fitres = ui.get_fit_results() ui.confidence(data_id) myconf = ui.get_confidence_results() # save_fits(ax=ax, fit=fitres, conf=myconf, outdir=outdir) # plot_fits(ids,outdir=os.path.join(outdir,'fit_plots')) axmod = dict(fit=fitres, conf=myconf) for idx, modpar in enumerate(myconf.parnames): par = modpar.lstrip('tiltm0.') axmod[par] = ui.get_par('tiltm0.%s' % par).val axmod["{}_parmax".format(par)] = myconf.parmaxes[idx] axmod["{}_parmin".format(par)] = myconf.parmins[idx] modelfit[ax] = axmod mod_pick = open(model_file, 'w') cPickle.dump(modelfit, mod_pick) mod_pick.close() obsfits.append(modelfit) plot_fits([dict(obsid=obsid, data_id=data_id, ax=ax)], posdir=obsdir, outdir=outdir) return obsfits
def fit_evol(dateglob='20?????', rootdir='darkhist_peaknorm', outroot='', xmin=25.0, xmax=4000, conf=True, gauss=False): results = {} fileglob = os.path.join(rootdir, '{}.dat'.format(dateglob)) for i, filename in enumerate(glob(fileglob)): filedate = re.search(r'(\d{7})', filename).group(1) print "\n\n*************** {} *****************".format(filename) plt.figure(1) ui.load_data(1, filename, 2) data = ui.get_data() ui.ignore(None, xmin) ui.ignore(xmax, None) dark_models.xall = data.x # dark_models.imin = np.where(xall > xmin)[0][0] # dark_models.imax = np.where(xall > xmax)[0][0] sbp.gamma1 = 0.05 sbp.gamma2 = 3.15 sbp.gamma2.min = 2. sbp.gamma2.max = 4. sbp.x_b = 130. sbp.x_b.min = 100. sbp.x_b.max = 160. sbp.x_r = 50. ok = (data.x > 40) & (data.x < 60) sbp.ampl1 = np.mean(data.y[ok]) if gauss: fit_gauss_sbp() else: fit_sbp() pars = (sbp.gamma1.val, sbp.gamma2.val, sbp.x_b.val, sbp.x_r.val, sbp.ampl1.val) fit_y = dark_models.smooth_broken_pow(pars, data.x) if conf: ui.set_conf_opt('numcores', 1) ui.conf() res = ui.get_conf_results() result = dict((x, getattr(res, x)) for x in ('parnames', 'parmins', 'parvals', 'parmaxes')) result['x'] = data.x result['y'] = data.y result['y_fit'] = fit_y results[filedate] = result if outroot is not None: ui.notice(0, xmax) ui.set_xlog() ui.set_ylog() ui.plot_fit() plt.xlim(1, 1e4) plt.ylim(0.5, 1e5) plt.grid(True) plt.xlabel('Dark current (e-/sec)') outfile = os.path.join(rootdir, '{}{}.png'.format(outroot, filedate)) print 'Writing', outfile plt.savefig(outfile) if conf: outfile = os.path.join(rootdir, '{}{}.pkl'.format(outroot, filedate)) print 'Writing', outfile pickle.dump(result, open(outfile, 'w'), protocol=-1) if outroot is not None: outfile = os.path.join(rootdir, '{}fits.pkl'.format(outroot)) print 'Writing', outfile pickle.dump(results, open(outfile, 'w'), protocol=-1) return results