def main(): options,args = parser.parse_args(sys.argv[1:]) if len(args) < 1: parser.print_help() sys.exit(45) run=args[0] c = shapesim.read_config(run) run = c['run'] sim_config = c['sim_config'] mcmc_config = c['mcmc_config'] s2n_vals = c['s2n_vals'] ns2n = len(s2n_vals) d = shapesim.get_wq_dir(run, bytrial=True, fs='local') if not os.path.exists(d): os.makedirs(d) d = shapesim.get_output_dir(run, sub='bytrial') if not os.path.exists(d): os.makedirs(d) groups='' if options.groups is not None: groups = 'group: [%s]' % options.groups smax=numpy.iinfo('i8').max for is2n in xrange(ns2n): s2n = s2n_vals[is2n] npair, nsplit = get_npair_nsplit(c, is2n) for isplit in xrange(nsplit): job_name='%s-%03i-%03i' % (run,is2n,isplit) # note the wq logs are local wqurl = shapesim.get_wq_url(run,0,is2n,itrial=isplit,fs='local') output = shapesim.get_output_url(run, 0, is2n, itrial=isplit) seed = numpy.random.randint(smax) wlog("writing wq script:",wqurl) with open(wqurl,'w') as fobj: d={'job_name':job_name, 'version':options.version, 'groups':groups, 'pri':options.priority, 'sim_config':sim_config, 'mcmc_config':mcmc_config, 's2n':s2n, 'npair':npair, 'seed':seed, 'output':output} wqscript=_wqtemplate % d fobj.write(wqscript)
def main(): options, args = parser.parse_args(sys.argv[1:]) if len(args) < 1: parser.print_help() sys.exit(45) simname = args[0] ss = shapesim.ShapeSim(simname) orient = ss.get("orient", "rand") if orient == "ring": numper_str = "" else: if len(args) < 2: parser.print_help() sys.exit(45) numper_str = args[1] if ss.fs != "hdfs": raise ValueError("This only works for HDFS right now " "would need to worry about making dirs") c = shapesim.read_config(simname) groups = options.groups if groups is None: groups = "" else: groups = "group: [%s]" % groups wqd = shapesim.get_cache_wq_dir(simname) if not os.path.exists(wqd): os.makedirs(wqd) extra = "" if options.bynode: extra = "mode: bynode\nN: 1" elif options.ncores is not None: ncores = int(options.ncores) extra = "mode: bycore1\nN: %d" % ncores for is2 in xrange(c["nums2"]): for ie in xrange(c["nume"]): job_name = "%s-%i-%i" % (simname, is2, ie) wqurl = shapesim.get_cache_wq_url(simname, is2, ie) wlog("writing wq script:", wqurl) with open(wqurl, "w") as fobj: wqscript = _wqtemplate % { "job_name": job_name, "simname": simname, "is2": is2, "ie": ie, "numper": numper_str, "groups": groups, "extra": extra, "pri": options.priority, } fobj.write(wqscript)
def main(): options,args = parser.parse_args(sys.argv[1:]) if len(args) < 1: parser.print_help() sys.exit(45) run=args[0] c = shapesim.read_config(run) cs = shapesim.read_config(c['sim']) group=options.group if group is not None: group = 'group: ['+group+']' else: group='' mode='' if options.bynode: mode='mode: bynode' extra='' if options.extra is not None: extra='\n'.join( options.extra.split(';') ) wqd = shapesim.get_wq_dir(run, combine=True) if not os.path.exists(wqd): os.makedirs(wqd) if run[0:8] == 'gmix-fit': rstr=run.replace('gmix-fit','gmix') else: rstr=run n1 = cs['nums2'] runtype = c['runtype'] if runtype == 'byellip': n2 = cs['nume'] else: n2 = shapesim.get_nums2n(c) for i1 in xrange(n1): for i2 in xrange(n2): job_name='%s-combine-%03i-%03i' % (rstr,i1,i2) wqurl = shapesim.get_wq_url(run,i1,i2,combine=True) wlog("writing wq script:",wqurl) with open(wqurl,'w') as fobj: wqscript=_wqtemplate % {'job_name':job_name, 'run':run, 'i1':i1, 'i2':i2, 'group':group, 'mode':mode, 'extra':extra, 'pri':options.priority} fobj.write(wqscript)
def create_sim_wq_byellip(run, groups=None): import pbs c = read_config(run) objmodel = c['objmodel'] psfmodel = c['psfmodel'] if groups is not None: groups = 'groups: [%s]' % groups else: groups='' s2vals=sample_s2(c['mins2'],c['maxs2'],c['ns2']) evals=numpy.linspace(c['mine'],c['maxe'],c['nume']) for s2 in s2vals: for ellip in evals: extra = '%0.3f-%0.3f' % (s2,ellip) job_name='%s-%s' % (run,extra) wqurl = get_wq_url(run,objmodel,psfmodel,extra) opts = '--s2 %(s2)0.3f -e %(ellip)0.3f' % {'s2':s2,'ellip':ellip} eu.ostools.makedirs_fromfile(wqurl,verbose=True) wlog("writing wq script:",wqurl) with open(wqurl,'w') as fobj: wqscript=_wqtemplate % {'run':run, 'job_name':job_name, 'opts':opts, 'groups':groups} fobj.write(wqscript)
def outfile(self, ellip, type='res'): f = get_simfile(self['run'], self['objmodel'], self['psfmodel'], self['s2'], ellip, self['psf_ellip'], type=type) dir=os.path.dirname(f) if not os.path.exists(dir): wlog("Making output dir:",dir) try: os.makedirs(dir) except: # probably a race condition pass return f
def process(self): c=self['filternum'] self.set_admom() ntrials = len(self['psf']['trials']) chi2arr=zeros(ntrials) + 1.e9 gmlist=[] im=self.psf.copy() im_min = im.min() if im_min <= 0: im -= im_min sky=0.001*im.max() im += sky else: sky = im_min for i,trial in enumerate(self['psf']['trials']): #prior,width = self.get_prior_turb(trial) guess = self.get_em_guess(trial) if self['verbose']: wlog('guess') gmix_print(guess,title='guess:') gm = gmix_image.GMixEM(im,guess, sky=sky, maxiter=4000, tol=1.e-6, coellip=False, cocenter=False) # true required for deconv chi2arr[i] = gm.get_fdiff() if self['verbose']: gmix_print(gm.pars,title='pars:') wlog("chi2/pdeg:",chi2arr[i]) gmlist.append(gm) w=chi2arr.argmin() self.gm = gmlist[w] flags = gm.get_flags() if flags != 0: printflags('em',flags) raise ValueError("error") if self['verbose']: gmix_print(gm.pars,title='popt:') wlog("\n") wlog("numiter gmix:",gm.numiter)
def process(self): c=self['filternum'] self.set_admom() ntrials = len(self['psf']['trials']) chi2arr=zeros(ntrials) + 1.e9 gmlist=[] for i,trial in enumerate(self['psf']['trials']): ngauss=trial['ngauss'] if ngauss==3: prior,width = self.get_prior_turb(trial) elif ngauss==2: prior,width = self.get_prior_2generic(trial) else: raise ValueError("only have ngauss in [2,3] now") #prior,width = self.get_prior_2psfield(trial) #prior,width = self.get_prior_test(trial) if self['verbose']: print_pars(prior,front="guess: ") gm = gmix_image.GMixFitCoellip(self.psf, self['skysig'], prior,width, verbose=False) #gm = gmix_image.gmix_fit.GMixFitCoellipNoPrior(self.psf, self['skysig'], # prior, verbose=False) if gm.flags != 0: gmix_image.printflags("flags:",gm.flags) raise ValueError("error") chi2arr[i] = gm.get_chi2per(gm.popt) if self['verbose']: print_pars(gm.popt,front="pars: ") print_pars(gm.perr,front="perr: ") wlog("chi2/pdeg:",chi2arr[i]) gmlist.append(gm) w=chi2arr.argmin() self.gm = gmlist[w] wlog('w:',w) if self['verbose']: print_pars(chi2arr,front='chi2/deg: ') wlog("\n") print_pars(gm.popt,front='popt: ') print_pars(gm.perr,front='perr: ') #wlog("s2n:",s2n) wlog("numiter gmix:",gm.numiter) ngauss=(len(gm.popt)-4)/2
def do_regauss(self, ci, verbose_local=True): """ ci is a convolved image """ rgkeys = self.rgkeys rgkeys['guess'] = (ci['cov_admom'][0] + ci['cov_admom'][2])/2 rgkeys['guess_psf'] = (ci['cov_psf_admom'][0] + ci['cov_psf_admom'][2])/2 if 's2n' in self: rgkeys['sigsky'] = self['sigsky'] if verbose_local: wlog("running regauss") rgkeys['verbose'] = verbose_local rg = admom.ReGauss(ci.image, ci['cen'][0], ci['cen'][1], ci.psf, **rgkeys) rg.do_all() self.add_unweighted_truth(rg, ci.image0) if self['verbose']: wlog("uwcorrstats") wlog(rg['uwcorrstats']) if rg['rgstats'] == None or rg['rgcorrstats'] == None: raise RuntimeError("Failed to run regauss") if rg['rgstats']['whyflag'] != 0: raise RuntimeError("regauss failed: '%s'" % rg['rgstats']['whystr']) # copy out the data output = self.copy_output(ci, rg) return output
def write_images(self, ellip, ci): f=self.outfile(ellip, 'image') wlog("writing image file:",f) eu.io.write(f, ci.image, clobber=True) f=self.outfile(ellip, 'image0') wlog("writing image0 file:",f) eu.io.write(f, ci.image0, clobber=True) f=self.outfile(ellip, 'psf') wlog("writing psf file:",f) eu.io.write(f, ci.psf, clobber=True)
def cache_window(self): import sdsspy w=sdsspy.window.Window() if self.verbose: wlog("Cacheing window flist") flist = w.read('flist') if self.verbose: wlog("Extracting matching fields") rfwhm = flist['psf_fwhm'][ :,self['filternum'] ] w,=numpy.where( (rfwhm > self['min_seeing']) & (rfwhm < self['max_seeing']) & (flist['score'] > 0.1) & (flist['rerun'] == '301') ) if w.size == 0: raise ValueError("No runs found with seeing in [%0.3f,%0.3f]" % (min_seeing,max_seeing)) if self.verbose: wlog(" Found:",w.size) self.flist = flist[w]
def run_ellip(self, ellip): wlog("ellip:",ellip) outfile = self.outfile(ellip) wlog("outfile:",outfile) # get a n ew RandomConvolvedImage with this ellip wlog("getting convolved image") #if 's2n' in self: if True: trials_file = self.outfile(ellip,type='trials') #output, trials = self.do_regauss_trials(ci) output, trials = self.do_regauss_trials(ellip) eu.io.write(trials_file, trials, clobber=True) else: ci = self.new_convolved_image(ellip) output = self.do_regauss(ci) eu.io.write(outfile, output, clobber=True) if self['debug']: self.write_images(ellip, ci)
def get_prior_2psfield(self, trial): """ Take two of the guesses from the psfield sigma1,sigma2 """ ngauss=trial['ngauss'] eguess=trial['eguess'] uniform_p=trial['uniform_p'] randomize=trial['randomize'] if ngauss != 2: raise ValueError("ngauss==2 only for now") npars=2*ngauss+4 prior=zeros(npars) width=zeros(npars) + 1.e20 prior[0] = self.amres['row'] prior[1] = self.amres['col'] if eguess is not None: prior[2],prior[3] = eguess else: prior[2] = self.amres['e1'] prior[3] = self.amres['e2'] T = self.amres['Irr'] + self.amres['Icc'] c = self['filternum'] T1 = 2*self.psfield['psf_sigma1'][0,c]**2 T2 = 2*self.psfield['psf_sigma2'][0,c]**2 Tmax = T2 Tfrac1 = T1/T2 prior[4] = Tmax prior[5] = Tfrac1 if uniform_p: wlog(" uniform p") prior[6] = self['counts']/ngauss prior[7] = self['counts']/ngauss else: # psf_b is p2/p1 pvals = array([self.psfield['psf_b'][0,c], 1.]) pvals /= self['counts']*pvals.sum() prior[6] = pvals[0] prior[7] = pvals[1] if randomize: wlog(" randomizing") e1start=prior[2] e2start=prior[3] prior[2],prior[3] = randomize_e1e2(e1start,e2start) prior[4] += prior[4]*0.05*(randu()-0.5) prior[5] += prior[5]*0.05*(randu()-0.5) prior[6] += prior[6]*0.05*(randu()-0.5) prior[7] += prior[7]*0.05*(randu()-0.5) pvals = prior[ [6,7] ].copy() pvals *= self['counts']/pvals.sum() prior[6] = pvals[0] prior[7] = pvals[1] return prior, width
def process_nlsolver(self): wlog("Using jarvis solver") c=self['filternum'] self.set_admom() ntrials = len(self['psf']['trials']) chi2arr=zeros(ntrials) + 1.e9 gmlist=[] for i,trial in enumerate(self['psf']['trials']): ngauss=trial['ngauss'] if ngauss==3: prior,width = self.get_prior_turb(trial) elif ngauss==2: prior,width = self.get_prior_2generic(trial) else: raise ValueError("only have ngauss in [2,3] now") #prior,width = self.get_prior_2psfield(trial) #prior,width = self.get_prior_test(trial) #prior,width = self.get_prior_2generic(trial) if self['verbose']: print_pars(prior,front="guess: ") # kludge using skysig=1 for now maxiter=2000 psf=None gm=gmix_image.gmix_nlsolve.GMixCoellipSolver(self.psf, prior, 1., maxiter, psf, False) success=gm.get_success() if not success: raise ValueError("error") chi2per=gm.get_chi2per() # kludge chi2per /= self['skysig']**2 chi2arr[i] = chi2per if self['verbose']: popt = gm.get_pars() cov=gm.get_pcov() perr=sqrt(diag(cov)) print_pars(popt,front="pars: ") # kludge perr *= self['skysig'] print_pars(perr,front="perr: ") wlog("chi2/pdeg:",chi2arr[i]) gmlist.append(gm) w=chi2arr.argmin() self.gm = gmlist[w] wlog('w:',w) if self['verbose']: print_pars(chi2arr,front='chi2/deg: ') wlog("\n") print_pars(gm.get_pars(),front='popt: ') cov=gm.get_pcov() perr=sqrt(diag(cov)) # kludge perr *= self['skysig'] print_pars(perr,front='perr: ')
def run_many_ellip(self): for ellip in self.ellipvals(): self.run_ellip(ellip) wlog("Done many_ellip")
def psfield_compare_model(rsp=None, generator='filter', next=False): """ compare psf reconstructions with the best fit models in the 6th header. """ import images import biggles from scipy.ndimage.filters import gaussian_filter import fimage filter='r' fnum=2 if rsp is None: rsp=RandomSDSSPSF(1.3,1.5,filter,verbose=True) image,meta = rsp.next(meta=True) else: if rsp.psf is None: image,meta = rsp.current(meta=True) else: if next: image,meta = rsp.next(meta=True) else: image,meta = rsp.current(meta=True) cen = [(image.shape[0]-1)/2, (image.shape[1]-1)/2] extra='_2G' a = 1.0 b = meta['psf_b'+extra][0,fnum] s1 = meta['psf_sigma1'+extra][0,fnum] s2 = meta['psf_sigma2'+extra][0,fnum] if generator == 'fimage': fake1 = fimage.makeimage('gauss',image.shape,cen,s1**2,0,s1**2,counts=1) fake2 = fimage.makeimage('gauss',image.shape,cen,s2**2,0,s2**2,counts=1) a /= (s1**2 + b*s2**2) fake = a*( s1**2*fake1 + b*s2**2*fake2 ) elif generator == 'imsim': import imsim fake1 = imsim.mom2disk('gauss',s1**2,0,s1**2,image.shape,cen=cen,counts=1) fake2 = imsim.mom2disk('gauss',s2**2,0,s2**2,image.shape,cen=cen,counts=1) a /= (s1**2 + b*s2**2) fake = a*( s1**2*fake1 + b*s2**2*fake2 ) elif generator == 'filter': a /= (s1**2 + b*s2**2) fake1 = numpy.zeros_like(image) # convolve delta function with gaussians fake1[cen[0],cen[1]] = 1 fake = a * (s1**2 * gaussian_filter(fake1, (s1,s1)) + b*s2**2*gaussian_filter(fake1, (s2,s2))) else: raise ValueError("unknown generator type: '%s'" % generator) wlog("image counts:",image.sum(),"model counts:",fake.sum()) resid = fake-image wlog("summed residuals:",resid.sum()) maxval = max( image.max(), fake.max() ) minval = 0.0 levels=7 tab=biggles.Table(2,3) #tab=biggles.Table(3,2) implt=images.view(image, levels=levels, show=False, min=minval, max=maxval) fakeplt=images.view(fake, levels=levels, show=False, min=minval, max=maxval) residplt=images.view(resid, show=False, min=minval, max=maxval) #sigma = numpy.sqrt((res['Irr']+res['Icc'])/2.0) #lab = biggles.PlotLabel(0.1,0.9,r'$\sigma$: %0.3f' % sigma, fontsize=4, halign='left') #fakeplt.add(lab) implt.title='original' fakeplt.title='gaussian '+generator residplt.title='residuals' # cross-sections imrows = image[:,cen[1]] imcols = image[cen[0],:] fakerows = fake[:,cen[1]] fakecols = fake[cen[0],:] resrows = resid[:,cen[1]] rescols = resid[cen[0],:] himrows = biggles.Histogram(imrows, color='blue') himcols = biggles.Histogram(imcols, color='blue') hfakerows = biggles.Histogram(fakerows, color='orange') hfakecols = biggles.Histogram(fakecols, color='orange') hresrows = biggles.Histogram(resrows, color='red') hrescols = biggles.Histogram(rescols, color='red') himrows.label = 'image' hfakerows.label = 'model' hresrows.label = 'resid' key = biggles.PlotKey(0.1,0.9,[himrows,hfakerows,hresrows]) rplt=biggles.FramedPlot() rplt.add( himrows, hfakerows, hresrows,key ) cplt=biggles.FramedPlot() cplt.add( himcols, hfakecols, hrescols ) rplt.aspect_ratio=1 cplt.aspect_ratio=1 tab[0,0] = implt tab[0,1] = fakeplt tab[0,2] = residplt tab[1,0] = rplt tab[1,1] = cplt #tab[0,0] = implt #tab[0,1] = fakeplt #tab[1,0] = residplt #tab[1,1] = rplt #tab[2,0] = cplt tab.show() return rsp
def get_prior_2generic(self, trial): """ generic guesses """ wlog("using generic guesses") ngauss=trial['ngauss'] eguess=trial['eguess'] uniform_p=trial['uniform_p'] randomize=trial['randomize'] if ngauss != 2: raise ValueError("ngauss==2 only for now") npars=2*ngauss+4 prior=zeros(npars) width=zeros(npars) + 1.e20 prior[0] = self.amres['row'] prior[1] = self.amres['col'] if eguess is not None: prior[2],prior[3] = eguess else: prior[2] = self.amres['e1'] prior[3] = self.amres['e2'] T = self.amres['Irr'] + self.amres['Icc'] Tmax = T*3 T1 = T*0.5 Tfrac1 = T1/Tmax prior[4] = Tmax prior[5] = Tfrac1 if uniform_p: wlog(" uniform p") prior[6] = self['counts']/ngauss prior[7] = self['counts']/ngauss else: prior[6] = self['counts']*0.2 prior[7] = self['counts']*0.8 if randomize: wlog(" randomizing") e1start=prior[2] e2start=prior[3] prior[2],prior[3] = randomize_e1e2(e1start,e2start) prior[4] += prior[4]*0.05*(randu()-0.5) prior[5] += prior[5]*0.05*(randu()-0.5) prior[6] += prior[6]*0.05*(randu()-0.5) prior[7] += prior[7]*0.05*(randu()-0.5) pvals = prior[ [6,7] ].copy() pvals *= self['counts']/pvals.sum() prior[6] = pvals[0] prior[7] = pvals[1] return prior, width
def get_prior_turb(self, trial): ngauss=trial['ngauss'] eguess=trial['eguess'] uniform_p=trial['uniform_p'] randomize=trial['randomize'] if ngauss != 3: raise ValueError("ngauss==3 only for now") npars=2*ngauss+4 prior=zeros(npars) width=zeros(npars) prior[0] = self.amres['row'] prior[1] = self.amres['col'] width[0] = 1000 width[1] = 1000 if eguess is not None: prior[2],prior[3] = eguess else: prior[2] = self.amres['e1'] prior[3] = self.amres['e2'] T = self.amres['Irr'] + self.amres['Icc'] # turbulent psf guess Tmax = T*8.3 Tfrac1 = 1.7/8.3 Tfrac2 = 0.8/8.3 prior[4] = Tmax prior[5] = Tfrac1 prior[6] = Tfrac2 if uniform_p: wlog(" uniform p") prior[7] = self['counts']/ngauss prior[8] = self['counts']/ngauss prior[9] = self['counts']/ngauss else: prior[7] = self['counts']*0.08 prior[8] = self['counts']*0.38 prior[9] = self['counts']*0.53 # uninformative priors width[2] = 1000 width[3] = 1000 width[4] = 1000 width[5:] = 1000 if randomize: wlog(" randomizing") e1start=prior[2] e2start=prior[3] prior[2],prior[3] = randomize_e1e2(e1start,e2start) prior[4] += prior[4]*0.05*(randu()-0.5) prior[5] += prior[5]*0.05*(randu()-0.5) prior[6] += prior[6]*0.05*(randu()-0.5) prior[7] += prior[7]*0.05*(randu()-0.5) prior[8] += prior[8]*0.05*(randu()-0.5) prior[9] += prior[9]*0.05*(randu()-0.5) return prior, width
def compare_s2n(amrun, serun, matches=None): import biggles amc = collate.open_columns(amrun) sec = collate.open_columns(serun) if matches is None: wlog('reading am rid') arid = amc['rid'][:] wlog(' read: ',arid.size) wlog(' unique:',numpy.unique(arid).size) wlog('reading se rid') srid = sec['rid'][:] wlog(' read: ',srid.size) wlog(' unique:',numpy.unique(srid).size) wlog('matching rids') ma, ms = eu.numpy_util.match(arid, srid) if ma.size != arid.size: raise RuntimeError("matched only %d/%d" % (ma.size, arid.size)) else: ma = matches['ma'] ms = matches['ms'] wlog("Reading am s2n, whyflag, Irr, Icc") am_s2n = amc['s2n'][:] am_irr = amc['Irr'][:] am_icc = amc['Icc'][:] #whyflag = amc['whyflag'][:] wlog("Reading se s2n,shear_flags") se_s2n = sec['shear_s2n'][:] seflags = sec['shear_flags'][:] #w=where1( (whyflag[ma] == 0) & (seflags[ms] == 0) ) """ w=where1( (am_s2n[ma] > 0) & (am_irr[ma] > 0) & (am_icc[ma] > 0) & (seflags[ms] == 0) ) """ w=where1( (am_s2n[ma] > 0) & (seflags[ms] == 0) ) msk = ms[w] mak = ma[w] wlog("min 'good' am s2n: ",am_s2n[mak].min()) nperbin=100000 wlog("binning nperbin:",nperbin) bs = eu.stat.Binner(se_s2n[msk], am_s2n[mak]) bs.dohist(nperbin=nperbin, min=20.0, max=1000.0) bs.calc_stats() wlog("plotting") plt=eu.plotting.bscatter(bs['xmean'],bs['ymean'],yerr=bs['yerr'], show=False, xlabel=r'$s2n_{SH}$',ylabel=r'$s2n_{AM}$') coeff = numpy.polyfit(bs['xmean'], bs['ymean'], 1) poly=numpy.poly1d(coeff) flabt='m: %0.2f b: %0.3f' % (coeff[0],coeff[1]) flab=biggles.PlotLabel(0.1,0.9,flabt,halign='left') plt.add(flab) ps = biggles.Curve(bs['xmean'], poly(bs['xmean']), color='blue') plt.add(ps) plt.show() plt.write_eps('/direct/astro+u/esheldon/tmp/compare-s2n.eps') wlog("binning sigma") sig=sqrt((am_irr[mak] + am_icc[mak])/2.) bs2 = eu.stat.Binner(sig, am_s2n[mak]/se_s2n[msk]) #bs2.dohist(nperbin=nperbin) bs2.dohist(binsize=0.1, min=1, max=20) bs2.calc_stats() wlog("plotting") wp=where1(bs2['xmean'] > 0) plt2=eu.plotting.bscatter(bs2['xmean'][wp], bs2['ymean'][wp], yerr=bs2['yerr'][wp], show=False, xlabel=r'$\sigma_{AM} [pixels]$', ylabel=r'$s2n_{AM}/s2n_{SH}$') flatv = ones(bs2['xmean'][wp].size)*coeff[0] flat = biggles.Curve(bs2['xmean'][wp], flatv, color='blue') plt2.add(flat) plt2.show() plt2.write_eps('/direct/astro+u/esheldon/tmp/compare-s2n-vs-sigma.eps') return {'ma':ma, 'ms':ms}
def main(): options,args = parser.parse_args(sys.argv[1:]) if len(args) < 1: parser.print_help() sys.exit(45) run=args[0] c = shapesim.read_config(run) cs = shapesim.read_config(c['sim']) if options.bytrial: if 'stack-' in run: ntrial=c['nsplit'] else: orient=cs.get('orient','rand') if orient == 'ring': ntrial = cs['nsplit'] else: ntrial = cs['ntrial'] wqd = shapesim.get_wq_dir(run, bytrial=options.bytrial) if not os.path.exists(wqd): os.makedirs(wqd) extra='' if options.bynode: extra='mode: bynode\nN: 1' elif options.ncores is not None: ncores=int(options.ncores) extra='mode: bycore1\nN: %d' % ncores if run[0:8] == 'gmix-fit': rstr=run.replace('gmix-fit','gmix') else: rstr=run n1 = shapesim.get_numT(cs) runtype = c['runtype'] if runtype == 'byellip': n2 = cs['nume'] else: n2 = shapesim.get_nums2n(c) for i1 in xrange(n1): for i2 in xrange(n2): groups='' if options.i2new is not None and i2 <= int(options.i2new): groups='group: [new,new2]' elif options.i2new1 is not None and i2 <= int(options.i2new1): groups='group: [new]' elif options.i2new2 is not None and i2 <= int(options.i2new2): groups='group: [new2]' elif options.groups is not None: groups = 'group: [%s]' % options.groups if options.bytrial: for itrial in xrange(ntrial): job_name='%s-%03i-%03i-%02i' % (rstr,i1,i2,itrial) wqurl = shapesim.get_wq_url(run,i1,i2,itrial=itrial) wlog("writing wq script:",wqurl) with open(wqurl,'w') as fobj: d={'job_name':job_name, 'run':run, 'i1':i1, 'i2':i2, 'itrial':itrial, 'groups':groups, 'extra':extra, 'pri':options.priority} wqscript=_wqtemplate_bytrial % d fobj.write(wqscript) else: job_name='%s-%03i-%03i' % (rstr,i1,i2) wqurl = shapesim.get_wq_url(run,i1,i2) wlog("writing wq script:",wqurl) with open(wqurl,'w') as fobj: wqscript=_wqtemplate % {'job_name':job_name, 'run':run, 'i1':i1, 'i2':i2, 'groups':groups, 'extra':extra, 'pri':options.priority} fobj.write(wqscript)
def run_ellip(self, ellip): """ Do nrand realizations for each ellipticity value If convergence fails, retry ntrial times """ wlog("ellip:",ellip) outfile = self.outfile(ellip) wlog("outfile:",outfile) dir=os.path.dirname(outfile) if not os.path.exists(dir): wlog("Making output dir:",dir) try: os.makedirs(dir) except: # probably race condition pass robj=None # get a n ew RandomConvolvedImage with this ellip rci = self.new_random_convolved_image(ellip) # do all the randoms, allowing for a certain number of failures to # retry randi=0 trial = 0 nrand = self['nrand'] ntrial = self['ntrial'] while randi < nrand and trial < ntrial: # moments of the object pre-convolution Tguess0 = rci.objpars['Irr_meas'] + rci.objpars['Icc_meas'] # moments after convolution Tguess=rci['Irr'] + rci['Icc'] # moments of the psf Tguess_psf = rci['Irr_psf'] + rci['Icc_psf'] # get moments before convolution amtrue = admom.admom(rci.image0, rci['cen'][0], rci['cen'][1], Tguess=Tguess0) if amtrue['whyflag'] == 0: # do regauss here rg = admom.ReGauss(rci.image, rci['cen'][0], rci['cen'][1], rci.psf, Tguess=Tguess,Tguess_psf=Tguess_psf) rg.do_all() if rg['rgstats'] != None and rg['rgcorrstats'] != None: if rg['rgstats']['whyflag'] == 0: # copy out the data output = self.copy_output(amtrue, rg, rci['theta']) if robj is None: robj = eu.sfile.Open(outfile, 'w') robj.write(output) # only now do we increment randi randi += 1 trial += 1 if randi < nrand and trial < ntrial: rci = self.new_random_convolved_image(ellip) if robj is not None: robj.close() wlog("ntrial:",trial," nfail:",trial-nrand) wlog("randi/nrand: %s/%s" % (randi,nrand)) if randi != nrand: wlog("Exceeded max trials, failed to get all",nrand," realizations")