def shape_chi(env, models, model0, frac='1sigma'): ns, ds = [[], []], [[], []] for m in models: nT, dT = 0, 0 nP, dP = 0, 0 for m1, m2 in izip(m['obj,data'], model0['obj,data']): obj, data = m1 obj0, data0 = m2 rs = [abs(img.pos) for src in obj.sources for img in src.images] rmin, rmax = np.amin(rs), np.amax(rs) w = (abs(obj.basis.ploc) >= obj.basis.top_level_cell_size * 0.9) * (abs(obj.basis.ploc) <= (rmax + obj.basis.top_level_cell_size * 0.5)) t0, p0 = _shape(data0['kappa'][w], obj0.basis.ploc[w]) t, p = _shape(data['kappa'][w], obj.basis.ploc[w]) #dTheta = lambda t0,t1 = (t0-t1) if np.abs(t0-t1) < np.pi else 2*np.pi-(t0-t1) nT += (t - t0)**2 nT = (t - t0) dT += t0**2 nP += (p - p0)**2 nP = (p - p0) dP += p0**2 ns[0].append(nT) ds[0].append(dT) ns[1].append(nP) ds[1].append(dP) return dist_range(np.array(ns[0]), frac), dist_range(np.array(ns[1]), frac)
def shape_chi(env, models, model0, frac='1sigma'): ns, ds = [[],[]], [[],[]] for m in models: nT,dT = 0,0 nP,dP = 0,0 for m1,m2 in izip(m['obj,data'], model0['obj,data']): obj,data = m1 obj0,data0 = m2 rs = [ abs(img.pos) for src in obj.sources for img in src.images] rmin, rmax = np.amin(rs), np.amax(rs) w = (abs(obj.basis.ploc) >= obj.basis.top_level_cell_size * 0.9) * (abs(obj.basis.ploc) <= (rmax+ obj.basis.top_level_cell_size * 0.5)) t0,p0 = _shape(data0['kappa'][w], obj0.basis.ploc[w]) t,p = _shape(data['kappa'][w], obj.basis.ploc[w]) #dTheta = lambda t0,t1 = (t0-t1) if np.abs(t0-t1) < np.pi else 2*np.pi-(t0-t1) nT += (t-t0)**2 nT = (t-t0) dT += t0**2 nP += (p-p0)**2 nP = (p-p0) dP += p0**2 ns[0].append(nT) ds[0].append(dT) ns[1].append(nP) ds[1].append(dP) return dist_range(np.array(ns[0]), frac), dist_range(np.array(ns[1]), frac)
def kappa_profile_chi2(env, models, model0, frac='1sigma'): n_max,d_max=0,0 n_min,d_min=np.inf,np.inf ns, ds = [], [] for m in models: n,d = 0,0 for m1,m2 in izip(m['obj,data'], model0['obj,data']): obj,data = m1 obj0,data0 = m2 rs = [ abs(img.pos) for src in obj.sources for img in src.images] #rs = [ abs(img.pos) for src in obj.sources for img in src.images if img.parity_name != 'max'] rmin, rmax = np.amin(rs), np.amax(rs) if 0: b = 0 else: rmin = obj.basis.top_level_cell_size * 1.6 b = np.argmin(abs(data['R'] - rmin)) e = np.argmin(abs(data['R'] - rmax)) v0 = data0['kappa(R)'][b:e+1] v1 = data['kappa(R)'][b:e+1] n += np.sum((v1 - v0)**2) d += np.sum(v0**2) #d += len(v0) #np.sum(v0**2) ns.append(n) ds.append(d) nd = array(ns) / array(ds) return dist_range(nd, frac)
def kappa_chi2(env, models, model0, frac='1sigma'): n_max,d_max=0,0 n_min,d_min=np.inf,np.inf ns, ds = [], [] for m in models: n,d = 0,0 for m1,m2 in izip(m['obj,data'], model0['obj,data']): obj,data = m1 obj0,data0 = m2 rs = [ abs(img.pos) for src in obj.sources for img in src.images if img.parity_name != 'max'] rmin, rmax = np.amin(rs), np.amax(rs) #w = (abs(obj.basis.rs) >= rmin) * (abs(obj.basis.rs) <= rmax) #w = abs(obj.basis.rs) <= rmax w = (abs(obj.basis.ploc) >= obj.basis.top_level_cell_size * 0.9) * (abs(obj.basis.ploc) <= (rmax+ obj.basis.top_level_cell_size * 0.5)) #b = np.argmin(abs(data['R'] - rmin)) #e = np.argmin(abs(data['R'] - rmax)) #v0 = data0['kappa'][b:e+1] #v1 = data['kappa'][b:e+1] v0 = data0['kappa'][w] v1 = data['kappa'][w] n += np.sum((v1 - v0)**2) d += np.sum(v0**2) ns.append(n) ds.append(d) #n_max,d_max = np.amax([n,n_max]), np.amax([d,d_max]) #n_min,d_min = np.amin([n,n_min]), np.amin([d,d_min]) nd = array(ns) / array(ds) return dist_range(nd, frac)
def kappa_profile_chi2(env, models, model0, frac='1sigma'): n_max, d_max = 0, 0 n_min, d_min = np.inf, np.inf ns, ds = [], [] for m in models: n, d = 0, 0 for m1, m2 in izip(m['obj,data'], model0['obj,data']): obj, data = m1 obj0, data0 = m2 rs = [abs(img.pos) for src in obj.sources for img in src.images] #rs = [ abs(img.pos) for src in obj.sources for img in src.images if img.parity_name != 'max'] rmin, rmax = np.amin(rs), np.amax(rs) if 0: b = 0 else: rmin = obj.basis.top_level_cell_size * 1.6 b = np.argmin(abs(data['R'] - rmin)) e = np.argmin(abs(data['R'] - rmax)) v0 = data0['kappa(R)'][b:e + 1] v1 = data['kappa(R)'][b:e + 1] n += np.sum((v1 - v0)**2) d += np.sum(v0**2) #d += len(v0) #np.sum(v0**2) ns.append(n) ds.append(d) nd = array(ns) / array(ds) return dist_range(nd, frac)
def kappa_chi2(env, models, model0, frac='1sigma'): n_max, d_max = 0, 0 n_min, d_min = np.inf, np.inf ns, ds = [], [] for m in models: n, d = 0, 0 for m1, m2 in izip(m['obj,data'], model0['obj,data']): obj, data = m1 obj0, data0 = m2 rs = [ abs(img.pos) for src in obj.sources for img in src.images if img.parity_name != 'max' ] rmin, rmax = np.amin(rs), np.amax(rs) #w = (abs(obj.basis.rs) >= rmin) * (abs(obj.basis.rs) <= rmax) #w = abs(obj.basis.rs) <= rmax w = (abs(obj.basis.ploc) >= obj.basis.top_level_cell_size * 0.9) * (abs(obj.basis.ploc) <= (rmax + obj.basis.top_level_cell_size * 0.5)) #b = np.argmin(abs(data['R'] - rmin)) #e = np.argmin(abs(data['R'] - rmax)) #v0 = data0['kappa'][b:e+1] #v1 = data['kappa'][b:e+1] v0 = data0['kappa'][w] v1 = data['kappa'][w] n += np.sum((v1 - v0)**2) d += np.sum(v0**2) ns.append(n) ds.append(d) #n_max,d_max = np.amax([n,n_max]), np.amax([d,d_max]) #n_min,d_min = np.amin([n,n_min]), np.amin([d,d_min]) nd = array(ns) / array(ds) return dist_range(nd, frac)
def _data_error_plot(models, X,Y, **kwargs): with_legend = False use = [0,0,0] if isinstance(X, basestring): X = [X,None] if isinstance(Y, basestring): Y = [Y,None] x_prop, x_units = X y_prop, y_units = Y ret_list = [] every = kwargs.pop('every', 1) upto = kwargs.pop('upto', len(models)) mark_images = kwargs.pop('mark_images', True) hilite_model = kwargs.pop('hilite_model', None) hilite_color = kwargs.pop('hilite_color', 'm') yscale = kwargs.pop('yscale', 'log') xscale = kwargs.pop('xscale', 'linear') xlabel = kwargs.pop('xlabel', None) ylabel = kwargs.pop('ylabel', None) sigma = kwargs.pop('sigma', '1sigma') kwargs.setdefault('color', 'k') kwargs.setdefault('marker', '.') kwargs.setdefault('ls', '-') normal_kw = {'zorder':0, 'drawstyle':'steps', 'alpha':1.0} hilite_kw = {'zorder':1000, 'drawstyle':'steps', 'alpha':1.0, 'lw':4, 'ls':'--'} accepted_kw = {'zorder':500, 'drawstyle':'steps', 'alpha':0.5} normal = [] hilite = [] accepted = [] #imgs = set() imgs = defaultdict(set) xmin, xmax = np.inf, -np.inf ymin, ymax = np.inf, -np.inf objplot = defaultdict(dict) for mi in xrange(0,upto,every): m = models[mi] si = m.get('accepted', 2) #print si tag = '' if si==False: tag = 'rejected' if si==True: tag = 'accepted' for [obj, data] in m['obj,data']: try: xs = data[x_prop][x_units] ys = data[y_prop][y_units] xlabel = _axis_label(xs, x_units) if not xlabel else xlabel ylabel = _axis_label(ys, y_units) if not ylabel else ylabel objplot[obj].setdefault(tag, {'ys':[], 'xs':None}) objplot[obj][tag]['ys'].append(ys) objplot[obj][tag]['xs'] = xs #objplot[obj].setdefault('%s:xs'%tag, xs) #objplot[obj].setdefault('%s:ymax'%tag, ys) #objplot[obj].setdefault('%s:ymin'%tag, ys) #objplot[obj].setdefault('%s:ysum'%tag, np.zeros_like(ys)) #objplot[obj].setdefault('%s:count'%tag, 0) #objplot[obj]['%s:ymax'%tag] = np.amax((objplot[obj]['%s:ymax'%tag], ys), axis=0) #objplot[obj]['%s:ymin'%tag] = np.amin((objplot[obj]['%s:ymin'%tag], ys), axis=0) #objplot[obj]['%s:ysum'%tag] += ys #objplot[obj]['%s:count'%tag] += 1 if mark_images: for i,src in enumerate(obj.sources): for img in src.images: imgs[i].add(convert('arcsec to %s' % x_units, np.abs(img.pos), obj.dL, data['nu'])) except KeyError as bad_key: Log( "Missing information for object %s with key %s. Skipping plot." % (obj.name,bad_key) ) continue use[si] = 1 s = _styles[si] #xmin, xmax = min(xmin, amin(data[X])), max(xmax, amax(data[X])) #ymin, ymax = min(ymin, amin(data[Y])), max(ymax, amax(data[Y])) for i,tag in enumerate(['rejected', 'accepted', '']): for k,v in objplot.iteritems(): if tag not in v: break #if not v.has_key('%s:count'%tag): break avg, errp, errm = dist_range(v[tag]['ys'], sigma=sigma) errp = errp - avg errm = avg - errm #s = np.sort(v[tag]['ys'], axis=0) #avg = s[len(s)//2] if len(s)%2==1 else (s[len(s)//2] + s[len(s)//2+1])/2 #print s #avg = np.median(v[tag]['ys'], axis=0) #print avg #print np.median(v[tag]['ys'], axis=1) #errp = s[len(s) * .841] - avg #errm = avg - s[len(s) * .159] #errp = np.amax(v[tag]['ys'], axis=0) - avg #errm = avg - np.amin(v[tag]['ys'], axis=0) #errp = errm = np.std(v[tag]['ys'], axis=0, dtype=np.float64) xs = v[tag]['xs'] # print [x[1] for x in v[tag]['ys']] # pl.hist([x[1] for x in v[tag]['ys']]) # break #avg = v['%s:ysum'%tag] / v['%s:count'%tag] #errp = v['%s:ymax'%tag]-avg #errm = avg-v['%s:ymin'%tag] #errm = errp = np.std( #print len(v['xs']) #print len(avg) #assert 0 #print len(xs) #print len(avg) ret_list.append([xs,avg,errm,errp]) yerr = (errm,errp) if not np.all(errm == errp) else None if tag == 'rejected': pl.errorbar(xs, avg, yerr=yerr, c=_styles[0]['c'], zorder=_styles[0]['z']) else: pl.errorbar(xs, avg, yerr=yerr, **kwargs) # return pl.xscale(xscale) pl.yscale(yscale) si = style_iterator() for k,v in imgs.iteritems(): lw,ls,c = si.next() for img_pos in v: pl.axvline(img_pos, c=c, ls=ls, lw=lw, zorder=-2, alpha=0.5) # if use[0] or use[1]: # lines = [s['line'] for s,u in zip(_styles, use) if u] # labels = [s['label'] for s,u in zip(_styles, use) if u] # pl.legend(lines, labels) if use[0]: lines = [ _styles[0]['line'] ] labels = [ _styles[0]['label'] ] pl.legend(lines, labels) #axis('scaled') if xlabel: pl.xlabel(xlabel) if ylabel: pl.ylabel(ylabel) pl.xlim(xmin=pl.xlim()[0] - 0.01*(pl.xlim()[1] - pl.xlim()[0])) #pl.ylim(0, ymax) return ret_list
def _hist(env, data_key, **kwargs): models = kwargs.pop('models', env.models) obj_index = kwargs.pop('obj_index', 0) key = kwargs.pop('key', 'accepted') label = kwargs.pop('label', None) color = kwargs.pop('color', None) xlabel = kwargs.pop('xlabel', data_key) ylabel = kwargs.pop('ylabel', r'Count') sigma = kwargs.pop('sigma', '1sigma') mark_sigma = kwargs.pop('mark_sigma', True) # select a list to append to based on the 'accepted' property. l = [[], [], []] for m in models: obj, data = m['obj,data'][obj_index] # For H0 we only have to look at one model because the others are the same if data.has_key(data_key): l[m.get(key,2)].append(data[data_key]) #print 'nu', data['nu'] #l[2].append(data['kappa'][1]) #print amin(l[2]), amax(l[2]) not_accepted, accepted, notag = l #print 'H0_plot',H0s for d,s in zip(l, _styles): kw = kwargs.copy() if d: kw.setdefault('bins', np.ptp(d)//1+1) kw.setdefault('histtype', 'step') #print len(d), d #pl.hist(d, bins=20, histtype='step', edgecolor=s['c'], zorder=s['z'], label=s['label']) pl.hist(d, edgecolor=s['c'] if color is None else color, zorder=s['z'], label=s['label'] if label is None else label, **kwargs) if not_accepted or label: pl.legend() if mark_sigma: if accepted or notag: if accepted: h = np.array(accepted) else: h = np.array(notag) m,u,l = dist_range(h, sigma=sigma) pl.axvline(m, c='r', ls='-', zorder = 2) pl.axvline(u, c='g', ls='-', zorder = 2) pl.axvline(l, c='g', ls='-', zorder = 2) Log( '%s: %f %f %f' % (data_key, m, u, l) ) Log( '%s: %f +/- %f %f' % (data_key, m, (u-m), (m-l)) ) else: Log( "%s: No H0 values accepted" % data_key ) #pl.axvline(72, c='k', ls=':', zorder = 2) pl.xlabel(xlabel) pl.ylabel(ylabel) pl.xlim(xmax=pl.xlim()[1] + 0.01*(pl.xlim()[1] - pl.xlim()[0])) pl.ylim(ymax=pl.ylim()[1] + 0.01*(pl.ylim()[1] - pl.ylim()[0]))
def _data_error_plot(models, X, Y, **kwargs): with_legend = False use = [0, 0, 0] if isinstance(X, basestring): X = [X, None] if isinstance(Y, basestring): Y = [Y, None] x_prop, x_units = X y_prop, y_units = Y ret_list = [] every = kwargs.pop('every', 1) upto = kwargs.pop('upto', len(models)) mark_images = kwargs.pop('mark_images', True) hilite_model = kwargs.pop('hilite_model', None) hilite_color = kwargs.pop('hilite_color', 'm') yscale = kwargs.pop('yscale', 'log') xscale = kwargs.pop('xscale', 'linear') xlabel = kwargs.pop('xlabel', None) ylabel = kwargs.pop('ylabel', None) sigma = kwargs.pop('sigma', '1sigma') kwargs.setdefault('color', 'k') kwargs.setdefault('marker', '.') kwargs.setdefault('ls', '-') normal_kw = {'zorder': 0, 'drawstyle': 'steps', 'alpha': 1.0} hilite_kw = { 'zorder': 1000, 'drawstyle': 'steps', 'alpha': 1.0, 'lw': 4, 'ls': '--' } accepted_kw = {'zorder': 500, 'drawstyle': 'steps', 'alpha': 0.5} normal = [] hilite = [] accepted = [] #imgs = set() imgs = defaultdict(set) xmin, xmax = np.inf, -np.inf ymin, ymax = np.inf, -np.inf objplot = defaultdict(dict) for mi in xrange(0, upto, every): m = models[mi] si = m.get('accepted', 2) #print si tag = '' if si == False: tag = 'rejected' if si == True: tag = 'accepted' for [obj, data] in m['obj,data']: try: xs = data[x_prop][x_units] ys = data[y_prop][y_units] xlabel = _axis_label(xs, x_units) if not xlabel else xlabel ylabel = _axis_label(ys, y_units) if not ylabel else ylabel objplot[obj].setdefault(tag, {'ys': [], 'xs': None}) objplot[obj][tag]['ys'].append(ys) objplot[obj][tag]['xs'] = xs #objplot[obj].setdefault('%s:xs'%tag, xs) #objplot[obj].setdefault('%s:ymax'%tag, ys) #objplot[obj].setdefault('%s:ymin'%tag, ys) #objplot[obj].setdefault('%s:ysum'%tag, np.zeros_like(ys)) #objplot[obj].setdefault('%s:count'%tag, 0) #objplot[obj]['%s:ymax'%tag] = np.amax((objplot[obj]['%s:ymax'%tag], ys), axis=0) #objplot[obj]['%s:ymin'%tag] = np.amin((objplot[obj]['%s:ymin'%tag], ys), axis=0) #objplot[obj]['%s:ysum'%tag] += ys #objplot[obj]['%s:count'%tag] += 1 if mark_images: for i, src in enumerate(obj.sources): for img in src.images: imgs[i].add( convert('arcsec to %s' % x_units, np.abs(img.pos), obj.dL, data['nu'])) except KeyError as bad_key: Log("Missing information for object %s with key %s. Skipping plot." % (obj.name, bad_key)) continue use[si] = 1 s = _styles[si] #xmin, xmax = min(xmin, amin(data[X])), max(xmax, amax(data[X])) #ymin, ymax = min(ymin, amin(data[Y])), max(ymax, amax(data[Y])) for i, tag in enumerate(['rejected', 'accepted', '']): for k, v in objplot.iteritems(): if tag not in v: break #if not v.has_key('%s:count'%tag): break avg, errp, errm = dist_range(v[tag]['ys'], sigma=sigma) errp = errp - avg errm = avg - errm #s = np.sort(v[tag]['ys'], axis=0) #avg = s[len(s)//2] if len(s)%2==1 else (s[len(s)//2] + s[len(s)//2+1])/2 #print s #avg = np.median(v[tag]['ys'], axis=0) #print avg #print np.median(v[tag]['ys'], axis=1) #errp = s[len(s) * .841] - avg #errm = avg - s[len(s) * .159] #errp = np.amax(v[tag]['ys'], axis=0) - avg #errm = avg - np.amin(v[tag]['ys'], axis=0) #errp = errm = np.std(v[tag]['ys'], axis=0, dtype=np.float64) xs = v[tag]['xs'] # print [x[1] for x in v[tag]['ys']] # pl.hist([x[1] for x in v[tag]['ys']]) # break #avg = v['%s:ysum'%tag] / v['%s:count'%tag] #errp = v['%s:ymax'%tag]-avg #errm = avg-v['%s:ymin'%tag] #errm = errp = np.std( #print len(v['xs']) #print len(avg) #assert 0 #print len(xs) #print len(avg) ret_list.append([xs, avg, errm, errp]) yerr = (errm, errp) if not np.all(errm == errp) else None if tag == 'rejected': pl.errorbar(xs, avg, yerr=yerr, c=_styles[0]['c'], zorder=_styles[0]['z']) else: pl.errorbar(xs, avg, yerr=yerr, **kwargs) # return pl.xscale(xscale) pl.yscale(yscale) si = style_iterator() for k, v in imgs.iteritems(): lw, ls, c = si.next() for img_pos in v: pl.axvline(img_pos, c=c, ls=ls, lw=lw, zorder=-2, alpha=0.5) # if use[0] or use[1]: # lines = [s['line'] for s,u in zip(_styles, use) if u] # labels = [s['label'] for s,u in zip(_styles, use) if u] # pl.legend(lines, labels) if use[0]: lines = [_styles[0]['line']] labels = [_styles[0]['label']] pl.legend(lines, labels) #axis('scaled') if xlabel: pl.xlabel(xlabel) if ylabel: pl.ylabel(ylabel) pl.xlim(xmin=pl.xlim()[0] - 0.01 * (pl.xlim()[1] - pl.xlim()[0])) #pl.ylim(0, ymax) return ret_list
def _hist(env, data_key, **kwargs): models = kwargs.pop('models', env.models) obj_index = kwargs.pop('obj_index', 0) key = kwargs.pop('key', 'accepted') label = kwargs.pop('label', None) color = kwargs.pop('color', None) xlabel = kwargs.pop('xlabel', data_key) ylabel = kwargs.pop('ylabel', r'Count') sigma = kwargs.pop('sigma', '1sigma') mark_sigma = kwargs.pop('mark_sigma', True) # select a list to append to based on the 'accepted' property. l = [[], [], []] for m in models: obj, data = m['obj,data'][ obj_index] # For H0 we only have to look at one model because the others are the same if data.has_key(data_key): l[m.get(key, 2)].append(data[data_key]) #print 'nu', data['nu'] #l[2].append(data['kappa'][1]) #print amin(l[2]), amax(l[2]) not_accepted, accepted, notag = l #print 'H0_plot',H0s for d, s in zip(l, _styles): kw = kwargs.copy() if d: #print len(d), d, np.ptp(d), np.sqrt(len(d)) kw.setdefault('bins', int(np.ptp(d) // 1) + 1) kw.setdefault('histtype', 'step') #print len(d), d #pl.hist(d, bins=20, histtype='step', edgecolor=s['c'], zorder=s['z'], label=s['label']) pl.hist(d, edgecolor=s['c'] if color is None else color, zorder=s['z'], label=s['label'] if label is None else label, **kw) if not_accepted or label: pl.legend() if mark_sigma: if accepted or notag: if accepted: h = np.array(accepted) else: h = np.array(notag) m, u, l = dist_range(h, sigma=sigma) pl.axvline(m, c='r', ls='-', zorder=2) pl.axvline(u, c='g', ls='-', zorder=2) pl.axvline(l, c='g', ls='-', zorder=2) Log('%s: %f %f %f' % (data_key, m, u, l)) Log('%s: %f +/- %f %f' % (data_key, m, (u - m), (m - l))) else: Log("%s: No H0 values accepted" % data_key) #pl.axvline(72, c='k', ls=':', zorder = 2) pl.xlabel(xlabel) pl.ylabel(ylabel) pl.xlim(xmax=pl.xlim()[1] + 0.01 * (pl.xlim()[1] - pl.xlim()[0])) pl.ylim(ymax=pl.ylim()[1] + 0.01 * (pl.ylim()[1] - pl.ylim()[0]))