def bpHist(self): import omega n = int(np.ceil(np.log2(len(self.bpData)) + 1)) values = [t[1] for t in self.bpData] p = omega.quickHist(values, n, keyText='Basepols') p.setLabels('%s closure' % self.item, 'Number of basepols') return p
def bpHist (self): import omega n = int (np.ceil (np.log2 (len (self.bpData)) + 1)) values = [t[1] for t in self.bpData] p = omega.quickHist (values, n, keyText='Basepols') p.setLabels ('%s closure' % self.item, 'Number of basepols') return p
def valHist(self): import omega rms = self.allrms.finish() n = int(np.ceil(np.log2(rms.size) + 1)) p = omega.quickHist(rms, n, keyText=self.datum) p.setLabels('%s closure' % self.item, 'Number of %s' % self.datum) return p
def valHist (self): import omega rms = self.allrms.finish () n = int (np.ceil (np.log2 (rms.size) + 1)) p = omega.quickHist (rms, n, keyText=self.datum) p.setLabels ('%s closure' % self.item, 'Number of %s' % self.datum) return p
def tui_checkcal (args): import omega as om, scipy.stats as SS toread = [] uvdatoptions = {} for arg in args: if '=' in arg: key, value = arg.split ('=', 1) uvdatoptions[key] = value else: toread.append (VisData (arg)) if len (toread) < 1: util.die ('usage: <vis1> [... visn] [uvdat options]') samples = VectorGrower () gen = uvdat.setupAndRead (toread, 'x3', False, **uvdatoptions) for inp, pream, data, flags in gen: w = np.where (flags)[0] if w.size == 0: continue data = data[w] var = 0.5 * (data.real.var (ddof=1) + data.imag.var (ddof=1)) uvar = np.sqrt (1. / (w.size - 1)) * var # uncert in variance msmt thy = inp.getVariance () samples.add ((var - thy) / uvar) samples = samples.finish () n = samples.size m = samples.mean () s = samples.std () med = np.median (samples) smadm = 1.4826 * np.median (np.abs (samples - med)) # see comment below print ' Number of samples:', n print 'Mean normalized error (should be 0):', m print ' Median:', med print ' Normalized std. dev. (should be 1):', s print ' SMADM:', smadm print 'Probability that samples are normal:', SS.normaltest (samples)[1] bins = 50 rng = -5, 5 p = om.quickHist (samples, keyText='Samples', bins=bins, range=rng) x = np.linspace (-5, 5, 200) area = 10. / bins * n y = area / np.sqrt (2 * np.pi) * np.exp (-0.5 * x**2) p.addXY (x, y, 'Ideal') p.rebound (False, False) p.show () return 0
def tui_checkcal(args): import omega as om, scipy.stats as SS toread = [] uvdatoptions = {} for arg in args: if '=' in arg: key, value = arg.split('=', 1) uvdatoptions[key] = value else: toread.append(VisData(arg)) if len(toread) < 1: util.die('usage: <vis1> [... visn] [uvdat options]') samples = VectorGrower() gen = uvdat.setupAndRead(toread, 'x3', False, **uvdatoptions) for inp, pream, data, flags in gen: w = np.where(flags)[0] if w.size == 0: continue data = data[w] var = 0.5 * (data.real.var(ddof=1) + data.imag.var(ddof=1)) uvar = np.sqrt(1. / (w.size - 1)) * var # uncert in variance msmt thy = inp.getVariance() samples.add((var - thy) / uvar) samples = samples.finish() n = samples.size m = samples.mean() s = samples.std() med = np.median(samples) smadm = 1.4826 * np.median(np.abs(samples - med)) # see comment below print ' Number of samples:', n print 'Mean normalized error (should be 0):', m print ' Median:', med print ' Normalized std. dev. (should be 1):', s print ' SMADM:', smadm print 'Probability that samples are normal:', SS.normaltest(samples)[1] bins = 50 rng = -5, 5 p = om.quickHist(samples, keyText='Samples', bins=bins, range=rng) x = np.linspace(-5, 5, 200) area = 10. / bins * n y = area / np.sqrt(2 * np.pi) * np.exp(-0.5 * x**2) p.addXY(x, y, 'Ideal') p.rebound(False, False) p.show() return 0
def tui_checkap(args): import omega as om if len(args) != 2: util.die('usage: <datfile> <antpol>') nc = NoiseCal() nc.load(args[0]) ap = util.parseAP(args[1]) apidx = nc.saps.index(ap) if apidx < 0: util.die('no antpol %s in data file!', util.fmtAP(ap)) sqbps = nc.sqbps vals = nc.bpdata[:, 1] modelvals = nc.bpdata[:, 3] * nc.svals resids = vals - modelvals runcerts = np.sqrt(1. / nc.bpdata[:, 2] + (nc.suncerts * nc.bpdata[:, 3])**2) resids /= runcerts w = np.where((sqbps[:, 0] == apidx) | (sqbps[:, 1] == apidx)) resids = resids[w] n = resids.size mn = resids.mean() s = resids.std() md = np.median(resids) smadm = 1.4826 * np.median(np.abs(resids - md)) # see comment below print ' Number of samples:', n print ' Norm. mean residal:', mn print ' Median:', md print 'Norm. residual std. dev.:', s print ' SMADM:', smadm bins = 50 rng = -5, 5 p = om.quickHist(resids, keyText='%s Residuals' % util.fmtAP(ap), bins=bins, range=rng) x = np.linspace(-5, 5, 200) area = 10. / bins * n y = area / np.sqrt(2 * np.pi) * np.exp(-0.5 * x**2) p.addXY(x, y, 'Ideal') p.rebound(False, False) p.show() return 0
def tui_checkap (args): import omega as om if len (args) != 2: util.die ('usage: <datfile> <antpol>') nc = NoiseCal () nc.load (args[0]) ap = util.parseAP (args[1]) apidx = nc.saps.index (ap) if apidx < 0: util.die ('no antpol %s in data file!', util.fmtAP (ap)) sqbps = nc.sqbps vals = nc.bpdata[:,1] modelvals = nc.bpdata[:,3] * nc.svals resids = vals - modelvals runcerts = np.sqrt (1./nc.bpdata[:,2] + (nc.suncerts * nc.bpdata[:,3])**2) resids /= runcerts w = np.where ((sqbps[:,0] == apidx) | (sqbps[:,1] == apidx)) resids = resids[w] n = resids.size mn = resids.mean () s = resids.std () md = np.median (resids) smadm = 1.4826 * np.median (np.abs (resids - md)) # see comment below print ' Number of samples:', n print ' Norm. mean residal:', mn print ' Median:', md print 'Norm. residual std. dev.:', s print ' SMADM:', smadm bins = 50 rng = -5, 5 p = om.quickHist (resids, keyText='%s Residuals' % util.fmtAP (ap), bins=bins, range=rng) x = np.linspace (-5, 5, 200) area = 10. / bins * n y = area / np.sqrt (2 * np.pi) * np.exp (-0.5 * x**2) p.addXY (x, y, 'Ideal') p.rebound (False, False) p.show () return 0
def tui_checkfit (args): import omega as om if len (args) != 1: util.die ('usage: <datfile>') nc = NoiseCal () nc.load (args[0]) vals = nc.bpdata[:,1] modelvals = nc.bpdata[:,3] * nc.svals resids = vals - modelvals runcerts = np.sqrt (1./nc.bpdata[:,2] + (nc.suncerts * nc.bpdata[:,3])**2) normresids = resids / runcerts n = normresids.size mn = normresids.mean () s = normresids.std () md = np.median (normresids) smadm = 1.4826 * np.median (np.abs (normresids - md)) # see comment below print ' Number of samples:', n print ' Normalized mean residal:', mn print ' Median:', md print 'Normalized std. dev. (should be 1):', s print ' SMADM:', smadm # Check for problematic antpols and basepols saps = nc.saps sqbps = nc.sqbps nap = len (saps) dumbnbp = nap**2 apcounts = np.zeros (nap, dtype=np.int) apsumsqresids = np.zeros (nap) bpcounts = np.zeros (dumbnbp, dtype=np.int) bpsumsqresids = np.zeros (dumbnbp) for i in xrange (n): idx1, idx2 = sqbps[i] apcounts[idx1] += 1 apsumsqresids[idx1] += normresids[i]**2 apcounts[idx2] += 1 apsumsqresids[idx2] += normresids[i]**2 bpidx = idx1 * nap + idx2 bpcounts[bpidx] += 1 bpsumsqresids[bpidx] += normresids[i]**2 aprmsresids = np.sqrt (apsumsqresids / apcounts) sapresids = np.argsort (aprmsresids) print print 'Extreme residual RMS by antpol:' for i in xrange (5): idx = sapresids[i] print ' %10s %8.2f' % (util.fmtAP (saps[idx]), aprmsresids[idx]) print ' ....' for i in xrange (5): idx = sapresids[i - 5] print ' %10s %8.2f' % (util.fmtAP (saps[idx]), aprmsresids[idx]) wbpgood = np.where (bpcounts)[0] wbpbad = np.where (bpcounts == 0)[0] bpcounts[wbpbad] = 1 bprmsresids = bpsumsqresids / bpcounts sbpresids = np.argsort (bprmsresids[wbpgood]) print print 'Extreme residual RMS by basepol:' for i in xrange (3): idx = wbpgood[sbpresids[i]] ap2 = saps[idx % nap] ap1 = saps[idx // nap] print ' %10s %8.2f' % (util.fmtBP ((ap1, ap2)), bprmsresids[idx]) print ' ....' for i in xrange (7): idx = wbpgood[sbpresids[i - 7]] ap2 = saps[idx % nap] ap1 = saps[idx // nap] print ' %10s %8.2f' % (util.fmtBP ((ap1, ap2)), bprmsresids[idx]) # Plot the distribution of residuals bins = 50 rng = -5, 5 p = om.quickHist (normresids, keyText='Residuals', bins=bins, range=rng) x = np.linspace (-5, 5, 200) area = 10. / bins * n y = area / np.sqrt (2 * np.pi) * np.exp (-0.5 * x**2) p.addXY (x, y, 'Ideal') p.rebound (False, False) p.show () return 0
def tui_checkfit(args): import omega as om if len(args) != 1: util.die('usage: <datfile>') nc = NoiseCal() nc.load(args[0]) vals = nc.bpdata[:, 1] modelvals = nc.bpdata[:, 3] * nc.svals resids = vals - modelvals runcerts = np.sqrt(1. / nc.bpdata[:, 2] + (nc.suncerts * nc.bpdata[:, 3])**2) normresids = resids / runcerts n = normresids.size mn = normresids.mean() s = normresids.std() md = np.median(normresids) smadm = 1.4826 * np.median(np.abs(normresids - md)) # see comment below print ' Number of samples:', n print ' Normalized mean residal:', mn print ' Median:', md print 'Normalized std. dev. (should be 1):', s print ' SMADM:', smadm # Check for problematic antpols and basepols saps = nc.saps sqbps = nc.sqbps nap = len(saps) dumbnbp = nap**2 apcounts = np.zeros(nap, dtype=np.int) apsumsqresids = np.zeros(nap) bpcounts = np.zeros(dumbnbp, dtype=np.int) bpsumsqresids = np.zeros(dumbnbp) for i in xrange(n): idx1, idx2 = sqbps[i] apcounts[idx1] += 1 apsumsqresids[idx1] += normresids[i]**2 apcounts[idx2] += 1 apsumsqresids[idx2] += normresids[i]**2 bpidx = idx1 * nap + idx2 bpcounts[bpidx] += 1 bpsumsqresids[bpidx] += normresids[i]**2 aprmsresids = np.sqrt(apsumsqresids / apcounts) sapresids = np.argsort(aprmsresids) print print 'Extreme residual RMS by antpol:' for i in xrange(5): idx = sapresids[i] print ' %10s %8.2f' % (util.fmtAP(saps[idx]), aprmsresids[idx]) print ' ....' for i in xrange(5): idx = sapresids[i - 5] print ' %10s %8.2f' % (util.fmtAP(saps[idx]), aprmsresids[idx]) wbpgood = np.where(bpcounts)[0] wbpbad = np.where(bpcounts == 0)[0] bpcounts[wbpbad] = 1 bprmsresids = bpsumsqresids / bpcounts sbpresids = np.argsort(bprmsresids[wbpgood]) print print 'Extreme residual RMS by basepol:' for i in xrange(3): idx = wbpgood[sbpresids[i]] ap2 = saps[idx % nap] ap1 = saps[idx // nap] print ' %10s %8.2f' % (util.fmtBP((ap1, ap2)), bprmsresids[idx]) print ' ....' for i in xrange(7): idx = wbpgood[sbpresids[i - 7]] ap2 = saps[idx % nap] ap1 = saps[idx // nap] print ' %10s %8.2f' % (util.fmtBP((ap1, ap2)), bprmsresids[idx]) # Plot the distribution of residuals bins = 50 rng = -5, 5 p = om.quickHist(normresids, keyText='Residuals', bins=bins, range=rng) x = np.linspace(-5, 5, 200) area = 10. / bins * n y = area / np.sqrt(2 * np.pi) * np.exp(-0.5 * x**2) p.addXY(x, y, 'Ideal') p.rebound(False, False) p.show() return 0