def fold_event_prof(ph_mjd, par_file, nbins=16): params = read_par(par_file, file_format='tempo2') # Fold data with psr_utils by first calculating phases for each MJD based on par file's F0, F1, ... only good for isolated puslar ph_phase = pu.calc_phs(ph_mjd, params['pepoch'], params['f'][0], params['f'][1], params['f'][2], params['f'][3]) # Cast all negative phases to be between 0 and 1 while (len(np.where(ph_phase <= 0.)[0]) > 0 ): # Need the [0] because output is a tuple... below_zero_ind = np.where(ph_phase <= 0.) ph_phase[below_zero_ind] += 1.0 # Now do the same to ensure anything greater than 1.0 is between (0., 1.0) while (len(np.where(ph_phase > 1.0)[0]) > 0 ): # Need the [0] because output is a tuple... above_zero_ind = np.where(ph_phase > 1.0) ph_phase[below_zero_ind] -= 1.0 # Now histogram phases to create profile for a given number of bins (and thus bin size), and give user option to split data into N equal parts prof, bin_edges = np.histogram(ph_phase, bins=nbins, range=(0.,1.)) bin_size = (bin_edges[1] - bin_edges[0]) bin_val = bin_edges[0: len(bin_edges)-1] + bin_size/2. prof_err = np.sqrt(prof) # Calculate central MJD and MJD span for profile from min/max event MJDs if(len(ph_mjd) > 0): mjd_mean = 0.5*(np.min(ph_mjd) + np.max(ph_mjd)) mjd_span = np.max(ph_mjd) - np.min(ph_mjd) # spin phase at representative MJD of profile ref_phase = pu.calc_phs(mjd_mean, params['pepoch'], params['f'][0], params['f'][1], params['f'][2], params['f'][3]) # Cast to be between 0 and 1 while(ref_phase <= 0.): ref_phase += 1.0 while(ref_phase > 1.0): ref_phase -= 1.0 ref_freq = pu.calc_freq(mjd_mean, params['pepoch'], params['f'][0], params['f'][1], params['f'][2], params['f'][3]) else: mjd_mean = 0. ref_phase = 0. ref_freq = 0. profile = {'i':prof, 'i_err':prof_err, 'phase':bin_val, 'mjd':mjd_mean, 'mjd_span': mjd_span, 'psrname':params['psr'], 'ref_phase':ref_phase, 'ref_freq':ref_freq} return profile
def main(): progname = 'show_resid_info.py' args = get_opt(progname) # First, read in residuals data file, and assign each column to a separate # numpy array resid_data = read_resid(args.resfile, tempo2=args.tempo2, info_file=args.infofile, info_flag=args.infoflag) # For now worry about ntoa in calculating chi2's but will implement # par file reader to determine the number of DOF # ( = n_toa + n_free_param + 1 for fit for phase) if(args.parfile==None): n_param = 0 else: if(args.tempo2): ffmt='tempo2' else: ffmt='tempo1' param_name, para_val, param_fit = read_par(args.parfile, file_format=ffmt, return_tuple=True) n_param = param_fit.count(True) # param_data = read_par(args.parfile, file_format=ffmt) # Now get information from residuals rinfo = get_resid_info(resid_data, nparam=n_param) # Now print out results, to stdout for now: print '' print 'Residual file: ', args.resfile print 'Par file: ', args.parfile print 'Number of TOAs: ', rinfo['ntoa'] print 'Number of parameters: ', rinfo['nparam'] print 'Number of DOF: ', rinfo['ndof'] print '\n' print 'Info Total Avg weight Number Chi^2 Adjusted chi^2 rms rms MJD range Years Centre' print ' weight per TOA of TOAs per TOA (per DOF) unweighted weighted freq ' print '' for i_info in range(len(resid_data['info_val'])): print '{0:8} {1:7.5f} {2:7.5f} {3:7d} {4:10.4f} {5:10.4f} {6:10.4f} {7:10.4f} {8:5d} - {9:5d} {10:5.2f} {11:6.1f}'.format( resid_data['info_val'][i_info], rinfo['normwgt'][i_info], rinfo['avgwgt'][i_info], rinfo['npts'][i_info], rinfo['rchi2'][i_info], rinfo['rchi2x'][i_info], rinfo['resrms'][i_info], rinfo['resrmsw'][i_info], int(rinfo['mjdstart'][i_info]), int(rinfo['mjdend'][i_info]), (rinfo['mjdend'][i_info]-rinfo['mjdstart'][i_info])/365.25, rinfo['cfreq'][i_info] ) print '' print '{0:8} {1:7.5f} {2:7.5f} {3:7d} {4:10.4f} {5:10.4f} {6:10.4f} {7:10.4f} {8:5d} - {9:5d} {10:5.2f} {11:6.1f}'.format( 'Total', rinfo['sum_normwgt'], rinfo['sum_avgwgt'], rinfo['sum_npts'], rinfo['sum_rchi2'], rinfo['sum_rchi2x'], rinfo['sum_resrms'], rinfo['sum_resrmsw'], int(rinfo['sum_mjdstart']), int(rinfo['sum_mjdend']), (rinfo['sum_mjdend']-rinfo['sum_mjdstart'])/365.25, rinfo['sum_cfreq'] )
def main(): progname = 'm2mtot_grid.py' args = get_opt(progname) if(args.psrname == None): outfile_base = '' else: outfile_base = args.psrname # Set par and tim files # par_base = '/Users/ferdman/Work/pulsar/1756-2251/timing/tempo/1756.dd.par.BASE' # tim_file = '/Users/ferdman/Work/pulsar/1756-2251/timing/tempo/1756.tempo.tim' # Prepare par file for grid fitting # First, read in par file: par_base_contents = [] f_par = open(args.parfile, 'r') for par_line in f_par.readlines(): if ((par_line.split()[0] != 'SINI') & (par_line.split()[0]!='M2')): par_base_contents.append(par_line.split()) #par_base_contents = [par_line.split() for par_line in f_par.readlines()] f_par.close() parfile_base = 'par_base.par' f_par_base = open(parfile_base, 'w') for par_line in par_base_contents: f_par_base.write(' '.join(par_line)+'\n') f_par_base.close() # read in pb and asini from par file, adn calculation mass function: if(args.tempo2): tempo_ver = 'tempo2' else: tempo_ver = 'tempo1' params=read_par(args.parfile, file_format=tempo_ver) # asini is in (light) seconds. Convert pb to secs as well. # tsun is in us to give f_mass in solar units. tsun = 4.925490947 tsun_secs = tsun*10**(-6) pb_secs = float(params['pb'])*86400.0 f_mass = 4*np.pi*np.pi*(float(params['a1'])**3.)/(tsun_secs*(pb_secs**2.)) # f_mass = 4*np.pi*np.pi*(float(params['a1'])**3.)/(tsun*(float(params['pb'])**2.)) print 'pb = ', float(params['pb']), ' = ', pb_secs, ' sec' print 'a1 = ', float(params['a1']) print 'fmass = ', f_mass if(args.loadfile==None): p_out = grid_fit_shapiro_tempo(parfile_base, args.timfile, m2_range=args.m2lim, cosi_range=args.cosilim, n_m2=args.nm2, n_cosi=args.ncosi, fmass=f_mass, tempo2=args.tempo2) if(args.savefile): save_file = 'm2cosi_grid_{0}_{1}'.format(args.nm2, args.ncosi) save_array = np.array([p_out['m2'], p_out['cosi'], p_out['sini'], p_out['m1'], p_out['m1_prob']]) np.save(save_file+'_params', save_array) np.save(save_file+'_prob', p_out['norm_like']) else: load_array = np.load(args.loadfile+'_params.npy') # for the purposes of this routine, only need the following # things in p_out p_out = {'m2':load_array[0], 'cosi':load_array[1], 'sini':load_array[2], 'm1':load_array[3], 'm1_prob':load_array[4]} p_out['norm_like'] = np.load(args.loadfile+'_prob.npy') # Now make contour plots plot_contour_pdf(p_out['cosi'], p_out['m2'], p_out['norm_like'], xlabel='|cos $i$|', ylabel='Companion mass ($M_\\odot$)') # Add in m1 curves if(args.m1curve != None): for i_m1 in np.arange(len(args.m1curve)): sini_plot = ( (f_mass*(args.m1curve[i_m1]+p_out['m2'])**2.)**(1./3.) )/p_out['m2'] cosi_plot = np.sqrt(1.0 - sini_plot**2.) plt.plot(cosi_plot, p_out['m2'], linestyle='dashed', color='black') plt.savefig('1756_m2cosi_contours.'+args.plotformat) # Now plot 1D pdfs for m2 and cosi m2_pdf = np.sum(p_out['norm_like'], axis=1) m2_med, m2_prob_min, m2_prob_max = \ get_pdf_prob(p_out['m2'], m2_pdf, prob_intervals) plot_pdf(p_out['m2'], m2_pdf, xlabel='Companion mass ($M_\\odot$)', ylabel='Probability density', prob_lines=np.append(m2_prob_min, m2_prob_max), prob_linestyle=['dashed','dashdot','dotted', 'dashed','dashdot','dotted']) plt.savefig('1756_m2_m2sini_pdf.'+args.plotformat) print 'M2 = ', m2_med print ' 68%: ', m2_prob_min[0], m2_prob_max[0] print ' 95%: ', m2_prob_min[1], m2_prob_max[1] print ' 99%: ', m2_prob_min[2], m2_prob_max[2] print ' ' sini_pdf = np.sum(p_out['norm_like'], axis=0) sini_med, sini_prob_min, sini_prob_max = \ get_pdf_prob(p_out['sini'], sini_pdf, prob_intervals) plot_pdf(p_out['sini'], sini_pdf, xlabel='Sine of inclination angle', ylabel='Probability density', prob_lines=np.append(sini_prob_min, sini_prob_max), prob_linestyle=['dashed','dashdot','dotted', 'dashed','dashdot','dotted']) plt.savefig('1756_sini_m2sini_pdf.'+args.plotformat) print 'SINI = ', sini_med print ' 68%: ', sini_prob_min[0], sini_prob_max[0] print ' 95%: ', sini_prob_min[1], sini_prob_max[1] print ' 99%: ', sini_prob_min[2], sini_prob_max[2] print ' ' cosi_pdf = np.sum(p_out['norm_like'], axis=0) cosi_med, cosi_prob_min, cosi_prob_max = \ get_pdf_prob(p_out['cosi'], cosi_pdf, prob_intervals) plot_pdf(p_out['cosi'], cosi_pdf, xlabel='Total system mass ($M_\\odot$)', ylabel='Probability density', prob_lines=np.append(cosi_prob_min, cosi_prob_max), prob_linestyle=['dashed','dashdot','dotted', 'dashed','dashdot','dotted']) plt.savefig('1756_cosi_m2sini_pdf.'+args.plotformat) print 'COSI = ', cosi_med print ' 68%: ', cosi_prob_min[0], cosi_prob_max[0] print ' 95%: ', cosi_prob_min[1], cosi_prob_max[1] print ' 99%: ', cosi_prob_min[2], cosi_prob_max[2] print ' ' # Now deal with m1's: create histogram weighted by likelihood m1_pdf, bin_edges = np.histogram(p_out['m1'], args.m1bins, density=True, weights=p_out['m1_prob']) # We can define the bin centres as follows since our call to np/histogram gives # back evenly spaced bins bin_size = bin_edges[1] - bin_edges[0] m1_val = bin_edges[0:len(bin_edges)-1] + 0.5*bin_size # Get PDF intervals and values: # pdf_rho = rho_hist/np.sum(rho_hist) m1_med, m1_prob_min, m1_prob_max = \ get_pdf_prob(m1_val, m1_pdf,prob_intervals, norm=True) plot_pdf(m1_val, m1_pdf, xlabel='Pulsar mass ($M_\\odot$)', ylabel='Probability density', prob_lines=np.append(m1_prob_min, m1_prob_max), prob_linestyle=['dashed','dashdot','dotted', 'dashed','dashdot','dotted']) plt.savefig('1756_m1_m2sini_pdf.'+args.plotformat) print ' ' print 'M1 = ', m1_med print ' 68%: ', m1_prob_min[0], ' ', m1_prob_max[0] print ' 95%: ', m1_prob_min[1], ' ', m1_prob_max[1] print ' 99%: ', m1_prob_min[2], ' ', m1_prob_max[2] print ' '