Пример #1
0
        fold.phs0 = 0.0
        (fold.f0, fold.f1, fold.f2) = psr_utils.p_to_f(fold.p0, fold.p1, fold.p2)

    #
    # Calculate the TOAs
    #

    for ii in range(numtoas):

        # The .pfd file was generated using -nosearch and a specified
        # folding period, p-dot, and p-dotdot (or f, f-dot, and f-dotdot).
        if (pcs is None):
            # Time at the middle of the interval in question
            midtime = fold.epoch + (ii+0.5)*timestep_day
            p = 1.0/psr_utils.calc_freq(midtime, fold.epoch, fold.f0, fold.f1, fold.f2)
            t0 = psr_utils.calc_t0(midtime, fold.epoch, fold.f0, fold.f1, fold.f2)
            t0i= int(t0 + 1e-9)
            t0f = t0 - t0i
        # The .pfd file was folded using polycos
        else:
            # Time at the middle of the interval in question
            mjdf = fold.epochf + (ii+0.5)*timestep_day
            (phs, f0) = pcs.get_phs_and_freq(fold.epochi, mjdf)
            phs -= fold.phs0
            p = 1.0/fold.f0
            t0f = mjdf - phs*p/SECPERDAY
            t0i = fold.epochi

        for jj in range(numsubbands):
            prof = profs[ii][jj]
Пример #2
0
def get_ml_toa(fits_fn, prof_mod, parfile, scope='swift', print_offs=None, 
               frequency=None, epoch=None,  sim=False, bg_counts=0, Emin=None, 
               Emax=None, gauss_err=False, tempo2=False, debug=False, 
               correct_pf=False, split_num=None, split_orbits=False):

    print_timings = False # if want to print summary of runtime

    fits = pyfits.open(fits_fn)
    t = smu.fits2times(fits_fn, scope=scope, Emin=Emin, Emax=Emax)

    #if scope != 'chandra':
    #    exposure = fits[0].header['EXPOSURE']

    try:
        obsid = fits[0].header['OBS_ID']
    except KeyError:
        obsid = os.path.basename(fits_fn)

    if bg_counts < 0:
        bg_scale = -1.0*bg_counts
        bg_fits_fn = fits_fn.replace('reg','bgreg')
        bg_fits = pyfits.open(bg_fits_fn)
        bg_counts = int(bg_fits[1].header['NAXIS2'] * bg_scale)
        print 'BG Counts:',bg_counts
        bg_fits.close()
    if frequency and epoch:
        par = lambda: None
        par.epoch = epoch
        par.f0 = frequency
        par.fdots = [0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]
    else:
        par = PSRpar(parfile)

    # split times into multiple arrays if needed
    if split_orbits:
        dt = t[1:] - t[:-1]
        splits = np.where(dt > 0.0116)[0] # 1 ks in days
        if len(splits):
            ts = [ t[:splits[0]] ]
            for i in range(len(splits)-1):
                ts.append(t[splits[i]+1:splits[i+1]])
            ts.append(t[splits[-1]+1:])
        else:
            ts = np.atleast_2d(t)

    elif split_num:
        remainder = len(t) % split_num
        if remainder:
            sys.stderr.write("Warning: Number of events in %s not divisable by %d. " \
                             "Dropping last %d events.\n" % (obsid, split_num, remainder))
            ts = np.split(t[:-remainder],split_num)
        else:
            ts = np.split(t,split_num)

    else:
        ts = np.atleast_2d(t)

    if len(ts) > 1 and debug:
        plt.figure()
        for t in ts:
            nbins = int((t[-1] - t[0]) * 8640.0)
            hist = np.histogram(t,bins=nbins)
            plt.plot(hist[1][:-1],hist[0],c='b')
            plt.axvline(t[0],ls='--',c='k',lw=2)
            plt.axvline(t[-1],ls='-',c='k',lw=2)
        plt.show()
           

    for i,t in enumerate(ts):
        sys.stderr.write('Measuring TOA #%d for %s\n' % (i+1,obsid))

        phases = smu.times2phases(t, parfile)

        if correct_pf:
            old_model, new_model, corr_folded = correct_model(phases,prof_mod)
        maxoff, error = calc_toa_offset(phases,prof_mod.prof_mod,sim_err=sim,bg_counts=bg_counts, gauss_err=gauss_err, debug=debug)
        midtime = (t[-1]+t[0])/2.0
        p_mid = 1.0/psr_utils.calc_freq(midtime, par.epoch, par.f0, par.fdots[0], par.fdots[1], par.fdots[2], par.fdots[3],
                                        par.fdots[4], par.fdots[5], par.fdots[6], par.fdots[7], par.fdots[8]) 

        t0 = psr_utils.calc_t0(midtime, par.epoch, par.f0, par.fdots[0], par.fdots[1], par.fdots[2], par.fdots[3],
                               par.fdots[4], par.fdots[5], par.fdots[6], par.fdots[7], par.fdots[8]) 
        t0i = int(t0)
        t0f = t0 - t0i

        toaf = t0f + maxoff*p_mid / SECPERDAY
        newdays = int(np.floor(toaf))
        
 
        if tempo2:
            psr_utils.write_tempo2_toa(t0i+newdays, toaf-newdays, error*p_mid*1.0e6, 0000, 0.0, name=obsid) 
        else:
            psr_utils.write_princeton_toa(t0i+newdays, toaf-newdays, error*p_mid*1.0e6, 0000, 0.0, name=obsid) 

        if print_offs:
            offs_file = open(print_offs,'a')
            #print "\t",error*p_mid*1.0e6,"\t",exposure # this was for checking uncertainties scaling with exposure time
            offs_file.write(fits_fn + "\t" + str(maxoff) + "\t" + str(error) + "\n")
            #print obsid,"\tOffset:",maxoff,"+/-",error 
            offs_file.close()

        fits.close()

        
        #double check PF correction with measuring binned model pulsed fraction
        if correct_pf and debug:
            plt.figure()
            nbins = len(corr_folded[0])
            uncertainties = np.sqrt(corr_folded[0])
            area = np.sum(corr_folded[0],dtype='float')/nbins
            plt.step(corr_folded[1][:-1],np.roll(corr_folded[0]/area,int(1.0-maxoff*nbins)),where='mid')
            plt.errorbar(corr_folded[1][:-1],np.roll(corr_folded[0]/area,int(1.0-maxoff*nbins)),uncertainties/area,fmt='ko')
            model_x = np.linspace(0,1,100)
            plt.plot(model_x,old_model(model_x),label='uncorrected')
            plt.plot(model_x,new_model(model_x),label='corrected')
            plt.legend()
            plt.show()

    if print_timings:
        global calcprobtime
        global logsumtime 
        global integratetime

        sys.stderr.write('\tCalc Prob: %f s\n' % calcprobtime)
        sys.stderr.write('\tLog Sum: %f s\n' % logsumtime)
        sys.stderr.write('\tIntegrate Norm: %f s\n' % integratetime)
Пример #3
0
    # Calculate the TOAs
    #

    if t2format:
        print "FORMAT 1"

    for ii in range(numtoas):

        # The .pfd file was generated using -nosearch and a specified
        # folding period, p-dot, and p-dotdot (or f, f-dot, and f-dotdot).
        if (pcs is None):
            # Time at the middle of the interval in question
            midtime = fold.epoch + (ii + 0.5) * timestep_day
            p = 1.0 / psr_utils.calc_freq(midtime, fold.epoch, fold.f0,
                                          fold.f1, fold.f2)
            t0 = psr_utils.calc_t0(midtime, fold.epoch, fold.f0, fold.f1,
                                   fold.f2)
            t0i = int(t0 + 1e-9)
            t0f = t0 - t0i
        # The .pfd file was folded using polycos
        else:
            # Time at the middle of the interval in question
            mjdf = fold.epochf + (ii + 0.5) * timestep_day
            (phs, f0) = pcs.get_phs_and_freq(fold.epochi, mjdf)
            phs -= fold.phs0
            p = 1.0 / f0
            if (phs < 0.0): phs += 1.0  # Consistent with pat
            t0f = mjdf - phs * p / SECPERDAY
            t0i = fold.epochi

        for jj in range(numsubbands):
            prof = profs[ii][jj]
Пример #4
0
def xte_toa(profile, template, params, n_trials=512):

    print 'Number of TOA trials per profile: ', n_trials

    # If given only one profile, just make it into a one-element list:
    if type(profile) is not list:
        profile = [profile]

    # First get amplitudes and phases from FFTing the input template profile
    t_prof_fft, t_prof_amp, t_prof_phase = cprof(template['i'])

    n_prof = len(profile)
    toa_int = []
    toa_frac = []
    toa_err = []
    time_lapse = []
    i_prof = 0
    for prof in profile:
        # Run a number of trials, sampling profiles from a Poisson distribution each time, and get out an
        # mean TOA and TOA err that is equal to the std dev of the resulting distribution of TOAs
        shift = []
        stt_time = time.time()
        for i_trial in range(n_trials):

            # Create trial profile by sampling from a Poisson distribution based on counts in each bin
            prof_trial = np.random.poisson(prof['i'])
            # Get shift between template and profile, in bins:
            # Now run fftfit to match the profiles and out the shifts and scales
            shift_trial,eshift,snr,esnr,b,errb,ngood = fftfit(prof_trial,t_prof_amp,t_prof_phase)

            n_bins = len(prof['i'])
            # ensure that shift is not negative, and less than nbins-1
            while(shift_trial <= 0.0):
                shift_trial += float(n_bins)
            while(shift_trial > float(n_bins-1)):
                shift_trial -= float(n_bins)

            shift.append(shift_trial)


        end_time = time.time()
        time_lapse.append(end_time-stt_time)
        # Now calculate a mean shift and error, we can calculate final TOA.
        shift = np.array(shift)
        shift_mean = np.mean(shift)
        shift_err = np.std(shift)

        # Convert to a shift in phase, and then time in days based on current pulse period:
        shift_phase = shift_mean/float(n_bins)
        shift_err_phase = shift_err/float(n_bins)
        # Get MJD closest to reference MJD that is at zero spin phase



        mjd0 = pu.calc_t0(prof['mjd'], params['pepoch'], params['f'][0], params['f'][1], params['f'][2], params['f'][3])
        # Separate out into integer and fractional times
        mjd0_int = int(mjd0)
        mjd0_frac = mjd0 - float(mjd0_int)

#        toa_f = mjd0_frac - shift_phase/prof['ref_freq']/86400.0
        toa_f = mjd0_frac + shift_phase/prof['ref_freq']/86400.0

        # Identify extra days in getting TOA calculation
        extra_days = np.floor(toa_f)
        toa_int.append(mjd0_int + int(extra_days))
        toa_frac.append(toa_f - extra_days)
        # Get final TOA error
        toa_err.append(1.0e6*shift_err_phase/prof['ref_freq']) # convert from secs to microsecs

#        i_prof += 1
#        print 'prof ', i_prof


    time_lapse = np.array(time_lapse)
    mean_time_lapse = np.mean(time_lapse) # in minutes
    print 'Mean time per profile: ', mean_time_lapse*1000., 'ms'
    print 'Total time taken: ', np.sum(time_lapse), 'sec'

    # Finally, package results into dictionary format:
    toa_int = np.array(toa_int)
    toa_frac = np.array(toa_frac)
    toa_err = np.array(toa_err)
    toa = {'toa_int':toa_int, 'toa_frac':toa_frac, 'toa_err':toa_err}

    return toa