Example #1
0
def align_archive(filename, template, outfile, tfac=1):
    """ Align an archive across subintegrations
    
    Dedisperses and fscrunches the archive, then fits for the shift between 
    the profile at each subint and the input template. 
    Applies this shift to each subint of the non-fscrunched, non-dedispersed archive.
    Saves this shifted archive.

    Parameters
    ----------
    filename: name of the input archive to be aligned
    template: file to align each subint against
    outfile: name of output file to save
    tfac: factor to tscrunch by
    """
    arch = psrchive.Archive_load(filename)
    arch.convert_state('Stokes')
    if arch.get_dedispersed() is True:
        print("Already dedispersed")
    else:
        if arch.get_dispersion_measure() == 0:
            print("Bad dispersion measure")
        else:
            print("De-Dispersing")
            arch.dedisperse()
    if arch.get_nchan() == 1:
        print("already fscrunched")
    else:
        arch.fscrunch()
    tfac = int(tfac)
    arch.tscrunch(tfac)

    arch_template = psrchive.Archive_load(template)
    if arch_template.get_npol() > 1:
        print("Warning: template has > 1 pols")
    if arch_template.get_nchan() > 1:
        print("Warning: template has > 1 channels")
    if arch_template.get_nsubint() > 1:
        print("Warning: template has > 1 subints")
    # careful: this assumes that the template has data shape (1,1,1,nbins)
    tmpl_prof = arch_template.get_Profile(0, 0, 0)

    shifts = np.zeros(arch.get_nsubint())
    var_shifts = np.zeros(shifts.shape)
    psf = psrchive.ProfileShiftFit()
    psf.set_standard(tmpl_prof)
    for subint in range(arch.get_nsubint()):
        data_prof = arch.get_Profile(subint, 0, 0)
        psf.set_Profile(data_prof)
        (shift, var_shift) = psf.get_shift()
        shifts[subint] = shift
        var_shifts[subint] = var_shift
    shifts[np.isnan(var_shifts)] = 0.
    arch_to_change = psrchive.Archive_load(filename)
    arch_to_change.tscrunch(tfac)
    for isub, subint in enumerate(arch_to_change):
        subint.rotate_phase(shifts[isub])
    arch_to_change.unload(outfile)
Example #2
0
 def freqNnbin(self):
     # freq and nbin needs to be integers
     # list of all *.rf.dzTFp files in the directory
     filenames=glob.glob(self.directory+'/*.rf.dzTFp')
     stdFreq=time_series.stdCmToFreq[self.cm]
     self.name=psrchive.Archive_load(filenames[0]).get_source()
     for filename in filenames:
         archive=psrchive.Archive_load(filename)
         fileFreq=archive.get_centre_frequency()
         fileNbin=archive.get_nbin()
         #Check if the file is calibrated, 1024 bin, and at the right freq
         if abs(fileFreq-stdFreq)<300 and fileNbin==self.nbin and archive.get_poln_calibrated():
             self.filteredFileNames.append(filename)
Example #3
0
def getOnOffSpectra(files, profile=None, npfb=32, thresh=0.2):
    #del files[7]
    #    for gpu in files.keys():
    #        files[gpu] = [files[gpu][0]]
    #    print files
    if profile is None:
        profile = getProfile(files)
    onp = profile > np.median(profile) + (np.max(profile) -
                                          np.median(profile)) * thresh
    offp = profile < np.median(profile) + (np.max(profile) -
                                           np.median(profile)) * 0.1
    freqs, fset, gpumap, idxmap = getFreqMap(files, npfb)
    nf = fset.shape[0]
    ons = []
    offs = []
    epochs = []
    tints = []
    for gpu in files.keys():
        osubint = 0
        oidx = np.flatnonzero(gpumap == gpu)
        iidx = idxmap[oidx]
        for fn in files[gpu]:
            print "loading: ", fn
            ar = psrchive.Archive_load(fn)
            source = ar.get_source()
            telescope = ar.get_telescope()

            #            ar.bscrunch_to_nbin(64)
            d = ar.get_data()
            #            imsk = np.zeros((d.shape[2],),dtype='bool')
            #            imsk[iidx] = True
            #            omsk = np.zeros((nf,),dtype='bool')
            #            omsk[oidx] = True
            for k in range(d.shape[0]):
                if len(epochs) <= osubint:
                    sub = ar.get_Integration(k)
                    epochs.append(sub.get_epoch().in_days())
                    tints.append(sub.get_duration())
                try:
                    on = ons[osubint]
                    off = offs[osubint]
                except:
                    on = np.zeros((1, 2, nf))
                    off = np.zeros((1, 2, nf))
                    ons.append(on)
                    offs.append(off)
                x = d[k, :2, :, :]
                x = x[:, iidx, :]
                x = x[:, :, onp].mean(2)
                on[:, :, oidx] = x
                x = d[k, :2, :, :]
                x = x[:, iidx, :]
                x = x[:, :, offp].mean(2)
                off[:, :, oidx] = x
                osubint += 1
    on = np.concatenate(ons, axis=0)
    off = np.concatenate(offs, axis=0)
    epochs = np.array(epochs)
    tints = np.array(tints)
    return fset, on, off, epochs, tints, profile, onp, offp, gpumap, idxmap, freqs, source, telescope
Example #4
0
    def group_psrflux(self):
        self.timeSeriesFileName=self.name+'.'+str(self.cm)+'.time_series'
        # return a list of the data points in tuples
        unsortedData=[]

        for fileName in self.filteredFileNames:
            stdFileName=self.stdProfDir+'/'+self.name+'_'+str(self.cm)+'cm_paas.std'
            try:
                print subprocess.check_output('psrflux '+fileName+' -s '+stdFileName,shell=True)
            except subprocess.CalledProcessError,e:
                with open('timeSeriesErrorLog','a') as errorLog:
                    z=e
                    errorLog.write(str(z)+'\n')
            #integration time from vap. Simon note that psrflux does not give the correct integration time
#            vapOut=subprocess.check_output('vap -nc "length" '+fileName,shell=True).split()
            archive=psrchive.Archive_load(fileName)
            time=archive.integration_length()
            fileNameWithoutDir=archive.get_filename()
            
            try:
                with open(fileName+'.ds','r') as f:
                    lines=f.readlines()
                    mjd=float(lines[4][8:])
                    dataLineList=lines[7].split()
                    freq=float(dataLineList[3])
                    flux=float(dataLineList[4])
                    flux_err=float(dataLineList[5])
                    toa_err=subprocess.check_output('pat -a"'+stdFileName+'" '+fileName,shell=True).split()[4]
                    if time!=0.0:
                        scaledToa_err=float(toa_err)/math.sqrt(time)
                        unsortedData.append((mjd,time,freq,flux,flux_err,scaledToa_err,fileNameWithoutDir))
            except IOError,r:
                with open('SfErrorLog','a') as errorLog:
                    errorLog.write(str(r)+'\n')
                break
Example #5
0
def fixCyclicDedisp(fname, nchan=32, overwrite=False, ext='fix'):
    # copied from paul's fix_cyclic_dedisp script
    import psrchive
    import os
    import psr_utils
    arch = psrchive.Archive_load(fname)
    cf = arch.get_centre_frequency()
    bw = arch.get_bandwidth()
    f_lo = cf - bw / 2.0
    nch = arch.get_nchan()
    pfb_nch = nchan
    pfb_bw = bw / pfb_nch
    chan_per_pfb = nch / pfb_nch
    dm = arch.get_dispersion_measure()
    for isub in range(arch.get_nsubint()):
        sub = arch[isub]
        per = sub.get_folding_period()
        for ichan in range(nch):
            pfb_cf = f_lo + ((ichan / chan_per_pfb) + 0.5) * pfb_bw
            dt = psr_utils.delay_from_DM(dm, pfb_cf) - psr_utils.delay_from_DM(
                dm, cf)
            for ipol in range(sub.get_npol()):
                prof = sub.get_Profile(ipol, ichan)
                prof.rotate_phase(dt / per)
    #arch.set_dedispersed(True) # doesn't work, lame...
    if (overwrite):
        outf = fname
    else:
        outf = fname + '.' + ext
    arch.unload(outf)
    os.system("psredit -m -c dmc=1 %s" % outf)
Example #6
0
    def fcal_archive(self, ar_filename):
        """Flux calibrate the archive 'ar_filename'. The method 'get_scaling' needs to have been called beforehand"""

        ar = p.Archive_load(ar_filename)
        # Check Npol
        if ar.get_npol() != 1 or ar.get_npol() != self.nrcvr:
            print "Error: Npol=%d in fluxcal does not match Npol=%d in archive:" % (
                self.nrcvr, ar.get_npol())
            sys.exit(-1)

        # Check Nchan
        if self.nchan != ar.get_nchan():
            print "Error: Nchan=%d in fluxcal does not match Nchan=%d in archive:" % (
                self.nchan, ar.get_nchan())
            sys.exit(-1)

        # Check if same centre frequency
        if self.obsfreq != ar.get_centre_frequency():
            print "Error: obsfreq=%f in fluxcal does not match obsfreq=%f in archive:" % (
                self.obsfreq, ar.get_centre_frequency())
            sys.exit(-1)

        # Rescale the archive to Janksy
        for isub in range(ar.get_nsubint()):
            i = ar.get_Integration(isub)
            for ichan in range(ar.get_nchan()):
                prof = i.get_Profile(0, ichan)
                pdata = prof.get_amps()
                pdata *= self.scale[ichan]

        ar.set_scale("Jansky")
        ar.unload(ar_filename + ".calib")
def load_data(fn, dcycle):
    """ Load data from a .ar file 
    Open file, remove baseline, dedisperse, pscrunch, 
    remove the main pulse, apply gates.
    
    Args:
        fn (str): Filename to open
        dcycle: duty-cycle percent of a pulse, e.g. for J1909-3744 is ~10 and for J0437-4715 is ~ 80
    
    Returns the archive and the amplitudes in the form of a numpy array 
    with a shape (time=1, pol=1, freq, phase_bin)
    """
    ar = psr.Archive_load(fn)
    print("Cleaning is started for {}".format(ar.get_filename()))
    patient = ar.clone()
    patient.remove_baseline()
    patient.pscrunch()
    patient.dedisperse()
    #data = patient.get_data()
    #data_offpulse = data_without_main_pulse(data, patient, dcycle)
    if dcycle > 0:
        print('main pulse is being removed')
        data_offpulse = data_without_main_pulse(patient, dcycle)
    else:
        data_offpulse = patient.get_data()
    return ar, data_offpulse
Example #8
0
def fits2numpy(fitsdir, write_npys=False):
    npydir = str(fitsdir) + '/npys'
    if write_npys:
        if not os.path.exists(npydir):
            os.mkdir(npydir)
        os.chdir(npydir)
        for fits in os.listdir(fitsdir):
            print('Fits files: ', fits)
            #npar = 'pulse_120390656' + '_secondtry.npy'
            if fits.endswith('.fits'):
                npar = str(fits[:-5]) + '.npy'
                with open(npar, 'wb') as npar_file:
                    #arch = psrchive.Archive_load('/datax/scratch/jfaber/SPANDAK_extension/pipeline_playground/61.4627973333_67.0552026667_fits/pulse_120390656.fits')
                    #arch = psrchive.Archive_load(directory + '/' + fits)
                    arch = psrchive.Archive_load(fitsdir + '/' + fits)
                    #os.system('psrplot -p F -jD' + directory + '/' + fits)
                    arch.dedisperse()
                    arch.remove_baseline()
                    arch.convert_state('Stokes')
                    data = arch.get_data()

                    #Write Numpy Arrays to npys directory in fits directory
                    np.save(npar_file, data[:, 0, :, :].mean(0))
                    print('Numpy Array Written...')
    return npydir
Example #9
0
def load_data(fn, gate_file):
    """ Load data from a .ar file 
    Open file, remove baseline, dedisperse, pscrunch, 
    remove the main pulse, apply gates.
    
    Args:
        fn (str): Filename to open
        gate_file (file): phase bin intervals to be removed (look at gate_reader function)
        0,15,500,512 means phase bins in the ranges 
        from 0 to 15 and from 500 to 512 will be removed
    
    Returns the archive and the amplitudes in the form of a numpy array 
    with a shape (time=1, pol=1, freq, phase_bin)
    """
    ar = psr.Archive_load(fn)
    print("Cleaning is started for {}".format(ar.get_filename()))
    patient = ar.clone()
    patient.remove_baseline()
    patient.pscrunch()
    #    patient.tscrunch()
    #    patient.fscrunch_to_nchan(patient.get_nchan()/scr_factor)
    #    patient.dedisperse()
    data = patient.get_data()
    data_offpulse = main_pulse_wash(data, gate_file)
    return ar, data_offpulse
Example #10
0
def uncalibrated(fn):
    a = psrchive.Archive_load(fn)
    unit = a.get_scale()
    if unit == 'Jansky':
        return False
    else:
        return True
Example #11
0
def psr2np(fname,NCHAN,dm):
    #Get psrchive file as input and outputs numpy array
    fpsr = psr.Archive_load(fname)
    fpsr.dededisperse() 
    fpsr.set_dispersion_measure(dm)
    fpsr.dedisperse()

    fpsr.fscrunch_to_nchan(NCHAN)
    fpsr.remove_baseline()
    
    #-- apply weights for RFI lines --#
    ds = fpsr.get_data().squeeze()
    w = fpsr.get_weights().flatten()
    w = w/np.max(w)
    idx = np.where(w==0)[0]
    ds = np.multiply(ds, w[np.newaxis,:,np.newaxis])
    ds[:,idx,:] = np.nan

    #-- Get total intensity data (I) from the full stokes --#
    data = ds[0,:,:]

    #-- Get frequency axis values --#
    freq = np.linspace(fpsr.get_centre_frequency()-abs(fpsr.get_bandwidth()/2),fpsr.get_centre_frequency()+abs(fpsr.get_bandwidth()/2),fpsr.get_nchan())
    
    #-- Get time axis --#
    tbin = float(fpsr.integration_length()/fpsr.get_nbin())
    taxis = np.arange(0,fpsr.integration_length(),tbin)
    # Convert to time to msec
    taxis = taxis*1000

    return data
Example #12
0
def dedispblock(ar, lodm, hidm):
    fpsr = psr.Archive_load(ar)
    toplot = []
    dmstep = 1
    dmrange = range(lodm, hidm, dmstep)
    for dm in dmrange:
        fpsr.remove_baseline()
        fpsr.set_dispersion_measure(dm)
        fpsr.dedisperse()
        ds = fpsr.get_data().squeeze()
        w = fpsr.get_weights().flatten()
        w = w / np.max(w)  # Normalized it
        idx = np.where(w == 0)[0]
        ds = np.multiply(ds, w[np.newaxis, :, np.newaxis])  # Apply it
        ds[:, idx, :] = np.nan
        data1 = ds[0, :, :]
        time = np.nanmean(data1[:, :], axis=0)
        toplot.append(time)

    tbin = float(fpsr.integration_length() / fpsr.get_nbin())
    taxis = np.arange(0, fpsr.integration_length(), tbin)
    taxis = taxis * 1000  #Get to msec
    toplot = np.array(toplot)
    toplot = [list(i) for i in zip(*toplot)]
    toplot = np.transpose(toplot)
    return toplot, taxis
Example #13
0
def psrfits_to_pickle(fname, output_fname=None):
    ar = psrchive.Archive_load(fname)
    ar.dedisperse()
    ar.pscrunch()
    ar.remove_baseline()
    nsubint = int(ar.get_nsubint())
    tobs = nsubint * float(ar.get_Integration(0).get_duration())
    metadata = dict(source=ar.get_source(),
                    period=get_period(ar),
                    acceleration=0.0,
                    cfreq=ar.get_centre_frequency(),
                    dm=ar.get_dispersion_measure(),
                    bw=abs(ar.get_bandwidth()),
                    nsubint=nsubint,
                    nsubband=ar.get_nchan(),
                    nphase=ar.get_nbin(),
                    tobs=tobs,
                    dc=0.1,
                    data=ar.get_data().squeeze())
    metadata["chbw"] = metadata["bw"] / metadata[
        "nsubband"]  # the width in MHz of each frequency channel.
    metadata["tsub"] = metadata["tobs"] / metadata[
        "nsubint"]  # the width in seconds of each temporal bin.
    metadata["tphase"] = metadata["period"] / metadata[
        "nphase"]  # the width of each bin across the profile in seconds.
    if output_fname is None:
        stem = fname.split(".")[0]
        output_fname = "{0}.pickle".format(stem)
        print "Output file:", output_fname
    with open(output_fname, "w") as fout:
        cPickle.dump(metadata, fout)
Example #14
0
 def _fromfile(cls, fname_path):
     """Generate a PSRFile instance from a data file.  
     fname should be full path to file name."""
     (path,fname) = os.path.split(fname_path)
     try:
         with psrchive_no_amps():
             arch = psrchive.Archive_load(fname_path)
             source = arch.get_source()
             rcvr = arch.get_receiver_name()
             backend = arch.get_backend_name()
             type = arch.get_type()
             mjd = arch[0].get_epoch().in_days()
             bad = 0
             reason = ""
     except:
         source = "unk"
         rcvr = "unk"
         backend = "unk"
         type = "unk"
         mjd = 0.0
         bad = 1
         reason = "import error"
     return super(PSRFile,cls).__new__(cls,
             fname=fname, path=path, 
             source=source, rcvr=rcvr, backend=backend,
             type=type, mjd=mjd, bad=bad, reason=reason)
Example #15
0
def read_single_file(filename, slice_bl, freq_info, freq_bin):
    print('open file %s' % filename)
    arch = psrchive.Archive_load(filename)
    arch.remove_baseline()  # remove baseline
    arch.convert_state('Stokes')
    data = arch.get_data()  # (subint, pol, chan, bin)
    #nchan = data.shape[2]
    #print(data.shape)
    #freq = arch.get_frequencies()
    wts = arch.get_weights()
    zap_channel = np.all(wts == 0, axis=0)  # channels zapped in the ar file
    zap_subint = np.all(wts == 0,
                        axis=1)  # sub-integrations zapped in the ar file
    data[:, :, zap_channel] = np.nan  # set the zapped channels to NaN
    # print(data.shape)
    noise = data[:, :, :, slice_bl]  # get the pulse off data
    var_file = noise[~zap_subint].var(
        axis=(0, 3))  # RMS of the Stokes vector (pol, chan)
    rms_file = np.sqrt(var_file)
    inorm = rms_file[0]
    data /= inorm[None, None, :, None]
    #profile_file = np.nanmean(data, axis=2)
    profile_file = []
    for k in range(len(freq_bin)):
        kk = freq_bin[k]
        #print('%d-%dMHz'%(freq_info[k][0],freq_info[k][1]))
        profile_file.append(np.nanmean(data[:, :, kk[0]:kk[1], :], axis=2))
    return (
        profile_file, var_file, np.count_nonzero(~zap_subint)
    )  # profiel_file.shape=(nchn,subint, pol, bin), var_file.shape=(pol, chan)
Example #16
0
def load_archive_data(path, verbose=False):
    """Function to load .ar files and convert to PSRCHIVE archive objects.

    Input:
        path    : full path to location of the .ar files.
        verbose : option to run in verbose mode (default=False)

    Output:
        archives : list of PSRCHIVE archive objects
    """
    files = []
    for file in os.listdir(path):
        if file.endswith('.ar'):
            files.append(file)
    files.sort()
    archives = []
    archives = [psr.Archive_load(path + file) for file in files]
    if verbose:
        print '======================================================================================================'
        print '                                     Files to be processed:                                           '
        print '======================================================================================================'
    for i in range(1, len(archives)):
        archives[0].append(archives[i])
        # add the .ar files (added file is archive[0])
        if verbose:
            print archives[i]
    return archives
Example #17
0
def generate_toa_info(template, filename, noise="off-pulse", off_pulse_fraction=0.25):
    F_fits = fits.open(filename)
    F = psrchive.Archive_load(filename)
    F.convert_state("Stokes")
    
    data = F.get_data()
    weights = F.get_weights()
    telescope = F.get_telescope()
    tel_code = tel_codes[telescope]

    nchan = F.get_nchan()
    bw = F.get_bandwidth()
    cf = F.get_centre_frequency()
    freqs = F_fits['SUBINT'].data['DAT_FREQ']
    if nchan==1 and len(freqs.shape)==1:
        # Aargh. FITS simplifies arrays.
        freqs = freqs[:,None]
    if freqs.shape != (len(F), nchan):
        raise ValueError("frequency array has shape %s instead of %s"
                         % (freqs.shape, (len(F),nchan)))
    for i in range(len(F)):
        debug("subint %d of %d",i,len(F))
        I = F.get_Integration(i)
        e = I.get_epoch()
        e_mjdi = e.intday()
        e_mjdf = np.longdouble(e.fracday())
        P = I.get_folding_period()
        for j in range(nchan):
            if weights[i,j]==0:
                continue
            debug("chan %d of %d",j,nchan)
            sub_data = data[i,:,j,:]
            r = align_profile_polarization(template, sub_data, noise=noise, off_pulse_fraction=off_pulse_fraction)
            # FIXME: check sign
            # FiXME: do we use doppler here or in tempo?
            dt = wrap(r.phase)*P/86400.
            mjdi, mjdf = e_mjdi, e_mjdf-dt
            mjdi, mjdf = mjdi + np.floor(mjdf), mjdf - np.floor(mjdf)
            assert 0<=mjdf<1
            mjd_string = "%d.%s" % (mjdi, ("%.20f" % mjdf)[2:])
            mjd = mjdi+np.longdouble(mjdf)
            assert np.abs(np.longdouble(mjd_string)-mjd)<1e-3/86400.
            uncert = r.uncert*P*1e6 # in us
            flags = dict(subint=i, chan=j, snr=r.snr,
                         reduced_chi2=r.reduced_chi2,
                         phase=r.phase, uncert=r.uncert,
                         uncert_scaled=r.uncert_scaled,
                         P=P, weighted_frequency=I.weighted_frequency(j),
                         bw=bw/nchan, tsubint=I.get_duration(),
                         nbin=sub_data.shape[1], 
                         )
            d = dict(mjd_string=mjd_string,
                     mjd=mjd,
                     file=filename,
                     freq=freqs[i,j],
                     tel=tel_code,
                     uncert=uncert,
                     flags=flags)
            yield d
def archive_to_channels(filename):
    arch = psrchive.Archive_load(filename)
    arch.remove_baseline()
    arch.tscrunch()
    data = arch.get_data()
    data = data.squeeze()

    return np.vstack((data[0], data[1], data[2], data[3]))
Example #19
0
    def load(self,filename,offp=None,maxchan = None,tscrunch=None):
        """ 
        Load periodic spectrum from psrchive compatible file (.ar or .fits)
        
        *offp*: tuple (start,end) with start and end bin numbers to use as off pulse region for normalizing the bandpass
        
        *maxchan*: Top channel index to use. Quick and dirty way to pull out one subband from a file which contains multiple
                    subbands
        *tscrunch* : average down by a factor of 1/tscrunch (i.e. if tscrunch = 2, average every pair of subints)
        """
        idx = 0 # only used to get parameters of integration, not data itself
        
        self.filename = filename
        self.ar = psrchive.Archive_load(filename)
        
        self.data = self.ar.get_data()  #we load all data here, so this should probably change in the long run
        if maxchan:
            bwfact = maxchan/(1.0*self.data.shape[2]) # bwfact used to indicate the actual bandwidth of the data if we're not using all channels.
            self.data = self.data[:,:,:maxchan,:]
        else:
            bwfact = 1.0
        if offp:
            self.data = self.data/(np.abs(self.data[:,:,:,offp[0]:offp[1]]).mean(3)[:,:,:,None])
        if tscrunch:
            for k in range(1,tscrunch):
                self.data[:-k,:,:,:] += self.data[k:,:,:,:]
#            d = self.data
#            nsub = d.shape[0]/tscrunch
#            ntot = nsub*tscrunch
#            self.data = d[:ntot,:,:,:].reshape((nsub,tscrunch,d.shape[1],d.shape[2],d.shape[3])).mean(1)
        subint = self.ar.get_Integration(idx)
        self.nspec,self.npol,self.nchan,self.nbin = self.data.shape
        
        epoch = subint.get_epoch()
        try:
            self.imjd = np.floor(epoch)
            self.fmjd = np.fmod(epoch,1)
        except: #new version of psrchive has different kind of epoch
            self.imjd = epoch.intday()
            self.fmjd = epoch.fracday()
        self.ref_phase = 0.0
        self.ref_freq = 1.0/subint.get_folding_period()
        self.bw = np.abs(subint.get_bandwidth()) * bwfact
        self.rf = subint.get_centre_frequency()
        
        self.source = self.ar.get_source() # source name

        self.nlag = self.nchan
        self.nphase = self.nbin
        self.nharm = self.nphase/2 + 1
        
        self.dynamic_spectrum = np.zeros((self.nspec,self.nchan))
        self.optimized_filters = np.zeros((self.nspec,self.nchan),dtype='complex')
        self.intrinsic_profiles = np.zeros((self.nspec,self.nbin))
        self.nopt = 0
        self.nloop = 0        
Example #20
0
def get_value(archive):
    arch = pr.Archive_load(archive)
    arch.tscrunch()
    arch.dedisperse()
    arch.fscrunch()
    arch.pscrunch()
    arch.remove_baseline()
#    data = arch[0].get_Profile(0.0).get_amps()
    data = arch.get_data()
    return data[0,0,0,:]
Example #21
0
def incorrect_noise_scale(fn):
    a = psrchive.Archive_load(fn)
    tInt = a.integration_length()
    #rmsOff=a.rms_baseline()
    rmsOff = subprocess.check_output('psrstat -c "off:rms" ' + fn + ' -Q',
                                     shell=True).split()[1]
    product = float(rmsOff) * math.sqrt(
        tInt)  #this quantity should be the same for all 1024chan pulsars
    prodList.append(product)
    print fn + ' ' + str(tInt) + ' ' + str(rmsOff) + ' ' + str(product)
def archive_to_channels(filename):
    """
    Retrieve the 4 channel data as 4 arrays from archive
    """
    arch = psrchive.Archive_load(filename)
    arch.remove_baseline()
    arch.tscrunch()
    data = arch.get_data()
    data = data.squeeze()

    return np.vstack((data[0], data[1], data[2], data[3]))
Example #23
0
        def archive_creation(phase_start=0):
            if os.path.exists(temp_folder): shutil.rmtree(temp_folder)
            os.makedirs(temp_folder)

            #Fold the fits file to create single-pulse archives
            if phase_start: start = period / 2.
            else: start = 0

            with open(os.devnull, 'w') as FNULL:
                _ = subprocess.call([
                    'dspsr', '-S',
                    str(start), '-K', '-b',
                    str(profile_bins), '-s', '-E', par_file, fits_file
                ],
                                    cwd=temp_folder,
                                    stdout=FNULL)

            #Lists of archive names and starting times (s)
            archive_list = np.array(
                glob(os.path.join(temp_folder, 'pulse_*.ar')))
            archive_time_list = np.array([
                psrchive.Archive_load(ar).start_time().get_secs() +
                psrchive.Archive_load(ar).start_time().get_fracsec()
                for ar in archive_list
            ])
            idx_sorted = np.argsort(archive_list)
            archive_list = archive_list[idx_sorted]
            archive_time_list = archive_time_list[idx_sorted]

            #Find archive where dispersed pulse would start
            start_dispersed_puls = SMJD - archive_time_list
            idx_puls = np.where((start_dispersed_puls > 0)
                                & (start_dispersed_puls < period))[0][0]

            #Check that puls is centered
            phase = start_dispersed_puls[idx_puls] / period - start / period

            idx_puls += n_puls
            if phase_start > 0.75: idx_puls += 1

            return phase, archive_list[idx_puls]
Example #24
0
def load_template(filename, realign=False):
    T = psrchive.Archive_load(filename)
    T.fscrunch()
    T.tscrunch()
    T.convert_state('Stokes')
    T.remove_baseline()
    t_pol = T.get_data()[0,:,0,:]
    if realign:
        a = np.angle(np.fft.fft(t_values)[1])/(2*np.pi)
        t_pol = rotate_phase_iquv(t_pol, -a)
    t_pol /= np.amax(t_pol)
    return t_pol
Example #25
0
def getProfile(files):
    pfs = []
    for gpu in files.keys():
        for fn in files[gpu]:
            print "loading profile from:", fn
            ar = psrchive.Archive_load(fn)
            #            ar.bscrunch_to_nbin(64)
            ar.tscrunch_to_nsub(1)
            ar.fscrunch_to_nchan(1)
            d = ar.get_data().squeeze().mean(0)
            pfs.append(d)
    return np.array(pfs).mean(0)
Example #26
0
def fits2numpy():
    for fits in os.listdir(directory):
        if fits.endswith(str(end)):
            npar = str(burstname) + '.dm' + str(DM) + '.npy'
            with open(npar, 'wb') as npar_file:
                arch = psrchive.Archive_load(directory + '/' + fits)
                arch.pscrunch()
                arch.set_dispersion_measure(float(DM))
                arch.dedisperse()
                arch.remove_baseline()
                arch.convert_state('Stokes')
                data = arch.get_data()
                np.save(npar_file, data[:, 0, :, :].mean(0))
                print('Array Written...')
Example #27
0
def par_diff(params_new, folder):
  ar = glob(os.path.join(folder, "*.ar"))[-1]
  load_archive = psrchive.Archive_load(ar)
  par = load_archive.get_ephemeris()
  psr = par.get_value('PSR')
  if psr == "": psr = par.get_value('PSRJ')
  print "PSR ", psr
  print "P0 diff: {:.2e} s".format(params_new['P0'] - float(par.get_value('P0')))
  print "DM diff: {:.2e} pc/cc".format(params_new['DM'] - float(par.get_value('DM')))
  
  dist = 60. * np.rad2deg(np.sqrt( (ra_to_rad(par.get_value('RAJ'))-ra_to_rad(params_new['RAJ']))**2 + (dec_to_rad(par.get_value('DECJ'))-dec_to_rad(params_new['DECJ']))**2 ))
  print "Distance: {:.3f}'".format(dist)
  print ""
  return
Example #28
0
def getFreqMap(files, npfb=32):
    gpus = files.keys()
    freqs = {}
    for gpu in gpus:
        ar = psrchive.Archive_load(files[gpu][0])
        i0 = ar.get_Integration(0)
        nch = ar.get_nchan()
        freqs[gpu] = np.array(
            [i0.get_Profile(0, k).get_centre_frequency() for k in range(nch)])
    ncyc = nch / npfb
    df = np.abs(np.diff(freqs[freqs.keys()[0]][:2]))[0]

    fset, edges, gpusets, gpumap, idxmap = getIndexMap(freqs, npfb, ncyc, df)

    return freqs, fset, gpumap, idxmap
Example #29
0
    def get_fluxcal_params(self):
        t = self.f['FLUX_CAL']
        nchan = t.header['NCHAN']
        nrcvr = t.header['NRCVR']
        self.mjd = float(t.header['EPOCH'])
        self.data_cal = t.data.field('S_CAL').reshape((nrcvr, nchan))
        self.err_cal = t.data.field('S_CALERR').reshape((nrcvr, nchan))
        self.data_sys = t.data.field('S_SYS').reshape((nrcvr, nchan))
        self.err_sys = t.data.field('S_SYSERR').reshape((nrcvr, nchan))
        self.wts = t.data.field('DAT_WTS').reshape((nchan, ))
        self.freq = t.data.field('DAT_FREQ').reshape((nchan, ))
        self.nchan = nchan
        self.nrcvr = nrcvr

        ar = p.Archive_load(self.filename)
        self.obsfreq = ar.get_centre_frequency()
def fits2numpy():
    for fits in os.listdir(directory):
        #print(fits)
        #npar = 'pulse_120390656' + '_secondtry.npy'
        if fits.endswith('.fits'):
            npar = str(fits) + '.npy'
            with open(npar, 'wb') as npar_file:
                #arch = psrchive.Archive_load('/datax/scratch/jfaber/SPANDAK_extension/pipeline_playground/61.4627973333_67.0552026667_fits/pulse_120390656.fits')
                arch = psrchive.Archive_load(directory + '/' + fits)
                #os.system('psrplot -p F -jD' + directory + '/' + fits)
                arch.dedisperse()
                arch.remove_baseline()
                arch.convert_state('Stokes')
                data = arch.get_data()
                np.save(npar_file, data[:, 0, :, :].mean(0))
                print('Array Written...')