Esempio n. 1
0
def convert_tim_to_dat(tim):
    """Convert a SIGPROC time series .tim file to a
        PRESTO .dat time series

        Input:
            tim: The SIGPROC .tim time series file to convert.

        Output:
            datfn: The PRESTO .dat time series file
    """
    if not tim.endswith(".tim"):
        raise ValueError("Was expecting a file name ending with '.tim'. "
                         "Got: %s" % tim)
    path, fn = os.path.split(tim)
    basenm = fn[:-4]
    outfn = os.path.join(path, basenm+".dat")
    hdr, hdrlen = sigproc.read_header(tim)

    N = sigproc.samples_per_file(tim, hdr, hdrlen)
    Ndone = 0
    status = -1
    with open(tim, 'rb') as inff, open(outfn, 'wb') as outff:
        inff.seek(hdrlen)

        data = np.fromfile(inff, dtype='float32', count=BLOCKSIZE)
        while data.size:
            data.tofile(outff)
            Ndone += data.size
            data = np.fromfile(inff, dtype='float32', count=BLOCKSIZE)
            newstatus = int(100.0*Ndone/N)
            if newstatus > status:
                sys.stdout.write(" %d %%\r" % newstatus)
                sys.stdout.flush()
                status = newstatus
    return outfn
Esempio n. 2
0
def read_jobs_queue(jobs):
    nb_jobs_sub=0
    try:
        file = open("%s/queue.dat"%basedir, "r")
        for line in file.readlines():

	    #
	    (job,box) = line.split()
	    print job,box
            box=box.replace("\n","")

            jobs_list.append(job)
            jobs_wlist.append(job)
	    jobs[job]=Job()
	    jobs[job].box=box

	    # Determine size 
	    fil_filenm = "/data5/%s"%job.replace(".fbk","_p00.fbk")
	    filhdr, hdrlen = sigproc.read_header(fil_filenm)
	    orig_N = sigproc.samples_per_file(fil_filenm, filhdr, hdrlen)

	    if orig_N > 4194304:
	      logfile.write('Job %s detected L'%job);logfile.flush()
	      jobs[job].long=1  
	    ###################  

	    nb_jobs_sub += 1
        file.close()
        return nb_jobs_sub 	
    except:
        return 0
Esempio n. 3
0
def convert_tim_to_dat(tim):
    """Convert a SIGPROC time series .tim file to a
        PRESTO .dat time series

        Input:
            tim: The SIGPROC .tim time series file to convert.

        Output:
            datfn: The PRESTO .dat time series file
    """
    if not tim.endswith(".tim"):
        raise ValueError("Was expecting a file name ending with '.tim'. "
                         "Got: %s" % tim)
    path, fn = os.path.split(tim)
    basenm = fn[:-4]
    outfn = os.path.join(path, basenm + ".dat")
    hdr, hdrlen = sigproc.read_header(tim)

    N = sigproc.samples_per_file(tim, hdr, hdrlen)
    Ndone = 0
    status = -1
    with open(tim, 'rb') as inff, open(outfn, 'wb') as outff:
        inff.seek(hdrlen)

        data = np.fromfile(inff, dtype='float32', count=BLOCKSIZE)
        while data.size:
            data.tofile(outff)
            Ndone += data.size
            data = np.fromfile(inff, dtype='float32', count=BLOCKSIZE)
            newstatus = int(100.0 * Ndone / N)
            if newstatus > status:
                sys.stdout.write(" %d %%\r" % newstatus)
                sys.stdout.flush()
                status = newstatus
    return outfn
Esempio n. 4
0
    def __init__(self, fil_filenm, box):
        self.fil_filenm = fil_filenm
        self.basefilenm = fil_filenm.rstrip(".sig")
        #self.beam = int(self.basefilenm[-1])
        self.beam = int(self.basefilenm)
        filhdr, self.hdrlen = sigproc.read_header(fil_filenm)
        self.orig_filenm = filhdr['rawdatafile']
        self.MJD = filhdr['tstart']
        self.nchans = filhdr['nchans']
        self.ra_rad = sigproc.ra2radians(filhdr['src_raj'])
        self.ra_string = psr_utils.coord_to_string(\
            *psr_utils.rad_to_hms(self.ra_rad))
        self.dec_rad = sigproc.dec2radians(filhdr['src_dej'])
        self.dec_string = psr_utils.coord_to_string(\
            *psr_utils.rad_to_dms(self.dec_rad))
        self.az = filhdr['az_start']
        self.el = 90.0-filhdr['za_start']
        self.BW = abs(filhdr['foff']) * filhdr['nchans']
        self.dt = filhdr['tsamp']
        self.orig_N = sigproc.samples_per_file(fil_filenm, filhdr, self.hdrlen)
        self.orig_T = self.orig_N * self.dt
        self.N = choose_N(self.orig_N)
        #self.N = 2097152 
        self.T = self.N * self.dt
        # Update the RA and DEC from the database file if required
        #newposn = read_db_posn(self.orig_filenm, self.beam)
        #if newposn is not None:
        #    self.ra_string, self.dec_string = newposn
            # ... and use them to update the filterbank file
        #    fix_fil_posn(fil_filenm, self.hdrlen,
        #                 self.ra_string, self.dec_string)
        # Determine the average barycentric velocity of the observation
        #self.baryv = get_baryv(self.ra_string, self.dec_string,
        #                       self.MJD, self.T, obs="NC")
        # Where to dump all the results
        # Directory structure is under the base_output_directory
        # according to base/MJD/filenmbase/beam
        self.outputdir = os.path.join(base_output_directory,box,
                                      self.basefilenm)
        # Figure out which host we are processing on
        self.hostname = socket.gethostname()
        # The fraction of the data recommended to be masked by rfifind
        self.masked_fraction = 0.0
        # Initialize our timers
        self.rfifind_time = 0.0
        self.downsample_time = 0.0
        self.subbanding_time = 0.0
        self.dedispersing_time = 0.0
        self.FFT_time = 0.0
        self.lo_accelsearch_time = 0.0
        self.hi_accelsearch_time = 0.0
        self.singlepulse_time = 0.0
        self.sifting_time = 0.0
        self.folding_time = 0.0
        self.total_time = 0.0
        # Inialize some candidate counters
        self.num_sifted_cands = 0
        self.num_folded_cands = 0
        self.num_single_cands = 0
	self.nb_sp = 0
Esempio n. 5
0
def fb_samp(flist):
    """
    Count the number of samples in a SIGPROC filterbank file.
    
    Currently only handles one file, but would not be too hard
    to update for multifile.
    """
    fillist = flist

    if len(fillist) > 1:
        print "Currently can handle only one .fil file"
        sys.exit(0)

    fil_filenm = fillist[0]
    filhdr, hdrlen = sigproc.read_header(fil_filenm)
    n_samp = sigproc.samples_per_file(fil_filenm, filhdr, hdrlen)

    return n_samp
Esempio n. 6
0
def submit_jobs(filename):
  """
  To Submit a Job : 

  jobs_list(job_id)
  jobs_wlist(job_id)
  jobs[job_id]=Job()
  jobs[job_id].box_id
  """

  nb_jobs=0
  try:
    print "Try to open %s"%filename
    file = open(filename, "r")
    for job in file.readlines():

        job=job.replace("\n","")

        jobs_list.append(job)
        jobs_wlist.append(job)

	jobs[job]=Job()

	# Determine size 
	fil_filenm = "/data5/%s"%job.replace(".fbk","_p00.fbk")
	filhdr, hdrlen = sigproc.read_header(fil_filenm)
	orig_N = sigproc.samples_per_file(fil_filenm, filhdr, hdrlen)

	if orig_N > 4194304:
	  logfile.write('Job %s detected L\n'%job);logfile.flush()
	  jobs[job].long=1  
	###################  

        c=filename.index("/")
	jobs[job].box=filename[c+1:]

	nb_jobs += 1
    file.close()

    write_jobs_queue()	
    return nb_jobs 	
  except: return 0
Esempio n. 7
0
def submit_a_job(job,box):
    """
    To resubmit a job after deletion by 'remove_a_job'
    """

    jobs_list.append(job)
    jobs_wlist.append(job)
    jobs[job]=Job()
    jobs[job].box=box

    # Determine size 
    fil_filenm = "/data5/%s"%job.replace(".fbk","_p00.fbk")
    filhdr, hdrlen = sigproc.read_header(fil_filenm)
    orig_N = sigproc.samples_per_file(fil_filenm, filhdr, hdrlen)

    if orig_N > 4194304:
      logfile.write('Job %s detected L'%job);logfile.flush()
      jobs[job].long=1  
    else:
      jobs[job].long=0
    ###################  

    write_jobs_queue()
Esempio n. 8
0
 def __init__(self, fil_filenm):
     self.fil_filenm = fil_filenm
     self.basefilenm = fil_filenm.rstrip(".fil")
     self.beam = int(self.basefilenm[-1])
     filhdr, self.hdrlen = sigproc.read_header(fil_filenm)
     self.orig_filenm = filhdr['rawdatafile']
     self.MJD = filhdr['tstart']
     self.nchans = filhdr['nchans']
     self.ra_rad = sigproc.ra2radians(filhdr['src_raj'])
     self.ra_string = psr_utils.coord_to_string(\
         *psr_utils.rad_to_hms(self.ra_rad))
     self.dec_rad = sigproc.dec2radians(filhdr['src_dej'])
     self.dec_string = psr_utils.coord_to_string(\
         *psr_utils.rad_to_dms(self.dec_rad))
     self.az = filhdr['az_start']
     self.el = 90.0 - filhdr['za_start']
     self.BW = abs(filhdr['foff']) * filhdr['nchans']
     self.dt = filhdr['tsamp']
     self.orig_N = sigproc.samples_per_file(fil_filenm, filhdr, self.hdrlen)
     self.orig_T = self.orig_N * self.dt
     self.N = psr_utils.choose_N(self.orig_N)
     self.T = self.N * self.dt
     # Update the RA and DEC from the database file if required
     newposn = read_db_posn(self.orig_filenm, self.beam)
     if newposn is not None:
         self.ra_string, self.dec_string = newposn
         # ... and use them to update the filterbank file
         fix_fil_posn(fil_filenm, self.hdrlen, self.ra_string,
                      self.dec_string)
     # Determine the average barycentric velocity of the observation
     self.baryv = presto.get_baryv(self.ra_string,
                                   self.dec_string,
                                   self.MJD,
                                   self.T,
                                   obs="AO")
     # Where to dump all the results
     # Directory structure is under the base_output_directory
     # according to base/MJD/filenmbase/beam
     self.outputdir = os.path.join(base_output_directory,
                                   str(int(self.MJD)), self.basefilenm[:-2],
                                   str(self.beam))
     # Figure out which host we are processing on
     self.hostname = socket.gethostname()
     # The fraction of the data recommended to be masked by rfifind
     self.masked_fraction = 0.0
     # Initialize our timers
     self.rfifind_time = 0.0
     self.downsample_time = 0.0
     self.subbanding_time = 0.0
     self.dedispersing_time = 0.0
     self.FFT_time = 0.0
     self.lo_accelsearch_time = 0.0
     self.hi_accelsearch_time = 0.0
     self.singlepulse_time = 0.0
     self.sifting_time = 0.0
     self.folding_time = 0.0
     self.total_time = 0.0
     # Inialize some candidate counters
     self.num_sifted_cands = 0
     self.num_folded_cands = 0
     self.num_single_cands = 0