def get_diagnostic(self): # find *.accelcands file candlists = glob.glob(os.path.join(self.directory, "*.accelcands")) pfdpngs = [os.path.split(fn)[-1] for fn in \ glob.glob(os.path.join(self.directory, "*.pfd.png"))] if len(candlists) != 1: raise DiagnosticError("Wrong number of candidate lists found (%d)!" % \ len(candlists)) candlist = accelcands.parse_candlist(candlists[0]) sigmas = [] for c in candlist: base, accel = c.accelfile.split("_ACCEL_") pngfn = "%s_Z%s_ACCEL_Cand_%d.pfd.png" % (base, accel, c.candnum) if pngfn in pfdpngs: sigmas.append(c.sigma) if len(pfdpngs) > len(sigmas): raise DiagnosticError("Not all *.pfd.png images were found " \ "in candlist! (%d > %d)" % \ (len(pfdpngs), len(sigmas))) elif len(pfdpngs) < len(sigmas): raise DiagnosticError("Some *.pfd.png image match multiple " \ "entries in candlist! (%d < %d)" % \ (len(pfdpngs), len(sigmas))) self.value = min(sigmas)
def get_diagnostic(self): # find *.accelcands file candlists = glob.glob(os.path.join(self.directory, "*.accelcands")) if len(candlists) != 1: raise DiagnosticError("Wrong number of candidate lists found (%d)!" % len(candlists)) candlist = accelcands.parse_candlist(candlists[0]) self.value = len(candlist)
def get_diagnostic(self): # find *.accelcands file candlists = glob.glob(os.path.join(self.directory, "*.accelcands")) if len(candlists) != 1: raise DiagnosticError("Wrong number of candidate lists found (%d)!" % \ len(candlists)) candlist = accelcands.parse_candlist(candlists[0]) self.value = len(candlist)
def get_diagnostic(self): # find *.accelcands file candlists = glob.glob(os.path.join(self.directory, "*.accelcands")) if len(candlists) != 1: raise DiagnosticError("Wrong number of candidate lists found (%d)!" % len(candlists)) candlist = accelcands.parse_candlist(candlists[0]) params = get_search_params(self.directory) self.value = len([c for c in candlist if c.sigma >= params["to_prepfold_sigma"]])
def get_diagnostic(self): # find *.accelcands file candlists = glob.glob(os.path.join(self.directory, "*.accelcands")) if len(candlists) != 1: raise DiagnosticError("Wrong number of candidate lists found (%d)!" % \ len(candlists)) candlist = accelcands.parse_candlist(candlists[0]) params = get_search_params(self.directory) self.value = len([c for c in candlist \ if c.sigma >= params['to_prepfold_sigma']])
def get_diagnostic(self): # find *.accelcands file candlists = glob.glob(os.path.join(self.directory, "*.accelcands")) pfdpngs = [os.path.split(fn)[-1] for fn in \ glob.glob(os.path.join(self.directory, "*_ACCEL_*.pfd.png"))] if len(candlists) != 1: raise DiagnosticError("Wrong number of candidate lists found (%d)!" % \ len(candlists)) candlist = accelcands.parse_candlist(candlists[0]) sigmas = [] for c in candlist: base, accel = c.accelfile.split("_ACCEL_") pngfn = "%s_Z%s_ACCEL_Cand_%d.pfd.png" % (base, accel, c.candnum) if pngfn in pfdpngs: sigmas.append(c.sigma) ffa_candlists = glob.glob(os.path.join(self.directory, "*.ffacands")) ffa_pfdpngs = [os.path.split(fn)[-1] for fn in \ glob.glob(os.path.join(self.directory, "*_ffa_*.pfd.png"))] if len(ffa_candlists) != 1: raise DiagnosticError("Wrong number of ffa candidate lists found (%d)!" % \ len(ffa_candlists)) ffa_candlist = ffacands.parse_candlist(ffa_candlists[0]) #candlist.extend(ffa_candlist) pfdpngs.extend(ffa_pfdpngs) #sigmas = [] for c in ffa_candlist: #base, accel = c.accelfile.split("_ACCEL_") #pngfn = "%s_Z%s_ACCEL_Cand_%d.pfd.png" % (base, accel, c.candnum) pngfn = "%s%.2fms_Cand.pfd.png"%(c.ffafile.replace("_cands.ffa","_ffa_"),c.period*1000) if pngfn in ffa_pfdpngs: sigmas.append(c.sigma) if len(pfdpngs) > len(sigmas): raise DiagnosticError("Not all *.pfd.png images were found " \ "in candlist! (%d > %d)" % \ (len(pfdpngs), len(sigmas))) elif len(pfdpngs) < len(sigmas): raise DiagnosticError("Some *.pfd.png image match multiple " \ "entries in candlist! (%d < %d)" % \ (len(pfdpngs), len(sigmas))) if not sigmas: errormsg = 'No candidates folded.' raise DiagnosticNonFatalError(errormsg) self.value = min(sigmas)
def get_diagnostic(self): # find *.accelcands file candlists = glob.glob(os.path.join(self.directory, "*.accelcands")) if len(candlists) != 1: raise DiagnosticError("Wrong number of candidate lists found (%d)!" % \ len(candlists)) candlist = accelcands.parse_candlist(candlists[0]) # find the search_params.txt file paramfn = os.path.join(self.directory, 'search_params.txt') if os.path.exists(paramfn): tmp, params = {}, {} execfile(paramfn, tmp, params) else: raise DiagnosticError("Search parameter file doesn't exist!") self.value = len([c for c in candlist \ if c.sigma >= params['to_prepfold_sigma']])
def get_candidates(versionnum, directory, header_id=None, timestamp_mjd=None, inst_cache=None): """Upload candidates to common DB. Inputs: versionnum: A combination of the githash values from PRESTO and from the pipeline. directory: The directory containing results from the pipeline. header_id: header_id number for this beam, as returned by spHeaderLoader/header.upload_header (default=None) timestamp_mjd: mjd timstamp for this observation (default=None). inst_cache: ratings2 RatingInstanceIDCache instance. Ouput: cands: List of candidates. tempdir: Path of temporary directory that PFDs have been untarred, returned so that it can be deleted after successful PFD upload. """ # find *.accelcands file candlists = glob.glob(os.path.join(directory, "*.accelcands")) ffa_candlists = glob.glob(os.path.join(directory, "*.ffacands")) if len(candlists) != 1 or len(ffa_candlists) != 1: raise PeriodicityCandidateError("Wrong number of candidate lists found accel" \ "candlists: (%d) and ffa candlists: (%d)" % \ (len(candlists)), len(ffa_candlists)) # Get list of candidates from *.accelcands file candlist = accelcands.parse_candlist(candlists[0]) ffa_candlist = ffacands.parse_candlist(ffa_candlists[0]) # find the search_params.txt file paramfn = os.path.join(directory, 'search_params.txt') if os.path.exists(paramfn): tmp, params = {}, {} execfile(paramfn, tmp, params) else: raise PeriodicityCandidateError("Search parameter file doesn't exist!") minsigma = params['to_prepfold_sigma'] foldedcands = [c for c in candlist \ if c.sigma > params['to_prepfold_sigma']] foldedcands = foldedcands[:params['max_accel_cands_to_fold']] foldedcands.sort(reverse=True) # Sort by descending sigma ffa_foldedcands = [c for c in ffa_candlist \ if c.snr > params['to_prepfold_sigma']] # if c.snr > params['ffa_snr']]# need something for ffa ffa_foldedcands = ffa_foldedcands[:params['max_ffa_cands_to_fold']] ffa_foldedcands.sort(reverse=True) # Sort by descending snr # Open attribute file attrib_fn = os.path.join(directory, 'candidate_attributes.txt') attribs = np.loadtxt(attrib_fn,dtype='S') # Create temporary directory N = 6 prefix = "/localscratch/PALFA_pfds_" suffix = "_tmp/" String = ''.join(random.choice(string.ascii_uppercase + string.digits + string.ascii_lowercase) for _ in range(N)) tempdir = prefix+String+suffix os.makedirs(tempdir) #tempdir = tempname if foldedcands or ffa_foldedcands: pfd_tarfns = glob.glob(os.path.join(directory, "*_pfd.tgz")) if len(pfd_tarfns) != 1: raise PeriodicityCandidateError("Wrong number (%d) of *_pfd.tgz " \ "files found in %s" % (len(pfd_tarfns), \ directory)) rating_tarfns = glob.glob(os.path.join(directory, "*_pfd_rat.tgz")) if len(rating_tarfns) != 1: raise PeriodicityCandidateError("Wrong number (%d) of *_pfd_rat.tgz " \ "files found in %s" % (len(rating_tarfns), \ directory)) mjd = int(timestamp_mjd) remote_pfd_base = os.path.join(config.upload.pfd_ftp_dir,str(mjd)) remote_pfd_dir = os.path.join(remote_pfd_base,\ os.path.basename(pfd_tarfns[0]).rstrip('_pfd.tgz')) pfd_tarball = PFDTarball(pfd_tarfns[0],remote_pfd_base,tempdir) pfd_tempdir, pfd_list = pfd_tarball.extract() # extract ratings tarball tar = tarfile.open(rating_tarfns[0]) try: tar.extractall(path=tempdir) except IOError: if os.path.isdir(tempdir): shutil.rmtree(tempdir) raise PeriodicityCandidateError("Error while extracting pfd files " \ "from tarball (%s)!" % tarfn) finally: tar.close() # Loop over candidates that were folded cands = [] cands.append(pfd_tarball) counter = 0 for ii, c in enumerate(foldedcands): basefn = "%s_ACCEL_Cand_%d" % (c.accelfile.replace("ACCEL_", "Z"), \ c.candnum) pfdfn = os.path.join(pfd_tempdir, basefn+".pfd") pngfn = os.path.join(directory, basefn+".pfd.png") ratfn = os.path.join(tempdir, basefn+".pfd.rat") pfd = prepfold.pfd(pfdfn) cand_attribs = dict(attribs[attribs[:,0] == basefn+".pfd"][:,1:]) try: cand = PeriodicityCandidate(ii+1, pfd, c.snr, \ c.cpow, c.ipow, len(c.dmhits), \ c.numharm, versionnum, c.sigma, \ c.period, c.dm, cand_attribs, c.search_type, \ header_id=header_id) except Exception: raise PeriodicityCandidateError("PeriodicityCandidate could not be " \ "created (%s)!" % pfdfn) pfd_size = dict(pfd_list)[pfdfn] cand.add_dependent(PeriodicityCandidatePFD(pfdfn, pfd_size, remote_pfd_dir=remote_pfd_dir)) cand.add_dependent(PeriodicityCandidatePNG(pngfn)) ratvals = ratings2.rating_value.read_file(ratfn) cand.add_dependent(PeriodicityCandidateRating(ratvals,inst_cache=inst_cache)) cands.append(cand) counter +=1 for ii, c in enumerate(ffa_foldedcands): basefn = "%s%.2fms_Cand" % (c.ffafile.replace("_cands.ffa","_ffa_"), c.period*1000) pfdfn = os.path.join(pfd_tempdir, basefn+".pfd") pngfn = os.path.join(directory, basefn+".pfd.png") ratfn = os.path.join(tempdir, basefn+".pfd.rat") pfd = prepfold.pfd(pfdfn) cand_attribs = dict(attribs[attribs[:,0] == basefn+".pfd"][:,1:]) try: cand = PeriodicityCandidate(counter+ii+1, pfd, c.snr, \ c.cpow, c.ipow, len(c.dmhits), \ c.numharm, versionnum, c.sigma, \ c.period, c.dm, cand_attribs, c.search_type, \ header_id=header_id) except Exception: raise PeriodicityCandidateError("PeriodicityCandidate could not be " \ "created (%s)!" % pfdfn) pfd_size = dict(pfd_list)[pfdfn] cand.add_dependent(PeriodicityCandidatePFD(pfdfn, pfd_size, remote_pfd_dir=remote_pfd_dir)) cand.add_dependent(PeriodicityCandidatePNG(pngfn)) ratvals = ratings2.rating_value.read_file(ratfn) cand.add_dependent(PeriodicityCandidateRating(ratvals,inst_cache=inst_cache)) cands.append(cand) #shutil.rmtree(tempdir) return cands,tempdir
def check_candidates(header_id, versionnum, directory, dbname='common-copy'): """Check candidates in common DB. Inputs: header_id: header_id number for this beam, as returned by spHeaderLoader/header.upload_header versionnum: A combination of the githash values from PRESTO and from the pipeline. directory: The directory containing results from the pipeline. dbname: Name of database to connect to, or a database connection to use (Defaut: 'common-copy'). Output: match: Boolean value. True if all candidates and plots match what is in the DB, False otherwise. """ # find *.accelcands file candlists = glob.glob(os.path.join(directory, "*.accelcands")) if len(candlists) != 1: raise PeriodicityCandidateError("Wrong number of candidate lists found (%d)!" % \ len(candlists)) # Get list of candidates from *.accelcands file candlist = accelcands.parse_candlist(candlists[0]) minsigma = config.searching.to_prepfold_sigma foldedcands = [c for c in candlist if c.sigma > minsigma] foldedcands = foldedcands[:config.searching.max_cands_to_fold] foldedcands.sort(reverse=True) # Sort by descending sigma # Create temporary directory tempdir = tempfile.mkdtemp(suffix="_tmp", prefix="PALFA_pfds_") tarfns = glob.glob(os.path.join(directory, "*_pfd.tgz")) if len(tarfns) != 1: raise PeriodicityCandidateError("Wrong number (%d) of *_pfd.tgz " \ "files found in %s" % (len(tarfns), \ directory)) tar = tarfile.open(tarfns[0]) try: tar.extractall(path=tempdir) except IOError: if os.path.isdir(tempdir): shutil.rmtree(tempdir) raise PeriodicityCandidateError("Error while extracting pfd files " \ "from tarball (%s)!" % tarfns[0]) finally: tar.close() # Loop over candidates that were folded matches = [] if isinstance(dbname, database.Database): db = dbname else: db = database.Database(dbname) for ii, c in enumerate(foldedcands): basefn = "%s_ACCEL_Cand_%d" % (c.accelfile.replace("ACCEL_", "Z"), \ c.candnum) pfdfn = os.path.join(tempdir, basefn + ".pfd") pngfn = os.path.join(directory, basefn + ".pfd.png") pfd = prepfold.pfd(pfdfn) try: cand = PeridocityCandidate(header_id, ii+1, pfd, c.snr, \ c.cpow, c.ipow, len(c.dmhits), \ c.numharm, versionnum, c.sigma) except Exception: raise PeriodicityCandidateError("PeriodicityCandidate could not be " \ "created (%s)!" % pfdfn) matches.append(cand.compare_with_db(dbname)) if not matches[-1]: break # Get candidate's ID number from common DB db.execute("SELECT pdm_cand_id " \ "FROM pdm_candidates AS c " \ "LEFT JOIN versions AS v ON v.version_id = c.version_id " \ "WHERE c.header_id=%d AND c.cand_num=%d " \ "AND v.version_number='%s'" % \ (header_id, cand.cand_num, versionnum)) # For cand.compare_with_db() to return True there must be a unique # entry in the common DB matching this candidate r = db.cursor.fetchone() cand_id = r[0] pfdplot = PeriodicityCandidatePFD(cand_id, pfdfn) matches.append(pfdplot.compare_with_db(dbname)) if not matches[-1]: break pngplot = PeriodicityCandidatePNG(cand_id, pngfn) matches.append(pngplot.compare_with_db(dbname)) if not matches[-1]: break if type(dbname) == types.StringType: db.close() shutil.rmtree(tempdir) return all(matches)
def upload_candidates(header_id, versionnum, directory, verbose=False, \ dry_run=False, *args, **kwargs): """Upload candidates to common DB. Inputs: header_id: header_id number for this beam, as returned by spHeaderLoader/header.upload_header versionnum: A combination of the githash values from PRESTO and from the pipeline. directory: The directory containing results from the pipeline. verbose: An optional boolean value that determines if information is printed to stdout. dry_run: An optional boolean value. If True no connection to DB will be made and DB command will not be executed. (If verbose is True DB command will be printed to stdout.) *** NOTE: Additional arguments are passed to the uploader function. Ouputs: cand_ids: List of candidate IDs corresponding to these candidates in the common DB. (Or a list of None values if dry_run is True). """ # find *.accelcands file candlists = glob.glob(os.path.join(directory, "*.accelcands")) if len(candlists) != 1: raise PeriodicityCandidateError("Wrong number of candidate lists found (%d)!" % \ len(candlists)) # Get list of candidates from *.accelcands file candlist = accelcands.parse_candlist(candlists[0]) minsigma = config.searching.to_prepfold_sigma foldedcands = [c for c in candlist if c.sigma > minsigma] foldedcands = foldedcands[:config.searching.max_cands_to_fold] foldedcands.sort(reverse=True) # Sort by descending sigma # Create temporary directory tempdir = tempfile.mkdtemp(suffix="_tmp", prefix="PALFA_pfds_") tarfns = glob.glob(os.path.join(directory, "*_pfd.tgz")) if len(tarfns) != 1: raise PeriodicityCandidateError("Wrong number (%d) of *_pfd.tgz " \ "files found in %s" % (len(tarfns), \ directory)) tar = tarfile.open(tarfns[0]) try: tar.extractall(path=tempdir) except IOError: if os.path.isdir(tempdir): shutil.rmtree(tempdir) raise PeriodicityCandidateError("Error while extracting pfd files " \ "from tarball (%s)!" % tarfns[0]) finally: tar.close() # Loop over candidates that were folded results = [] for ii, c in enumerate(foldedcands): basefn = "%s_ACCEL_Cand_%d" % (c.accelfile.replace("ACCEL_", "Z"), \ c.candnum) pfdfn = os.path.join(tempdir, basefn + ".pfd") pngfn = os.path.join(directory, basefn + ".pfd.png") pfd = prepfold.pfd(pfdfn) try: cand = PeridocityCandidate(header_id, ii+1, pfd, c.snr, \ c.cpow, c.ipow, len(c.dmhits), \ c.numharm, versionnum, c.sigma) except Exception: raise PeriodicityCandidateError("PeriodicityCandidate could not be " \ "created (%s)!" % pfdfn) if dry_run: cand.get_upload_sproc_call() if verbose: print cand results.append(None) cand_id = -1 else: cand_id = cand.upload(*args, **kwargs) pfdplot = PeriodicityCandidatePFD(cand_id, pfdfn) pngplot = PeriodicityCandidatePNG(cand_id, pngfn) if dry_run: pfdplot.get_upload_sproc_call() pngplot.get_upload_sproc_call() if verbose: print pfdplot print pngplot else: pfdplot.upload(*args, **kwargs) pngplot.upload(*args, **kwargs) shutil.rmtree(tempdir) return results
def upload_candidates(header_id, versionnum, directory, verbose=False, \ dry_run=False, *args, **kwargs): """Upload candidates to common DB. Inputs: header_id: header_id number for this beam, as returned by spHeaderLoader/header.upload_header versionnum: A combination of the githash values from PRESTO and from the pipeline. directory: The directory containing results from the pipeline. verbose: An optional boolean value that determines if information is printed to stdout. dry_run: An optional boolean value. If True no connection to DB will be made and DB command will not be executed. (If verbose is True DB command will be printed to stdout.) *** NOTE: Additional arguments are passed to the uploader function. Ouputs: cand_ids: List of candidate IDs corresponding to these candidates in the common DB. (Or a list of None values if dry_run is True). """ # find *.accelcands file candlists = glob.glob(os.path.join(directory, "*.accelcands")) if len(candlists) != 1: raise PeriodicityCandidateError("Wrong number of candidate lists found (%d)!" % \ len(candlists)) # Get list of candidates from *.accelcands file candlist = accelcands.parse_candlist(candlists[0]) minsigma = config.searching.to_prepfold_sigma foldedcands = [c for c in candlist if c.sigma > minsigma] foldedcands = foldedcands[:config.searching.max_cands_to_fold] foldedcands.sort(reverse=True) # Sort by descending sigma # Create temporary directory tempdir = tempfile.mkdtemp(suffix="_tmp", prefix="PALFA_pfds_") tarfns = glob.glob(os.path.join(directory, "*_pfd.tgz")) if len(tarfns) != 1: raise PeriodicityCandidateError("Wrong number (%d) of *_pfd.tgz " \ "files found in %s" % (len(tarfns), \ directory)) tar = tarfile.open(tarfns[0]) tar.extractall(path=tempdir) tar.close() # Loop over candidates that were folded results = [] for ii, c in enumerate(foldedcands): basefn = "%s_ACCEL_Cand_%d" % (c.accelfile.replace("ACCEL_", "Z"), \ c.candnum) pfdfn = os.path.join(tempdir, basefn+".pfd") pngfn = os.path.join(directory, basefn+".pfd.png") pfd = prepfold.pfd(pfdfn) try: cand = PeridocityCandidate(header_id, ii+1, pfd, c.snr, \ c.cpow, c.ipow, len(c.dmhits), \ c.numharm, versionnum, c.sigma) except Exception: raise PeriodicityCandidateError("PeriodicityCandidate could not be " \ "created (%s)!" % pfdfn) if dry_run: cand.get_upload_sproc_call() if verbose: print cand results.append(None) cand_id = -1 else: cand_id = cand.upload(*args, **kwargs) pfdplot = PeriodicityCandidatePFD(cand_id, pfdfn) pngplot = PeriodicityCandidatePNG(cand_id, pngfn) if dry_run: pfdplot.get_upload_sproc_call() pngplot.get_upload_sproc_call() if verbose: print pfdplot print pngplot else: pfdplot.upload(*args, **kwargs) pngplot.upload(*args, **kwargs) shutil.rmtree(tempdir) return results
def get_candidates(versionnum, directory, header_id=None, timestamp_mjd=None, inst_cache=None): """Upload candidates to common DB. Inputs: versionnum: A combination of the githash values from PRESTO and from the pipeline. directory: The directory containing results from the pipeline. header_id: header_id number for this beam, as returned by spHeaderLoader/header.upload_header (default=None) timestamp_mjd: mjd timstamp for this observation (default=None). inst_cache: ratings2 RatingInstanceIDCache instance. Ouput: cands: List of candidates. tempdir: Path of temporary directory that PFDs have been untarred, returned so that it can be deleted after successful PFD upload. """ # find *.accelcands file candlists = glob.glob(os.path.join(directory, "*.accelcands")) if len(candlists) != 1: raise PeriodicityCandidateError("Wrong number of candidate lists found (%d)!" % \ len(candlists)) # Get list of candidates from *.accelcands file candlist = accelcands.parse_candlist(candlists[0]) # find the search_params.txt file paramfn = os.path.join(directory, 'search_params.txt') if os.path.exists(paramfn): tmp, params = {}, {} execfile(paramfn, tmp, params) else: raise PeriodicityCandidateError("Search parameter file doesn't exist!") minsigma = params['to_prepfold_sigma'] foldedcands = [c for c in candlist \ if c.sigma > params['to_prepfold_sigma']] foldedcands = foldedcands[:params['max_cands_to_fold']] foldedcands.sort(reverse=True) # Sort by descending sigma # Open attribute file attrib_fn = os.path.join(directory, 'candidate_attributes.txt') attribs = np.loadtxt(attrib_fn,dtype='S') # Create temporary directory tempdir = tempfile.mkdtemp(suffix="_tmp", prefix="pfds_", dir="/sps/hep/glast/data/survey_pulsar/") if foldedcands: pfd_tarfns = glob.glob(os.path.join(directory, "*_pfd.tgz")) if len(pfd_tarfns) != 1: raise PeriodicityCandidateError("Wrong number (%d) of *_pfd.tgz " \ "files found in %s" % (len(pfd_tarfns), \ directory)) bestprof_tarfns = glob.glob(os.path.join(directory, "*_bestprof.tgz")) if len(bestprof_tarfns) != 1: raise PeriodicityCandidateError("Wrong number (%d) of *_bestprof.tgz " \ "files found in %s" % (len(bestprof_tarfns), \ directory)) rating_tarfns = glob.glob(os.path.join(directory, "*_pfd_rat.tgz")) if len(rating_tarfns) != 1: raise PeriodicityCandidateError("Wrong number (%d) of *_pfd_rat.tgz " \ "files found in %s" % (len(rating_tarfns), \ directory)) for tarfn in [ pfd_tarfns[0], bestprof_tarfns[0], rating_tarfns[0] ]: tar = tarfile.open(tarfn) try: tar.extractall(path=tempdir) except IOError: if os.path.isdir(tempdir): shutil.rmtree(tempdir) raise PeriodicityCandidateError("Error while extracting pfd files " \ "from tarball (%s)!" % tarfn) finally: tar.close() # Loop over candidates that were folded cands = [] for ii, c in enumerate(foldedcands): basefn = "%s_ACCEL_Cand_%d" % (c.accelfile.replace("ACCEL_", "Z"), \ c.candnum) pfdfn = os.path.join(tempdir, basefn+".pfd") pngfn = os.path.join(directory, basefn+".pfd.png") ratfn = os.path.join(tempdir, basefn+".pfd.rat") pfd = prepfold.pfd(pfdfn) cand_attribs = dict(attribs[attribs[:,0] == basefn+".pfd"][:,1:]) try: cand = PeriodicityCandidate(ii+1, pfd, c.snr, \ c.cpow, c.ipow, len(c.dmhits), \ c.numharm, versionnum, c.sigma, \ c.period, c.dm, cand_attribs, header_id=header_id) except Exception: raise PeriodicityCandidateError("PeriodicityCandidate could not be " \ "created (%s)!" % pfdfn) cand.add_dependent(PeriodicityCandidatePFD(pfdfn, timestamp_mjd=timestamp_mjd)) cand.add_dependent(PeriodicityCandidatePNG(pngfn)) ratvals = ratings2.rating_value.read_file(ratfn) cand.add_dependent(PeriodicityCandidateRating(ratvals,inst_cache=inst_cache)) cands.append(cand) #shutil.rmtree(tempdir) return cands,tempdir
def get_candidates(versionnum, directory, header_id=None): """Upload candidates to common DB. Inputs: versionnum: A combination of the githash values from PRESTO and from the pipeline. directory: The directory containing results from the pipeline. header_id: header_id number for this beam, as returned by spHeaderLoader/header.upload_header (default=None) Ouput: cands: List of candidates. """ # find *.accelcands file candlists = glob.glob(os.path.join(directory, "*.accelcands")) if len(candlists) != 1: raise PeriodicityCandidateError("Wrong number of candidate lists found (%d)!" % \ len(candlists)) # Get list of candidates from *.accelcands file candlist = accelcands.parse_candlist(candlists[0]) # find the search_params.txt file paramfn = os.path.join(directory, 'search_params.txt') if os.path.exists(paramfn): tmp, params = {}, {} execfile(paramfn, tmp, params) else: raise PeriodicityCandidateError("Search parameter file doesn't exist!") minsigma = params['to_prepfold_sigma'] foldedcands = [c for c in candlist \ if c.sigma > params['to_prepfold_sigma']] foldedcands = foldedcands[:params['max_cands_to_fold']] foldedcands.sort(reverse=True) # Sort by descending sigma # Create temporary directory tempdir = tempfile.mkdtemp(suffix="_tmp", prefix="PALFA_pfds_") if foldedcands: tarfns = glob.glob(os.path.join(directory, "*_pfd.tgz")) if len(tarfns) != 1: raise PeriodicityCandidateError("Wrong number (%d) of *_pfd.tgz " \ "files found in %s" % (len(tarfns), \ directory)) tar = tarfile.open(tarfns[0]) try: tar.extractall(path=tempdir) except IOError: if os.path.isdir(tempdir): shutil.rmtree(tempdir) raise PeriodicityCandidateError("Error while extracting pfd files " \ "from tarball (%s)!" % tarfns[0]) finally: tar.close() # Loop over candidates that were folded cands = [] for ii, c in enumerate(foldedcands): basefn = "%s_ACCEL_Cand_%d" % (c.accelfile.replace("ACCEL_", "Z"), \ c.candnum) pfdfn = os.path.join(tempdir, basefn + ".pfd") pngfn = os.path.join(directory, basefn + ".pfd.png") pfd = prepfold.pfd(pfdfn) try: cand = PeriodicityCandidate(ii+1, pfd, c.snr, \ c.cpow, c.ipow, len(c.dmhits), \ c.numharm, versionnum, c.sigma, \ header_id=header_id) except Exception: raise PeriodicityCandidateError("PeriodicityCandidate could not be " \ "created (%s)!" % pfdfn) cand.add_dependent(PeriodicityCandidatePFD(pfdfn)) cand.add_dependent(PeriodicityCandidatePNG(pngfn)) cands.append(cand) shutil.rmtree(tempdir) return cands
def get_candidates(versionnum, directory, header_id=None): """Upload candidates to common DB. Inputs: versionnum: A combination of the githash values from PRESTO and from the pipeline. directory: The directory containing results from the pipeline. header_id: header_id number for this beam, as returned by spHeaderLoader/header.upload_header (default=None) Ouput: cands: List of candidates. """ # find *.accelcands file candlists = glob.glob(os.path.join(directory, "*.accelcands")) if len(candlists) != 1: raise PeriodicityCandidateError("Wrong number of candidate lists found (%d)!" % \ len(candlists)) # Get list of candidates from *.accelcands file candlist = accelcands.parse_candlist(candlists[0]) # find the search_params.txt file paramfn = os.path.join(directory, 'search_params.txt') if os.path.exists(paramfn): tmp, params = {}, {} execfile(paramfn, tmp, params) else: raise PeriodicityCandidateError("Search parameter file doesn't exist!") minsigma = params['to_prepfold_sigma'] foldedcands = [c for c in candlist \ if c.sigma > params['to_prepfold_sigma']] foldedcands = foldedcands[:params['max_cands_to_fold']] foldedcands.sort(reverse=True) # Sort by descending sigma # Create temporary directory tempdir = tempfile.mkdtemp(suffix="_tmp", prefix="PALFA_pfds_") if foldedcands: tarfns = glob.glob(os.path.join(directory, "*_pfd.tgz")) if len(tarfns) != 1: raise PeriodicityCandidateError("Wrong number (%d) of *_pfd.tgz " \ "files found in %s" % (len(tarfns), \ directory)) tar = tarfile.open(tarfns[0]) try: tar.extractall(path=tempdir) except IOError: if os.path.isdir(tempdir): shutil.rmtree(tempdir) raise PeriodicityCandidateError("Error while extracting pfd files " \ "from tarball (%s)!" % tarfns[0]) finally: tar.close() # Loop over candidates that were folded cands = [] for ii, c in enumerate(foldedcands): basefn = "%s_ACCEL_Cand_%d" % (c.accelfile.replace("ACCEL_", "Z"), \ c.candnum) pfdfn = os.path.join(tempdir, basefn+".pfd") pngfn = os.path.join(directory, basefn+".pfd.png") pfd = prepfold.pfd(pfdfn) try: cand = PeriodicityCandidate(ii+1, pfd, c.snr, \ c.cpow, c.ipow, len(c.dmhits), \ c.numharm, versionnum, c.sigma, \ header_id=header_id) except Exception: raise PeriodicityCandidateError("PeriodicityCandidate could not be " \ "created (%s)!" % pfdfn) cand.add_dependent(PeriodicityCandidatePFD(pfdfn)) cand.add_dependent(PeriodicityCandidatePNG(pngfn)) cands.append(cand) shutil.rmtree(tempdir) return cands
def check_candidates(header_id, versionnum, directory, dbname='common-copy'): """Check candidates in common DB. Inputs: header_id: header_id number for this beam, as returned by spHeaderLoader/header.upload_header versionnum: A combination of the githash values from PRESTO and from the pipeline. directory: The directory containing results from the pipeline. dbname: Name of database to connect to, or a database connection to use (Defaut: 'common-copy'). Output: match: Boolean value. True if all candidates and plots match what is in the DB, False otherwise. """ # find *.accelcands file candlists = glob.glob(os.path.join(directory, "*.accelcands")) if len(candlists) != 1: raise PeriodicityCandidateError("Wrong number of candidate lists found (%d)!" % \ len(candlists)) # Get list of candidates from *.accelcands file candlist = accelcands.parse_candlist(candlists[0]) minsigma = config.searching.to_prepfold_sigma foldedcands = [c for c in candlist if c.sigma > minsigma] foldedcands = foldedcands[:config.searching.max_cands_to_fold] foldedcands.sort(reverse=True) # Sort by descending sigma # Create temporary directory tempdir = tempfile.mkdtemp(suffix="_tmp", prefix="PALFA_pfds_") tarfns = glob.glob(os.path.join(directory, "*_pfd.tgz")) if len(tarfns) != 1: raise PeriodicityCandidateError("Wrong number (%d) of *_pfd.tgz " \ "files found in %s" % (len(tarfns), \ directory)) tar = tarfile.open(tarfns[0]) try: tar.extractall(path=tempdir) except IOError: if os.path.isdir(tempdir): shutil.rmtree(tempdir) raise PeriodicityCandidateError("Error while extracting pfd files " \ "from tarball (%s)!" % tarfns[0]) finally: tar.close() # Loop over candidates that were folded matches = [] if isinstance(dbname, database.Database): db = dbname else: db = database.Database(dbname) for ii, c in enumerate(foldedcands): basefn = "%s_ACCEL_Cand_%d" % (c.accelfile.replace("ACCEL_", "Z"), \ c.candnum) pfdfn = os.path.join(tempdir, basefn+".pfd") pngfn = os.path.join(directory, basefn+".pfd.png") pfd = prepfold.pfd(pfdfn) try: cand = PeridocityCandidate(header_id, ii+1, pfd, c.snr, \ c.cpow, c.ipow, len(c.dmhits), \ c.numharm, versionnum, c.sigma) except Exception: raise PeriodicityCandidateError("PeriodicityCandidate could not be " \ "created (%s)!" % pfdfn) matches.append(cand.compare_with_db(dbname)) if not matches[-1]: break # Get candidate's ID number from common DB db.execute("SELECT pdm_cand_id " \ "FROM pdm_candidates AS c " \ "LEFT JOIN versions AS v ON v.version_id = c.version_id " \ "WHERE c.header_id=%d AND c.cand_num=%d " \ "AND v.version_number='%s'" % \ (header_id, cand.cand_num, versionnum)) # For cand.compare_with_db() to return True there must be a unique # entry in the common DB matching this candidate r = db.cursor.fetchone() cand_id = r[0] pfdplot = PeriodicityCandidatePFD(cand_id, pfdfn) matches.append(pfdplot.compare_with_db(dbname)) if not matches[-1]: break pngplot = PeriodicityCandidatePNG(cand_id, pngfn) matches.append(pngplot.compare_with_db(dbname)) if not matches[-1]: break if type(dbname) == types.StringType: db.close() shutil.rmtree(tempdir) return all(matches)