def test_download_db(): """ Try downloading the database with the socket disabled. """ with pytest.raises(RuntimeError): query = QueryATNF(checkupdate=True)
def test_bad_database(): """ Try loading in random file. """ baddbfile = 'sdhfjjdf' # bad database file with pytest.raises(RuntimeError): query = QueryATNF(loadfromdb=baddbfile)
def qpsr(sample_ra=6.02363, sample_dec=-72.08128): (ra, dec) = deg2HMS(ra=sample_ra, dec=sample_dec) c = [ra, dec, 1] #c=['00h24m05.67s','-72d04m52.62s',1] logging.info(f'Querying with {c}') query = QueryATNF( params=['BNAME', 'JNAME', 'RAJ', 'DECJ', 'DM', 'S1400', 'P0'], circular_boundary=c, include_errs=False) return query, query.table[:5]
def test_save_load_file(query): """ Test saving and reloading a query as a pickle file. """ # test exception handling testfilebad = '/jkshfdjfd/jkgsdfjkj/kgskfd.jhfd' with pytest.raises(IOError): query.save(testfilebad) # test exception handling with pytest.raises(IOError): querynew = QueryATNF(loadquery=testfilebad) testfile = os.path.join(os.getcwd(), 'query.pkl') query.save(testfile) # re-load in as a new query querynew = QueryATNF(loadquery=testfile) assert query.num_pulsars == querynew.num_pulsars
def import_pulsars(use_epn=True): """Import pulsar info from ATNF.""" from psrqpy import QueryATNF query = QueryATNF() df = query.pandas db = pd.DataFrame() def calc_bandwidth(row): bw = 500 s400 = ~np.isnan(row.S400) s1400 = ~np.isnan(row.S1400) s2000 = ~np.isnan(row.S2000) if s400 & s1400: bw += (1400 - 400) if (s400 & s2000) or (s400 & s1400 & s2000): bw += (2000 - 400) if s1400 & s2000: bw += (2000 - 1400) if sum((s400, s1400, s2000)) == 0: bw = np.nan return bw # Observing bandwidth db['bw'] = df.apply(calc_bandwidth, axis=1) # MHz def calc_flux(row): return max([row.S400, row.S1400, row.S2000]) # Pseudo luminosity in Jy*kpc**2 db['pseudo_lum'] = df.apply(calc_flux, axis=1) * 1e-3 * df.DIST_DM**2 # Pulse width db['w_eff'] = df['W10'] * 1e-3 # ms -> s # Object type db['type'] = 'pulsar' # Find bw from EPN if available if use_epn: db['name'] = df.NAME epn_db = bw_from_epn() epn_db['bw'] += 500 # Set a default of 500 MHz merged_db = pd.merge(db, epn_db, on='name') merged_db['bw'] = merged_db[['bw_x', 'bw_y']].apply(np.max, axis=1) merged_db.drop(['bw_x', 'bw_y'], inplace=True, axis=1) return merged_db else: return db
def do_query(self): """ Query ATNF """ params = ['JNAME', 'RAJ', 'DECJ', 'S1400', 'DM', 'P0'] condition = "S1400 > 1 && DECJ > -35" print("Querying ATNF pulsar catalogue") tstart = time.time() result = QueryATNF(params=params, condition=condition) time_taken = time.time() - tstart print("Query took {:.1f} seconds".format(time_taken)) print("Found {} pulsars for {}".format(len(result.table), condition)) self.ATNFtable = result.table # store coordinates as SkyCoord self.ATNFtable['coordinates'] = SkyCoord(self.ATNFtable['RAJ'], self.ATNFtable['DECJ'], unit=(u.hourangle, u.deg))
def get_pwn_cutoff(ra, dec, rad_search=2., deathline=1.e34): """ Set cutoff for PWN based on association with pulsars in ATNF catalog Only pulsars with Edot > 1.e34 erg/s are considered, because cutoffs at ~1 TeV should have already been detected :param ra: R.A. of PWN center, deg :param dec: Dec. of PWN center, deg :param rad_search: search radius, deg :param deathline: minimum Edot, erg/s :return: """ # convert coordinates to astropy SkyCoord c = SkyCoord(ra, dec, frame='icrs', unit='deg') # extract ra, dec in the format required by psrqpy ra_hms, dec_dms = c.to_string('hmsdms').split(' ') # define circular search region search_region = [ra_hms, dec_dms, rad_search] # query ATNF catalog psrs = QueryATNF(params=['JNAME', 'RAJ', 'DECJ', 'EDOT', 'AGE'], circular_boundary=search_region, condition='EDOT > {}'.format(deathline)) if len(psrs) == 0: # no PSR found, setting random cutoff ecut = get_random_cutoff() else: if len(psrs) == 1: # 1 pulsar found s = 0 else: # multiple pulsars found # pulsars position in SkyCoord form cpsrs = SkyCoord(ra=psrs['RAJ'], dec=psrs['DECJ'], frame='icrs', unit=(u.hourangle, u.deg)) # calculate angular separation between pulsars and PWN sep = cpsrs.separation(c) # select closest pulsar s = np.where(sep == np.min(sep))[0][0] # assume E_gamma = 0.03 E_e in deep KN regime, convert to TeV ecut = 0.01 * 1.e-12 * Emax_pwn(psrs['AGE'][s], psrs['EDOT'][s]) return ecut
names_cat[6] = names[6] + '+2551' names.remove(names[6]) names_cat.remove(names_cat[6]) print('Select Source', flush=True) Test_source = 'J1643-1224' print('Define Simulation Parameters', flush=True) times_str = np.load('./binarytimestamps/times_%s.npy' % Test_source).astype(str) times = Time(times_str) ##Knowns if not os.path.isfile('%s_params.npy' % Test_source): try: query = QueryATNF(psrs=names_cat) psrs = query.get_pulsars() cat = True except: print('psrqpy not available', flush=True) sys.exit() PSR = psrs[Test_source] dp = PSR.DIST_DM1 * u.kpc Om_peri = PSR.OM * u.deg Om_peri_dot = PSR.OMDOT * u.deg / u.year A1 = PSR.A1 * u.s * const.c Ecc = PSR.ECC Pb = PSR.PB * u.day if str(PSR.T0) == '--': TASC = Time(PSR.TASC, format='mjd') T0 = Time(brentq(orbfits.T0_find,
def get_atnf_version(): return QueryATNF().get_version
def __init__(self, download=False, correct_for_pm=True, th=True, wanted_names=[]): if download: # Runs if you don't have a recently downloaded version of the ATNF catalog. # Turn this off if you already have a downloaded catalog as it slows the code down quite a bit. query = QueryATNF(condition='!type(RRAT)', include_refs=True) query.save('all_ATNF.npy') else: # Loading the ATNF catalog from a .npy file. This won't work if you # haven't saved it to a .npy file yet. See commented lines above. query = QueryATNF(loadquery='all_ATNF.npy') numstring = 'Using ATNF catalogue version {} which contains {} pulsars.' print(numstring.format(query.get_version, query.num_pulsars)) # Desired query parameters from the ATNF catalog saved version self.table = table = query.table # not PM corrected RAJ: Right ascension(J2000)(hh:mm:ss.s) # not PM corrected DecJ: Declination(J2000)(+dd:mm:ss) self.raj_UNCORRECTED = np.asarray(table['RAJ']) self.decj_UNCORRECTED = np.asarray(table['DECJ']) # RAJD: Right ascension(J2000)(degrees) # DecJD: Declination(J2000)(degrees) self.rajd = np.asarray(table['RAJD']) self.decjd = np.asarray(table['DECJD']) self.glon = np.nan_to_num(np.asarray(table["GL"])) self.glat = np.nan_to_num(np.asarray(table["GB"])) # RAJ & DECJ error raj_err = np.asarray(table['RAJ_ERR']) self.raj_err = np.nan_to_num(raj_err, 100000) decj_err = np.asarray(table['DECJ_ERR']) self.decj_err = np.nan_to_num(decj_err, 100000) # if pm correction on # Epoch of position, defaults to PEpoch (MJD) self.pos_epoch = np.asarray(table['POSEPOCH']) # Epoch of period or frequency (MJD) self.period_epoch = np.asarray(table['PEPOCH']) # Mean flux density at 400 MHz (mJy) self.fluxes = np.asarray(table['S400']) # B and J names of pulsars self.bnames = np.asarray(table['BNAME']) self.jnames = np.asarray(table['JNAME']) self.type = np.asarray(table['TYPE']) self.type = ["Radio" if str(x) == "nan" else x for x in self.type] # Proper motion of RA in degrees pm_ra = np.asarray(table['PMRA']) self.pm_ra = np.nan_to_num( pm_ra) / 3.6e6 # converting to degrees and removing nans # Proper motion of DEC in degrees pm_dec = np.asarray(table['PMDEC']) self.pm_dec = np.nan_to_num( pm_dec) / 3.6e6 # converting to degrees and removing nans # Proper motion of RA in degrees pm_ra_err = np.asarray(table['PMRA_ERR']) self.pm_ra_err_mas = pm_ra_err self.pm_ra_err_deg = np.nan_to_num( pm_ra_err, 100000) / 3.6e6 # converting to degrees and removing nans # Proper motion of DEC in degrees pm_dec_err = np.asarray(table['PMDEC_ERR']) self.pm_dec_err_mas = pm_dec_err self.pm_dec_err_deg = np.nan_to_num( pm_dec_err, 100000) / 3.6e6 # converting to degrees and removing nans # time of position observation (MJD) time_pos = np.asarray(table['POSEPOCH']) self.time_pos = np.nan_to_num(time_pos) # time of period observation (MJD) time_per = np.asarray(table['PEPOCH']) self.time_per = np.nan_to_num(time_per) self.references = np.asarray(table['SURVEY']) fluxes = np.asarray(table['S400']) self.fluxes = fluxes self.convert() self.tau_s = np.asarray(table['TAU_SC']) self.tau_400 = self.tau_s * ( ((400e6)**(-4.4)) / ((1e9)**(-4.4))) # scattering time scaled to 400 MHz in seconds self.distance = self.table['DIST'] # in kpc self.distance_DM = self.table['DIST_DM'] # in kpc self.theta_atnf = [] self.tau_400_NE2001 = [] if th: # in milliarcseconds self.theta_NE2001 = [] self.theta_NE2001_DM = [] for glon, glat, distance, distance_dm, name, tau_400 in zip( self.glon, self.glat, self.distance, self.distance_DM, self.jnames, self.tau_400): if not isinstance(distance, float) or not isinstance( distance_dm, float): theta = np.nan theta_DM = np.nan atnf = np.nan else: theta = self.convert_taus_thetas(glon, glat, distance) theta_DM = self.convert_taus_thetas( glon, glat, distance_dm) atnf = self.convert_taus_thetas(glon, glat, distance, tau_400, ne2001=False) self.theta_atnf.append(atnf) self.theta_NE2001.append(theta) self.theta_NE2001_DM.append(theta_DM) else: self.theta_atnf = np.zeros(len(self.distance)) self.theta_NE2001 = np.zeros(len(self.distance)) self.theta_NE2001_DM = np.zeros(len(self.distance)) if correct_for_pm: self.correct_pm() # Getting specific ATNF pulsar corrected properties wanted_data = { "bname": [], "jname": [], "ra (degrees)": [], "dec (degrees)": [], "ra_err (degrees)": [], "dec_err (degrees)": [], "pm_ra (degrees)": [], "pm_dec (degrees)": [], "pm_ra_err (degrees)": [], "pm_dec_err (degrees)": [], "type": [], "survey": [] } if len(wanted_names) > 0: for bname, jname, ra, dec, ra_err, dec_err, pm_ra, pm_dec, pm_ra_err, pm_dec_err, type, survey in zip( self.bnames, self.jnames, self.rajd, self.decjd, self.raj_err, self.decj_err, self.pm_ra, self.pm_dec, self.pm_ra_err_deg, self.pm_dec_err_deg, self.type, self.references): if bname in wanted_names or jname in wanted_names: wanted_data["bname"].append(bname) wanted_data["jname"].append(jname) wanted_data["ra (degrees)"].append(ra) wanted_data["dec (degrees)"].append(dec) wanted_data["ra_err (degrees)"].append(ra_err) wanted_data["dec_err (degrees)"].append(dec_err) wanted_data["pm_ra (degrees)"].append(pm_ra) wanted_data["pm_dec (degrees)"].append(pm_dec) wanted_data["pm_ra_err (degrees)"].append(pm_ra_err) wanted_data["pm_dec_err (degrees)"].append(pm_dec_err) wanted_data["type"].append(type) wanted_data["survey"].append(survey) wanted_df = pandas.DataFrame(wanted_data) wanted_df.to_csv("daniele_pulsar.csv")
if args.load: df = pd.read_pickle(args.load) else: psrlist = np.genfromtxt(args.psrlist, dtype=str, comments="#", autostrip=True) psrcat_params = np.genfromtxt(args.psrcat_params, dtype=str, comments="#", autostrip=True) psrlist = psrlist.tolist() psrcat_params = psrcat_params.tolist() query = QueryATNF(psrs=psrlist, params=psrcat_params) #Saving the entire catalogue too df_cat = query.catalogue df_cat = df_cat[[ 'JNAME', 'BNAME', 'RAJ', 'RAJ_ERR', 'DECJ', 'DECJ_ERR', 'F0', 'F0_ERR', 'F1', 'F1_ERR', 'P0', 'P0_ERR', 'P1', 'P1_ERR', 'DM', 'DM_ERR', 'RM', 'RM_ERR', 'DIST', 'ASSOC', 'PB', 'PB_ERR', 'BINCOMP', 'AGE', 'BSURF', 'EDOT', 'NGLT' ]] #Removing TPA pulsars from the full catalogue pickle file indices = df_cat[df_cat["JNAME"].isin(psrlist)].index df_cat.drop(indices, inplace=True) if args.save: df_cat.to_pickle(args.save + "_fullcat.pckl")
def query_atnf(): return QueryATNF(loadfromdb='test/derived_catalogue.db')
def query_derived(): return QueryATNF(loadfromdb='test/test_catalogue.db')
def query(): return QueryATNF()
parser.add_argument('-th',type=int,default=80,help='Number of Threads') parser.add_argument('-nT',type=int,default=8,help='Number of Temperatures') parser.add_argument('-nb',type=int,default=1000,help='Burn Steps') parser.add_argument('-dir',type=str,default='./',help='Data Directory') parser.add_argument('-srce',type=str,default=None,help='Source Name') args=parser.parse_args() dirname=args.dir Source=args.srce print('Load Parameters',flush=True) ##Knowns if not os.path.isfile('%s/%s_params.npy' %(dirname,Source)): try: query=QueryATNF(psrs=list([Source])) psrs = query.get_pulsars() cat=True except: print('psrqpy not available',flush=True) sys.exit() PSR = psrs[Source] dp = PSR.DIST_DM1 * u.kpc Om_peri = PSR.OM * u.deg Om_peri_dot = PSR.OMDOT * u.deg / u.year A1 = PSR.A1 * u.s * const.c Ecc = PSR.ECC Pb = PSR.PB * u.day if str(PSR.T0) == '--': TASC = Time(PSR.TASC, format='mjd') T0 = Time(brentq(orbfits.T0_find,
def __init__(self, name, data=None, sigma=None): self.name = name if not os.path.isfile('%s_params.npy' % self.name): try: query = QueryATNF(psrs=[self.name]) psrs = query.get_pulsars() cat = True except: print('psrqpy not available', flush=True) sys.exit() PSR = psrs[self.name] self.dp = parameter(PSR.DIST_DM1 * u.kpc) if not PSR.DIST_DM1_ERR == None: sig = PSR.DIST_DM1_ERR self.dp.lnprior = lambda x: np.exp(.5 * ( (x - self.dp.val.value) / sig)**2) / np.sqrt(2 * np.pi * err**2) self.dp.fixed = False self.Om_peri = parameter(PSR.OM * u.deg) if not PSR.OM_ERR == None: sig = PSR.OM_ERR self.Om_peri.lnprior = lambda x: np.exp(.5 * ( (x - self.Om_peri.val.value) / sig)**2) / np.sqrt(2 * np.pi * err**2) self.Om_peri.fixed = False self.Om_peri_dot = parameter(PSR.OMDOT * u.deg / u.year) if not PSR.OMDOT_ERR == None: sig = PSR.OMDOT_ERR self.Om_peri_dot.lnprior = lambda x: np.exp(.5 * ( (x - self.Om_peri_dot.val.value) / sig)**2) / np.sqrt( 2 * np.pi * err**2) self.Om_peri_dot.fixed = False self.A1 = parameter(PSR.A1 * u.s * const.c) if not PSR.A1_ERR == None: sig = PSR.A1_ERR * const.c.value self.A1.lnprior = lambda x: np.exp(.5 * ( (x - self.A1.val.value) / sig)**2) / np.sqrt(2 * np.pi * err**2) self.A1.fixed = False self.Ecc = parameter(PSR.ECC) if not PSR.ECC_ERR == None: sig = PSR.ECC_ERR self.Ecc.lnprior = lambda x: np.exp(.5 * ( (x - self.A1.val) / sig)**2) / np.sqrt(2 * np.pi * err**2) self.Ecc.fixed = False self.Pb = parameter(PSR.PB * u.day) if not PSR.PB_ERR == None: sig = PSR.PB_ERR self.Pb.lnprior = lambda x: np.exp(.5 * ( (x - self.Pb.val.value) / sig)**2) / np.sqrt(2 * np.pi * err**2) self.Pb.fixed = False if str(PSR.T0) == '--': TASC = Time(PSR.TASC, format='mjd') self.T0 = parameter( Time(brentq(orbfits.T0_find, TASC.mjd, (TASC + Pb).mjd, args=(TASC, -Om_peri, Pb, Ecc)), format='mjd')) if not PSR.TASC_ERR == None: sig = PSR.TASC_ERR self.T0.lnprior = lambda x: np.exp(.5 * ( (x - self.T0.val.mjd) / sig)**2) / np.sqrt(2 * np.pi * err**2) self.T0.fixed = False else: self.T0 = parameter(Time(PSR.T0, format='mjd')) if not PSR.T0_ERR == None: sig = PSR.PB_ERR self.T0.lnprior = lambda x: np.exp(.5 * ( (x - self.T0.val.mjd) / sig)**2) / np.sqrt(2 * np.pi * err**2) self.T0.fixed = False self.pm_ra = parameter( (PSR.PMRA * u.mas / u.year).to_value(u.rad / u.s) / u.s) if not PSR.PMRA_ERR == None: sig = (PSR.PMRA_ERR * u.mas / u.year).to_value(u.rad / u.s) self.pm_ra.lnprior = lambda x: np.exp(.5 * ( (x - self.pm_ra.val.value) / sig)**2) / np.sqrt(2 * np.pi * err**2) self.pm_ra.fixed = False self.pm_dec = parameter( (PSR.PMDec * u.mas / u.year).to_value(u.rad / u.s) / u.s) if not PSR.PMDec_ERR == None: sig = (PSR.PMDec_ERR * u.mas / u.year).to_value(u.rad / u.s) self.pm_dec.lnprior = lambda x: np.exp(.5 * ( (x - self.pm_dec.val.value) / sig)**2) / np.sqrt(2 * np.pi * err**2) self.pm_dec.fixed = False self.srce = SkyCoord.from_name('PSR %s' % Test_source) np.save( '%s_params.npy' % Test_source, np.array([ dp.value, Om_peri.value, Om_peri_dot.value, A1.value, Ecc, Pb.value, T0.mjd, pm_ra.value, pm_dec.value, self.srce.ra.to_value(u.deg), self.srce.dec.to_value(u.deg) ])) self.Om_orb = parameter(180, lambda x: bound_prior(x, 0, 360)) self.Om_scr = parameter(0, lambda x: bound_prior(x, -90, 90), fixed=False) self.inc = parameter(45, lambda x: bound_prior(x, 0, 1), fixed=False) self.s = parameter(.5, lambda x: bound_prior(x, 0, 1), fixed=False) self.data = data self.sigma = sigma
def main(): # default parameters to query params = ["NAME", "BNAME", "RAJ", "DECJ", "DM", "P0", "S1400"] parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter, description= "For a given pointing, determine which pulsars are in the field and " "in which compound beam they are.") parser.add_argument( "--ra", required=True, help="Right Ascension of pointing in hh:mm:ss.s format") parser.add_argument("--dec", required=True, help="Declination of pointing in dd:mm:ss.s format") parser.add_argument( "--freq", type=float, default=1370, help= "Observing frequency (MHz), used to determine size of compound beams. " "If max_dist is used, the frequency is only used for plotting the CBs." "(Default: %(default)s)") parser.add_argument( "--max_dist", type=float, help="Maximum distance from CB center (arcmin) to be considered " "in the CB. (Default: twice half-power width)") parser.add_argument("--condition", type=str, help="Search condition when querying psrcat, e.g. " "'S1400 > 1'") parser.add_argument("--more_info", action="store_true", help="Add pulsar parameters to output") parser.add_argument( "--params", nargs="+", default=params, help=f"Space-separated list of pulsar parameters to show " f"when more_info=True. (Default: {' '.join(params)})") parser.add_argument("--plot", action="store_true", help="Plot CB pattern with pulsar locations") # print help if no arguments are given if len(sys.argv) == 1: parser.print_help() sys.exit(1) args = parser.parse_args() # set max dist if not given half_power_width = get_half_power_width(args.freq * u.MHz) if args.max_dist is None: args.max_dist = 2 * half_power_width.to(u.arcmin).value # query ATNF for pulsars in a 4x4 degree FoV, which covers more than the entire Apertif FoV bound = [args.ra, args.dec, 4] # params to query should always include NAME, RAJ and DECJ query_params = args.params[:] for key in ["NAME", "RAJ", "DECJ"]: if key not in query_params: query_params.append(key) print("Querying ATNF") # for some reason the query gives a RuntimeWarning, suppress it with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) results = QueryATNF(params=query_params, circular_boundary=bound, condition=args.condition) if len(results) == 0: print("No pulsars found") sys.exit() # find the pointing of each CB pointing = SkyCoord(args.ra, args.dec, unit=(u.hourangle, u.deg)) cb_pointings = [] for cb in range(NCB): cb_ra, cb_dec = tools.cb_index_to_pointing(cb, pointing.ra, pointing.dec) cb_pointings.append(SkyCoord(cb_ra, cb_dec)) # check the closest CB for each pulsar output = [] pulsar_coords = [] pulsar_found = False print("Locating pulsars in Apertif Compound Beams") for psr in results.table: psr_coord = SkyCoord(psr["RAJ"], psr["DECJ"], unit=(u.hourangle, u.deg)) # get separation from each CB and find minimum separations = [ psr_coord.separation(beam).to(u.arcmin).value for beam in cb_pointings ] # index of lowest separation is best CB best_cb = np.argmin(separations) # check the separation itself sep = separations[best_cb] # if too far away, skip this pulsar # sep must be in acrmin here if sep > args.max_dist: continue # a good pulsar was found pulsar_found = True # store info output.append([best_cb, sep, psr]) pulsar_coords.append([psr["NAME"], psr_coord]) if not pulsar_found: print("No pulsars found") sys.exit() # print info sorted by CB output = np.array(output) order = np.argsort(output[:, 0]) print("Pulsars found:") for cb, sep, psr in output[order]: print( f"PSR {psr['NAME']} in CB{cb:02d}, separation from CB centre: {sep:.2f}'" ) if args.more_info: for p in args.params: # get value and unit value = psr[p] unit = psr.columns[p].unit dtype = psr.columns[p].dtype # format according to dtype if dtype == np.float: formatted_value = f" {p} = {value:.2f}" else: formatted_value = f" {p} = {value}" # add unit if unit is not None: formatted_value += f" {unit}" # add newline print(formatted_value) print() if args.plot: make_plot(cb_pointings, pulsar_coords, half_power_width)