def xmm_gti(filename, df=False): ''' Read GTI fits file and return the total GTI time in ks. ''' inp = tab(fits.getdata(filename)).to_pandas() gti = 0. for index, row in inp.iterrows(): gti = gti + row.STOP - row.START return gti / 1000.
def xmm_gti(filename, df=False): ''' Read GTI fits file and return the total GTI time in ks. ''' inp = tab(fits.getdata(filename)).to_pandas() gti = 0. for index, row in inp.iterrows(): gti = gti + row.STOP-row.START return gti/1000.
def load_SDSS_phot_dr12(ra,dec,search_radius,pandas=None,ver=None,columns=None): ''' ra in degrees, dec in degrees, search_radius in arcmin. ''' def gen_SDSS_sql(ra, dec, search_radius,columns=columns): if columns is None: query_out = ' p.*' else: query_out = columns+',p.ra,p.dec' query_from = ' FROM fGetNearbyObjEq({ra},{dec},{search_radius}) n, PhotoPrimary p WHERE n.objID=p.objID'.format(ra=str(ra),dec=str(dec),search_radius=str(search_radius)) query_str='SELECT'+query_out+query_from return query_str def query_SDSS(sSQL_query): sURL = 'http://skyserver.sdss.org/dr12/en/tools/search/x_sql.aspx?' # for POST request values = {'cmd': sSQL_query, 'format': 'csv'} data = urllib.urlencode(values) request = urllib2.Request(sURL, data) response = urllib2.urlopen(request) return response.read() sql_str=gen_SDSS_sql(ra,dec,search_radius) sdss_ds=query_SDSS(sql_str) lines=sdss_ds.split('\n') nobj=len(lines)-2 if ver:print(str(nobj)+' SDSS objects found') if nobj >0: #pop table name and the EOF line lines.pop(0) lines.pop(-1) cols=lines[0].split(',') #pop column lines.pop(0) rows=[] for i in lines: tt=i.split(',') tt=map(float,tt) rows.append(tt) tab_out=tab(rows=rows,names=cols) if pandas: tab_out=pd.DataFrame.from_records(tab_out._data) radec=coord.SkyCoord(ra,dec,unit=(u.degree,u.degree),frame='icrs') sdss=coord.SkyCoord(tab_out.ra, tab_out.dec, unit=(u.degree,u.degree),frame='icrs') tab_out['dist_arcsec'] = radec.separation(sdss).arcsec #should fix objID to string #should find out what p.type means return tab_out else: return []
def READSERVS(cat=True): #fname = '/cuc36/xxl/multiwavelength/vacc/xmm/121212/xmm-irac12-sextractor.fits.gz' fname = '/Users/ctchen/xxl/data//servs-xmm-data-fusion-sextractor.fits.gz' servs = tab(fits.getdata(fname, 1)).to_pandas() servs = servs[(servs.FLUX_APER_3_1 > 1.0) & (servs.FLUX_APER_3_2 > 1.0)] servs['ID_SERVS'] = 'IRAC1_' + servs.ID_1.astype(str).str.zfill(6) servs['mag1'] = ab_to_jy(servs.FLUX_APER_3_1.values.copy() / 1e6, tomag=True) servs['mag2'] = ab_to_jy(servs.FLUX_APER_3_2.values.copy() / 1e6, tomag=True) servs.set_index('ID_12', inplace=True) servs['numid'] = pd.Series(range(len(servs)), index=servs.index) servs.rename(columns={'RA_12': 'ra', 'DEC_12': 'dec'}, inplace=True) if cat: catservs = makecd(servs.ra.value, servs.dec.values) return servs, catservs #Check Data Fusion else: return servs
def xmm_bkgd(filename, df=False, fit=False, sig=None): ''' The input file should be the output of the step 1 in: http://www.cosmos.esa.int/web/xmm-newton/sas-thread-epic-filterbackground which bins the photons in 100s time intervals ''' if sig is None: sig = 3 inp = fits.getdata(filename) inp = tab(inp).to_pandas() # Fit a gaussian model to the rate distribution if df is True: return inp elif fit is True: gmm = sklearn.mixture.GaussianMixture(n_components=1) r = gmm.fit(inp.RATE.values[:, np.newaxis]) return r else: gmm = sklearn.mixture.GaussianMixture(n_components=1) r = gmm.fit(inp.RATE.values[:, np.newaxis]) return r.means_[0, 0] + sig * np.sqrt(r.covars_[0, 0])
def xmm_bkgd(filename, df=False, fit=False, sig=None): ''' The input file should be the output of the step 1 in: http://www.cosmos.esa.int/web/xmm-newton/sas-thread-epic-filterbackground which bins the photons in 100s time intervals ''' if sig is None: sig = 3 inp = fits.getdata(filename) inp = tab(inp).to_pandas() # Fit a gaussian model to the rate distribution if df is True: return inp elif fit is True: mu, std = norm.fit(inp.RATE.values) r = [mu, std] return r else: mu, std = norm.fit(inp.RATE.values) r = [mu, std * sig] return r
default=False, help='savepdf if set as True.') args = parser.parse_args() verbose = args.verbose vprint = verboseprint(verbose) overwrite = args.overwrite savepdf = args.savepdf evt = args.evt out = args.out xy = args.xy evthdu = fits.open(evt, ignore_missing_end=True) evttab = tab(evthdu[1].data) evttab = evttab[(evttab['PI'] >= 0) & (evttab['PI'] <= 511)] # flattend index of the detector pixels each event detcoor_id = evttab['RAW_X'].quantity.value * npixx + evttab[ 'RAW_Y'].quantity.value dmask = create_circular_mask(48, 48, radius=24) img, xx, yy = np.histogram2d(evttab['RAW_X'], evttab['RAW_Y'], bins=[np.arange(npixx + 1), np.arange(npixy + 1)]) if np.min(img) == 0: ctmin = 1 else:
savepdf = args.savepdf evt = args.evt switch = args.switch arf = args.arf out = args.out psf = args.psf vig = args.vig rmf = args.rmf box = args.box bbox = args.bbox bkgout = out[:-4] + 'bkg.fits' evthdu = fits.open(evt) evttab = tab(evthdu[1].data) # kick out events with weird PI (should've been done with martfilter # already, but just in case) evttab = evttab[(evttab['PI']>=0) & (evttab['PI'] <= 511)] expvalue = evthdu[0].header['EXPOSURE'] # flattend index of the detector pixel coordinate of each event detcoor_id = evttab['RAW_X'].quantity.value * npixx + evttab['RAW_Y'].quantity.value # detector mask dmask = create_circular_mask(48,48,radius=24) img, xx, yy = np.histogram2d(evttab['RAW_X'], evttab['RAW_Y'], bins=[np.arange(npixx + 1), np.arange(npixy +1)]) if np.min(img) == 0:
for i, l in enumerate(lines): if i == 22: met = float(l.split()[-1]) if i >= 517: j = l.split() t.append(float(j[0])) ug.append(float(j[1])) gi.append(float(j[2])) ih.append(float(j[3])) ui.append(float(j[4])) t = np.array(t) ui = np.array(ui) gi = np.array(gi) ug = np.array(ug) ih = np.array(ih) pegase_table = tab([t, ui, gi, ug, ih], names=("time", "u-i", "g-i", "u-g", "i-h")) pegase_table.write("pegase_z=" + str(met) + ".fits", format="fits", overwrite=True) fig = plt.scatter(ui[::skips], ih[::skips], c=np.log10(t * 1000000.)[::skips], marker=markers[k], s=100, label=str(met), edgecolors="none", zorder=5, alpha=1) plt.plot(ui, ih, '--', color='black', alpha=.7, zorder=3) plt.scatter(data_cat['MAG_APER_1'][:, 1] - derred[0] - (data_cat["MAG_APER_3"][:, 1] - derred[2]),
def readfits(fname): return tab(fits.getdata(fname)).to_pandas()
detnam = detnam_d[istr] urdn = urdn_d[istr] detn = detn_d[istr] csvname = eapath + '/ARTXC_EA_M' + istr + '.csv' df = pd.read_csv(csvname) vigarr = np.zeros((1, 1, 31, 13)) for i in range(13): thetac = df.loc[i, 'thetac'] sig1 = df.loc[i, 'sigma1'] sig2 = df.loc[i, 'sigma2'] a1 = df.loc[i, 'A1'] a2 = df.loc[i, 'A2'] vigarr[0,0,:,i] = func_EA(thetaarcmin, thetac, sig1, a1, sig2, a2)/\ np.max(func_EA(thetaarcmin, thetac, sig1, a1, sig2, a2)) t = tab([[en_lo], [en_hi], [thetadeg], np.array([0.0]), vigarr], names=('ENERG_LO', 'ENERG_HI', 'THETA', 'PHI', 'VIGNET')) col1 = fits.Column(array=t['ENERG_LO'].data, name='ENERG_LO', format='13E', unit='keV') col2 = fits.Column(array=t['ENERG_HI'].data, name='ENERG_HI', format='13E', unit='keV') col3 = fits.Column(array=t['THETA'].data, name='THETA', format='31E', unit='degree') col4 = fits.Column(array=t['PHI'].data, name='PHI',
for key in hdum[0].header.keys(): keylist.append(key) if not 'HISTORY' in keylist: vprint('No history in the primary HDU') hdum[0].header['HISTORY'] = 'Initializing with ' + fname_m for i in np.arange(len(evtfiles) - 1) + 1: evta = evtfiles[i].strip() fname_a = os.path.abspath(evta).split('/')[-2] + '/' + os.path.abspath( evta).split('/')[-1] for card in hdum[0].header['HISTORY']: if fname_a in card: print('no merging needed, ' + fname_a + ' has been merged before.') hdua = fits.open(evta) tabm = tab(hdum[1].data) taba = tab(hdua[1].data) if tabm['TIME'][-1] <= taba['TIME'][0]: # if all events the list to be appended are later than the last event of primary newtab = vstack([tabm, taba]) elif taba['TIME'][-1] <= tabm['TIME'][0]: # if the first event of the primary list is later than all events in the list to be appended newtab = vstack([taba, tabm]) elif force: # something is wrong, sort the output vprint('The time interval of evta is already part of evtm') vprint('force == True, continuing now.') newtab = vstack([tabm, taba]) newtab.sort('TIME') else: # something is wrong, sort the output
parser.add_argument('-overwrite', type=bool, required=False, default=True, help='Overwrite if set as True.') args = parser.parse_args() verbose = args.verbose vprint = verboseprint(verbose) evt = args.evt arf = args.arf out = args.out overwrite = args.overwrite arfhdu = fits.open(arf) arf1 = tab(arfhdu[1].data) enarr = arf1['ENERG_LO'].quantity.value enarr = np.hstack([enarr, np.array(arf1['ENERG_HI'][-1])]) arthdu = fits.open(evt) piarr = get_pi(arthdu[1].data['ENERGY']) evthead = arthdu[1].header arthdu[1] = add_column(arthdu[1], piarr, 'PI', 'I', '', overwrite) arthdu.writeto(out, overwrite=overwrite) print('wrote PI arrays into ' + out)
help='RA in degrees') parser.add_argument( '-dec', type=float, required=True, help='DEC in degrees') args = parser.parse_args() src_ra = args.ra src_dec = args.dec data = ascii.read('/home/ctchen/lib/martxc_local/artxc_survey_grid.txt') tiles = data['tile'].quantity.value.astype(str) for idx, tt in enumerate(tiles): tiles[idx] = tt.zfill(6) data['tile'] = tiles data = tab(data).to_pandas() raarr = data['ramin'].values raarr = np.hstack([raarr, data['ramax'].values[-1]]) decarr = data['decmin'].values decarr = np.hstack([decarr, data['decmax'].values[-1]]) def isintile(row, ra, dec): if (row.ramin <= ra < row.ramax) & (row.decmin <= dec < row.decmax): return True else: return False if sum(data.apply(isintile, axis=1,args=(src_ra, src_dec))) == 1: