def fetch_object_by_name(name, radius): """ This function ... :param name: :param color: :param radius: :return: """ # Query the NED database for the object table = Ned.query_object(name) region_string = "# Region file format: DS9 version 3.0\n" region_string += "global color=green\n" # For every entry in the table for entry in table: # Get the right ascension and the declination ra = entry[2] dec = entry[3] #print coordinates.degrees_to_hms(ra=ra, dec=dec) # Create a string with the coordinates of the star regline = "fk5;circle(%s,%s,%.2f\")\n" % (ra, dec, radius) # Add the parameters of this star to the region string region_string += regline # Return the region return regions.parse(region_string)
def get_ned_properties(self): """ This function ... :return: """ # Inform the user log.info("Querying the NASA/IPAC Extragalactic Database ...") # Search on NED ned_result = Ned.query_object(self.galaxy_name) ned_entry = ned_result[0] # Get a more common name for this galaxy (sometimes, the name obtained from NED is one starting with 2MASX .., use the PGC name in this case) if ned_entry["Object Name"].startswith("2MASX "): gal_name = self.ngc_name else: gal_name = ned_entry["Object Name"] # Get the redshift gal_redshift = ned_entry["Redshift"] if isinstance(gal_redshift, np.ma.core.MaskedConstant): gal_redshift = None # Get the type (G=galaxy, HII ...) gal_type = ned_entry["Type"] if isinstance(gal_type, np.ma.core.MaskedConstant): gal_type = None # Get the distance #ned_distance = ned_entry["Distance (arcmin)"] #if isinstance(ned_distance, np.ma.core.MaskedConstant): ned_distance = None # Set properties self.properties.common_name = gal_name self.properties.redshift = gal_redshift self.properties.galaxy_type = gal_type
def redshift_finder(objname): obj = objname[0] main_table = Ned.query_object(obj) redshift = main_table['Redshift'][0] NED_data.append(redshift) RA = main_table['RA(deg)'][0] NED_data.append(RA) DEC = main_table['DEC(deg)'][0] NED_data.append(DEC)
def get_photometry(self): """ :return: Returns photometry data as astropy.table.Table` object.: Available dict.keys() are: ['No.', 'Object Name', 'RA(deg)', 'DEC(deg)', 'Type', 'Velocity', 'Redshift', 'Redshift Flag', 'Magnitude and Filter', 'Distance (arcmin)', 'References', 'Notes', 'Photometry Points', 'Positions', 'Redshift Points', 'Diameter Points', 'Associations'] """ return Ned.get_table(self.name)
def __init__(self, ob_name, object_name=""): self.ob_name = ob_name self.object_name = object_name if self.object_name == "": if self.ob_name[:-2] == "ESO137": self.object_name = "ESO137-G034" else: self.object_name = ob_name[:-2] ## ## TODO: need some error handling here! n=Ned.query_object(self.object_name) self.z=n['Redshift'].data.data[0] ## ## set some directories and files self.f_combined = os.getenv("XDIR")+'/combined/'+self.ob_name+'.fits' self.dir_starlight = os.getenv("HOME") + "/STARLIGHT" self.f_starlight_bc03 = self.dir_starlight + "/spectra/" + self.ob_name + ".txt" self.cfg_SL_infile=self.dir_starlight+"/infiles/"+self.ob_name+".in" self.dataset_definition = os.getenv("XDIR")+'/dataset_definition/'+self.ob_name+'.txt' d=ascii.read(self.dataset_definition) self.night=d['night'][0] ob_name_list=[d['object'][0],d['telluric'][0],d['flux'][0]] self.arms=["NIR","VIS","UVB"] self.dprlist=["SCI","TELL","FLUX"] self.dataset = Table(names=('dprtype', 'ob_name', 'arm', 'dpid'), dtype=(np.dtype((str,10)), np.dtype((str,20)), np.dtype((str,3)), np.dtype((str,29))), meta={'night': self.night}) conn=sqlite3.connect(os.getenv("OBSDB")) c=conn.cursor() for arm in self.arms: f_arm=self.dataset_definition.split('.txt')[0]+'_'+arm+'.txt' if os.path.isfile(f_arm): d_arm=ascii.read(f_arm,data_start=0,names=["ob","dpid"]) for ob,dpid,dpr in zip(d_arm["ob"],d_arm["dpid"],self.dprlist): self.dataset.add_row([dpr,ob,arm,dpid]) else: with open(f_arm,'w') as f: for ob,dpr in zip(ob_name_list,self.dprlist): query = "select arcfile from shoot where night=\"" + self.night + \ "\" and ob_name=\""+ ob + \ "\" and arm=\"" + arm + \ "\" and opti2_name=\"IFU\" limit 1;" c.execute(query) dpid=c.fetchone() self.dataset.add_row([dpr,ob,arm,dpid]) file_string=ob+" "+dpid[0]+"\n" f.write(file_string)
def query_name(name, verbose=False, print_header=False): """ Query NED by source name. """ try: q = Ned.query_object(name) objname = q["Object Name"][0] objtype = q["Type"][0].decode("utf-8") ra = q["RA(deg)"][0] dec = q["DEC(deg)"][0] velocity = q["Velocity"][0] z = q["Redshift"][0] z_flag = q["Redshift Flag"][0].decode("utf-8") refs = q["References"][0] notes = q["Notes"][0] except RemoteServiceError: objname = None objtype = None ra = None dec = None velocity = None z = None z_flag = None refs = None notes = None if verbose: print("*** %s: not found ***" % name, file=sys.stderr) # results = OrderedDict([ ("Name", name), ("NED_Name", objname), ("Type", objtype), ("RA", ra), ("DEC", dec), ("Velocity", velocity), ("z", z), ("z_Flag", z_flag), ("References", refs), ("Notes", notes), ]) if verbose: if print_header: print(",".join(results.keys())) print(",".join([str(v) for v in results.values()])) return results
def queryNED(name): """ queryNED(): Return information on a galaxy from a NED Query Note: This is now simply a wrapper around astroquery.ned. Arguments name: galaxy name Returns: dictionary with: - Cordinates (J2000) - redshift - nans for angular size (not provided in this short NED query) """ from astroquery.ned import Ned try: res = Ned.query_object(name)[0] except RemoteServiceError: _sys.stderr.write(name + " not found in NED.\n") return out out = {'RA': _np.nan*_u.deg, 'Dec': _np.nan*_u.deg, 'z': _np.nan, 'angsize': {'major': _np.nan*_u.arcmin, 'minor': _np.nan*_u.arcmin, 'PA': _np.nan*_u.deg}} try: out['RA'] = res['Ra(deg)'] * u.deg out['Dec'] = res['Dec(deg)'] * u.deg except: _sys.stderr.write('Error: coordinates not available for ' + name + "\n") try: out['z'] = res['Redshift'] except: _sys.stderr.write('Error: redshift not available for ' + name + "\n") return out
def get_diameters(self): """ :return ` Returns data as astropy.table.Table` object.: Available dict.keys() are: ['No.', 'Frequency targeted', 'Refcode', 'Major Axis', 'Major Axis Flag', 'Major Axis Unit', 'Minor Axis', 'Minor Axis Flag', 'Minor Axis Unit', 'Axis Ratio', 'Axis Ratio Flag', 'Major Axis Uncertainty', 'Ellipticity', 'Eccentricity', 'Position Angle', 'Equinox', 'Reference Level', 'NED Frequency', 'NED Major Axis', 'NED Major Axis Uncertainty', 'NED Axis Ratio', 'NED Ellipticity', 'NED Eccentricity', 'NED cos-1_axis_ratio', 'NED Position Angle', 'NED Minor Axis', 'Minor Axis Uncertainty', 'NED Minor Axis Uncertainty', 'Axis Ratio Uncertainty', 'NED Axis Ratio Uncertainty', 'Ellipticity Uncertainty', 'NED Ellipticity Uncertainty', 'Eccentricity Uncertainty', 'NED Eccentricity Uncertainty', 'Position Angle Uncertainty', 'NED Position Angle Uncertainty', 'Significance', 'Frequency', 'Frequency Unit', 'Frequency Mode', 'Detector Type', 'Fitting Technique', 'Features', 'Measured Quantity', 'Measurement Qualifiers', 'Targeted RA', 'Targeted DEC', 'Targeted Equinox', 'NED Qualifiers', 'NED Comment'] """ return Ned.get_table(self.name, table="diameters")
def queryNED(names,cat): ''' takes a list of names, queries them in NED and returns the 'Object Name' from the NED query results. ''' ned_names = [] if cat == 'bzb': failstr = '---bzcat---' elif cat == 'fermi': failstr = '---fermi---' else: failstr = '---' for name in names: try: ned_query = Ned.query_object(name) ned_names.append(ned_query["Object Name"][0]) except: ned_names.append(failstr) return ned_names
def vlsr2(self, name): """ experimental Simbad/NED """ if have_SB: print "Trying SIMBAD..." try: t1 = Simbad.query_object(name) print t1.colnames print t1 except: pass else: print "No SIMBAD" if have_NED: print "Trying NED..." try: t2 = Ned.query_object(name) print t2.colnames print t2 print 'VLSR=',t2['Velocity'].item() except: pass else: print "No NED"
with open ("gnames.txt", "r") as myfile: mylist = myfile.readlines() #print (mylist[:10]) name = [] for item in mylist: name.append(item.rstrip("\n")) #print (name[:10]) from astroquery.ned import Ned image_list = {} url = [] for entry in name: url = Ned.get_image_list(entry, item = 'spectra') image_list.update({entry:url}) nomatch = [] import urllib for name, url in image_list: if url == []: nomatch.append(name) else: urllib.urlretrieve (url)
key_download = "txt" # fits dir_data = "../proj_goals_" + data_suffix + "/" list_donwload_txt = "goals_" + data_suffix + "_" + time_stamp + ".txt" done = glob.glob(dir_data) if not done: os.system("mkdir " + dir_data) array_goals = np.loadtxt("goals_list_name.txt", delimiter=",", dtype="S20") loop = len(array_goals) list_donwload = [] for i in range(loop): result_table = Ned.query_object(array_goals[i]) print(array_goals[i] + ", z=" + str(result_table["Redshift"][0])) target = Alma.query_object(array_goals[i]) spws = target['Frequency support'].tolist() uids = target['Member ous id'].tolist() loop2 = len(spws) for j in range(loop2): print(uids[j]) for k in range(len(spws[j].split(" U "))): freq_cover = spws[j].split(" U ")[k].split(",")[0] edge_low = float(freq_cover.split("..")[0].replace("[", "")) edge_high = float(freq_cover.split("..")[1].replace("GHz", "")) redshift_plus_1 = 1 + result_table["Redshift"][0] obs_freq = search_freq / redshift_plus_1 if edge_low < obs_freq < edge_high: print(data_suffix + " is here!")
def get_galaxy_info(name, position): """ This function ... :param name: :param position: :return: """ # Obtain more information about this galaxy try: ned_result = Ned.query_object(name) ned_entry = ned_result[0] # Get a more common name for this galaxy (sometimes, the name obtained from NED is one starting with 2MASX .., use the PGC name in this case) if ned_entry["Object Name"].startswith("2MASX "): gal_name = name else: gal_name = ned_entry["Object Name"] # Get the redshift gal_redshift = ned_entry["Redshift"] if isinstance(gal_redshift, np.ma.core.MaskedConstant): gal_redshift = None # Get the type (G=galaxy, HII ...) gal_type = ned_entry["Type"] if isinstance(gal_type, np.ma.core.MaskedConstant): gal_type = None except astroquery.exceptions.RemoteServiceError: # Set attributes gal_name = name gal_redshift = None gal_type = None except astroquery.exceptions.TimeoutError: # Set attributes gal_name = name gal_redshift = None gal_type = None except: # Set attributes gal_name = name gal_redshift = None gal_type = None # Create a new Vizier object and set the row limit to -1 (unlimited) viz = Vizier(keywords=["galaxies", "optical"]) viz.ROW_LIMIT = -1 # Query Vizier and obtain the resulting table result = viz.query_object(name.replace(" ", ""), catalog=["VII/237"]) # Not found ... TODO: fix this ... this object was in the first query output if len(result) == 0: return name, position, None, None, [], None, None, None, None, None, None table = result[0] # Get the correct entry (sometimes, for example for mergers, querying with the name of one galaxy gives two hits! We have to obtain the right one each time!) if len(table) == 0: raise ValueError("The galaxy could not be found under this name") elif len(table) == 1: entry = table[0] else: entry = None # Some rows don't have names, if no match is found based on the name just take the row that has other names defined rows_with_names = [] for row in table: if row["ANames"]: rows_with_names.append(row) # If only one row remains, take that one for the galaxy we are looking for if len(rows_with_names) == 1: entry = rows_with_names[0] # Else, loop over the rows where names are defined and look for a match else: for row in rows_with_names: names = row["ANames"] if name.replace(" ", "") in names or gal_name.replace(" ", "") in names: entry = row break # If no matches are found, look for the table entry for which the coordinate matches the given position (if any) if entry is None and position is not None: for row in table: if np.isclose(row["_RAJ2000"], position.ra.value) and np.isclose(row["_DEJ2000"], position.dec.value): entry = row break # Note: another temporary fix if entry is None: return name, position, None, None, [], None, None, None, None, None, None # Get the right ascension and the declination position = SkyCoordinate(ra=entry["_RAJ2000"], dec=entry["_DEJ2000"], unit="deg", frame="fk5") # Get the names given to this galaxy gal_names = entry["ANames"].split() if entry["ANames"] else [] # Get the size of the galaxy ratio = np.power(10.0, entry["logR25"]) if entry["logR25"] else None diameter = np.power(10.0, entry["logD25"]) * 0.1 * Unit("arcmin") if entry["logD25"] else None #print(" D25_diameter = ", diameter) radial_profiles_result = viz.query_object(name, catalog="J/ApJ/658/1006") if len(radial_profiles_result) > 0: radial_profiles_entry = radial_profiles_result[0][0] gal_distance = radial_profiles_entry["Dist"] * Unit("Mpc") gal_inclination = Angle(radial_profiles_entry["i"], "deg") gal_d25 = radial_profiles_entry["D25"] * Unit("arcmin") else: gal_distance = None gal_inclination = None gal_d25 = None # Get the size of major and minor axes gal_major = diameter gal_minor = diameter / ratio if diameter is not None and ratio is not None else None # Get the position angle of the galaxy gal_pa = Angle(entry["PA"] - 90.0, "deg") if entry["PA"] else None # Create and return a new Galaxy instance return gal_name, position, gal_redshift, gal_type, gal_names, gal_distance, gal_inclination, gal_d25, gal_major, gal_minor, gal_pa
def runQueriesWithLines(self, restFreqs, redshiftRange=(0, 1000), lineNames=[], public=False, science=False, **kwargs): """Run queries for spectral lines. Parameters ---------- restFreqs : sequence of floats The spectral line rest frequencies to search the query results for. redshiftRange : sequence of floats, optional A two-element sequence defining the lower and upper limits of the object redshifts (in that order) to be searched for. The restFreqs will be shifted using this range to only find observations that have spectral coverage in that redshift range. Default is to search 0 <= z <= 1000 (i.e. all redshifts). lineNames : sequence of strs, optional A sequence of strings containing names for each spectral line to be searched for that will be used as column names in the results table. This must be the same length as restFreqs. Default is to name lines like "Line0", "Line1", "Line2", etc. public : bool Return only publicly available datasets? science : bool Return only data marked as "science" in the archive? kwargs : dict Keywords that are accepted by the ALMA archive system. You can look these up by examining the forms at http://almascience.org/aq. Passed to `astroquery.alma.Alma.query`. "frequency" cannot be specified here since it is used to limit the query to frequencies that could contain the lines in the specified redshift range. If archiveSearch was initialized with the `targets` argument then "source_name_resolver" and "ra_dec" also cannot be used here. Matching against NED to find source redshifts is attempted first with the ALMA archive coordinates, searching in NED with a search radius of 30 arcseconds and only keeping results with type G (galaxy). If more or less than one NED result matches the positional search then a search is attempted based on a sanitized version of the ALMA archive source name. If there is no match to name then the ALMA observation is placed in the queryResultsNoNED dictionary. """ if 'frequency' in kwargs: msg = '"frequency" cannot be passed to runQueriesWithLines' raise ValueError(msg) restFreqs = np.array(restFreqs) lineNames = np.array(lineNames) if (len(lineNames) != len(restFreqs) and len(lineNames) != 0): msg = 'length mismatch between ' \ + '"restFreqs" ({:})'.format(len(restFreqs)) \ + ' and "lineNames" ({:})'.format(len(lineNames)) raise ValueError(msg) if len(lineNames) == 0: lineNames = ['Line{:}'.format(i) for i in range(len(restFreqs))] lineNames = np.array(lineNames) inds = restFreqs.argsort() restFreqs = restFreqs[inds] lineNames = lineNames[inds] redshiftRange = np.array(redshiftRange) redshiftRange.sort() # define frequency range from lines and redshifts lowFreq = self._observedFreq(restFreqs[0], redshiftRange[1]) highFreq = self._observedFreq(restFreqs[-1], redshiftRange[0]) freqLimits = '{:} .. {:}'.format(lowFreq, highFreq) self.runQueries(public=public, science=science, frequency=freqLimits, **kwargs) for target in self.targets: if len(self.queryResults[target]) > 0: # targets with ALMA results currTable = self.queryResults[target] # sanitize ALMA source names safeNames = currTable['target_name'] safeNames = np.char.replace(safeNames, b' ', b'') safeNames = np.char.replace(safeNames, b'_', b'') safeNames = np.char.upper(safeNames) currTable['ALMA sanitized source name'] = safeNames # query NED for object redshifts nedResult = list() noNEDinds = list() searchCoords = SkyCoord(ra=currTable['s_ra'], dec=currTable['s_dec'], unit=(u.deg, u.deg), frame='icrs') pBar = trange(len(currTable), desc='NED cross matching', unit=' source') for i in pBar: # coordinate search try: nedSearch = Ned.query_region(searchCoords[i], radius=30 * u.arcsec, equinox='J2000.0') except Exception: pass # only want galaxies typeInds = np.where(nedSearch['Type'] != b'G') nedSearch.remove_rows(typeInds) # try name search when not just one coordinate match if len(nedSearch) != 1: try: nedSearch = Ned.query_object( currTable['ALMA sanitized source name'][i]) except Exception: pass if len(nedSearch) != 1: noNEDinds.append(i) else: # next line prevents vstack warnings nedSearch.meta = None nedResult.append(nedSearch) if len(nedResult) > 0: nedResult = vstack(nedResult, join_type='exact') else: msg = 'No NED results for {:} returned. nedResult = {:}' raise ValueError(msg.format(target, nedResult)) # store away rows without a single NED match self.queryResultsNoNED[target] = currTable[noNEDinds] currTable.remove_rows(noNEDinds) # store away rows without redshift in NED noZinds = nedResult['Redshift'].mask.nonzero() nedResult.remove_rows(noZinds) self.queryResultsNoNEDz[target] = currTable[noZinds] currTable.remove_rows(noZinds) # remove rows where redshift not in range outOfRangeZInds = list() for i, row in enumerate(nedResult): if (redshiftRange[0] > row['Redshift'] or redshiftRange[1] < row['Redshift']): outOfRangeZInds.append(i) nedResult.remove_rows(outOfRangeZInds) currTable.remove_rows(outOfRangeZInds) # rectify this naming difference between NED and ALMA nedResult.rename_column('DEC', 'Dec') nedResult.keep_columns( ['Object Name', 'RA', 'Dec', 'Redshift']) ALMAnedResults = hstack([currTable, nedResult], join_type='exact') # tidy up column names ALMAnedResults.rename_column('target_name', 'ALMA source name') ALMAnedResults.rename_column('s_ra', 'ALMA RA') ALMAnedResults.rename_column('s_dec', 'ALMA Dec') ALMAnedResults.rename_column('Object Name', 'NED source name') ALMAnedResults.rename_column('RA', 'NED RA') ALMAnedResults.rename_column('Dec', 'NED Dec') ALMAnedResults.rename_column('Redshift', 'NED Redshift') # mark flags if spw is on line (initialized to False) lineObserved = np.zeros((len(ALMAnedResults), len(restFreqs)), dtype=bool) for i, row in enumerate(ALMAnedResults): obsFreqs = self._observedFreq(restFreqs, row['NED Redshift']) for j in range(len(obsFreqs)): for spwRange in row['Frequency ranges']: if not lineObserved[i, j]: if spwRange[0] <= obsFreqs[j] <= spwRange[1]: lineObserved[i, j] = True else: break for i in range(len(restFreqs)): ALMAnedResults[lineNames[i]] = lineObserved[:, i] # remove rows which have no lines covered lineCount = np.array(ALMAnedResults[lineNames[0]], dtype=int) for i in range(1, len(restFreqs)): lineCount += np.array(ALMAnedResults[lineNames[i]], dtype=int) noLinesInds = np.where(lineCount == 0) ALMAnedResults.remove_rows(noLinesInds) self.queryResults[target] = ALMAnedResults
from astroquery.ned import Ned image_list = [] image_list += Ned.get_image_list('NGC_5128', item = 'spectra') print (image_list)
def test4(): images = Ned.get_images("m1") print images
def getNEDInfo(df): df.reset_index(inplace=True, drop=True) df['NED_name'] = "" df['NED_type'] = "" df["NED_vel"] = np.nan df["NED_redshift"] = np.nan df["NED_mag"] = np.nan ra = df["raMean"] dec = df["decMean"] # setup lists for ra and dec in hr format, names of NED-identified object, and # separation between host in PS1 and host in NED ra_hms = [] dec_dms = [] names = [] sep = [] missingCounter = 0 for index, row in df.iterrows(): tempRA = ra[index] tempDEC = dec[index] # create a sky coordinate to query NED c = SkyCoord(ra=tempRA * u.degree, dec=tempDEC * u.degree, frame='icrs') # execute query result_table = [] tempName = "" tempType = "" tempRed = np.nan tempVel = np.nan tempMag = np.nan try: result_table = Ned.query_region(c, radius=(0.00055555) * u.deg, equinox='J2000.0') #print(result_table) if len(result_table) > 0: missingCounter = 0 except: missingCounter += 1 #print(c) if len(result_table) > 0: result_table = result_table[result_table['Separation'] == np.min( result_table['Separation'])] result_table = result_table[result_table['Type'] != b'SN'] result_table = result_table[result_table['Type'] != b'MCld'] result_gal = result_table[result_table['Type'] == b'G'] if len(result_gal) > 0: result_table = result_gal if len(result_table) > 0: result_table = result_table[ result_table['Photometry Points'] == np.nanmax( result_table['Photometry Points'])] result_table = result_table[result_table['References'] == np. nanmax(result_table['References'])] #return result_table # NED Info is presented as: # No. ObjectName RA DEC Type Velocity Redshift Redshift Flag Magnitude and Filter Separation References Notes Photometry Points Positions Redshift Points Diameter Points Associations #Split NED info up - specifically, we want to pull the type, velocity, redshift, mag tempNED = str(np.array(result_table)[0]).split(",") if len(tempNED) > 2: #print("Found one!") tempName = tempNED[1].strip().strip("b").strip("'") if len(tempNED) > 20: seps = [ float(tempNED[9].strip()), float(tempNED[25].strip()) ] if np.argmin(seps): tempNED = tempNED[16:] tempType = tempNED[4].strip().strip("b").strip("''") tempVel = tempNED[5].strip() tempRed = tempNED[6].strip() tempMag = tempNED[8].strip().strip("b").strip("''").strip( ">").strip("<") if tempName: df.loc[index, 'NED_name'] = tempName if tempType: df.loc[index, 'NED_type'] = tempType if tempVel: df.loc[index, 'NED_vel'] = float(tempVel) if tempRed: df.loc[index, 'NED_redshift'] = float(tempRed) if tempMag: tempMag = re.findall(r"[-+]?\d*\.\d+|\d+", tempMag)[0] df.loc[index, 'NED_mag'] = float(tempMag) if missingCounter > 5000: print("Locked out of NED, will have to try again later...") return df return df
def test2(): result_table = Ned.query_region("3c 273", radius=0.05*u.deg) print(result_table)
def GetAndUploadAllData(self, objs, ras, decs, doNED=True): TransientUploadDict = {} assert len(ras) == len(decs) if type(ras[0]) == float: scall = SkyCoord(ras, decs, frame="fk5", unit=u.deg) else: scall = SkyCoord(ras, decs, frame="fk5", unit=(u.hourangle, u.deg)) ebvall, nedtables = [], [] ebvtstart = time.time() if doNED: for sc in scall: try: ebvall += [float('%.3f' % sfd(sc) * 0.86)] except: dust_table_l = IrsaDust.get_query_table(sc) ebvall += [dust_table_l['ext SandF mean'][0]] try: ned_region_table = Ned.query_region(sc, radius=self.nedradius * u.arcmin, equinox='J2000.0') except: ned_region_table = None nedtables += [ned_region_table] print('E(B-V)/NED time: %.1f seconds' % (time.time() - ebvtstart)) tstart = time.time() TNSData = [] json_data = [] for j in range(len(objs)): TNSGetSingle = [("objname", objs[j]), ("photometry", "1"), ("spectra", "1")] response = get(self.tnsapi, TNSGetSingle, self.tnsapikey) json_data += [format_to_json(response.text)] print(time.time() - tstart) print('getting TNS content takes %.1f seconds' % (time.time() - tstart)) for j, jd in zip(range(len(objs)), json_data): tallstart = time.time() obj = objs[j] iobj = np.where(obj == np.array(objs))[0] if len(iobj) > 1: iobj = int(iobj[0]) else: iobj = int(iobj) if doNED: sc, ebv, nedtable = scall[iobj], ebvall[iobj], nedtables[iobj] else: sc = scall[iobj] ebv = None nedtable = None print("Object: %s\nRA: %s\nDEC: %s" % (obj, ras[iobj], decs[iobj])) ######################################################## # For Item in Email, Get NED ######################################################## if type(jd['data']['reply']['name']) == str: jd = jd['data']['reply'] else: jd = None transientdict = self.getTNSData(jd, obj, sc, ebv) #try: photdict = self.getZTFPhotometry(sc) #except: photdict = None try: if jd: photdict,nondetectdate,nondetectmaglim,nondetectfilt,nondetectins = \ self.getTNSPhotometry(jd,PhotUploadAll=photdict) specdict = self.getTNSSpectra(jd, sc) transientdict['transientphotometry'] = photdict transientdict['transientspectra'] = specdict if nondetectdate: transientdict['non_detect_date'] = nondetectdate if nondetectmaglim: transientdict['non_detect_limit'] = nondetectmaglim if nondetectfilt: transientdict['non_detect_band'] = nondetectfilt if nondetectfilt: transientdict['non_detect_instrument'] = nondetectins except: pass try: if doNED: hostdict, hostcoords = self.getNEDData(jd, sc, nedtable) transientdict['host'] = hostdict transientdict['candidate_hosts'] = hostcoords except: pass #try: # phot_ps1dr2 = self.get_PS_DR2_data(sc) # if phot_ps1dr2 is not None: # transientdict['transientphotometry']['PS1DR2'] = phot_ps1dr2 #except: # pass TransientUploadDict[obj] = transientdict if not j % 10: TransientUploadDict['noupdatestatus'] = self.noupdatestatus TransientUploadDict['TNS'] = True self.UploadTransients(TransientUploadDict) TransientUploadDict = {} if j % 10: TransientUploadDict['noupdatestatus'] = self.noupdatestatus TransientUploadDict['TNS'] = True self.UploadTransients(TransientUploadDict) return (len(TransientUploadDict))
def test1(): result_table = Ned.query_object("NGC 224") print(result_table)
def test6(): spectra = Ned.get_spectra("3c 273") print spectra
def test5(): image_list = Ned.get_image_list("m1") print image_list
h['CALC_COO'] = h['CALC_COO'].replace('h', ':') h['CALC_COO'] = h['CALC_COO'].replace('d', ':') h['CALC_COO'] = h['CALC_COO'].replace('m', ':') h['CALC_COO'] = h['CALC_COO'].replace('s', '') s = 'Object Name: {OBJECT:s}\n' \ 'Target Coords: {RA:s} {DEC:s} \n' \ 'Coords Found: {CALC_COO:s}'.format(**hdu.header) ax2 = fig.add_subplot(gs[1]) ax2.text(0.05, 0.5, s, va='center', ha='left') ax2.set_xticks([]) ax2.set_yticks([]) r = 3 * u.arcmin region = Ned.query_region(coordinates=c, radius=r) region = region.to_pandas() print(region) # for obj in region: # obj_coord = SkyCoord(ra=obj['RA(deg)'] * u.deg, dec=obj['DEC(deg)'] * u.deg) # x, y = obj_coord.to_pixel(wcs) # ax1.scatter(x, y, edgecolor='None', facecolor='red', marker='o') # # plt.tight_layout(pad=1.3, w_pad=2) # plt.show()
def get_redshift(object_name): Q=Ned.query_object(object_name) z=Q['Redshift'][0] return(z)
def find(self): """ This function ... :return: """ # Get the list urls = Ned.get_image_list(self.config.galaxy) images = [] # Print the list for url in urls: # Get the name name = fs.strip_extension(fs.strip_extension(fs.name(url))) # strip both the .gz as the .fits extension # Get the bibcode try: bibcode = url.split("img/")[1].split("/")[0] except IndexError: bibcode = None if ":" in name: splitted = name.split(":") if splitted[0].startswith("NGC_"): band = splitted[0].split("NGC_")[1][5:] try: filter = parse_filter(band) splitted = [self.config.galaxy, None, band, splitted[1]] except: pass if len(splitted) == 3: splitted = [self.config.galaxy, None, splitted[1], splitted[2]] elif len(splitted) == 2: info_and_band = splitted[0].split("NGC_")[1][5:] splitted = [self.config.galaxy, None, info_and_band, splitted[1]] galaxy_name = splitted[0] unknown = splitted[1] band = splitted[2] source = splitted[3] try: year = int(source[-4:]) if year < 1985: continue except ValueError: year = None images.append((band, year, bibcode, url)) elif "_" in name: splitted = name.split("_") band = splitted[-1] images.append((band, None, bibcode, url)) elif "." in name: splitted = name.split(".") galaxy_name = splitted[0] images.append((None, None, bibcode, url)) # Print for band, year, bibcode, url in images: if band is None: fltr = None elif "Ha" in band or "H-alpha" in band or "H_alph" in band: fltr = NarrowBandFilter("Ha") else: try: fltr = parse_filter(band) except ValueError: fltr = None #print(fltr, year, bibcode, url) if fltr is None: self.unknown.append((bibcode, year, url)) else: fltrstring = str(fltr) # Add to the images dictionary self.images[fltrstring].append((bibcode, year, url))
def make_catalog(region_name, cloud_name, distance, good_cores_array, additional_cores_array, cross_matched_core_indices, cross_matched_proto_indices, alpha_BE, getsources_core_catalog, R_deconv, FWHM_mean, Masses, Masses_err, Temps, Temps_err, not_accepted_counter, CSAR_catalog = '/mnt/scratch-lustre/jkeown/DS9_regions/L1157/CSAR/CEPl1157_CSAR.dat', high_res_coldens_image = '/mnt/scratch-lustre/jkeown/Getsources/Prepare/Images/cep1157/080615/cep1157_255_mu.image.resamp.fits', SED_figure_directory = '/mnt/scratch-lustre/jkeown/DS9_regions/HGBS_pipeline/L1157/L1157_core_SED/', Dunham_YSOs_file = 'Dunham_YSOs.dat'): # These are the values in each column of "good_cores_array" #NO, XCO_P, YCO_P, WCS_ACOOR, WCS_DCOOR, SIG_GLOB, FG, GOOD, SIG_MONO01, FM01, FXP_BEST01, FXP_ERRO01, FXT_BEST01, FXT_ERRO01, AFWH01, BFWH01, THEP01, SIG_MONO02, FM02, FXP_BEST02, FXP_ERRO02, FXT_BEST02, FXT_ERRO02, AFWH02, BFWH02, THEP02, SIG_MONO03, FM03, FXP_BEST03, FXP_ERRO03, FXT_BEST03, FXT_ERRO03, AFWH03, BFWH03, THEP03, SIG_MONO04, FM04, FXP_BEST04, FXP_ERRO04, FXT_BEST04, FXT_ERRO04, AFWH04, BFWH04, THEP04, SIG_MONO05, FM05, FXP_BEST05, FXP_ERRO05, FXT_BEST05, FXT_ERRO05, AFWH05, BFWH05, THEP05, SIG_MONO06, FM06, FXP_BEST06, FXP_ERRO06, FXT_BEST06, FXT_ERRO06, AFWH06, BFWH06, THEP06, SIG_MONO07, FM07, FXP_BEST07, FXP_ERRO07, FXT_BEST07, FXT_ERRO07, AFWH07, BFWH07, THEP07 # These are the values in each column of the "additional" "cores_array" and "protostar_array" # NO XCO_P YCO_P PEAK_SRC01 PEAK_BGF01 CONV_SRC01 CONV_BGF01 PEAK_SRC02 PEAK_BGF02 CONV_SRC02 CONV_BGF02 PEAK_SRC03 PEAK_BGF03 CONV_SRC03 CONV_BGF03 PEAK_SRC04 PEAK_BGF04 CONV_SRC04 CONV_BGF04 PEAK_SRC05 PEAK_BGF05 CONV_SRC05 CONV_BGF05 PEAK_SRC06 PEAK_BGF06 CONV_SRC06 CONV_BGF06 PEAK_SRC07 PEAK_BGF07 CONV_SRC07 CONV_BGF07 # Make a column of 1's identifying protostellar cores protostellar_catalog_column = numpy.empty(len(good_cores_array[:,0]), dtype='object') if len(cross_matched_core_indices)>0: protostellar_catalog_column[numpy.array(cross_matched_core_indices)]='1' protostellar_catalog_column = numpy.array(protostellar_catalog_column, dtype='S12') # Make a column indicating core type: starless, prestellar, or protostellar core_type_column = numpy.where(alpha_BE<=5.0, "prestellar", "starless") core_type_column = numpy.array(core_type_column, dtype='S12') core_type_column2 = numpy.where(protostellar_catalog_column=='1', "protostellar", core_type_column) # Make S_peak/S_background column at each wavelength S_peak_bg_070 = additional_cores_array[:,3]/additional_cores_array[:,4] S_peak_bg_160 = additional_cores_array[:,7]/additional_cores_array[:,8] S_peak_bg_165 = additional_cores_array[:,11]/additional_cores_array[:,12] S_peak_bg_250 = additional_cores_array[:,15]/additional_cores_array[:,16] S_peak_bg_255 = additional_cores_array[:,19]/additional_cores_array[:,20] S_peak_bg_350 = additional_cores_array[:,23]/additional_cores_array[:,24] S_peak_bg_500 = additional_cores_array[:,27]/additional_cores_array[:,28] # Make S_conv column at each wavelength (convert MJy/str to Jy/beam, then to H2/cm**2) # Prepareobs scales down the column density image by a factor of 1e20 # The final units in the catalog will be off by a factor of 1e20 S_conv_070 = numpy.array(additional_cores_array[:,5]*(10**6)*((numpy.pi/180.0/3600.0)**2)*1.13309*(36.3**2)) S_conv_160 = numpy.array(additional_cores_array[:,9]*(10**6)*((numpy.pi/180.0/3600.0)**2)*1.13309*(36.3**2)) S_conv_165 = numpy.array(additional_cores_array[:,13]*(10**6)*((numpy.pi/180.0/3600.0)**2)*1.13309*(36.3**2)) S_conv_250 = numpy.array(additional_cores_array[:,17]*(10**6)*((numpy.pi/180.0/3600.0)**2)*1.13309*(36.3**2)) S_conv_255 = numpy.array(additional_cores_array[:,21]*(10**6)*((numpy.pi/180.0/3600.0)**2)*1.13309*(36.3**2)) S_conv_350 = numpy.array(additional_cores_array[:,25]*(10**6)*((numpy.pi/180.0/3600.0)**2)*1.13309*(36.3**2)) S_conv_500 = numpy.array(additional_cores_array[:,29]*(10**6)*((numpy.pi/180.0/3600.0)**2)*1.13309*(36.3**2)) N_H2_bg = numpy.array(additional_cores_array[:,20]) # Define a function that produces a Flux/beam given wavelength, Temp, and ColDense # We will input wavelength then find T and M using least squares minimization below def col_dense(wavelength, T, N_H2): #wavelength input in microns, Temp in Kelvin, N_H2 in cm**-2 #returns S_v in units of Jy/beam wavelength_mm = numpy.array(wavelength)*10.**-3. exponent = 1.439*(wavelength_mm**-1)*((T/10.)**-1) aaa = ((2.02*10**20)*(numpy.exp(exponent)-1.0))**-1.0 bbb = (0.1*((numpy.array(wavelength)/300.)**-2.0))/0.01 ccc = (36.3/10.)**2. ddd = wavelength_mm**-3. return N_H2*aaa*bbb*ccc*ddd*(10**-3) guess = [10.0, 1.0*10.**21.] N_H2_peak = [] counter = 0 for S_160, S_250, S_350, S_500 in zip(S_conv_160, S_conv_250, S_conv_350, S_conv_500): #print 'Fitting S_peak for Core ' + str(counter) + ' of ' + str(int(len(good_cores_array[:,0]))) wavelengths = [160.,250.,350.,500.] fluxes = [S_160, S_250, S_350, S_500] flux_err = [S_160*0.2, S_250*0.1, S_350*0.1, S_500*0.1] try: popt,pcov = curve_fit(col_dense, wavelengths, fluxes, p0=guess, sigma=flux_err) except RuntimeError: popt = [-9999., -9999.] N_H2_peak.append(popt[1]) counter+=1 # Calculate the FWHM_mean at 500 microns AFWH07 = good_cores_array[:,68] BFWH07 = good_cores_array[:,69] A = numpy.float64(((((AFWH07)/60.)/60.)*numpy.pi)/180.) #radians A1 = numpy.float64(numpy.tan(A/2.)*2.*distance*(3.086e18)) #cm B = numpy.float64(((((BFWH07)/60.)/60.)*numpy.pi)/180.) #radians B1 = numpy.float64(numpy.tan(B/2.)*2.*distance*(3.086e18)) #cm FWHM_mean_500 = mstats.gmean([A1,B1]) Vol_dense_peak = (((4.0*numpy.log(2.0))/numpy.pi)**0.5)*(numpy.array(N_H2_peak)/FWHM_mean_500) # Import CSAR-matched core indices print "Cross-matching getsources and CSAR Catalogs:" CSAR_matched_cores_indices = CSAR_core_cross_match.cross_match_CSAR(getsources_core_catalog, CSAR_catalog, high_res_coldens_image) # Get a cloumn of 1's identifying CSAR cross-matched cores CSAR_catalog_column = numpy.zeros(len(good_cores_array[:,0]), dtype='int') CSAR_catalog_column[numpy.array(CSAR_matched_cores_indices)]+=1 # Make a column indicating the number of significant Herschel bands N_SED = [] for line in good_cores_array: counter = 0 if line[8]>5 and line[12]>0: counter+=1 # Statement below uses the 160micron map, not the temp-corrected map if line[17]>5 and line[21]>0: counter+=1 if line[35]>5 and line[39]>0: counter+=1 if line[53]>5 and line[57]>0: counter+=1 if line[62]>5 and line[66]>0: counter+=1 N_SED.append(counter) # Convert the decimal degrees coordinates of getsources into hh:mm:ss and dd:mm:ss RA_array = [] Dec_array = [] HGBS_name_array = [] for line in good_cores_array: RA = astropy.coordinates.Angle(line[3], u.degree) DEC = astropy.coordinates.Angle(line[4], u.degree) RA_hours = str('{:.0f}'.format(round(RA.hms[0],2)).zfill(2)) RA_minutes = str('{:.0f}'.format(round(RA.hms[1],2)).zfill(2)) RA_seconds = str('{:.2f}'.format(round(RA.hms[2],2)).zfill(5)) if DEC.hms[0] > 0: DEC_degs = str('{:.0f}'.format(round(DEC.dms[0],2)).zfill(2)) DEC_minutes = str('{:.0f}'.format(round(DEC.dms[1],2)).zfill(2)) DEC_seconds = str('{:.2f}'.format(round(DEC.dms[2],2)).zfill(5)) name_sign = '+' HGBS_name = RA_hours+RA_minutes+RA_seconds[0:4]+name_sign+DEC_degs+DEC_minutes+DEC_seconds[0:2] else: DEC_degs = str('{:.0f}'.format(round(DEC.dms[0],2)).zfill(3)) DEC_minutes = str('{:.0f}'.format(round(DEC.dms[0]*-1,2)).zfill(2)) DEC_seconds = str('{:.2f}'.format(round(DEC.dms[2]*-1,2)).zfill(5)) HGBS_name = RA_hours+RA_minutes+RA_seconds[0:4]+DEC_degs+DEC_minutes+DEC_seconds[0:2] RA_array.append(RA_hours + ':' + RA_minutes + ':' + RA_seconds) Dec_array.append(DEC_degs + ':' + DEC_minutes + ':' + DEC_seconds) HGBS_name_array.append("HGBS_J"+HGBS_name) core_number = numpy.arange(len(good_cores_array[:,0]))+1 catalog_array_70_160 = good_cores_array[:,8:26] catalog_array_70_160 = numpy.delete(catalog_array_70_160, (1,10), 1) catalog_array_250 = good_cores_array[:,35:44] catalog_array_250 = numpy.delete(catalog_array_250, 1, 1) catalog_array_coldense = good_cores_array[:,44:53] catalog_array_coldense = numpy.delete(catalog_array_coldense, 1, 1) catalog_array_350_500 = good_cores_array[:,53:71] catalog_array_350_500 = numpy.delete(catalog_array_350_500, (1,10), 1) Cloud,Name,Av,alpha,T_bol,L_bol,alphaPrime,TbolPrime,LbolPrime,likelyAGB,Dunham_RA,Dunham_DEC,Class = numpy.loadtxt(Dunham_YSOs_file, delimiter=',', unpack=True, dtype=[('Cloud','S30'),('Name','S40'), ('Av',float),('alpha',float), ('T_bol',float),('L_bol',float), ('alphaPrime',float),('TbolPrime',float), ('LbolPrime',float),('likelyAGB','S1'), ('Dunham_RA',float),('Dunham_DEC',float),('Class','S10') ]) Dunham_indices = numpy.where(Cloud==cloud_name) Spitzer_YSOs_RA = Dunham_RA[Dunham_indices] Spitzer_YSOs_DEC = Dunham_DEC[Dunham_indices] Spitzer_YSOs_Name = Name[Dunham_indices] potential_matches = [] YSO_matches = [] count = 0 for line in good_cores_array: match_counter=0 YSO_index = 0 for RA,DEC in zip(Spitzer_YSOs_RA, Spitzer_YSOs_DEC): distance = ((line[3]-RA)**2 + (line[4]-DEC)**2)**0.5 if distance < 6.0/3600. and match_counter==0: # matched_counter prevents counting indices twice # if two YSO candidates fall within getsources ellipse potential_matches.append(count) match_counter+=1 YSO_matches.append(YSO_index) YSO_index+=1 count += 1 Spitzer_column = numpy.zeros(len(good_cores_array[:,0]), dtype='S40') Spitzer_column[numpy.arange(0,len(good_cores_array[:,0]))] = 'None' if len(potential_matches)>0: Spitzer_column[numpy.array(potential_matches)] = Spitzer_YSOs_Name[numpy.array(YSO_matches)] # Cross-match cores with SIMBAD catalog print "Cross-matching SIMBAD catalog:" RA, Dec = numpy.loadtxt(SED_figure_directory + region_name +'_SIMBAD_RA_DEC.dat', unpack=True) Simbad.ROW_LIMIT = 1 results = [] for i,j in zip(RA,Dec): result_table = Simbad.query_region(astropy.coordinates.SkyCoord(ra=i, dec=j, unit=(u.deg, u.deg)), radius=6. * u.arcsec) if result_table != None: results.append(result_table['MAIN_ID'][0].replace(" ", "_")) else: results.append('None') # Cross-match cores with NED catalog print "Cross-matching NED catalog:" Ned.ROW_LIMIT = 1 results2 = [] for i,j in zip(RA,Dec): result_table_value='Yes' try: result_table = Ned.query_region(astropy.coordinates.SkyCoord(ra=i, dec=j, unit=(u.deg, u.deg)), radius=6. * u.arcsec) except astroquery.exceptions.RemoteServiceError: result_table_value=None if result_table_value != None: results2.append(result_table['Object Name'][0].replace(" ", "_")) else: results2.append('None') zipped_array = zip(core_number, HGBS_name_array, RA_array, Dec_array, catalog_array_70_160[:,0], catalog_array_70_160[:,1], catalog_array_70_160[:,2], S_peak_bg_070, S_conv_070, catalog_array_70_160[:,3], catalog_array_70_160[:,4], catalog_array_70_160[:,5], catalog_array_70_160[:,6], catalog_array_70_160[:,7], catalog_array_70_160[:,8], catalog_array_70_160[:,9], catalog_array_70_160[:,10], S_peak_bg_160, S_conv_160, catalog_array_70_160[:,11], catalog_array_70_160[:,12], catalog_array_70_160[:,13], catalog_array_70_160[:,14], catalog_array_70_160[:,15], catalog_array_250[:,0], catalog_array_250[:,1], catalog_array_250[:,2], S_peak_bg_250, S_conv_250, catalog_array_250[:,3], catalog_array_250[:,4], catalog_array_250[:,5], catalog_array_250[:,6], catalog_array_250[:,7], catalog_array_350_500[:,0], catalog_array_350_500[:,1], catalog_array_350_500[:,2], S_peak_bg_350, S_conv_350, catalog_array_350_500[:,3], catalog_array_350_500[:,4], catalog_array_350_500[:,5], catalog_array_350_500[:,6], catalog_array_350_500[:,7], catalog_array_350_500[:,8], catalog_array_350_500[:,9], catalog_array_350_500[:,10], S_peak_bg_500, catalog_array_350_500[:,11], catalog_array_350_500[:,12], catalog_array_350_500[:,13], catalog_array_350_500[:,14], catalog_array_350_500[:,15], catalog_array_coldense[:,0], additional_cores_array[:,19], S_peak_bg_255, S_conv_255, N_H2_bg, catalog_array_coldense[:,5], catalog_array_coldense[:,6], catalog_array_coldense[:,7], N_SED, CSAR_catalog_column, core_type_column2, results, results2, Spitzer_column) catalog1 = numpy.array(zipped_array, dtype=[('core_number',int),('HGBS_name_array','S30'),('RA_array','S16'),('Dec_array','S16'),('catalog_array_70_160_1',float),('catalog_array_70_160_2',float), ('catalog_array_70_160_3',float),('S_peak_bg_070',float),('S_conv_070',float), ('catalog_array_70_160_4',float),('catalog_array_70_160_5',float),('catalog_array_70_160_6',float), ('catalog_array_70_160_7',float),('catalog_array_70_160_8',float), ('catalog_array_70_160_9',float),('catalog_array_70_160_10',float), ('catalog_array_70_160_11',float),('S_peak_bg_160',float),('S_conv_160',float),('catalog_array_70_160_12',float),('catalog_array_70_160_13',float),('catalog_array_70_160_14',float), ('catalog_array_70_160_15',float),('catalog_array_70_160_16',float), ('catalog_array_250_1',float),('catalog_array_250_2',float), ('catalog_array_250_3',float),('S_peak_bg_250',float),('S_conv_250',float),('catalog_array_250_4',float),('catalog_array_250_5',float),('catalog_array_250_6',float), ('catalog_array_250_7',float),('catalog_array_250_8',float),('catalog_array_350_500_1',float),('catalog_array_350_500_2',float), ('catalog_array_350_500_3',float),('S_peak_bg_350',float),('S_conv_350',float),('catalog_array_350_500_4',float),('catalog_array_350_500_5',float),('catalog_array_350_500_6',float), ('catalog_array_350_500_7',float),('catalog_array_350_500_8',float), ('catalog_array_350_500_9',float),('catalog_array_350_500_10',float), ('catalog_array_350_500_11',float),('S_peak_bg_500',float),('catalog_array_350_500_12',float),('catalog_array_350_500_13',float),('catalog_array_350_500_14',float), ('catalog_array_350_500_15',float),('catalog_array_350_500_16',float), ('catalog_array_coldense_1',float),('catalog_array_coldense_2',float),('S_peak_bg_255',float),('S_conv_255',float),('additional_cores_array_28',float),('catalog_array_coldense_6',float), ('catalog_array_coldense_7',float),('catalog_array_coldense_8',float),('N_SED',int),('CSAR_catalog_column',int),('core_type_column','S16'), ('SIMBAD_column','S60'), ('NED_column','S60'), ('Spitzer_column','S40')]) header1 = 'core_number, core_name, RA_hms, DEC_dms, sig_070, peak_flux_070, peak_flux_err_070, peak_flux_over_bg_070, peak_070_conv_500, total_flux_070, total_flux_err_070, AFWHM_070, BFWHM_070, PA_070, sig_160, peak_flux_160, peak_flux_err_160, peak_flux_over_bg_160, peak_160_conv_500, total_flux_160, total_flux_err_160, AFWHM_160, BFWHM_160, PA_160, sig_250, peak_flux_250, peak_flux_err_250, peak_flux_over_bg_250, peak_250_conv_500, total_flux_250, total_flux_err_250, AFWHM_250, BFWHM_250, PA_250, sig_350, peak_flux_350, peak_flux_err_350, peak_flux_over_bg_350, peak_350_conv_500, total_flux_350, total_flux_err_350, AFWHM_350, BFWHM_350, PA_350, sig_500, peak_flux_500, peak_flux_err_500, peak_flux_over_bg_500, total_flux_500, total_flux_err_500, AFWHM_500, BFWHM_500, PA_500, sig_coldens, peak_flux_coldens, peak_flux_over_bg_coldens, peak_coldens_conv_500, peak_bg_coldens, AFWHM_coldens, BFWHM_coldens, PA_coldens, N_SED, CSAR, core_type, SIMBAD_match, NED_match, Spitzer_match' numpy.savetxt(SED_figure_directory + region_name + '_core_catalog1.dat', catalog1, fmt="%i %s %s %s %3.1f %1.2e %1.1e %3.3f %1.2e %1.2e %1.1e %3.1f %3.1f %3.1f %3.1f %1.2e %1.1e %3.3f %1.2e %1.2e %1.1e %3.1f %3.1f %3.1f %3.1f %1.2e %1.1e %3.3f %1.2e %1.2e %1.1e %3.1f %3.1f %3.1f %3.1f %1.2e %1.1e %3.3f %1.2e %1.2e %1.1e %3.1f %3.1f %3.1f %3.1f %1.2e %1.1e %3.3f %1.2e %1.1e %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %i %i %s %s %s %s", header=header1) mu = 2.8 # mean molecular weight mass_H = 1.67372e-24 # (grams) mass of neutral Hydrogen atom solar_mass = 1.989e33 # (grams) mass_H_solar_masses = mass_H / solar_mass parsec = 3.086e18 # cm R_deconv_cm = numpy.array(R_deconv)*parsec FWHM_mean_cm = numpy.array(FWHM_mean)*parsec N_H2_avg_1 = (numpy.array(Masses)/(numpy.pi*(R_deconv_cm**2.))) * (1/(mu*mass_H_solar_masses)) N_H2_avg_2 = (numpy.array(Masses)/(numpy.pi*(FWHM_mean_cm**2.))) * (1/(mu*mass_H_solar_masses)) avg_Volume_dens_1 = (numpy.array(Masses)/(numpy.pi*(4./3.)*(R_deconv_cm**3.))) * (1/(mu*mass_H_solar_masses)) avg_Volume_dens_2 = (numpy.array(Masses)/(numpy.pi*(4./3.)*(FWHM_mean_cm**3.))) * (1/(mu*mass_H_solar_masses)) catalog2 = numpy.array(zip(core_number, HGBS_name_array, RA_array, Dec_array, R_deconv, FWHM_mean, Masses, Masses_err, Temps, Temps_err, N_H2_peak, N_H2_avg_1, N_H2_avg_2, Vol_dense_peak, avg_Volume_dens_1, avg_Volume_dens_2, alpha_BE, core_type_column2, not_accepted_counter), dtype=[('core_number',int),('HGBS_name_array','S30'),('RA_array','S16'),('Dec_array','S16'),('R_deconv',float),('FWHM_mean',float),('Masses',float), ('Masses_err',float),('Temps',float),('Temps_err',float),('N_H2_peak',float),('N_H2_avg_1',float),('N_H2_avg_2',float),('Vol_dense_peak',float),('avg_Volume_dens1',float),('avg_Volume_dens_2',float),('alpha_BE',float),('core_type_column2','S16'),('not_accepted_counter','S16')]) header2 = 'core_number, core_name, RA_hms, DEC_dms, R_deconv, FWHM_mean, Mass, Mass_err, Temp_dust, Temp_dust_err, N_H2_peak, N_H2_avg_1, N_H2_avg_2, Vol_dense_peak, avg_Volume_dens_1, avg_Volume_dens_2, alpha_BE, core_type_column2, not_accepted_counter' numpy.savetxt(SED_figure_directory + region_name +'_core_catalog2.dat', catalog2, fmt="%i %s %s %s %1.1e %1.1e %1.3f %1.2f %2.1f %2.1f %1.2e %1.2e %1.2e %1.2e %1.2e %1.2e %2.1f %s %s", header=header2)
with open ("gnames.txt", "w") as output: for name in gnames: output.write(name) output.write("\n") image_list = [] full_list = [] #print ('here') from astroquery.ned import Ned #print ('okay') output = open("download.txt", "w") #print (len(gnames)) for name in gnames: image_list = Ned.get_image_list(name, item = 'spectra') full_list += image_list print ("full_list done") for item in image_list: output.write(item) print ('at the end') #print (len(image_list)) output.close()
def test3(): result_table = Ned.query_region(coord.FK4(ra=56.38, dec=38.43, unit=(u.deg, u.deg)), radius=0.1 * u.deg, equinox='B1950.0') print(result_table)
def Background_Finder_3( gname, evtfname, objLfname, R ): #Need to apply energy filter (0.3kev to 10kev) to the counts, This may allow the code to treat back illuminated chips and front illuminated chips the same, if not then the code must be modifed to consider both cases """ gname:-str, Galaxy Name, The name of the galaxy in the form NGC #, For Example 'NGC 3077' evtfname:-str, Event File Name, The name of the event file of the observation, For Example 'acisf02076_repro_evt2.fits' objLfname:-str, Object List File Name, The name of the object list file which is a list of circluar regions around the X-ray objects. For Example 'ngc3077_ObsID-2076_Source_List_R_Mod_2.txt' n:-int, Number of objects, The number of objects in the observation R:-float(?) or int, Radius, The radius of the circle used to find the background in pixels Returns: BG_Ratio:-float, Background Ratio, The background ratio in number of counts per pixel or "None" if a region without an object in it cannot be found """ Obj_L = [ ] #Obj_L:-List, Object_List, The list of all object string shapes in the observation Obj_B = True #Obj_B:-bool, Object Boolean, A Boolean statement in regards to if there is no X-ray objects in the area being used to find the background List_Done_Bool = False #List_Done_Bool:-bool, List_Done_Boolean, A Boolean statement in regards to if 3 background measurments were found in the observation #BG_Circle_Overlap_Bool=False BG_R = R # Note: Physical Radius might not be equal to the Pixel Radius Num_BG_Pix = math.pi * ( (BG_R)**2 ) #Num_BG_Pix:-float or int, the number of pixels in the background test region print Num_BG_Pix CCD_L = [ ] # Note: I don't even know if I need this, It's only defined here and never used again I think Obj_Shape = "" # Note: I don't even know if I need this, It's only defined here and never used again I think #system('pwd') #system('ls') #system('cd ~/Desktop/Big_Object_Regions/') #os.chdir('~/Desktop/Big_Object_Regions/') dir = os.path.dirname(__file__) #filename= os.path.join(dir, '~','Desktop','SQL_Standard_File',) #filepath=os.path.abspath("~/Desktop/SQL_Standard_File") #print "Filepath =",filepath #path= os.path.join(dir,'~','Desktop','SQL_Standard_File',) #path=os.path.realpath('~/Desktop/SQL_Standard_File/SQL_Sandard_File.csv') path = os.path.realpath('../Big_Object_Regions/') print "Path=", path #system('pwd') os.chdir(path) #system('ls') #os.chdir("~") #os.system("cd ~") #Objfile=open("Desktop/Big_Object_Regions/"+str(objLfname),"r") #Objfile:-file, Objectfile, a file containing the regions of the X-ray objects in the observation as strings regions Objfile = open(str(objLfname), "r") #print type(Objfile) path2 = os.path.realpath( '../Background_Finder/' ) #Changes PWD back to this code's PWD in Desktop/Background_Finder, this may be Changed later to go to the location of the Evt2 file that will be used in the DMCOORDS, the location will be given by File_Query_Code os.chdir(path2) Objstring = Objfile.read( ) #Objstring:-str, Objstring, the all X-ray object regions all in one big string with each object "\n" seperated #print Objstring #print type(Objstring) G_Data = Ned.query_object( gname ) #G_Data:-astropy.table.table.Table, Galaxy_Data, The queryed data of the galaxy from NED in the form of a astropy table #print G_Data #print type(G_Data) raGC = float( G_Data['RA(deg)'] ) #raGC:-float, Right Ascension of Galatic Center, The right ascension of the galatic center of the current galaxy in degrees. decGC = float( G_Data['DEC(deg)'] ) #decGC:-float, Declination of Galatic Center, The declination of the galatic center of the current galaxy in degrees. """ Dia_A= Ned.get_table(gname,table='diameters') #Dia_A:-astropy.table.table.Table, Diameter_Array, The astropy table that contains the diameter info for the galaxy, which is referred to as an array #print type(Dia_A) #print Dia_A Dia_A2=Dia_A[6] #Dia_A2:-astropy.table.row.Row, Diameter Array 2, The diameter subarray using RC3 D_0 (blue) standard for the diameter, contians the galaxy diameter infomation as an astropy row #print type(Dia_A2) #print Dia_A2 Maj=Dia_A2[18] #Maj:-numpy.float64, Major axis, The major axis of the galaxy in arcseconds #print type(Maj) #print Maj #Maj=Dia_A2[18] Min=Dia_A2[25] #Min:-numpy.float64, Minor axis, The minor axis of the galaxy in arcseconds #print type(Min) #print Min S_Maj=Maj/2 #S_Maj:-numpy.float64, Semi_Major axis, The semi major axis of the galaxy in acrseconds """ G_Data = Ned.query_object(gname) Dia_Table = Ned.get_table(gname, table='diameters') #print G_Data #print Dia_Table #print Dia_Table.colnames #print Dia_Table.meta #print Dia_Table.columns Dia_Table_Feq = Dia_Table['Frequency targeted'] #print Dia_Table['NED Frequency'] #print Dia_Table_Feq Dia_Table_Feq_L = list(Dia_Table_Feq) #print Dia_Table_Feq_L Dia_Table_Num = Dia_Table['No.'] #print Dia_Table_Num Dia_Table_Num_L = list(Dia_Table_Num) #print Dia_Table_Num_L for i in range( 0, len(Dia_Table_Feq_L) - 1 ): #There is a bug here with index matching, The matched index isn't that same index for the major axis Cur_Feq = Dia_Table_Feq_L[i] #print Cur_Feq if (Cur_Feq == "RC3 D_25, R_25 (blue)"): Match_inx = i Match_Feq = Dia_Table_Feq_L[Match_inx] Match_Num = Dia_Table_Num_L[Match_inx] #Match_Num #print "Match_Feq ", Match_Feq #print "Match_inx ", Match_inx #print "Match_Num ", Match_Num #Dia_Table_Maj=Dia_Table['Major Axis'] Dia_Table_Maj = Dia_Table['NED Major Axis'] #print Dia_Table_Maj Dia_Table_Maj_L = list(Dia_Table_Maj) #print Dia_Table_Maj_L Dia_Table_Maj_Units = Dia_Table['Major Axis Unit'] #print Dia_Table_Maj_Units Dia_Table_Maj_Units_L = list(Dia_Table_Maj_Units) #print Dia_Table_Maj_Units_L #print "i ", i D25_Maj = Dia_Table_Maj_L[Match_inx] #print "D25_Maj ", D25_Maj D25_Units = Dia_Table_Maj_Units[Match_inx] #print "D25_Units ", D25_Units #print type(Dia_Table) #print Dia_Table.info() #Dia_Table_2=Dia_Table[6] #print Dia_Table_2 #Maj=Dia_Table_2[18] #print "Maj, ! ! !", Maj D25_S_Maj = D25_Maj / 2.0 #D25_S_Maj_Deg=D25_S_Maj/3600.0 dmcoords( infile=str(evtfname), ra=str(raGC), dec=str(decGC), option='cel', verbose=0, celfmt='deg' ) # Runs the dmcoords CIAO tool, which converts coordinates like CHIP_ID to SKY, the tool is now being used to convert the RA and Dec of the GC to SKY coodinates in pixels (?) X_Phys = dmcoords.x #X_Phys:-float, X_Physical, The sky plane X pixel coordinate in units of pixels of the galatic center Y_Phys = dmcoords.y #Y_Phys:-float, Y_Physical, The sky plane Y pixel coordinate in units of pixels of the galatic center Chip_ID = dmcoords.chip_id #Chip_ID:-int, Chip_ID, The Chip ID number the GC is on print Chip_ID print "GC X is ", X_Phys print "GC Y is ", Y_Phys #R_Phys=S_Maj*2.03252032520325 #R_Phys:-numpy.float64, Radius_Physical, The radius of the galaxy in pixels, the converstion factor is 2.03252032520325pix/arcsec R_Phys = D25_S_Maj * 2.03252032520325 #R_Phys:-numpy.float64, Radius_Physical, The radius of the galaxy in pixels, the converstion factor is 2.03252032520325pix/arcsec #D25_S_Maj #print type(R_Phys) print "Radius of Galaxy is ", R_Phys Gal_V_Shape = 'circle(' + str(X_Phys) + ',' + str(Y_Phys) + ',' + str( R_Phys) + ')' # This might not be used at all in this code Objstring_L = Objstring.split("\n") del Objstring_L[len(Objstring_L) - 1] #print "n ", n #print "Objstring_L ", Objstring_L #print "len(Objstring_L) ", len(Objstring_L) for Cur_Obj in Objstring_L: Obj_L.append(Cur_Obj) """ for i in range(0,n): Cur_Obj= Objstring.split("\n")[i] #Cur_Obj:-str, Current Object, The current X-ray object region string that is being added to the Object List Obj_L.append(Cur_Obj) #Obj_L:-List, Object List, list of the string regions of all the X-ray objects that are in the observation """ Header_String = dmlist(infile=str(evtfname), opt="header") #print Header_String Header_String_Reduced = Header_String.split("DETNAM")[1] #print Header_String_Reduced Header_String_Reduced_2 = Header_String_Reduced.split("String")[0] #print Header_String_Reduced_2 Header_String_Reduced_3 = Header_String_Reduced_2.replace(' ', '') print Header_String_Reduced_3 #dmkeypar(infile=str(evtfname), keyword="DETNAM") #pget(paramfile, paramname) #Chip_ID_String=pget(toolname="dmkeypar", parameter="value") #Chip_ID_String=pget("dmkeypar","value") #Chip_ID_String:-str, Chip_Idenifcation_String, Runs the pget tool to get the string containing what CCDs are used in the FOV1.fits file from the parameter file asscoiated with the dmkeypar tool and sets it equal to the Chip_ID_String (This) variable Chip_ID_String = Header_String_Reduced_3 #Chip_ID_String:-str, Chip_Idenifcation_String, Runs the pget tool to get the string containing what CCDs are used in the FOV1.fits file from the parameter file asscoiated with the dmkeypar tool and sets it equal to the Chip_ID_String (This) variable #Chip_ID_String=pget(toolname="dmkeypar", p_value="value") print "Chip_ID_String ", Chip_ID_String Chip_ID_String_L = Chip_ID_String.split( '-' ) #Chip_ID_String_L:-List, Chip_Idenifcation_String_List, The resulting list from spliting the Chip_ID_String on "_", This list contains 2 elements, the first element is the string "ACIS" and the second element is the string segment in the form (Example) "356789" where each number in the list is its own CCD ID #print "Chip_ID_String_L ", Chip_ID_String_L Chip_ID_String_Reduced = Chip_ID_String_L[ 1] #Chip_ID_String_Reduced:-str, Chip_Idenifcation_String_Reduced, the string segment in the form (Example) "356789" where each number in the list is its own CCD ID print "Chip_ID_String_Reduced ", Chip_ID_String_Reduced Chip_ID_L = [ ] #Chip_ID_L:-List, Chip_Idenifcation_List, The list of all the int CCD IDs in FOV1.fits file for Cur_Chip_ID_Str in Chip_ID_String_Reduced: #Cur_Chip_ID_Str:-str, Current_Chip_Idenifcation_Str, The string vaule of the current string CCD ID in the Chip_ID_String_Reduced string, for example "3" Cur_Chip_ID = int( Cur_Chip_ID_Str ) #Cur_Chip_ID:-int, Current_Chip_Idenifcation, The current chip ID number as an int, for example 3 Chip_ID_L.append( Cur_Chip_ID ) #Appends The current chip ID number as an int to Chip_Idenifcation_List print "Chip_ID_L ", Chip_ID_L #Step_L=[500,250,100,50,25,10,5,1] Step_L = [500, 250, 100] Background_L = [] BG_Circle_Info_L = [] #BG_Circle_Overlap_Bool=False if (len(Background_L) <= 3): for Step in Step_L: #print "Step ", Step for Chip_ID_Test in Chip_ID_L: #print "Chip_ID_Test ", Chip_ID_Test for c in range( 0 + BG_R, 1025 - BG_R, Step ): # c is "x" #Check Bounds #The Bounds for CHIP coordinates are (1,1024)(both included), ie range(1,1025), So if this is correct (I am not 100% sure about these CHIP bounds), "for c in range(0+BG_R,1025-BG_R):" should instead be "for c in range(1+BG_R,1025-BG_R):" for v in range( 0 + BG_R, 1025 - BG_R, Step ): # v is "y" #Check Bounds, should instead be "for v in range(1+BG_R,1025-BG_R):"(?) BG_Circle_Overlap_Bool = False Obj_B = True #Obj_B:-bool, Object Boolean, A Boolean statement in regards to if there is no X-ray objects in the area being used to find the background #print " " # Puts a space between objects BG_X = c #BG_X:-int, BackGround circle_X, The x coordinate of the backgound circle in Chip coordinates, Note: This should probably be a float along with all numerical imputs to this function #print type(BG_X) BG_Y = v #BG_Y:-int, BackGround circle_Y, The y coordinate of the backgound circle in Chip coordinates, Note: This should probably be a float along with all numerical imputs to this function #print "Chip x is ",c #print "Chip y is ",v #dmcoords(infile=str(evtfname),chipx=BG_X, chipy=BG_Y, chip_id=Chip_ID, option='chip', verbose=0) # Runs the dmcoords CIAO tool, which converts coordinates like CHIP_ID to SKY, The tool is now being used to convert the Background Circle center from CHIP to SKY coordinates (?) dmcoords( infile=str(evtfname), chipx=BG_X, chipy=BG_Y, chip_id=Chip_ID_Test, option='chip', verbose=0 ) # Runs the dmcoords CIAO tool, which converts coordinates like CHIP_ID to SKY, The tool is now being used to convert the Background Circle center from CHIP to SKY coordinates (?) BG_X_Pix = dmcoords.x #BG_X_Pix:-float, BackGround circle_X_Pixels, The x of the center of the background circle in SKY coordinates in pixels BG_Y_Pix = dmcoords.y #BG_Y_Pix:-float, BackGround circle_Y_Pixels, The y of the center of the background circle in SKY coordinates in pixels #print "Background X is ", BG_X_Pix #print "Background Y is ", BG_Y_Pix #print "Background R is ", BG_R Dis_GC = math.sqrt( ((BG_X_Pix - X_Phys)**2) + ((BG_Y_Pix - Y_Phys)**2) ) #Dis_GC:-float, Distance_Galatic_Center, The distance from the background circle to the galatic center in pixels #print "BG_Circle_Info_L ", BG_Circle_Info_L if ( len(BG_Circle_Info_L) > 0 ): #Need to stop testing against background cirlces only on the current chip and instead on all chips for BG_Circle_Info_Old in BG_Circle_Info_L: BG_X_Pix_Old = BG_Circle_Info_Old[0] BG_Y_Pix_Old = BG_Circle_Info_Old[1] BG_R_Pix_Old = BG_Circle_Info_Old[2] Dis_BG_to_BG = math.sqrt(( (BG_X_Pix - BG_X_Pix_Old)**2) + ( (BG_Y_Pix - BG_Y_Pix_Old)**2)) BG_Total_Reach = Dis_BG_to_BG - BG_R - BG_R_Pix_Old if (BG_Total_Reach <= 0): BG_Circle_Overlap_Bool = True #print type(Dis_GC) #print "Distance to GC is ", Dis_GC #print "R_Phys is ", R_Phys #print "The GC Test is ", Dis_GC-R_Phys-BG_R if ( (Dis_GC - R_Phys - BG_R) > 0 ): # Makes sure that the backgound cirlce does not intersect with the radius of the galaxy, ie this functions disregards all X-ray objects in the visible extent of the galaxy for Obj_S in Obj_L: #String split X, Y and the R out Cur_X = Obj_S.split( "," )[0] #Cur_X:-str, Current_X, The unreduced X-ray object string in the form "circle(5330.96623132" with the X coordinate in it in pixels Cur_X_R = Cur_X.split( '(' )[1] #Cur_X_R:-str, Current_X_Reduced, The reduced X-ray object string in the form "5330.96623132" which is the X coordinate in pixels Cur_Y = Obj_S.split( "," )[1] #Cur_Y:-str, Current_Y_Reduced, The reduced X-ray object string in the form "5333.51369932" which is the Y coordinate in pixels Cur_R = Obj_S.split( "," )[2] #Cur_R:-str, Current_Radius, The unreduced X-ray object string in the form "233.272357724)" with the radius in it in pixels Cur_R_R = Cur_R.split( ')' )[0] #Cur_R_R:-str, Current_Radius_Reduced, The reduced X-ray object string in the form "233.272357724" which is the radius in pixels Cur_X_N = float( Cur_X_R ) #Cur_X_N:-float, Current_X_Number, The current coordinate of the X-ray object region's X coordinate in pixels Cur_Y_N = float( Cur_Y ) #Cur_Y_N:-float, Current_Y_Number, The current coordinate of the X-ray object region's Y coordinate in pixels Cur_R_N = float( Cur_R_R ) #Cur_R_N:-float, Current_Radius_Number, The current coordinate of the X-ray object region's radius in pixels Dis_Obj = math.sqrt( ((BG_X_Pix - Cur_X_N)**2) + ((BG_Y_Pix - Cur_Y_N)**2) ) #Dis_Obj:-float, Distance_Object, The distance from the backgound cirlce to the current object #print "Distance to Object is ", Dis_Obj #print "Cur_R_N is ", Cur_R_N #print "BG_R is ", BG_R #print "The Obj Test is ", Dis_Obj-Cur_R_N-BG_R if ( (Dis_Obj - Cur_R_N - BG_R) <= 0 ): # Checks to see if the backgound circle contian or is touching an object Obj_B = False #Obj_B:-bool, Object_Boolean, A boolean that is false if the background cirlce is intersecting with an object region #print "Obj_B ", Obj_B #print "BG_Circle_Overlap_Bool ", BG_Circle_Overlap_Bool if ( (Obj_B == True) and (BG_Circle_Overlap_Bool == False) ): #Makes sure that the background circle is not intersecting with any objects or any other other background circle #print "Background Found ! ! !" #Dm_Out=dmlist(infile=str(evtfname)+"[sky=circle("+str(BG_X_Pix)+","+str(BG_Y_Pix)+","+str(BG_R)+")]", opt='counts', outfile="", verbose=2) #Dm_Out:-ciao_contrib.runtool.CIAOPrintableString,Dmlist_Out,Uses the Dmlist CIAO tool to find the amount of counts in the background cirlce, Note: mlist "acis_evt2.fits[sky=rotbox(4148,4044,8,22,44.5)]" counts #Need to apply energy filter (0.3kev to 10kev) to the counts, This may allow the code to treat back illuminated chips and front illuminated chips the same, if not then the code must be modifed to consider both cases Dm_Out = dmlist( infile=str(evtfname) + "[sky=circle(" + str(BG_X_Pix) + "," + str(BG_Y_Pix) + "," + str(BG_R) + "),energy=300:10000]", opt='counts', outfile="", verbose=2 ) #Dm_Out:-ciao_contrib.runtool.CIAOPrintableString,Dmlist_Out,Uses the Dmlist CIAO tool to find the amount of counts in the background cirlce, Note: mlist "acis_evt2.fits[sky=rotbox(4148,4044,8,22,44.5)]" counts #Energy filter (0.3kev to 10kev) has been applied to the counts, This may allow the code to treat back illuminated chips and front illuminated chips the same, if not then the code must be modifed to consider both cases #print Dm_Out #print type(Dm_Out) Num_Counts_S = Dm_Out.split( '\n' )[9] #Num_Counts_S:-str, Number_of_Counts_String, The number of counts in the background cirlce as a string #print Num_Counts_S #print type(Num_Counts_S) Num_Counts = float( Num_Counts_S ) #Num_Counts:-float, Number_of_Counts, The number of counts as a float BG_Ratio = Num_Counts / Num_BG_Pix #BG_Ratio:-float, Background_Ratio, The background of the observation #return BG_Ratio #Returns the background of the observation Background_L.append(BG_Ratio) Cur_BG_Circle_Info = [ BG_X_Pix, BG_Y_Pix, BG_R, Chip_ID_Test ] BG_Circle_Info_L.append(Cur_BG_Circle_Info) if (len(Background_L) == 3): print "List Done ! ! ! ! ! ! ! ! !" List_Done_Bool = True print "Background_L ", Background_L print "BG_Circle_Info_L Final", BG_Circle_Info_L if (List_Done_Bool == True): BG_Ratio_Avg = np.average(Background_L) return BG_Ratio_Avg #print "List_Done_Bool ", List_Done_Bool #print "List_Done_Bool==False ", str(List_Done_Bool==False) if (List_Done_Bool == False): #print "List_Done_Bool==False ", str(List_Done_Bool==False) return "None_Found" # returns the string "None" if there is no place to put the background cirlce without intersecting the visible extent of the galaxy or an X-ray object
__author__ = 'Jakub Wojtanek, [email protected]' from astroquery.ned import Ned import unittest result_table = Ned.query_object("NGC 6720") result_table2 = Ned.get_table("NGC 6720", table='diameters') for k in result_table: print k print result_table.keys() print result_table2.keys() print result_table['RA(deg)'] print result_table['DEC(deg)'] print result_table['Magnitude and Filter'] print result_table['Distance (arcmin)'] print result_table['Diameter Points'] print result_table2['Major Axis'] print result_table2['Minor Axis']