Esempio n. 1
0
    def __init__(self, ID: int, sectors: np.ndarray, search_radius: int = 10):
        """
        Queries TIC for sources near the target and obtains a cutout
        of the pixels enclosing the target.
        Args:
            ID (int): TIC ID of the target.
            sectors (numpy array): Sectors in which the target
                                   has been observed.
            search_radius (int): Number of pixels from the target
                                 star to search.
        """
        self.ID = ID
        self.sectors = sectors
        self.search_radius = search_radius
        self.N_pix = 2 * search_radius + 2
        # query TIC for nearby stars
        pixel_size = 20.25 * u.arcsec
        df = Catalogs.query_object("TIC" + str(ID),
                                   radius=search_radius * pixel_size,
                                   catalog="TIC")
        new_df = df["ID", "Tmag", "ra", "dec", "mass", "rad", "Teff", "plx"]
        stars = new_df.to_pandas()
        self.stars = stars

        TESS_images = []
        col0s, row0s = [], []
        pix_coords = []
        # for each sector, get FFI cutout and transform RA/Dec into
        # TESS pixel coordinates
        for j, sector in enumerate(sectors):
            Tmag = stars["Tmag"].values
            ra = stars["ra"].values
            dec = stars["dec"].values
            cutout_coord = SkyCoord(ra[0], dec[0], unit="deg")
            cutout_hdu = Tesscut.get_cutouts(cutout_coord,
                                             size=self.N_pix,
                                             sector=sector)[0]
            cutout_table = cutout_hdu[1].data
            hdu = cutout_hdu[2].header
            wcs = WCS(hdu)
            TESS_images.append(np.mean(cutout_table["FLUX"], axis=0))
            col0 = cutout_hdu[1].header["1CRV4P"]
            row0 = cutout_hdu[1].header["2CRV4P"]
            col0s.append(col0)
            row0s.append(row0)

            pix_coord = np.zeros([len(ra), 2])
            for i in range(len(ra)):
                RApix = np.asscalar(wcs.all_world2pix(ra[i], dec[i], 0)[0])
                Decpix = np.asscalar(wcs.all_world2pix(ra[i], dec[i], 0)[1])
                pix_coord[i, 0] = col0 + RApix
                pix_coord[i, 1] = row0 + Decpix
            pix_coords.append(pix_coord)

        self.TESS_images = TESS_images
        self.col0s = col0s
        self.row0s = row0s
        self.pix_coords = pix_coords
        return
Esempio n. 2
0
def get_star_info(IDnumber):
    tic = Catalogs.query_object("TIC {0}".format(IDnumber),
                                radius=0.0001,
                                catalog="TIC")
    star = tic[np.argmin(tic["dstArcSec"])]
    tic_ID = int(star["ID"])
    tic_ra = float(star["ra"])
    tic_dec = float(star["dec"])
    return tic_ID, tic_ra, tic_dec
Esempio n. 3
0
def get_ID_from_ID(id_type, ID, new_id_type):
    if id_type == 'TIC':
        query_string = 'tic ' + str(ID)
        obs_table = Catalogs.query_object(query_string,
                                          radius=0.002 * u.deg,
                                          catalog='TIC')
        obs_table = obs_table[obs_table['ID'] == str(ID)]
        ra = obs_table['ra'][0]
        dec = obs_table['dec'][0]
    return (ra, dec)
Esempio n. 4
0
def find_tic(target_ID, from_file = True):
    if from_file == True:
        try:
            table_data = Table.read("Original_BANYAN_XI-III_xmatch_TIC.csv" , format='ascii.csv')
            #table_data = Table.read("Original VCA Members.csv" , format='ascii.csv') 
            #table_data = Table.read("Original Argus members info.csv" , format='ascii.csv')
            
            # Obtains ra and dec for object from target_ID
            i = list(table_data['main_id']).index(target_ID)
            ra = table_data['ra'][i]
            dec = table_data['dec'][i]
            tic = table_data['MatchID'][i]
        except:
            try:
                TIC_table = Catalogs.query_object(target_ID, catalog = "TIC")
                ra = TIC_table['ra'][0]
                dec = TIC_table['dec'][0]
                tic = TIC_table['ID'][0] 
            except:
                table_data = Table.read('BANYAN_XI-III_combined_members.csv')
                i = list(table_data['main_id']).index(target_ID)
                ra = table_data['ra'][i]
                dec = table_data['dec'][i]
                object_coord = SkyCoord(ra, dec, unit="deg")
                TIC_table = Catalogs.query_region(object_coord, radius = '1 deg', catalog = 'TIC')
                tic = TIC_table['ID'][0]
    else:
        # Find ra, dec and tic # via the TIC (typically based on Gaia DR2)
        try:
            TIC_table = Catalogs.query_object(target_ID, catalog = "TIC")
            ra = TIC_table['ra'][0]
            dec = TIC_table['dec'][0]
            tic = TIC_table['ID'][0] 
        except:
            table_data = Table.read('BANYAN_XI-III_combined_members.csv')
            i = list(table_data['main_id']).index(target_ID)
            ra = table_data['ra'][i]
            dec = table_data['dec'][i]
            object_coord = SkyCoord(ra, dec, unit="deg")
            TIC_table = Catalogs.query_region(object_coord, radius = '1 deg', catalog = 'TIC')
            tic = TIC_table['ID'][0]
    
    return ra, dec, tic
Esempio n. 5
0
def app_catalogs():
    global blc
    global trc
    global im
    if blc is None or trc is None or im is None: load_image()

    searchString = '{} {}'.format(np.mean([blc[0], trc[0]]),
                                  np.mean([blc[1], trc[1]]))
    catalogData = Catalogs.query_object(searchString,
                                        radius=0.2,
                                        catalog="GAIAdr2")

    # get plot
    p = make_base_bokeh()

    source = ColumnDataSource(catalogData.to_pandas())
    p.scatter('ra',
              'dec',
              source=source,
              legend="GAIA DR2",
              alpha=0.7,
              size=10)

    # Add hover tooltip for GAIA data
    tooltip = [("RA", "@ra"), ("Dec", "@dec"), ("Desig.", "@designation"),
               ("parallax", "@parallax"),
               ("phot_g_mean_mag", "@phot_g_mean_mag")]
    p.add_tools(HoverTool(tooltips=tooltip))

    p.legend.click_policy = "hide"

    # Table data
    columns = []
    for col in catalogData.to_pandas().columns:
        if col not in ('ra', 'dec', 'designation', 'parallax'):
            continue
        columns.append(TableColumn(field=col, title=col))
    data_table = DataTable(source=source,
                           columns=columns,
                           width=1200,
                           height=280)

    # Fails to load anything
    # script, div_dict = components({'plot': p, 'table': widgetbox(data_table)})
    # return render_template('catalogs.html', script=script, div=div_dict)

    # Fails to load table
    # script1, div1 = components(p)
    # script2, div2 = components(widgetbox(data_table))
    # return render_template('catalogs.html', script1=script1, div1=div1, script2=script2, div2=div2)

    # No table
    script, div = components(p)
    return render_template('base.html', script=script, plot=div)
Esempio n. 6
0
def coords_from_tic(tic):
    """Finds the RA, Dec, and magnitude for a given TIC source_id.

    Returns
    -------
    coords : tuple
        (RA, Dec) position [degrees].
    tmag : float
        TESS apparent magnitude.
    """

    ticData = Catalogs.query_object('tic'+str(tic), radius=.0001, catalog="TIC")
    return [ticData['ra'].data[0], ticData['dec'].data[0]], [ticData['Tmag'].data[0]], int(ticData['version'].data[0]), ticData['contratio'].data[0]
Esempio n. 7
0
def get_coord(tic):
	"""
	Get TIC corrdinates

	Returns
	-------
	TIC number
	"""
	try:
		catalog_data = Catalogs.query_object(objectname="TIC"+tic, catalog="TIC")
		ra = catalog_data[0]["ra"]
		dec = catalog_data[0]["dec"]
		return ra, dec
	except:
		print "ERROR: No gaia ID found for this TIC"
Esempio n. 8
0
def tic_query(integer):
    tic_ID = "TIC " + str(integer)
    bhol = Catalogs.query_object(tic_ID, catalog="TIC")
    gaia_ID = bhol[0]["GAIA"]
    radius = bhol[0]["rad"]
    temperature = bhol[0]["Teff"]
    Tmag = bhol[0]["Tmag"]
    mass = bhol[0]["mass"]
    ra = bhol[0]["ra"]
    dec = bhol[0]["dec"]
    distance = bhol[0]["d"]
    TESS_info = [
        int(gaia_ID), radius, temperature, Tmag, mass, ra, dec, distance
    ]
    return TESS_info
Esempio n. 9
0
def loadSingleSector(ticId, sector, size_pixels):
    starname = "TIC %08i" % (ticId)

    catalogData = Catalogs.query_object(starname,
                                        radius=1 / 60.,
                                        catalog="TIC")
    ra = catalogData[0]['ra']
    dec = catalogData[0]['dec']
    coord = SkyCoord(ra, dec, unit='deg')

    data, hdr, wcshdr = getTessCutout(coord, size_pixels, sector)
    time = data['TIME']
    cube = getTargetPixelArrayFromFits(data, hdr)

    return time, cube, hdr, wcshdr
def get_tic_info(star_name="TIC 1234647",
                 radius_deg=.10,
                 maglimit=14.5,
                 cols=[
                     'ID', 'Tmag', 'Jmag', 'Teff', 'logg', 'ra', 'dec',
                     'TWOMASS', 'dstArcSec'
                 ]):

    catalogData = Catalogs.query_object(star_name,
                                        radius=radius_deg,
                                        catalog="TIC")
    want = catalogData['Tmag'] <= maglimit
    #I always want the first two regardless of magnitude, it returns sorted by angular distance
    want[0:2] = [True, True]

    return (catalogData[want][cols])
Esempio n. 11
0
 def query_tic(self, ticname):
     """
     Query the TESS Input Catalog for data
     """
     name = ticname.replace('-', ' ').replace('_', ' ')
     df = Catalogs.query_object(name, radius=0.0003,
                                catalog="TIC").to_pandas()[0:1]
     data = {}
     data['ra'] = df.ra.values[0]
     data['dec'] = df.dec.values[0]
     data['pmra'] = df.pmRA.values[0]
     data['pmdec'] = df.pmDEC.values[0]
     data['px'] = df.plx.values[0]
     data['epoch'] = 2451545.0
     data['rv'] = 0.
     return data
Esempio n. 12
0
def tic_stellar_info(target_ID,
                     from_file=False,
                     filename='BANYAN_XI-III_members_with_TIC.csv'):
    if from_file == True:
        table_data = Table.read(filename, format='ascii.csv')

        # Obtains ra and dec for object from target_ID
        i = list(table_data['main_id']).index(target_ID)
        #camera = table_data['S{}'.format(sector)][i]
        tic = table_data['MatchID'][i]
        r_star = table_data['Stellar Radius'][i]
        T_eff = table_data['T_eff'][i]

    else:
        TIC_table = Catalogs.query_object(target_ID, catalog="TIC")
        tic = TIC_table['ID'][0]
        #        print(TIC_table[0])
        r_star = TIC_table['rad'][0]
        T_eff = TIC_table['Teff'][0]

    return tic, r_star, T_eff
Esempio n. 13
0
def get_gaia_data_from_tic(tic):
    '''
    Get Gaia parameters

    Returns
    -----------------------
    GaiaID, Gaia_mag
    '''
    # Get the Gaia sources
    result = Catalogs.query_object('TIC' + tic, radius=.005, catalog="TIC")
    IDs = result['ID'].data.data
    k = np.where(IDs == tic)[0][0]
    GAIAs = result['GAIA'].data.data
    Gaiamags = result['GAIAmag'].data.data

    GAIA_k = GAIAs[k]
    Gaiamag_k = Gaiamags[k]

    if GAIA_k == '':
        GAIA_k = np.nan
    return GAIA_k, Gaiamag_k
Esempio n. 14
0
    def query_TIC(self, ID=None, radius = 10.0*u.arcsec):
        
        key = 'TIC'
        
        if ID is None:
            ID = self.IDs['TIC']
        
        tbl = Table(names = ('ID',), dtype = (str,))
        
        for i, id in tqdm(enumerate(ID)):
            if not isinstance(id, str):
                add_empty_row(tbl)
            else:
                job = Catalogs.query_object(objectname=id, catalog='TIC', objType='STAR', radius = 10.0*u.arcsec)
                
                ridx = job['ID'] == str(id.replace('TIC ',''))
                if len(job[ridx][0]) > 0:
                    tbl = avstack([tbl, job[ridx][0]])
                else:
                    add_empty_row(tbl)
        
        self.TIC = tbl

        if not hasattr(self, 'simbad'):
            self.query_simbad(ID)  
            
        for i in range(len(self.IDs)): 
            if len(self.IDs['2MASS'][i])==0 and (self.TIC['TWOMASS'][i] != 0):
                self.IDs['2MASS'][i] = '2MASS J'+self.TIC['TWOMASS'][i]
            
            if len(self.IDs['HIP'][i])==0 and (self.TIC['HIP'][i] != 0):
                self.IDs['HIP'][i] = 'HIP '+self.TIC['HIP'][i]
            
            if len(self.IDs['TYC'][i])==0 and (self.TIC['TYC'][i] != 0):
                self.IDs['TYC'][i] = 'TYC '+self.TIC['TYC'][i]
            
            if len(self.IDs['KIC'][i])==0 and (self.TIC['KIC'][i] != 0):
                self.IDs['KIC'][i] = 'KIC '+self.TIC['KIC'][i]
        
        return self.TIC
Esempio n. 15
0
def name_to_tic(name):
    """
    Function to convert common name to TIC ID. Queries the MAST for TIC entry
    nearest to known position for common name.

    Parameters
    ----------
    name : str
       Common name to be converted to TIC.

    Returns
    -------
    tic : int
       TIC ID of closest match to input name's position from TIC on MAST.
    """
    if not isinstance(name, str):
        raise ValueError('Name must be a string.')

    cat = Catalogs.query_object(name, radius=0.02, catalog="TIC")
    tic = int(cat[0]['ID'])
    
    return tic
Esempio n. 16
0
def _queryTIC(ID, radius=20):
    """ Query TIC for bp-rp value
    
    Queries the TIC at MAST to search for a target ID to return bp-rp value. The
    TIC is already cross-matched with the Gaia catalog, so it contains a bp-rp 
    value for many targets (not all though).
    
    For some reason it does a cone search, which may return more than one 
    target. In which case the target matching the ID is found in the returned
    list. 
    
    Returns None if the target does not have a GDR3 ID.
    
    Parameters
    ----------
    ID : str
        The TIC identifier to search for.
    radius : float, optional
        Radius in arcseconds to use for the sky cone search. Default is 20".
    
    Returns
    -------
    bp_rp : float
        Gaia bp-rp value from the TIC.   
    """

    print('Querying TIC for Gaia bp-rp values.')
    job = Catalogs.query_object(objectname=ID,
                                catalog='TIC',
                                objType='STAR',
                                radius=radius * units.arcsec)

    if len(job) > 0:
        idx = job['ID'] == str(ID.replace('TIC', '').replace(' ', ''))
        return float(
            job['gaiabp'][idx] -
            job['gaiarp'][idx])  #This should crash if len(result) > 1.
    else:
        return None
Esempio n. 17
0
    def query_information(self):
        """Queries the TIC for basic stellar parameters.
        """
        result = Catalogs.query_object('tic' + str(int(self.tic)),
                                       radius=0.0001,
                                       catalog="TIC")
        # APASS Magnitudes
        self.jmag = result['Jmag'][0]
        self.hmag = result['Hmag'][0]
        self.kmag = result['Kmag'][0]
        self.jmag_err = result['e_Jmag'][0]
        self.hmag_err = result['e_Hmag'][0]
        self.kmag_err = result['e_Kmag'][0]

        # 2MASS Magnitude
        self.vmag = result['Vmag'][0]
        self.vmag_err = result['e_Vmag'][0]

        # Gaia magnitudes
        self.gaia_bp = result['gaiabp'][0]
        self.gaia_rp = result['gaiarp'][0]
        self.gaia_g = result['GAIAmag'][0]
        self.gaia_bp_err = result['e_gaiabp'][0]
        self.gaia_rp_err = result['e_gaiarp'][0]
        self.gaia_g_err = result['e_GAIAmag'][0]

        # GAIA proper motions
        self.pmra = result['pmRA'][0]
        self.pmdec = result['pmDEC'][0]

        # GAIA parallax
        self.plx = result['plx'][0]

        # GAIA temperature
        self.teff = result['Teff'][0]
        self.e_teff = result['e_Teff'][0]
        self.lum = result['lum'][0]
        self.e_lum = result['e_lum'][0]
    def from_cone(cls,
                  center,
                  radius=3*u.arcmin,
                  magnitudelimit=20,
                  **kw):
        '''
        Create a Constellation from a cone search of the sky,
        characterized by a positional center and a radius from it.

        Parameters
        ----------
        center : SkyCoord object, or str
            The center around which the query will be made.
            If a str, SkyCoord will be resolved with SkyCoord.from_name
        radius : float, with units of angle
            The angular radius for the query.
        magnitudelimit : float
            The maximum magnitude to include in the download.
            (This is explicitly thinking UV/optical/IR, would
            need to change to flux to be able to include other
            wavelengths.)
        '''

        # convert the center into astropy coordinates
        center = parse_center(center)


        # run the query
        print('querying astroquery, centered on {} with radius {}, for G<{}'.format(center, radius, magnitudelimit))
        table = Catalogs.query_object(center, radius=radius, catalog=self.catalog)

        # store the search parameters in this object
        c = cls(cls.standardize_table(table))
        c.standardized.meta['center'] = center
        c.standardized.meta['radius'] = radius
        c.standardized.meta['magnitudelimit'] = magnitudelimit

        return c
Esempio n. 19
0
def query(message, catalog, star_id):
    message.react('+1')
    try:
        catalogData = Catalogs.query_object(star_id,
                                            catalog=catalog,
                                            radius=0.01)
        if catalog == 'Gaia':
            df = catalogData['source_id', 'ra', 'dec', 'parallax',
                             'parallax_error', 'phot_g_mean_mag',
                             'distance'].to_pandas()
            response = "I have found *{}* stars within a 0.01 degree radius of {}. \n".format(
                len(catalogData), star_id)
            response += "```" + df.to_string() + "```"
        elif catalog == 'TIC':
            df = catalogData['ID', 'ra', 'dec', 'HIP', 'TYC', 'UCAC',
                             'TWOMASS', 'SDSS', 'ALLWISE', 'GAIA', 'APASS',
                             'KIC'].to_pandas()
            response = "I have found *{}* stars within a 0.01 degree radius of {}. \n".format(
                len(catalogData), star_id)
            response += "```" + df.to_string() + "```"
    except:
        response = "Could not resolve query"

    message.reply(response, in_thread=True)
Esempio n. 20
0
'''

# %%
'''
## Exploring the variable star

Now we will look more closely at the variable star we can see in the animation. 

### Querying the TESS Input Catalog

To start with we will overlay the nearby TIC sources onto the image so we can identify the star in question. To do this we will use the `astroquery.mast` Catalog clas to search the TIC.
'''

# %%
sources = Catalogs.query_object(catalog="TIC",
                                objectname=f"TIC {tic_id}",
                                radius=10 * u.arcmin)
sources = sources[sources["Tmag"] < 12]
print(f"Number of sources: {len(sources)}")
print(sources)

# %%
'''
### Overlaying the sources on a single cutout image

We will get the WCS infomation associated with our cutout so that we can make a WCS-aware plot, and identify a single cutout image to show. Then we display the image and sources together, and label the sources with their row number in the catalog table.
'''

# %%
cutout_wcs = WCS(cutout_hdu[2].header)
cutout_img = cutout_table["FLUX"][start]
Esempio n. 21
0
def raw_FFI_lc_download(target_ID,
                        sector,
                        plot_tpf=False,
                        plot_lc=False,
                        save_path='',
                        from_file=False):
    """
    Downloads and returns 30min cadence lightcurves based on SAP analysis of 
    the raw FFIs
    """
    if from_file == True:
        with open('Sector_1_target_filenames.pkl', 'rb') as f:
            target_filenames = pickle.load(f)
        f.close()
    else:
        target_filenames = {}

        # Find ra, dec and tic # via the TIC (typically based on Gaia DR2)
        TIC_table = Catalogs.query_object(target_ID, catalog="TIC")
        ra = TIC_table['ra'][0]
        dec = TIC_table['dec'][0]
        tic = TIC_table['ID'][0]

        object_coord = SkyCoord(ra, dec, unit="deg")
        manifest = Tesscut.download_cutouts(object_coord, [11, 11],
                                            path='./TESS_Sector_5_cutouts')
        #        sector_info = Tesscut.get_sectors(object_coord)
        if len(manifest['Local Path']) == 1:
            target_filenames[target_ID] = manifest['Local Path'][0][2:]
        elif len(manifest['Local Path']) > 1:
            target_filenames[target_ID] = []
            for filename in manifest['Local Path']:
                target_filenames[target_ID].append(filename[2:])
        else:
            print(
                'Cutout for target {} can not be downloaded'.format(target_ID))

    if type(target_filenames[target_ID]) == str:
        filename = target_filenames[target_ID]
    else:
        filename = target_filenames[target_ID][0]

    # Load tpf
    tpf_30min = lightkurve.search.open(filename)

    # Attach target name to tpf
    tpf_30min.targetid = target_ID

    # Create a median image of the source over time
    median_image = np.nanmedian(tpf_30min.flux, axis=0)

    # Select pixels which are brighter than the 85th percentile of the median image
    aperture_mask = median_image > np.nanpercentile(median_image, 85)

    # Plot and save tpf
    if plot_tpf == True:
        tpf_30min.plot(aperture_mask=aperture_mask)
    #tpf_plot.savefig(save_path + '{} - Sector {} - tpf plot.png'.format(target_ID, tpf.sector))
    #plt.close(tpf_plot)

    # Convert to lightcurve object
    lc_30min = tpf_30min.to_lightcurve(aperture_mask=aperture_mask)
    #    lc_30min = lc_30min[(lc_30min.time < 1346) | (lc_30min.time > 1350)]
    if plot_lc == True:
        lc_30min.scatter()
        plt.title('{} - 30min FFI base lc'.format(target_ID))
        plt.xlabel("Time - 2457000 (BTJD days)")
        plt.ylabel("Relative flux")
        plt.show()

    return lc_30min
Esempio n. 22
0
def brew_LATTE(tic, indir, syspath, transit_list, simple, BLS, model, save, DV, sectors, sectors_all, alltime, allflux, allflux_err, all_md, alltimebinned, allfluxbinned, allx1, allx2, ally1, ally2, alltime12, allfbkg, start_sec, end_sec, in_sec, upper_axis_lim_final, lower_axis_lim_final, tessmag, teff, srad, ra, dec, input_numax, input_analysis_window, url_list, args):
	'''
	This function combines all the results from LATTE and calls all the different functions -
	it makes the plots, saves them, runs the BLS model and the pyaneti model before making a PHT DV report (if this option is selected.)

	Parameters
	----------
	tic  :   str
		target TIC ID
	indir  :  str
		path to directory where all the plots and data will be saved.
	transit_list   : list
		list of the transit-like events
	simple   :   boolean
		whether or not to run the simple version
	BLS   :   boolean
		whether or not to run the BLS routine
	model   :   boolean
		whether or not to model the transit using pyaneti
	save   :	boolean
		whether or not to save the figures and data
	DV   :   boolean
		whether or not to write and save a DV report
	sectors_all  :   list
		all the sectors in which the target has been/ will be observed
	alltime  :  list
		times (not binned)
	allflux  :  list
		normalized flux (not binned)
	allflux_err  :  list
		normalized flux errors (not binned)
	all_md  :  list
		times of the momentum dumps
	alltimebinned  :  list
		binned time
	allfluxbinned  :  list
		normalized binned flux
	allx1  :  list
		CCD column position of target’s flux-weighted centroid. In x direction
	allx2  :  list
		The CCD column local motion differential velocity aberration (DVA), pointing drift, and thermal effects. In x direction
	ally1  :  list
		CCD column position of target’s flux-weighted centroid. In y direction
	ally2  :  list
		The CCD column local motion differential velocity aberration (DVA), pointing drift, and thermal effects. In y direction
	alltimel2  :  list
		time used for the x and y centroid position plottin
	allfbkg  :  list
		background flux
	start_sec  :  list
		times of the start of the sector
	end_sec  :  list
		times of the end of the sector
	in_sec  :  list
		the sectors for which data was downloaded
	tessmag  :  list
		TESS magnitude of the target star
	teff  :  float
		effective temperature of the tagret star (K)
	srad  :  float
		radius of the target star (solar radii)
	 ra	:	float
		the right ascension of the target stars
	 dec	:   float
		the declination of the target star

	'''

	# -------------------
	# SAVE THE DATA FILES
	# -------------------
	
	if (save == True) or (DV == True):
		save = True
		# if this folder doesn't exist then create it. These are the folder where the images, data and reports for each TIC ID will be stored.
		newpath = '{}/{}'.format(indir,tic)

		if not exists(newpath):
			os.makedirs(newpath)

		# save the data used as a text file - these often come in use later for a quick re-analysis.
		with open('{}/{}/{}_data.txt'.format(indir, tic, tic), "w") as f:

			# get rid of nan values first using a mask
			good_mask = np.isfinite(np.array(alltime)) * np.isfinite(np.array(allflux)) * np.isfinite(np.array(allflux_err))
			alltime_ar = np.array(alltime)[good_mask]
			allflux_ar = np.array(allflux)[good_mask]
			allflux_err_ar = np.array(allflux_err)[good_mask]

			# save
			writer = csv.writer(f, delimiter='\t')
			writer.writerow(['time', 'flux', 'flux_err'])
			writer.writerows(zip(alltime_ar,allflux_ar,allflux_err_ar))

		'''
		if the modelling option was also chose, save another data file with slightly different formatting to be called by Pyaneti.
		Pyaneti requires a very specific data format.

		Furhermore, in order for Pyaneti to run more efficiently (it has a Bayesian backend which scales with number of data points)
		we create a cutout of the times around the times of the marked transit events.
		'''

		if len(transit_list) != 0:
			if model == True:
				with open('{}/{}/{}_data_pyaneti.dat'.format(indir, tic, tic), "w") as f:
					writer = csv.writer(f, delimiter='\t')
					writer.writerow(['#time', 'flux', 'flux_err'])

				# If the dip separations are too small, then don't create cut outs and save the whole dataset
				if (len(transit_list) > 1) and ((transit_list[1] - transit_list[0]) < 2): # if there are LOTS of transit events on short period (if so it's probably a TOI but let's keep it here as a condition)
					with open('{}/{}/{}_data_pyaneti.dat'.format(indir, tic, tic), "a") as f:
						writer = csv.writer(f, delimiter='\t')
						writer.writerows(zip(alltime_ar,allflux_ar,allflux_err_ar)) # save all the data

				# else create a cut out of the data around the time of the transit events
				else:
					for transit in transit_list:
						# save the data
						# get rid of nan values first - this is used for the pyaneti code
						pyaneti_mask = (alltime_ar > (transit - 1)) * (alltime_ar < (transit + 1))

						with open('{}/{}/{}_data_pyaneti.dat'.format(indir, tic, tic), "a") as f:
							writer = csv.writer(f, delimiter='\t')
							writer.writerows(zip(alltime_ar[pyaneti_mask],allflux_ar[pyaneti_mask],allflux_err_ar[pyaneti_mask]))


	# -----------------------------------
	#			START PLOTTING		  	 - calls functions from LATTEutils.py
	# -----------------------------------

	if len(transit_list) != 0: # this is always the case unless the asteroseismic only option ws chosen
		# create a plot of the fulllighcurves with the momentum dumps (MDs) marked and a zoom-in of the marked transits
		# this plit is saved but not shown (as already shown in the interact part fo the code)
		utils.plot_full_md(tic, indir, alltime,allflux,all_md,alltimebinned,allfluxbinned, transit_list, upper_axis_lim_final, lower_axis_lim_final, args)

		# Get a list of the sectors that have transit marked in them
		# this is so that we no longer have to loop through all of the sectors, and can focus on the ones which are important.
		transit_sec = utils.transit_sec(in_sec, start_sec, end_sec, transit_list)

		# -----------
		# plot how the centroids moved during the transit event
		utils.plot_centroid(tic, indir,alltime12, allx1, ally1, allx2, ally2, transit_list, args)
		# plot the background flux at the time of the transit event.
		utils.plot_background(tic, indir,alltime, allfbkg, transit_list, args)

		print ("Centroid and background plots... done.")
		# -----------

		# if the 'simple' option is chosen in the GUI, then the code will end here - this is designed to provide a quick analysis requiring no TPFs.
		if simple == True:
			print ("Simple option was selected, therefore end analysis here.")
			sys.exit('')

		# -----------

		# call function to extract the Target Pixel File information
		# this is needed in order to extract the LCs in different aperture sizes.
		# the data is extracted using the open source Lightkurve package as they a built in function to extract LCs using different aperture sizes
		#TESS_unbinned_t_l, TESS_binned_t_l, small_binned_t_l, TESS_unbinned_l, TESS_binned_l, small_binned_l, tpf_list = utils.download_tpf_lightkurve(indir, transit_list, sectors, tic)

		print ("\n Start downloading of the target pixel files - this can take a little while (up to a minute) as the files are large \n")

		X1_list, X4_list, oot_list, intr_list, bkg_list, apmask_list, arrshape_list, t_list, T0_list, tpf_filt_list,TESS_unbinned_t_l, TESS_binned_t_l, small_binned_t_l, TESS_unbinned_l, TESS_binned_l, small_binned_l, tpf_list = utils.download_tpf(indir, transit_sec, transit_list, tic, url_list)

		# if the TPF wasn't corrupt then make the TPF files (only very ocassionally corrupt but don't want code to crash if it is corrrupt)
		if (TESS_unbinned_t_l[0] != -111):

			tpf_corrupt = False
			# plot the LCs using two different aperture sizes.
			utils.plot_aperturesize(tic,indir,TESS_unbinned_t_l, TESS_binned_t_l, small_binned_t_l, TESS_unbinned_l, TESS_binned_l, small_binned_l, transit_list, args)

			print ("Aperture size plots... done.")
			# ------------

			'''
			Plot the average pixel brightness of the cut-out around the target star and the corresponding SDSS field of view.
			Both are oriented so that North is pointing upwards.
			The former also shows the nearby stars with TESS magnitude brighter than 17. Queried from GAIA using astroquery.
			The function returns the mass of the star (also output from astroquery)- this is a useful input for the Pyaneti modelling
			'''
			if args.mpi == False:
				test_astroquery, _, _, mstar, vmag, logg, plx, c_id = utils.plot_TESS_stars(tic,indir, transit_sec, tpf_list, args)

				if test_astroquery == -111:
					tessmag, teff, srad, mstar, vmag, logg, plx, c_id = utils.plot_TESS_stars_not_proj(tic,indir, transit_list, transit_sec, tpf_list, args)
					args.mpi = True

			else:
				test_astroquery, _, _, mstar, vmag, logg, plx, c_id = utils.plot_TESS_stars_not_proj(tic,indir, transit_list, transit_sec, tpf_list, args)

			# keep track of whether astroquery is working (sometimes the site is down and we don't want this to stop us from running the code)
			astroquery_corrupt = False

			if test_astroquery == -999:
				astroquery_corrupt = True
				print ("Star Aperture plots... failed.")

			else:
				print ("Star Aperture plots... done.")

			# ------------

			# Download the Target Pixel File using the raw MAST data - this comes in a different format as the TPFs extracted using Lightkurve
			# This data is then corrected using Principal Component Analysis is orderto get rid of systematics.
			#X1_list, X4_list, oot_list, intr_list, bkg_list, apmask_list, arrshape_list, t_list, T0_list, tpf_filt_list = utils.download_tpf_mast(indir, transit_sec, transit_list, tic)

			# ------------

			'''
			plot the in and out of transit flux comparison.
			By default the images are NOT oriented north - this is because the reprojection takes longer to run and for a simple
			analysis to check whether the brightest pixel moves during the transit this is not required.
			The orientation towards north can be defined in the command line with '--north'.
			'''
			if args.north == True:
				utils.plot_in_out_TPF_proj(tic, indir, X4_list, oot_list, t_list, intr_list, T0_list, tpf_filt_list, tpf_list, args)
				print ("In and out of aperture flux comparison with reprojection... done. ")

			else:
				utils.plot_in_out_TPF(tic, indir, X4_list, oot_list, t_list, intr_list, T0_list, tpf_filt_list, args)
				print ("In and out of aperture flux comparison... done.")
			# ------------

			# For each pixel in the TPF, extract and plot a lightcurve around the time of the marked transit event.
			utils.plot_pixel_level_LC(tic, indir, X1_list, X4_list, oot_list, intr_list, bkg_list, tpf_list, apmask_list, arrshape_list, t_list, T0_list, args)
			print ("Pixel level LCs plot... done.")
			# ------------

		else:
			tpf_corrupt = True
			mstar = 1 # need to define mstar otherwise pyaneti will complain - just make it one as an approximation.
			tessmag = np.nan
			teff = np.nan
			srad = np.nan
			vmag = np.nan
			logg = np.nan
			plx = np.nan
			c_id = np.nan

			astroquery_corrupt = True

		# ------------
		# end of plots that require target pixel files
		# ------------

		# If more than one transit has been marked by the user, the LC is phase folded based on the period of the separation of the first two maarked peaks.
		# These plots are saved but do not feature in the DV report.
		if len (transit_list) > 1: # needs to know a period so can only do this if more than one transit has been marked.

			period = transit_list[1] - transit_list[0]
			t0 = transit_list[0] # time of the first marking

			# calculate the phase
			phased = np.array([-0.5+( ( t - t0-0.5*period) % period) / period for t in alltimebinned])

			fig, ax = plt.subplots(figsize=(5.55,5))
			ax.plot(phased, allfluxbinned, marker='.',color = 'k', alpha = 1, lw = 0, markersize = 4, label = 'None', markerfacecolor='k')

			#ax.plot(phased, allflux,marker='o',color = 'navy', alpha = 0.7, lw = 0, markersize = 2, label = 'binning = 7', markerfacecolor='white')
			plt.title("Phase folded LC")
			ax.set_xlabel("Phase (days)")
			ax.set_ylabel("Normalized Flux")
			plt.plot()


			if save == True:
				plt.savefig('{}/{}/{}_phase_folded.png'.format(indir, tic, tic), format='png')

			if args.noshow == False:
				plt.show()

			print ("Phase folded plot... done.")

		else:
			print ("\n Only one transit marked - therefore can't be phase folded. \n")

		# ------------

		'''
		Plot LCs of the six closest TESS target stars. This allows us to check whether the transit-like events
		also appear in other nearby LCs which would be a sign that this is caused by a background event.
		'''

		# get the tic IDs of the six nearest stars
		if args.FFI == False:
			ticids, distance, target_ra, target_dec = utils.nn_ticids(indir, transit_sec, tic)

			# download the data for these stars
			alltime_nn, allflux_nn, all_md_nn, alltimebinned_nn, allfluxbinned_nn,outtics,tessmag_list, distance = utils.download_data_neighbours(indir, transit_sec[0], ticids, distance)

			# plot the LCs
			utils.plot_nn(tic, indir,alltime_nn, allflux_nn, alltimebinned_nn, allfluxbinned_nn, transit_list, outtics, tessmag_list, distance, args)

		else:
			target_ra = ra
			target_dec = dec
			distance = None

		print ("Nearest neighbour plot... done.")
		# ------------

		# if the BLS option is chose, a BLS search is run. The LCs are first detrended and smoothed using a moving average.
		# The corrected and uncorrected LCs are saves as a single plot for comparison and to verify that the correction worked well - saved but do not feature in the DV report.
		if BLS == True:
			print ("Running BLS algorithm...", end =" ")
			bls_stats1, bls_stats2 = utils.data_bls(tic, indir, alltime, allflux, allfluxbinned, alltimebinned, args)
			print ("done.")


	else:

		from astroquery.mast import Catalogs
		#plot the main LC with only one panel and no transit events marked
		utils.plot_full_md_notransits(tic, indir, alltime, allflux, all_md, alltimebinned, allfluxbinned, upper_axis_lim_final, lower_axis_lim_final, args)
		target_ra = ra
		target_dec = dec
		tpf_corrupt = False

		# get the star information that would otherwise come from the plot_TESS_stars function
		starName = "TIC " + str(tic)
		radSearch = 5/60 #radius in degrees

		# this function depends on astroquery working, and sometimes it doesn't.
		# for when it doesn't work (or simply can't connect to it), just skip plotting the other TESS stars.
		try:
			astroquery_corrupt = False
			catalogData = Catalogs.query_object(starName, radius = radSearch, catalog = "TIC")
		except:
			astroquery_corrupt = True
			print ("Currently cannot connect to Astroquery.")
			# return values that we know aren't real so that we can tell the code that the plotting didn't work
			return -999, -999, -999, 1, -999,-999,-999,-999

		# ra and dec of the target star
		ra = catalogData[0]['ra']
		dec = catalogData[0]['dec']

		# while we have the astroquery loaded, let's collect some other information about the star
		# these paramaters can help us find out what type of star we have with just a glance

		vmag = catalogData['Vmag'][0] # v magnitude (this migth be more useful than the TESS mag for things such as osbevring)
		logg = catalogData['logg'][0] # logg of the star
		mstar = catalogData['mass'][0] # mass of the star
		plx = catalogData['plx'][0]   # parallax

		# sometimes these values aren't accessible through astroquery - so we shoudl just quickly check.
		if not np.isfinite(vmag): vmag = '--' # this is what will appear in the table of the report to indicate that it's unknown
		if not np.isfinite(logg): logg = '--'
		if not np.isfinite(mstar): mass = '--'
		if not np.isfinite(plx): plx   = '--'

		# sometimes it's useufl to know if the star has another name
		# check whether it was osberved by one of these four large surveys

		catalogs = ['HIP', 'TYC', 'TWOMASS', 'GAIA']

		for cat in catalogs:
			c_id = str(catalogData[0][cat])
			if c_id != '--':
				cat_id = "{} {}".format(cat,c_id)
				break
			else:
				continue

		tessmag = catalogData['Tmag'][0]
		teff = catalogData['Teff'][0]
		srad = catalogData['rad'][0]
		c_id = c_id

	# ------------
	# period analysis
	# always make a peridoogram? QUestion for later.
	print ("Periodogram plot...", end =" ")
	mass_ast, radius_ast, logg_ast, numax, deltanu = utils.plot_periodogram(tic, indir, alltime, allflux, teff, input_numax, input_analysis_window, args)
	print ("done.")

	# ------------
	# stellar evolutionary tracks
	print ("Evolutionary tracks plot...", end =" ")
	utils.eep_target(tic, indir, syspath, teff, srad, args)
	print ("done.")

	# ------------

	# SKIP FROM HERE....

	'''
	NOTE: CURRENTLY ONLY WORKS ON NORA'S COMPUTER - WILL BE AVAILABLE IN NEXT RELEASE SO PLEASE SKIP THIS PART OF THE CODE
	If the modelling option is selected (in the GUI), model the transit event using Pyaneti (Barragan et al 2018)
	which uses an Bayesian approach with an MCMC sampling to best fit and model the transit.
	The code runs slightly differently depending on whether one or multiple transits have been marked.
	This is because with multiple transits the code has information about the possible orbital period.
	Need to ensure that the code has compiled correctly on the users computer.

	Reason why is doesn't work else where: the priors need to be set up ver carefully, and this has not been tested enough to know
	it can be automated to work reliably. Also, this code requires a fortran backend, which has not yet been included in LATTE.
	---> we're working on implementing this as it will be very useful.
	'''

	# First check if Pyaneti is installed...
	if os.path.exists("{}/pyaneti_LATTE.py".format(syspath)):

		if model == True:
			print ("Running Pyaneti modelling - this could take a while so be patient...")

			transit_list_model =  ("{}".format(str(np.asarray(transit_list)))[1:-1]) # change the list into a string and get rid of the brackets
			# the code is usually run through the command line so call it using the os.system function.

			os.system("python3 {}/pyaneti_LATTE.py {} {} {} {} {} {} {}".format(syspath, tic, indir, syspath, mstar, teff, srad, transit_list_model))

	else:
		#print ("Pyaneti has not been installed so you can't model anything yet. Contact Nora or Oscar for the LATTE version of the Pyaneti code.")
		model = False

	# ... UNTIL HERE
	# ------------


	# Finally, create a DV report which summarises all of the plots and tables.
	if DV == True:

		from LATTE import LATTE_DV as ldv

		if BLS == True:
			ldv.LATTE_DV(tic, indir, syspath, transit_list, sectors_all, target_ra, target_dec, tessmag, teff, srad, mstar, vmag, logg, mass_ast, radius_ast, logg_ast, numax, deltanu, plx, c_id, bls_stats1, bls_stats2, tpf_corrupt, astroquery_corrupt, FFI = args.FFI,  bls = True, model = model, mpi = args.mpi)
		else:
			ldv.LATTE_DV(tic, indir, syspath, transit_list, sectors_all, target_ra, target_dec, tessmag, teff, srad, mstar, vmag, logg, mass_ast, radius_ast, logg_ast, numax, deltanu, plx, c_id, [0], [0], tpf_corrupt, astroquery_corrupt, FFI = args.FFI,  bls = False, model = model, mpi = args.mpi)
Esempio n. 23
0
from astropy.stats import SigmaClip
from photutils import MMMBackground
import numpy as np
import matplotlib.pyplot as plt
import argparse

parser = argparse.ArgumentParser(description='Extract Lightcurves from FFIs')
parser.add_argument('TIC', type=int, help='TIC ID or RA DEC')
parser.add_argument('Sector', type=int, help='Sector')
parser.add_argument('--size', type=int, default=21)


args = parser.parse_args()


target = Catalogs.query_object('TIC %d' % args.TIC, radius=0.05, catalog='TIC')
ra     = float(target[0]['ra'])
dec    = float(target[0]['dec'])
coord  = SkyCoord(ra, dec, unit='deg')



ahdu = search_tesscut(coord, sector=args.Sector).download(cutout_size=args.size, download_dir='.')
#w    = WCS(allhdus.hdu[2].header)
hdu  = ahdu.hdu

flux = hdu[1].data['FLUX']
bkgs = np.zeros(len(flux))

#Background
for i,f in enumerate(flux):
Esempio n. 24
0
def _get_period_guess_given_plname(plname):

    from astroquery.mast import Catalogs

    res = Catalogs.query_object(plname, catalog="TIC", radius=0.5*1/3600)

    if len(res) != 1:
        raise ValueError('for {}, got result:\n{}'.format(plname, repr(res)))

    ticid = int(res["ID"])
    litdir = '../data/literature_physicalparams/{}/'.format(ticid)
    if not os.path.exists(litdir):
        os.mkdir(litdir)
    litpath = os.path.join(litdir,'params.csv')

    try:
        lpdf = pd.read_csv(litpath)
        period_guess = float(lpdf['period_day'])

    except FileNotFoundError:

        from astrobase.services.mast import tic_objectsearch

        ticres = tic_objectsearch(ticid)

        with open(ticres['cachefname'], 'r') as json_file:
            data = json.load(json_file)

        ra = data['data'][0]['ra']
        dec = data['data'][0]['dec']

        targetcoordstr = '{} {}'.format(ra, dec)

        # attempt to get physical parameters of planet -- period, a/Rstar, and
        # inclination -- for the initial guesses.
        from astroquery.nasa_exoplanet_archive import NasaExoplanetArchive
        eatab = NasaExoplanetArchive.get_confirmed_planets_table()

        pl_coords = eatab['sky_coord']
        tcoord = SkyCoord(targetcoordstr, frame='icrs', unit=(u.deg, u.deg))

        print('got match w/ separation {}'.format(
            np.min(tcoord.separation(pl_coords).to(u.arcsec))))
        pl_row = eatab[np.argmin(tcoord.separation(pl_coords).to(u.arcsec))]

        # all dimensionful
        period = pl_row['pl_orbper'].value
        incl = pl_row['pl_orbincl'].value
        semimaj_au = pl_row['pl_orbsmax']
        rstar = pl_row['st_rad']
        a_by_rstar = (semimaj_au / rstar).cgs.value

        litdf = pd.DataFrame(
            {'period_day':period,
             'a_by_rstar':a_by_rstar,
             'inclination_deg':incl
            }, index=[0]
        )
        # get the fixed physical parameters from the data. period_day,
        # a_by_rstar, and inclination_deg are comma-separated in this file.
        litdf.to_csv(litpath, index=False, header=True, sep=',')
        lpdf = pd.read_csv(litpath, sep=',')
        period_guess = float(lpdf['period_day'])

    return period_guess
def tic_cone_search(star_name="Kepler-10", radius_deg=.3):
    catalogData = Catalogs.query_object(star_name,
                                        radius=radius_deg,
                                        catalog="TIC")

    return catalogData
Esempio n. 26
0
#                 "t_end",\
#                 "t_max",\
#                 "flux_max",\
#                 "raw_integral",\
#                 "fit_amp",\
#                 "fit_fwhm",\
#                 "fit_t_start",\
#                 "fit_t_end",\
#                 "fit_t_max",\
#                 "fit_integral"]
#    ofile1.write(",".join(fieldnames1)+'\n')
    for this_id in ids:
        try:
            target_name = this_id
            radius = 0.2
            catalogTIC = Catalogs.query_object(target_name, radius, catalog = "TIC")
            numObj = "Number of TIC objects within %f deg of %s: %u" % (radius, target_name, len(catalogTIC))
            where_dwarfs = np.where(catalogTIC['lumclass'] == 'DWARF')[0]
            where_giants = np.where(catalogTIC['lumclass'] == 'GIANT')[0]
            dwarfs = "Number of objects classified as 'DWARF' within %f deg of %s: %u" % (radius, target_name, len(where_dwarfs))
            giants = "Number of objects classified as 'GIANT' within %f deg of %s: %u" % (radius, target_name, len(where_giants))
            where_closest = np.argmin(catalogTIC['dstArcSec'])
            closest = "Closest TIC ID to %s: TIC %s, seperation of %f arcsec. and a TESS mag. of %f" % (target_name, catalogTIC['ID'][where_closest], catalogTIC['dstArcSec'][where_closest], catalogTIC['Tmag'][where_closest])

            #sectors_search = Observations.query_criteria(target_name=this_id, provenance_project='TASOC')
            sectors_search = Observations.query_criteria(target_name=this_id, obs_collection="HLSP", filters="TESS",
                                                         t_exptime=[1799, 1801])
            #print('Getting sectors')
            #import pdb; pdb.set_trace()
            sector_length = len(sectors_search)
            if  sector_length !=0:
Esempio n. 27
0
    def __init__(self, tic=None, ra=None, dec=None):
        """
        Takes in TIC and/or RA/Dec, download directory, and product list.
        Updates: 
            - make tic,ra,dec flexible for float/str input
            - make sure download dir is proper format
            - make sure products is "all" or a list
            - specify ResolveError and No Data Products error exceptions
        """

        self.tic = tic
        self.ra = ra
        self.dec = dec

        if tic == None:
            radii = np.linspace(start=0.0001, stop=0.001, num=19)
            for rad in radii:
                if self.tic == None:
                    query_string = str(self.ra) + " " + str(
                        self.dec
                    )  # make sure to have a space between the strings!
                    obs_table = Catalogs.query_object(query_string,
                                                      radius=rad * u.deg,
                                                      catalog="TIC")
                    obs_df = obs_table.to_pandas()
                    if len(obs_table['ID']) == 1:
                        self.tic = obs_table['ID'][0]
                        self.bp_rp = (obs_table['gaiabp'] -
                                      obs_table['gaiarp'])[0]
                        break

                    if len(obs_df[obs_df['GAIA'].to_numpy(
                            dtype='str') != '']) == 1:
                        temp_obs_df = obs_df[obs_df['GAIA'].to_numpy(
                            dtype='str') != '']
                        self.tic = temp_obs_df['ID'].iloc[0]
                        self.bp_rp = (temp_obs_df['gaiabp'] -
                                      temp_obs_df['gaiarp']).iloc[0]
                        break

                    if len(
                            np.unique(obs_df[obs_df['HIP'].to_numpy(
                                dtype='str') != '']['HIP'])) == 1:
                        self.tic = obs_table['ID'][0]
                        self.bp_rp = (obs_table['gaiabp'] -
                                      obs_table['gaiarp'])[0]
                        break
#
#                    if len(obs_table[obs_table['typeSrc'] == "tmgaia2"]) == 1:
#                        self.tic = obs_table['ID'][0]
#                        self.bp_rp = (obs_table['gaiabp'] - obs_table['gaiarp'])[0]
#                        break

            if self.tic == None:
                self.tic = "tic issue"
                #self.bp_rp = 9999

        if ra == None:
            query_string = "tic " + self.tic  # make sure to have a space between the strings!
            obs_table = Catalogs.query_object(query_string,
                                              radius=0.001 * u.deg,
                                              catalog="TIC")
            #obs_df = obs_table.to_pandas()
            self.ra = obs_table['ra'][0]
            self.dec = obs_table['dec'][0]
Esempio n. 28
0
    csvf = open(options.csvfile, 'w')
    csvfile = csv.writer(csvf, delimiter=',')
    csvfile.writerow(csvcols)

    for id in options.ticid:
        target_name = 'TIC ' + str(id)
        ticid = target_name[4:]
        # target_name = '330.794887332661, 18.8843189579296'
        search_radius_deg = options.artrad / 3600.0

        # Query the TESS Input Catalog centered on the target_name.
        # target_name will be resolved by Simbad, so we need 'TIC ' in front of
        # the id. catallog = 'TIC' is for the MAST radial query.
        ticstars = Catalogs.query_object(target_name,
                                         radius=search_radius_deg,
                                         catalog='TIC')

        # What columns are available from the TIC?
        # print(len(ticstars), 'stars found')
        # print(ticstars.columns)
        # print(ticstars['gaiaqflag'])

        # propagate proper motions
        propagate_pm(ticstars)

        # get a copy of the target star itself, then delete if from the list
        where_self = np.where(ticstars['ID'] == ticid)[0]
        target = deepcopy(ticstars[where_self[0]])
        del ticstars[where_self[0]]
        nrstars = len(ticstars)
Esempio n. 29
0
    csvf = open(options.csvfile, 'w')
    csvfile = csv.writer(csvf, delimiter=',')
    csvfile.writerow(csvcols)
    print(head)
    lf.write(head + '\n')

    for actid in options.ticid:
        target_name = 'TIC ' + str(actid)
        ticid = target_name[4:]
        # target_name = '330.794887332661, 18.8843189579296'

        # Query the TESS Input Catalog centered on the target_name.
        # target_name will be resolved by Simbad, so we need 'TIC ' in front of
        # the id. catallog = 'TIC' is for the MAST radial query.
        ticstars = Catalogs.query_object(target_name,
                                         radius=tess_srad,
                                         catalog='TIC')

        # What columns are available from the TIC?
        # print('available columns =', ticstars.columns)
        # print(len(ticstars), 'stars found')

        # propagate proper motions
        propagate_pm(ticstars)

        # get a copy of the target star itself, then delete if from the list
        where_self = np.where(ticstars['ID'] == ticid)[0]
        target = deepcopy(ticstars[where_self[0]])
        del ticstars[where_self[0]]
        for exid in options.ticexclude:
            where_ex = np.where(ticstars['ID'] == str(exid))[0]
Esempio n. 30
0
from astroquery.mast import Observations
from astroquery.mast import Catalogs
import numpy as np

f = open('ballering_new.txt', "r")
line = f.readlines()[1:]
f.close()
name = np.array([])
for i in range(len(line)):
	name = np.append(name, str(line[i].split()[0]))

for i in range(len(name)):
	temp_name = "HIP" + name[i]
	catalogData = Catalogs.query_object(temp_name, radius=0.004 , catalog="Galex") #searches within 0.2 arcmin
	length = len(catalogData)
	if length > 1:
		fuv_array = np.array([])
		err_array = np.array([])
		for j in range(length):
			if catalogData[j][5] != 1:
				fuv_array = np.append(fuv_array, catalogData[j][10])
				err_array = np.append(err_array, catalogData[j][11])
		fuv_mag = np.mean(fuv_array)
		fuv_mag_err = np.mean(err_array)
	if length < 1:
		fuv_mag = 'n/a'
		fuv_mag_err = 'n/a'
	if length == 1:
		if catalogData[0][5] == 1:
			fuv_mag = 'n/a'
			fuv_mag_err = 'n/a'