Esempio n. 1
0
def astroquery_sci(data_start, data_end, proposal_id, paths, download):
    """Retreives raw science data for a given proposal ID using astroquery

    To get the right files, astroquery requires the following inputs:

        - intentType='science' – for the darks
        - instrument_name="WFC3/UVIS" – or whichever instrument is needed  
        - proposal_id -- user-defined input string of program/proposal ID
    """

    # Collecting the raw science files for a program ID using astroquery
    if data_start:
        # Round the data end date down so that the file from the new date isn't included
        data_end_rnd = data_end // 0.001 * 0.001
        sciobs = Observations.query_criteria(intentType='science',
                                             t_min=[data_start, data_end_rnd],
                                             instrument_name="WFC3/UVIS",
                                             proposal_id=proposal_id)
    else:
        sciobs = Observations.query_criteria(intentType='science',
                                             instrument_name="WFC3/UVIS",
                                             proposal_id=proposal_id)

    sciprod = Observations.get_product_list(sciobs)
    rawsci = Observations.filter_products(sciprod,
                                          productSubGroupDescription="RAW")

    # If download option is set, move to the download directory, check if empty, then download
    if download == True:
        os.chdir(paths['PID_DIR'])
        if os.path.exists(paths['DLD_DIR']):
            print('Download directory exists!! Not downloading files')
        else:
            Observations.download_products(rawsci, mrp_only=False)
            print('Download to {} complete'.format(paths['DLD_DIR']))
Esempio n. 2
0
def filter_products(obsids, table):
    """ filters the dataproducts for the given astroquery by obsids found in obs_id() and limits to only DRZ files
    before downloading all filtered products to /Volumes/galaxies"""
  ## move indexing obsids to another function
    obs = table['obsid']
    dataProductsByID = Observations.get_product_list(obs)
    dataProductsByID = Observations.filter_products(dataProductsByID, obs_id=obsids, productSubGroupDescription="DRZ")
    Observations.download_products(dataProductsByID, "/Volumes/galaxies/mastDownload/HST") ## SPECIFY filepath!!!
    def get_datafiles(self, ra, dec, verbose=True):
        """
        Given an ra and dec create the correct output directory,
        and download associated drizzle files
        """
        coords = "{}_{}".format(ra, dec)

        if verbose:
            print(coords)

        # Querying MAST and downloading all associated drz files
        table_of_observations = Observations.query_criteria(
            coordinates=coords,
            radius=0.02,
            obs_collection="HST",
            dataproduct_type="image")

        data_product_list = Observations.get_product_list(
            table_of_observations)
        manifest = Observations.download_products(
            data_product_list,
            extension="drz.fits",
            obs_collection="HST",
            download_dir=self.download_directory)

        return list(
            manifest["Local Path"])  # We just want the local file locations
Esempio n. 4
0
def get_kepler_lcs(filenames):
    """
    Purpose:
        Downloads lightcurves
    Args:
        filenames - list of full filenames
    returns:
        paths_to_files - list containing the full paths to downloaded lightcurves.
    """
    obj_ids = [i[:13] for i in filenames]
    keplerObs = Observations.query_criteria(target_name=obj_ids,
                                            obs_collection='Kepler')
    keplerProds = Observations.get_product_list(keplerObs)
    yourProd = Observations.filter_products(keplerProds, extension=filenames)
    manifest = Observations.download_products(yourProd)
    """
    The process of downloading the files sorts them in numerical order messing up the 
    order, so I'm making the manifest a dataframe and sampling it
    one at a time from the filenames, not sure if there's a way to just sort the whole list
    based on the original order.
    """
    manifest = manifest.to_pandas(index='Local Path')
    paths_to_files = []
    for i, f in enumerate(
            filenames):  # forcing the order to match given filenames list
        f_sampler = make_sampler([f])  # to find the right file from manifest
        filename = f_sampler(manifest).index[0]  # full local filepath
        paths_to_files.append(filename)

    return paths_to_files
Esempio n. 5
0
def get_hlsp_lightcurves(tic_id,
                         hlsp_products=('CDIPS', 'TASOC'),
                         download_dir=None,
                         verbose=True):
    """This downloads TESS HLSP light curves for a given TIC ID.

    Parameters
    ----------
    tic_id : str
        The TIC ID of the object as a string.

    hlsp_products : sequence of str
        List of desired HLSP products to search. For instance, ["CDIPS"].

    download_dir : str
        Path of directory to which light-curve will be downloaded.

    Returns
    -------
    lcfiles : list or None
        List of light-curve file paths. None if none are found and downloaded.

    """

    if not astroquery_dependency:
        LOGERROR(
            "The astroquery package is required for this function to work.")
        return None

    lcfiles = []

    for hlsp in hlsp_products:

        obs_table = Observations.query_criteria(target_name=tic_id,
                                                provenance_name=hlsp)

        if verbose:
            LOGINFO('Found {} {} light-curves.'.format(len(obs_table), hlsp))

        # Get list of available products for this Observation.
        cdips_products = Observations.get_product_list(obs_table)

        # Download the products for this Observation.
        manifest = Observations.download_products(cdips_products,
                                                  download_dir=download_dir)
        if verbose:
            LOGINFO("Done")

        if len(manifest) >= 1:
            lcfiles.append(list(manifest['Local Path']))

    #
    # flatten lcfiles list
    #
    if len(lcfiles) >= 1:
        return_lcfiles = [item for sublist in lcfiles for item in sublist]
    else:
        return_lcfiles = None

    return return_lcfiles
Esempio n. 6
0
        def download_target(targetname, radius='10 arcsec'):
            download_dir = os.path.join(os.path.expanduser('~'), '.shadow-cache')
            if not os.path.isdir(download_dir):
                try:
                    os.mkdir(download_dir)
                # downloads locally if OS error occurs
                except OSError:
                    log.warning('Warning: unable to create {}. '
                                'Downloading MAST files to the current '
                                'working directory instead.'.format(download_dir))
                    download_dir = '.'

            logging.getLogger('astropy').setLevel(log.getEffectiveLevel())
            obsTable = Observations.query_criteria(target_name=targetname,
                                                    obs_collection='HST',
                                                    project='HST',
                                                    radius=radius)
            obsTable = obsTable[(obsTable['instrument_name'] == 'WFC3/IR') &
                                (obsTable['dataRights'] == "PUBLIC")]

            fnames = ['{}/mastDownload/'.format(download_dir) +
                        url.replace('mast:', '').replace('product', url.split('/')[-1].split('_')[0])[:-9] +
                         '_flt.fits' for url in obsTable['dataURL']]
            log.info('Found {} files.'.format(len(obsTable)))
            paths = []
            for idx, t in enumerate(tqdm(obsTable, desc='Downloading files', total=len(obsTable))):
                if os.path.isfile(fnames[idx]):
                    paths.append(fnames[idx])
                else:
                    t1 = Observations.get_product_list(t)
                    t1 = t1[t1['productSubGroupDescription'] == 'FLT']
                    paths.append(Observations.download_products(t1, mrp_only=False, download_dir=download_dir)['Local Path'][0])
            return paths
Esempio n. 7
0
def get_all_TESS_data(object_name, radius = ".02 deg", get_PDC = True, get_all = False):
    """ 
    Given a planet name, this function returns a dictionary of times, fluxes and 
    errors on fluxes in a juliet-friendly format for usage. The function does an 
    astroquery to MAST using a default radius of .02 deg around the target name. If get_PDC is True, 
    this function returns PDC fluxes. False returns SAP fluxes. If get_all is true, this function 
    returns a dictionary that in addition to the times, fluxes and errors, returns other 
    metadata.
    """
    if not has_astroquery:
        print("Error on using juliet function `get_all_TESS_data`: astroquery.mast not found.")
    obs_table = Observations.query_object(object_name,radius=radius)
    out_dict = {}
    times = {}
    fluxes = {}
    fluxes_errors = {}
    for i in range(len(obs_table['dataURL'])):
        if 's_lc.fits' in obs_table['dataURL'][i]:
            fname = obs_table['dataURL'][i].split('/')[-1]
            metadata = fname.split('-')
            if len(metadata) == 5:
                # Extract metadata:
                sector = np.int(metadata[1].split('s')[-1])
                ticid = np.int(metadata[2])
                # Download files:
                data_products = Observations.get_product_list(obs_table[i])
                manifest = Observations.download_products(data_products)
                # Read lightcurve file:
                d,h = fits.getdata('mastDownload/TESS/'+fname[:-8]+'/'+fname,header=True)
                t,fs,fserr,f,ferr = d['TIME']+h['BJDREFI'],d['SAP_FLUX'],d['SAP_FLUX_ERR'],\
                                    d['PDCSAP_FLUX'],d['PDCSAP_FLUX_ERR']
                idx_goodpdc = np.where((f != 0.)&(~np.isnan(f)))[0]
                idx_goodsap = np.where((fs != 0.)&(~np.isnan(fs)))[0]
                # Save to output dictionary:
                if 'TIC' not in out_dict.keys():
                    out_dict['TIC'] = ticid
                out_dict[sector] = {}
                out_dict[sector]['TIME_PDCSAP_FLUX'] = t[idx_goodpdc]
                out_dict[sector]['PDCSAP_FLUX'] = f[idx_goodpdc]
                out_dict[sector]['PDCSAP_FLUX_ERR'] = ferr[idx_goodpdc]
                out_dict[sector]['TIME_SAP_FLUX'] = t[idx_goodsap]
                out_dict[sector]['SAP_FLUX'] = fs[idx_goodsap]
                out_dict[sector]['SAP_FLUX_ERR'] = fserr[idx_goodsap]
                if get_PDC:
                    times['TESS'+str(sector)] = t[idx_goodpdc]
                    med = np.median(f[idx_goodpdc])
                    fluxes['TESS'+str(sector)] = f[idx_goodpdc]/med
                    fluxes_errors['TESS'+str(sector)] = ferr[idx_goodpdc]/med
                else:
                    times['TESS'+str(sector)] = t[idx_goodsap]
                    med = np.median(fs[idx_goodsap])
                    fluxes['TESS'+str(sector)] = fs[idx_goodsap]/med
                    fluxes_errors['TESS'+str(sector)] = fserr[idx_goodsap]/med
                # Remove downloaded folder:
                os.system('rm -r mastDownload')
    if get_all:
        return out_dict, times, fluxes, fluxes_errors
    else:
        return times, fluxes, fluxes_errors
Esempio n. 8
0
    def download(self, quality_bitmask='default', download_dir=None):
        """Returns a single `KeplerTargetPixelFile` or `KeplerLightCurveFile` object.

        If multiple files are present in `SearchResult.table`, only the first
        will be downloaded.

        Parameters
        ----------
        quality_bitmask : str or int
            Bitmask (integer) which identifies the quality flag bitmask that should
            be used to mask out bad cadences. If a string is passed, it has the
            following meaning:

                * "none": no cadences will be ignored (`quality_bitmask=0`).
                * "default": cadences with severe quality issues will be ignored
                  (`quality_bitmask=1130799`).
                * "hard": more conservative choice of flags to ignore
                  (`quality_bitmask=1664431`). This is known to remove good data.
                * "hardest": removes all data that has been flagged
                  (`quality_bitmask=2096639`). This mask is not recommended.

            See the :class:`KeplerQualityFlags` class for details on the bitmasks.
        download_dir : str
            Location where the data files will be stored.
            Defaults to "~/.lightkurve-cache" if `None` is passed.

        Returns
        -------
        data : `TargetPixelFile` or `LightCurveFile` object
            The first entry in the products table.
        """
        if len(self.table) == 0:
            warnings.warn("Cannot download from an empty search result.",
                          LightkurveWarning)
            return None
        if len(self.table) != 1:
            warnings.warn('Warning: {} files available to download. '
                          'Only the first file has been downloaded. '
                          'Please use `download_all()` or specify a campaign, quarter, or '
                          'cadence to limit your search.'.format(len(self.table)),
                          LightkurveWarning)

        # Make sure astroquery uses the same level of verbosity
        logging.getLogger('astropy').setLevel(log.getEffectiveLevel())

        # download first product in table
        if download_dir is None:
            download_dir = self._default_download_dir()

        path = Observations.download_products(self.table[:1], mrp_only=False,
                                              download_dir=download_dir)['Local Path']

        # return single tpf or lcf
        tpf_files = ['lpd-targ.fits', 'spd-targ.fits']
        lcf_files = ['llc.fits', 'slc.fits']
        if any(file in self.table['productFilename'][0] for file in tpf_files):
            return KeplerTargetPixelFile(path[0], quality_bitmask=quality_bitmask)
        elif any(file in self.table['productFilename'][0] for file in lcf_files):
            return KeplerLightCurveFile(path[0], quality_bitmask=quality_bitmask)
Esempio n. 9
0
def download_mast(obs_table,**kwargs):
    
    """Download data from the MAST archive.
    
    This function gives you an easy way to download data from the MAST
    archive, if you already have a table of observations (from Astroquery)
    to hand. By default, this will download everything into your current
    working directory.
    
    Args:
        obs_table (astropy.table.Table): The result of an astroquery.mast
            query.
    
    """
                  
    Observations.download_products(obs_table,**kwargs
                                   )
Esempio n. 10
0
def dload_url(tic_id, sector): #Dowloads URL through query for general 2-minute cadence data given TIC ID and Sector
     obsTable = Observations.query_criteria(
            obs_collection = "TESS",
            dataproduct_type = ["TIMESERIES"],
            target_name = tic_id,
            sequence_number=sector)
     products = Observations.get_product_list(obsTable)
     manifest = Observations.download_products(products, extension = ['fits'])
     return manifest
Esempio n. 11
0
def download_lightcurve(obsid):
    """Return a pandas dataframe of lightcurve data for one source."""
    dl = Observations.download_products(obsid, productSubGroupDescription='LC')
    fn = dl['Local Path'][0]
    hdul = fits.open(fn)
    lc = Table(hdul[1].data)
    lc['NORM_PDCSAP_FLUX'] = lc['PDCSAP_FLUX'] / np.nanpercentile(
        lc['PDCSAP_FLUX'], 50)
    data = lc.to_pandas()
    return data
Esempio n. 12
0
    def _download_one(self, table, quality_bitmask, download_dir, cutout_size):
        """Private method used by `download()` and `download_all()` to download
        exactly one file from the MAST archive.

        Always returns a `TargetPixelFile` or `LightCurveFile` object.
        """
        # Make sure astroquery uses the same level of verbosity
        logging.getLogger('astropy').setLevel(log.getEffectiveLevel())

        if download_dir is None:
            download_dir = self._default_download_dir()

        # if the SearchResult row is a TESScut entry, then download cutout
        if 'FFI Cutout' in table[0]['description']:
            try:
                log.debug("Started downloading TESSCut for '{}' sector {}."
                          "".format(table[0]['target_name'],
                                    table[0]['sequence_number']))
                path = self._fetch_tesscut_path(table[0]['target_name'],
                                                table[0]['sequence_number'],
                                                download_dir, cutout_size)
                log.debug("Finished downloading.")
            except Exception as exc:
                msg = str(exc)
                if "504" in msg:
                    # TESSCut will occasionally return a "504 Gateway Timeout
                    # error" when it is overloaded.
                    raise HTTPError(
                        'The TESS FFI cutout service at MAST appears '
                        'to be temporarily unavailable. It returned '
                        'the following error: {}'.format(exc))
                else:
                    raise SearchError(
                        'Unable to download FFI cutout. Desired target '
                        'coordinates may be too near the edge of the FFI.'
                        'Error: {}'.format(exc))

            return _open_downloaded_file(path,
                                         quality_bitmask=quality_bitmask,
                                         targetid=table[0]['targetid'])

        else:
            if cutout_size is not None:
                warnings.warn(
                    '`cutout_size` can only be specified for TESS '
                    'Full Frame Image cutouts.', LightkurveWarning)
            from astroquery.mast import Observations
            log.debug("Started downloading {}.".format(
                table[:1]['dataURL'][0]))
            path = Observations.download_products(
                table[:1], mrp_only=False,
                download_dir=download_dir)['Local Path'][0]
            log.debug("Finished downloading.")
            # open() will determine filetype and return
            return _open_downloaded_file(path, quality_bitmask=quality_bitmask)
Esempio n. 13
0
    def download_all(self, quality_bitmask='default', download_dir=None):
        """Returns a `TargetPixelFileCollection or `LightCurveFileCollection`.

         Parameters
         ----------
         quality_bitmask : str or int
             Bitmask (integer) which identifies the quality flag bitmask that should
             be used to mask out bad cadences. If a string is passed, it has the
             following meaning:

                 * "none": no cadences will be ignored (`quality_bitmask=0`).
                 * "default": cadences with severe quality issues will be ignored
                   (`quality_bitmask=1130799`).
                 * "hard": more conservative choice of flags to ignore
                   (`quality_bitmask=1664431`). This is known to remove good data.
                 * "hardest": removes all data that has been flagged
                   (`quality_bitmask=2096639`). This mask is not recommended.

             See the :class:`KeplerQualityFlags` class for details on the bitmasks.
        download_dir : str
            Location where the data files will be stored.
            Defaults to "~/.lightkurve-cache" if `None` is passed.

        Returns
        -------
        collection : `lightkurve.Collection` object
            Returns a `LightCurveFileCollection`  or `TargetPixelFileCollection`,
            containing all entries in the products table
        """
        if len(self.table) == 0:
            warnings.warn("Cannot download from an empty search result.",
                          LightkurveWarning)
            return None

        # Make sure astroquery uses the same level of verbosity
        logging.getLogger('astropy').setLevel(log.getEffectiveLevel())

        # download all products listed in self.products
        if download_dir is None:
            download_dir = self._default_download_dir()

        path = Observations.download_products(self.table, mrp_only=False,
                                              download_dir=download_dir)['Local Path']

        # return collection of tpf or lcf
        tpf_files = ['lpd-targ.fits', 'spd-targ.fits']
        lcf_files = ['llc.fits', 'slc.fits']
        if any(file in self.table['productFilename'][0] for file in tpf_files):
            tpfs = [KeplerTargetPixelFile(p, quality_bitmask=quality_bitmask)
                    for p in path]
            return TargetPixelFileCollection(tpfs)
        elif any(file in self.table['productFilename'][0] for file in lcf_files):
            lcs = [KeplerLightCurveFile(p, quality_bitmask=quality_bitmask)
                   for p in path]
            return LightCurveFileCollection(lcs)
Esempio n. 14
0
def astroquery_darks(anneal_start, anneal_end, paths, download):
    """Retreives darks within an anneal cycle using astroquery

    To get the right files, astroquery requires the following inputs:

        - intentType='calibration' – for the darks
        - instrument_name="WFC3/UVIS" – or whichever instrument is needed  
        - t_min – Anneal dates start and end defined above
        - target_name=DARK* – all files identified as a type of dark

    NO exposure time should be given (too stringent a cut). The astroquery 
    command returns more than the required files than would be collected 
    from MAST. However, the darks CTE correction code then selects only the 
    files it needs from these, so this is not a problem.

    Astroquery takes the start and end dates of the anneal cycle. The end 
    anneal date in the astroquery command below is slightly reduced so that 
    the file at the start of the next anneal is not included. 
    e.g. [58170.7838 (start of Feb 21 2018 anneal), 58201.3608 (start of next Mar 24 2018 anneal] 
    --> [58170.7838, 58201.360]"""

    # Round the next anneal start date down so that the file from the new date isn't included
    anneal_end_rnd = anneal_end // 0.001 * 0.001

    # Collecting the raw dark files for one anneal cycle using astroquery
    darkobs = Observations.query_criteria(intentType='calibration',
                                          instrument_name="WFC3/UVIS",
                                          t_min=[anneal_start, anneal_end_rnd],
                                          target_name="DARK*")
    darkprod = Observations.get_product_list(darkobs)
    rawdark = Observations.filter_products(darkprod,
                                           productSubGroupDescription="RAW")

    # If download option is set, move to the anneal directory, check if empty, then download
    if download == True:
        os.chdir(paths['ANN_DIR'])
        if os.path.exists(paths['DLD_DIR']):
            print('Download directory exists!! Not downloading files')
        else:
            Observations.download_products(rawdark, mrp_only=False)
            print('Download to {} complete'.format(paths['DLD_DIR']))
Esempio n. 15
0
def download_tess_data(target_name):
    r"""Run a query to MAST to obtain the TESS data of an object.

    Data is saved in the directories: `./mastDownload/TESS/<target_name>_*/*.fits`

    Args:
        target_name (str): name of the target: e.g., '231663901'

    Returns:
        bool: it returns `True` if data are retrieved and `False` otherwise.

    """
    tess_obs = Observations.query_criteria(target_name=target_name, obs_collection='TESS')
    if len(tess_obs) == 0:
        msgs.warning('No TESS data for target {}'.format(target_name))
        return False
    for idx in np.arange(0, len(tess_obs)):
        tess_prods = Observations.get_product_list(tess_obs[idx])
        tess_fits_prods = Observations.filter_products(tess_prods, extension='fits', mrp_only=False)
        Observations.download_products(tess_fits_prods, mrp_only=False, cache=False)
    return True
Esempio n. 16
0
def download_kepler_data(target_name):
    r"""Run a query to MAST to obtain the kepler data of an object

    The data is saved in the directories: `./mastDownload/Kepler/<target_name>_*/*.fits`

    Args:
        target_name (str): name of the target: e.g., 'kplr011446443'

    Returns:
        bool: it returns `True` if data are retrieved and `False` otherwise.

    """
    kepler_obs = Observations.query_criteria(target_name=target_name, obs_collection='Kepler')
    if len(kepler_obs) == 0:
        msgs.warning('No Kepler data for target {}'.format(target_name))
        return False
    for idx in np.arange(0, len(kepler_obs)):
        kepler_prods = Observations.get_product_list(kepler_obs[idx])
        kepler_fits_prods = Observations.filter_products(kepler_prods, extension='fits', mrp_only=False)
        Observations.download_products(kepler_fits_prods, mrp_only=False, cache=False)
    return True
Esempio n. 17
0
    def download_products(data_products: pd.DataFrame, data_directory: Path) -> pd.DataFrame:
        """
         A wrapper for MAST's `download_products`, allowing the use of Pandas DataFrames instead of AstroPy Tables.
        Retries on error when communicating with the MAST server.

        :param data_products: The data frame of data products to download. Will be converted from DataFrame to Table
                              for sending the request to MAST.
        :param data_directory: The path to download the data to.
        :return: The manifest of the download. Will be converted from Table to DataFrame for use.
        """
        manifest = Observations.download_products(Table.from_pandas(data_products), download_dir=str(data_directory))
        return manifest.to_pandas()
Esempio n. 18
0
def get_galex_image(ra, dec, imsize):
    """

    get galex image of a galaxy
    
    Input:
    * ra in deg
    * dec in deg
    * imsize in arcsec
    
    Returns:
    * image
    """

    # following procedure outlined here:
    # https://astroquery.readthedocs.io/en/latest/mast/mast.html

    # get data products in region near ra,dec
    obs_table = Observations.query_region("%12.8f %12.8f" % (c.ra, c.dec),
                                          radius=.1 * u.arcmin)
    # create a flag to select galex data
    galexFlag = obs_table['obs_collection'] == 'GALEX'

    # separate out galex data
    data_products = Observations.get_product_list(obs_table[galexFlag])

    # download the observations
    manifest = Observations.download_products(data_products,
                                              productType="SCIENCE")

    for m in manifest:
        # choose the first NUV image
        if m['Local Path'].find('nd-int') > -1:
            nuv_path = m['Local Path']
            break

    # should be able to construct path from the obs_id in data_products
    # this will let us check if the image is already downloaded
    #
    # I can also save the cutout in a GALEX folder, and
    # look for the image before calling this function

    nuv, nuv_header = fits.getdata(nuv_path, header=True)

    # this is a big image, so we need to get a cutout

    nuv_wcs = WCS(nuv_header)
    position = SkyCoord(ra, dec, unit="deg", frame='icrs')
    cutout = Cutout2D(nuv,
                      position, (imsize * u.arcsec, imsize * u.arcsec),
                      wcs=nuv_wcs)

    return cutout
Esempio n. 19
0
def plot_lc(file, filepath='./fitsFiles/', local=True, c='blue', ax=False):
    """
    kid should be full id including time information
    Args:
        file (str) - filename starting with kplr ending in .fits
        filepath (str) - path to fits file
    Returns:
        None
    """
    if local:
        f = filepath + file
    else:
        filenames = [file]
        obj_ids = [i[:13] for i in tmp.index[:n]]
        keplerObs = Observations.query_criteria(target_name=obj_ids,
                                                obs_collection='Kepler')
        keplerProds = Observations.get_product_list(keplerObs)
        yourProd = Observations.filter_products(keplerProds, extension=file)
        manifest = Observations.download_products(yourProd)
        manifest = manifest.to_pandas(index='Local Path')
        f = manifest.index[0]

    if not ax:
        fig = plt.figure(figsize=(16, 4))
        ax = fig.add_subplot(111)

    t, nf, err = read_kepler_curve(f)

    x = t
    y = nf

    axrange = 0.55 * (max(y) - min(y))
    mid = (max(y) + min(y)) / 2
    yaxmin = mid - axrange
    yaxmax = mid + axrange
    if yaxmin < .95:
        if yaxmax > 1.05:
            ax.set_ylim(yaxmin, yaxmax)
        else:
            ax.set_ylim(yaxmin, 1.05)
    elif yaxmax > 1.05:
        ax.set_ylim(.95, yaxmax)
    else:
        ax.set_ylim(.95, 1.05)

    ax.set_xlim(min(x), max(x))
    color = c
    ax.plot(x, y, 'o', markeredgecolor='none', c=color, alpha=0.2)
    ax.plot(x, y, '-', markeredgecolor='none', c=color, alpha=0.7)
    #ax2.set_title(files[index][:13],fontsize = 20)
    ax.set_ylabel(r'$\frac{\Delta F}{F}$', fontsize=25)
    ax.tick_params(labelsize=20)
Esempio n. 20
0
    def drawData(index):
        # Plots the lightcurve of the point chosen
        ax2.cla()
        ### for fits files saved locally
        #f = pathtofits+df.index[index]
        #t,nf,err=read_kepler_curve(f)

        ### for fits files that need downloaded
        f = df.index[index]  # full file name ****-****_llc.fits
        obj_id = f[:13]  # observation id kplr********
        keplerObs = Observations.query_criteria(target_name=obj_id,
                                                obs_collection='Kepler')
        keplerProds = Observations.get_product_list(keplerObs)
        yourProd = Observations.filter_products(keplerProds, extension=f)
        manifest = Observations.download_products(yourProd)
        filename = manifest[0][0]
        t, nf, err = qt.read_kepler_curve(filename)
        shutil.rmtree('./mastDownload')  # removing the downloaded data

        x = t
        y = nf
        axrange = 0.55 * (max(y) - min(y))
        mid = (max(y) + min(y)) / 2
        yaxmin = mid - axrange
        yaxmax = mid + axrange
        if yaxmin < .95:
            if yaxmax > 1.05:
                ax2.set_ylim(yaxmin, yaxmax)
            else:
                ax2.set_ylim(yaxmin, 1.05)
        elif yaxmax > 1.05:
            ax2.set_ylim(.95, yaxmax)
        else:
            ax2.set_ylim(.95, 1.05)

        ax2.plot(x,
                 y,
                 'o',
                 markeredgecolor='none',
                 c=colorVal[index],
                 alpha=0.2)
        ax2.plot(x,
                 y,
                 '-',
                 markeredgecolor='none',
                 c=colorVal[index],
                 alpha=0.7)
        #ax2.set_title(files[index][:13],fontsize = 20)
        ax2.set_xlabel('Time (Days)', fontsize=22)
        ax2.set_ylabel(r'$\frac{\Delta F}{F}$', fontsize=30)
        fig.suptitle(files[index][:13], fontsize=30)
        canvas.draw()
def download_products(query_products, output_dir=''):
    """ Downloads all products in `query_products` to `output_dir`.

    Parameters
    ----------
    query_products : `astropy.table.Table`
        Table of data products to download.


    Notes
    -----
    Files are initially downladed temporary directory within `output_dir`
    called 'temp', so if a subdirectory `temp` already exists within
    `output_dir` an error is raised. ."""

    # format path for output dir
    if output_dir == '':
        output_dir = os.getcwd()
    output_dir = os.path.join(output_dir, '')

    # make temp dir in `output_dir`. error if it exists.
    print(output_dir + 'temp')
    assert (os.path.isdir(output_dir + 'temp') is False)
    os.makedirs(output_dir + 'temp')

    print('Downloading {} files.'.format(len(query_products)))
    Observations.download_products(query_products,
                                   download_dir=output_dir + 'temp',
                                   mrp_only=False)

    # move files from mast download directories in `temp` to `output_dir`
    files = glob.glob(output_dir + 'temp/mastDownload/HST/*/*.fits')
    if len(files) > 0:
        print('Cleaning up temp directory.')
        for f in files:
            shutil.copy(f, output_dir + os.path.basename(f))

        # remove temp directory
        shutil.rmtree(output_dir + 'temp')  # remove mast download dir
Esempio n. 22
0
def plot_top_n(Q_coo, n=10, sortby='sAverage', top_n_df=False):
    """
    Purpose:
        Plots the top n outlier lightcurves for a given quarter. This assumes data is saved in the standard
        way for coo files, i.e. that feature data is saved as the attribute 'data' in a dataframe, that outlier scores are
        saved as 'scores', and that the indices of each are the full file names.
    Args:
        Q_coo (cluster outlier object) - The cluster outlier object containing the feature data and outlier scores attributes
        n (int) - number of plots to create of most outlying points
        sortby (optional, str) - how to sort scores, default is the sampled average of k=4 to 13
        top_n_df (optional, boolean) - whether or not to return a dataframe containing the features of the top outliers
    Returns:
        top_n_feats (optional, dataframe) - pandas dataframe containing the features of the top n outliers
    """

    tmp = Q_coo.scores.sort_values(by=sortby, ascending=False)
    filenames = list(tmp.index[:n])
    obj_ids = [i[:13] for i in tmp.index[:n]]
    keplerObs = Observations.query_criteria(target_name=obj_ids,
                                            obs_collection='Kepler')
    keplerProds = Observations.get_product_list(keplerObs)
    yourProd = Observations.filter_products(keplerProds, extension=filenames)
    manifest = Observations.download_products(yourProd)
    """
    The process of downloading the files sorts them in numerical order messing up the 
    order of most outlying to leas, so I'm making the manifest a dataframe and sampling it
    one at a time from the filenames, not sure if there's a way to just sort the whole list
    based on the original order.
    """
    manifest = manifest.to_pandas(index='Local Path')

    for i, f in enumerate(
            filenames):  # forcing the order to match most to least outlying
        f_sampler = make_sampler([f])  # to find the right file from manifest
        filename = f_sampler(manifest).index[0]  # full local filepath

        fig = plt.figure(figsize=(15, 1))
        ax = fig.add_subplot(111)
        t, nf, err = read_kepler_curve(filename)
        ax.errorbar(t, nf, err)
        plt.title('KIC {}'.format(int(obj_ids[i][4:])))

    shutil.rmtree('./mastDownload')  # removing the downloaded data

    if top_n_df:
        top_n_sampler = make_sampler(tmp[:n].index)
        top_n_feats = top_n_sampler(Q_coo.data)
        return top_n_feats
    else:
        return
Esempio n. 23
0
    def download(self, key):
        """Download the data

        Only download the observations contained in the interval specified by
        the `key`. The `key` argument must correspond to one of the keys in the
        :py:attr:`~download.Downloader.products` attribute. If it is not,
        then a KeyError will be raised and the download will be skipped.


        Parameters
        ----------
        key : str
            Date in ISO format (YYYY-MM-DD) of a given intervals start time

        Returns
        -------
        None
            Downloaded data will be stored in directory specified by the
            :py:attr:`~download.Downloader.download_dir` attribute
        """
        msg = ('Downloading data...\n '
               'Download Directory: {}\n {}'.format(self.download_dir,
                                                    self._msg_div))
        LOG.info(msg)
        download_params = {
            'download_dir': self.download_dir,
            'mrp_only': False,
            'dataproduct_type': self.product_type,
            'productSubGroupDescription': self.SubGroupDescription
        }
        try:
            download_list = self.products[key]['obsID'].tolist()
        except KeyError as e:
            LOG.error('{}\n{}'.format(e, self._msg_div))
        else:
            Observations.download_products(download_list, **download_params)
Esempio n. 24
0
def get_cdips_product(star_num):
    """ Given a list of numbers or a single number,
    return the same number of FitsTable objects.

    Given values can be TIC ids or indexes, the latter will be used to index

    """
    global index_provided
    if type(star_num) != list:
        star_num = list(star_num)

    num_ints = [int(n) for n in star_num]
    tic_searches = []
    mask = get_mask(table_data)
    print("Filtered down to", np.count_nonzero(mask), "items to index from.")
    temp_table = table_data[mask]
    num_stars = len(table_data)
    for n in num_ints:
        if n > num_stars:
            # Already have tics
            tic_searches.append(str(n))
        else:
            if not index_provided: index_provided = True
            tic_searches.append(str(int(temp_table['MatchID'][n])))

    obs = Observations.query_criteria(target_name=tic_searches, provenance_name="CDIPS")
    print(f"TIC {tic_searches} returned {len(obs)} object(s).")
    # Get rid of duplicates across sectors
    u, u_indexes = np.unique(obs['target_name'], return_index=True)
    if len(u) != len(obs):
        obs = obs[u_indexes]
        print(f"After removing duplicates, {len(obs)} object(s) remain.")

    if len(obs) > 0:
        products = Observations.get_product_list(obs)
        print("Downloading products...")
        down = Observations.download_products(products)
        print("Done.")
        ft_list = []
        for f in down:
            ft = FitsTable(f['Local Path'])
            ft_list.append(ft)

        ft_list = sorted(ft_list, key=lambda x: tic_searches.index(x.ticid))

        return ft_list
    else:
        return
def download_wcs_file(filtered, local_dir, n=10, cloud=False):
    """
    filtered is results if id_wcs_file
    local_dir is the location to put the file
    cloud determines whether the file should first be pulled from the cloud.
    
    For lambda local_dir should be /tmp
    """

    if cloud:
        Observations.enable_cloud_dataset(provider='AWS')

    obsslice = slice(n, n + 1)
    manifest = Observations.download_products(filtered[obsslice],
                                              download_dir=local_dir,
                                              mrp_only=False)

    return manifest['Local Path'][0]
Esempio n. 26
0
def download_products(products):
    """Download data products from MAST.

    Downloads will be cached using astroquery.mast's caching system.

    Parameters
    ----------
    products : astropy.Table
        A table detailing the products to be downloaded, i.e. the output
        of `search_kepler_products` or `search_kepler_tpf_products`.

    Returns
    -------
    paths : astropy.Table.Column
        Detailing the paths of the downloaded or cached products.
    """
    # Note: by default, MAST will only let us download "Minimum Recommended
    # Products" (MRPs), which do not include e.g. Target Pixel Files.
    # We need to set `mrp=False` to ensure MAST downloads whatever we want.
    dl = Observations.download_products(products, mrp_only=False)
    return dl['Local Path']
Esempio n. 27
0
    def getOneFileFromMastObs(self, ticid, sector, fileType, *args, **kwargs):
        """
        Generic funtion to retrive a particular file type from a
        CAOM observation at the MAST.
        Private Function --  Only used by this class.
        """

        if fileType not in ['LC', 'TP', 'DVT']:
            raise ValueError("Requested file Type (%s) not in allowed list (LC,TP,DVT)."\
                             % fileType)

        obsid = self.getPostageStampObservationId(sector, ticid)

        manifest=Observations.download_products(obsid,\
                                            productSubGroupDescription=[fileType],\
                                            mrp_only=False,extension="fits",
                                            download_dir=self.cachePath)

        localUrl = manifest['Local Path'][0]

        return self.parse(localUrl, *args, **kwargs)
Esempio n. 28
0
def download_products(products):
    """Download data products from MAST.

    Downloads will be cached using astroquery.mast's caching system.

    Parameters
    ----------
    products : astropy.Table
        A table detailing the products to be downloaded, i.e. the output
        of `search_products` or `search_kepler_tpf_products`.

    Returns
    -------
    paths : astropy.Table.Column
        Detailing the paths of the downloaded or cached products.
    """
    # Note: by default, MAST will only let us download "Minimum Recommended
    # Products" (MRPs), which do not include e.g. Target Pixel Files.
    # We need to set `mrp=False` to ensure MAST downloads whatever we want.

    # check if download directory exists (~/.lightkurve-cache)
    cache_dir = os.path.join(os.path.expanduser('~'), '.lightkurve-cache')
    if os.path.isdir(cache_dir):
        download_dir = cache_dir
    else:
        # if it doesn't exist, make a new cache directory
        try:
            os.mkdir(cache_dir)
            download_dir = cache_dir
        # downloads locally if OS error occurs
        except OSError:
            log.warning(
                'Warning: unable to create .lightkurve-cache directory. '
                'Downloading MAST files to local directory.')
            download_dir = '.'

    dl = Observations.download_products(products,
                                        mrp_only=False,
                                        download_dir=download_dir)
    return dl['Local Path']
Esempio n. 29
0
    def _download_one(self, table, quality_bitmask, download_dir, cutout_size):
        """Private method used by `download()` and `download_all()` to download
        exactly one file from the MAST archive.

        Always returns a `TargetPixelFile` or `LightCurveFile` object.
        """
        # Make sure astroquery uses the same level of verbosity
        logging.getLogger('astropy').setLevel(log.getEffectiveLevel())

        if download_dir is None:
            download_dir = self._default_download_dir()

        # if the SearchResult row is a TESScut entry, then download cutout
        if 'FFI Cutout' in table[0]['description']:
            try:
                path = self._fetch_tesscut_path(table[0]['target_name'],
                                                table[0]['sequence_number'],
                                                download_dir, cutout_size)
            except:
                raise SearchError(
                    'Unable to download FFI cutout. Desired target '
                    'coordinates may be too near the edge of the FFI.')

            return _open_downloaded_file(path,
                                         quality_bitmask=quality_bitmask,
                                         targetid=table[0]['targetid'])

        else:
            if cutout_size is not None:
                warnings.warn(
                    '`cutout_size` can only be specified for TESS '
                    'Full Frame Image cutouts.', LightkurveWarning)
            from astroquery.mast import Observations
            path = Observations.download_products(
                table[:1], mrp_only=False,
                download_dir=download_dir)['Local Path'][0]

            # open() will determine filetype and return
            return _open_downloaded_file(path, quality_bitmask=quality_bitmask)
Esempio n. 30
0
def retrieve_observation(obsid, suffix=['FLC'], archive=False,clobber=False):
    """Simple interface for retrieving an observation from the MAST archive

    If the input obsid is for an association, it will request all members with
    the specified suffixes.

    Parameters
    -----------
    obsid : string
        ID for observation to be retrieved from the MAST archive.  Only the
        IPPSSOOT (rootname) of exposure or ASN needs to be provided; eg., ib6v06060.

    suffix : list
        List containing suffixes of files which should be requested from MAST.

    path : string
        Directory to use for writing out downloaded files.  If `None` (default),
        the current working directory will be used.

    archive : Boolean
        Retain copies of the downloaded files in the astroquery created sub-directories? Default is 'False'.

    clobber : Boolean
        Download and Overwrite existing files? Default is 'False'.

    Returns
    -------
    local_files : list
        List of filenames
    """
    local_files = []

    # Query MAST for the data with an observation type of either "science" or "calibration"
    obsTable = Observations.query_criteria(obs_id=obsid, obstype='all')
    # Catch the case where no files are found for download
    if len(obsTable) == 0:
        log.info("WARNING: Query for {} returned NO RESULTS!".format(obsid))
        return local_files

    dpobs = Observations.get_product_list(obsTable)
    dataProductsByID = Observations.filter_products(dpobs,
                                              productSubGroupDescription=suffix,
                                              extension='fits',
                                              mrp_only=False)

    # After the filtering has been done, ensure there is still data in the table for download.
    # If the table is empty, look for FLT images in lieu of FLC images. Only want one
    # or the other (not both!), so just do the filtering again.
    if len(dataProductsByID) == 0:
        log.info("WARNING: No FLC files found for {} - will look for FLT files instead.".format(obsid))
        suffix = ['FLT']
        dataProductsByID = Observations.filter_products(dpobs,
                                              productSubGroupDescription=suffix,
                                              extension='fits',
                                              mrp_only=False)

        # If still no data, then return.  An exception will eventually be thrown in
        # the higher level code.
        if len(dataProductsByID) == 0:
            log.info("WARNING: No FLC or FLT files found for {}.".format(obsid))
            return local_files
    allImages = []
    for tableLine in dataProductsByID:
        allImages.append(tableLine['productFilename'])
    log.info(allImages)
    if not clobber:
        rowsToRemove = []
        for rowCtr in range(0,len(dataProductsByID)):
            if os.path.exists(dataProductsByID[rowCtr]['productFilename']):
                log.info("{} already exists. File download skipped.".format(dataProductsByID[rowCtr]['productFilename']))
                rowsToRemove.append(rowCtr)
        if rowsToRemove:
            rowsToRemove.reverse()
            for rowNum in rowsToRemove:
                dataProductsByID.remove_row(rowNum)

    manifest = Observations.download_products(dataProductsByID, mrp_only=False)

    if not clobber:
        rowsToRemove.reverse()
        for rownum in rowsToRemove:
            if not manifest:
                local_files = allImages
                return local_files
            else:
                manifest.insert_row(rownum,vals=[allImages[rownum],"LOCAL","None","None"])

    download_dir = None
    for file,fileStatus in zip(manifest['Local Path'],manifest['Status']):
        if fileStatus != "LOCAL":
            # Identify what sub-directory was created by astroquery for the download
            if download_dir is None:
                file_path = file.split(os.sep)
                file_path.remove('.')
                download_dir = file_path[0]
            # Move or copy downloaded file to current directory
            local_file = os.path.abspath(os.path.basename(file))
            if archive:
                shutil.copy(file, local_file)
            else:
                shutil.move(file, local_file)
            # Record what files were downloaded and their current location
            local_files.append(os.path.basename(local_file))
        else:
            local_files.append(file)
    if not archive:
        # Remove astroquery created sub-directories
        shutil.rmtree(download_dir)
    return local_files
Esempio n. 31
0
    try:
        print(i)

        target_name = "TIC " + str(i)
        #print(target_name)
        data_table = Observations.query_criteria(provenance_name="SPOC",
                                                 objectname=target_name,
                                                 obs_collection="TESS",
                                                 dataproduct_type="timeseries",
                                                 radius=0)
        #print(data_table)
        #print(Observations.get_metadata("products"))
        product_list = Observations.get_product_list(data_table)
        #print(product_list)
        download_data = Observations.download_products(
            product_list,
            extension="lc.fits",
            download_dir="/data/wallaby/rmorris/SPOC/{}".format(tic_ids[j]))
        #print(download_data[1][0])

        print("Data downloaded correctly")

    except:
        print("Connection to MAST Not Established")

    try:
        time = []
        norm_flux = []
        ###might not need background because already is PDCSAP flux?
        ticker = 0
        while ticker < len(download_data):
            file = fits.open(str(download_data[ticker][0]))
Esempio n. 32
0
from astroquery.mast import Mast, Observations
from astropy.visualization import make_lupton_rgb, ImageNormalize
import matplotlib.pyplot as plt
import reproject

result = Observations.query_object('M83')
selected_bands = result[(result['obs_collection'] == 'HST') &
                        (result['instrument_name'] == 'WFC3/UVIS') &
                        ((result['filters'] == 'F657N') |
                         (result['filters'] == 'F487N') |
                         (result['filters'] == 'F336W')) &
                        (result['target_name'] == 'MESSIER-083')]
prodlist = Observations.get_product_list(selected_bands)
filtered_prodlist = Observations.filter_products(prodlist)

downloaded = Observations.download_products(filtered_prodlist)

blue = fits.open(downloaded['Local Path'][2])
red = fits.open(downloaded['Local Path'][5])
green = fits.open(downloaded['Local Path'][8])

target_header = red['SCI'].header
green_repr, _ = reproject.reproject_interp(green['SCI'], target_header)
blue_repr, _ = reproject.reproject_interp(blue['SCI'], target_header)


rgb_img = make_lupton_rgb(ImageNormalize(vmin=0, vmax=1)(red['SCI'].data),
                          ImageNormalize(vmin=0, vmax=0.3)(green_repr),
                          ImageNormalize(vmin=0, vmax=1)(blue_repr),
                          stretch=0.1,
                          minimum=0,