コード例 #1
0
ファイル: giants.py プロジェクト: skgrunblatt/giants
    def from_lightkurve(self, ind=0, ticid=None, method=None, cutout_size=9):
        """Download cutouts around target for each sector using Lightkurve
        and create light curves.
        Requires either `ind` or `ticid`.

        Parameters
        ----------
        ind : int
            Index of target array to download files for
        ticid : int
            TIC ID of desired target
        pld : boolean
            Option to detrend raw light curve with PLD
        cutout_size : int or tuple
            Dimensions of TESScut cutout in pixels

        Returns
        -------
        LightCurveCollection :
            ~lightkurve.LightCurveCollection containing raw and corrected light curves.
        """
        if ticid == None:
            i = ind
            ticid = self.target_list.ID.values[i]
        # search TESScut for the desired target, read its sectors
        sectors = self._find_sectors(ticid)
        if isinstance(ticid, str):
            ticid = int(re.search(r'\d+', str(ticid)).group())
        print(
            f'Creating light curve for target {ticid} for sectors {sectors}.')
        # download the TargetPixelFileCollection for TESScut observations
        tpfc = sr.download_all(cutout_size=cutout_size)
        rlc = self._photometry(tpfc[0]).normalize()
        # track breakpoints between sectors
        self.breakpoints = [rlc.time[-1]]
        # iterate through TPFs and perform photometry on each of them
        for t in tpfc[1:]:
            single_rlc = self._photometry(t).normalize()
            rlc = rlc.append(single_rlc)
            self.breakpoints.append(single_rlc.time[-1])
        rlc.label = 'Raw {ticid}'
        # do the same but with de-trending (if you want)
        if method is not None:
            clc = self._photometry(tpfc[0], method=method).normalize()
            for t in tpfc[1:]:
                single_clc = self._photometry(t, method=method).normalize()
                clc = clc.append(single_clc)
            clc.label = 'PLD {ticid}'
            rlc = rlc.remove_nans()
            clc = clc.remove_nans()
            return lk.LightCurveCollection([rlc, clc])
        else:
            rlc = rlc.remove_nans()
            return lk.LightCurveCollection([rlc])
コード例 #2
0
def combine_lcs_and_calculate_and_plot_trend(star, *input_lcs):
    lcs = []
    flat_lcs = []
    trend_lcs = []

    for lc in input_lcs:
        flat, trend = lc.flatten(window_length=301, return_trend=True)
        lcs.append(lc)
        flat_lcs.append(flat)
        trend_lcs.append(trend)
    lc = lk.LightCurveCollection(
        lcs).stitch()  #corrector_func=my_custom_corrector_func)
    flat = lk.LightCurveCollection(flat_lcs).stitch()
    trend = lk.LightCurveCollection(trend_lcs).stitch()
    ax = lc.errorbar(label=star)  # plot() returns a matplotlib axes ...
    trend.plot(ax=ax, color='red', lw=2, label='Trend')
    # which we can pass to the next plot() to use the same axes

    return (lc, flat, trend)
コード例 #3
0
ファイル: tpf.py プロジェクト: SSDataLab/psfmachine
    def lcs_in_tpf(self, tpf_number):
        """
        Returns the light curves from a given TPF as a lightkurve.LightCurveCollection.

        Parameters
        ----------
        tpf_number: int
            Index of the TPF
        """
        ldx = self.tpf_meta["sources"][tpf_number]
        return lk.LightCurveCollection([self.lcs[l] for l in ldx])
コード例 #4
0
def batch_flatten_collection(lcs, kepid_df, method='GP', return_trend=False):
    """Batch flatten a LightCurveCollection, grouping by quarter

    Parameters:
    -----------
    lcs: LightCurveCollection
        A collection of lightcurves for a single planet host star system

    kepid_df: pandas DataFrame
        A pandas DataFrame containing the periods and t0 of each TCE in the
        system.  Uses the same column names as the DR24 catalog

    method: string
        Either "GP" or "spline"

    return_trend: boolean
        Whether or not to return the inferred trend.  Default: False
    """
    # Flatten all the quarters first using splines
    lcs_flattened = lk.LightCurveCollection([])
    lcs_trends = lk.LightCurveCollection([])
    for lc_per_quarter in lcs:
        lc_per_quarter = lc_per_quarter.remove_nans()
        tce_mask = make_cadence_mask(lc_per_quarter, kepid_df)
        if method == 'GP':
            input_lc = lc_per_quarter[~tce_mask].remove_outliers()
            trend = estimate_gp_mean_model(input_lc, t=lc_per_quarter.time)
        if method == 'spline':
            trend, knot_spacing = spline_model_comparison_BIC(
                lc_per_quarter, ~tce_mask)

        flattened = lc_per_quarter / trend
        lcs_flattened.append(flattened)
        lc_trend = lc_per_quarter.copy()
        lc_trend.flux = trend
        lcs_trends.append(lc_trend)
    if return_trend:
        return lcs_flattened, lcs_trends
    else:
        return lcs_flattened
コード例 #5
0
def cleanup_lkfc(lkf_collection, kic):
    """
    Join each quarter in a lk.LightCurveFileCollection into a single lk.LightCurveFile
    Performs only the minimal detrending step remove_nans()
    
    Parameters
    ----------
    lkf_collection : lk.LightCurveFileCollection
        
    kic : int
        Kepler Input Catalogue (KIC) number for target
    
    Returns
    -------
    lkc : lk.LightCurveCollection
    
    """
    lkf_col = deepcopy(lkf_collection)

    quarters = []
    for i, lkf in enumerate(lkf_col):
        quarters.append(lkf.quarter)

    data_out = []
    for q in np.unique(quarters):
        lkf_list = []
        cadno = []

        for i, lkf in enumerate(lkf_col):
            if (lkf.quarter == q) * (lkf.targetid == kic):
                lkf_list.append(lkf)
                cadno.append(lkf.cadenceno.min())

        order = np.argsort(cadno)

        lkfc_list = []

        for j in order:
            lkfc_list.append(lkf_list[j])

        # the operation "stitch" converts a LightCurveFileCollection to a single LightCurve
        lkfc_list = lk.LightCurveFileCollection(lkfc_list)
        lklc = lkfc_list.PDCSAP_FLUX.stitch().remove_nans()

        data_out.append(lklc)

    return lk.LightCurveCollection(data_out)
コード例 #6
0
best_fit_period = np.zeros(len(mass_id[beg_ind:end_ind]))
best_fit_uncert = np.zeros(len(mass_id[beg_ind:end_ind]))
#data_source = np.zeros(len(mass_id[beg_ind:end_ind]))
for j,i in enumerate(mass_id[beg_ind:end_ind]):
	print(i)
	coords = SkyCoord(ra=float(target_ra[j]), dec=float(target_dec[j]), unit=(u.deg, u.deg))
	outID,outEclipLong,outEclipLat,outSec,outCam,outCcd,outColPix, outRowPix, scinfo = tess_stars2px_function_entry(j, float(target_ra[j]), float(target_dec[j]))
	sector_source = []			   #array of where the data came from length of the amount of sectors
	#lightcurves = lk.LightCurveCollection()
	for num, sector in enumerate(outSec):
		if sector <= 31:
	###first try to find 2-min data for this sector
			try:
				lc = lk.search_lightcurvefile(coords, mission='TESS', sector=sector).download().PDCSAP_FLUX.remove_nans()
				if sector == outSec[0]:
					lightcurves = lk.LightCurveCollection(lc)
				else:
					lightcurves.append(lc)
				sector_source.append('2-min')
				print('Sector {} downloaded with LightKurve'.format(sector))
			except KeyboardInterrupt:
				raise
			except Exception as e:
				print(e)
				print('Sector {} not available with LightKurve, trying eleanor'.format(sector))
				try:
					###then try to see if the fits file already exists
					time = []
					flux = []
					background = []
					data = fits.open('/data/wallaby/rmorris/GALAH/all_target_lc/{}_s{}_eleanor_data.fits'.format(i,sector))
コード例 #7
0
ファイル: giants.py プロジェクト: skgrunblatt/giants
    def from_eleanor(self, ticid, save_postcard=False):
        """Download light curves from Eleanor for desired target. Eleanor light
        curves include:
        - raw : raw flux light curve
        - corr : corrected flux light curve
        - pca : principle component analysis light curve
        - psf : point spread function photometry light curve

        Parameters
        ----------
        ticid : int
            TIC ID of desired target

        Returns
        -------
        LightCurveCollection :
            ~lightkurve.LightCurveCollection containing raw and corrected light curves.
        """
        '''
        # BUGFIX FOR ELEANOR (DEPRICATED)
        # -------------------------------
        from astroquery.mast import Observations
        server = 'https://mast.stsci.edu'
        Observations._MAST_REQUEST_URL = server + "/api/v0/invoke"
        Observations._MAST_DOWNLOAD_URL = server + "/api/v0.1/Download/file"
        Observations._COLUMNS_CONFIG_URL = server + "/portal/Mashup/Mashup.asmx/columnsconfig"
        '''

        # search TESScut to figure out which sectors you need (there's probably a better way to do this)
        sectors = self._find_sectors(ticid)
        if isinstance(ticid, str):
            ticid = int(re.search(r'\d+', str(ticid)).group())
        self.ticid = ticid
        print(
            f'Creating light curve for target {ticid} for sectors {sectors}.')
        # download target data for the desired source for only the first available sector

        star = eleanor.Source(tic=ticid, sector=int(sectors[0]), tc=True)
        try:
            data = eleanor.TargetData(star,
                                      height=11,
                                      width=11,
                                      bkg_size=27,
                                      do_psf=True,
                                      do_pca=True,
                                      try_load=True,
                                      save_postcard=save_postcard)
        except:
            data = eleanor.TargetData(star,
                                      height=7,
                                      width=7,
                                      bkg_size=21,
                                      do_psf=True,
                                      do_pca=True,
                                      try_load=True,
                                      save_postcard=save_postcard)
        q = data.quality == 0
        # create raw flux light curve
        raw_lc = lk.LightCurve(time=data.time[q],
                               flux=data.raw_flux[q],
                               flux_err=data.flux_err[q],
                               label='raw',
                               time_format='btjd').remove_nans().normalize()
        corr_lc = lk.LightCurve(time=data.time[q],
                                flux=data.corr_flux[q],
                                flux_err=data.flux_err[q],
                                label='corr',
                                time_format='btjd').remove_nans().normalize()
        pca_lc = lk.LightCurve(time=data.time[q],
                               flux=data.pca_flux[q],
                               flux_err=data.flux_err[q],
                               label='pca',
                               time_format='btjd').remove_nans().normalize()
        psf_lc = lk.LightCurve(time=data.time[q],
                               flux=data.psf_flux[q],
                               flux_err=data.flux_err[q],
                               label='psf',
                               time_format='btjd').remove_nans().normalize()
        #track breakpoints between sectors
        self.breakpoints = [raw_lc.time[-1]]
        # iterate through extra sectors and append the light curves
        if len(sectors) > 1:
            for s in sectors[1:]:
                try:  # some sectors fail randomly
                    star = eleanor.Source(tic=ticid, sector=int(s), tc=True)
                    data = eleanor.TargetData(star,
                                              height=15,
                                              width=15,
                                              bkg_size=31,
                                              do_psf=True,
                                              do_pca=True,
                                              try_load=True)
                    q = data.quality == 0

                    raw_lc = raw_lc.append(
                        lk.LightCurve(
                            time=data.time[q],
                            flux=data.raw_flux[q],
                            flux_err=data.flux_err[q],
                            time_format='btjd').remove_nans().normalize())
                    corr_lc = corr_lc.append(
                        lk.LightCurve(
                            time=data.time[q],
                            flux=data.corr_flux[q],
                            flux_err=data.flux_err[q],
                            time_format='btjd').remove_nans().normalize())
                    pca_lc = pca_lc.append(
                        lk.LightCurve(
                            time=data.time[q],
                            flux=data.pca_flux[q],
                            flux_err=data.flux_err[q],
                            time_format='btjd').remove_nans().normalize())
                    psf_lc = psf_lc.append(
                        lk.LightCurve(
                            time=data.time[q],
                            flux=data.psf_flux[q],
                            flux_err=data.flux_err[q],
                            time_format='btjd').remove_nans().normalize())

                    self.breakpoints.append(raw_lc.time[-1])
                except:
                    continue
        # store in a LightCurveCollection object and return
        return lk.LightCurveCollection([raw_lc, corr_lc, pca_lc, psf_lc])
コード例 #8
0
def fetch_hlsps(lc):
    '''Fetches HLSPs, given a lightkurve.KeplerLightCurve

    Parameters
    ----------
    lc : lightkurve.KeplerLightCurve
        Light curve to find high level science products for

    Returns
    -------
    hlsps : lightkurve.LightCurveCollection
        Collection of HLSP light curves
     '''

    if not isinstance(lc, lk.KeplerLightCurve):
        raise ValueError('Please pass a lightkurve.KeplerLightCurve.')

    v_url = ('https://archive.stsci.edu/hlsps/k2sff/'
             'c{0:02d}/{1}/{2:05d}/hlsp_k2sff_k2_lightcurve_{3}-c{0:02d}_kepler_v1_llc.fits'
             ''.format(lc.campaign, (lc.targetid//100000)*100000, lc.targetid - (lc.targetid//100000)*100000, lc.targetid))
    e_url = ('https://archive.stsci.edu/hlsps/everest/v2/'
             'c{0:02d}/{1}/{2:05d}/hlsp_everest_k2_llc_{3}-c{0:02d}_kepler_v2.0_lc.fits'
             ''.format(lc.campaign, (lc.targetid//100000)*100000, lc.targetid - (lc.targetid//100000)*100000, lc.targetid))
    s_url = ('https://archive.stsci.edu/hlsps/k2sc/v2/'
             'c{0:02d}/{1}/hlsp_k2sc_k2_llc_{2}-c{0:02d}_kepler_v2_lc.fits'
             ''.format(lc.campaign, (lc.targetid//100000)*100000, lc.targetid))

    hlsps = []

    try:
        hdu = fits.open(v_url)[1].data
        lc1 = lk.KeplerLightCurve(hdu['T'], hdu['FCOR'], cadenceno=hdu['CADENCENO'], meta={'arclength': hdu['ARCLENGTH']},
                                  channel=lc.channel, campaign=lc.campaign, quarter=lc.quarter,
                                  mission=lc.mission, targetid=lc.targetid, ra=lc.ra, dec=lc.dec,
                                  label='K2SFF (Vanderburg & Johnson)')
        lc1 = lc1[np.isfinite(lc1.time)]
        hlsps.append(lc1)
    except HTTPError:
        pass

    try:
        hdu = fits.open(e_url)[1].data
        lc1 = lk.KeplerLightCurve(hdu['TIME'], hdu['FCOR'], cadenceno=hdu['CADN'], quality=hdu['QUALITY'],
                                  channel=lc.channel, campaign=lc.campaign, quarter=lc.quarter,
                                  mission=lc.mission, targetid=lc.targetid, ra=lc.ra, dec=lc.dec,
                                  label='EVEREST (Luger)')
        lc1 = lc1[((hdu['QUALITY'] & 24) == 0)]

        lc1 = lc1[np.isfinite(lc1.time)]
        hlsps.append(lc1)

    except HTTPError:
        pass

    try:
        hdu = fits.open(s_url)[1].data
        lc1 = lk.KeplerLightCurve(hdu['time'], hdu['flux'], hdu['error'], cadenceno=hdu['cadence'], quality=hdu['quality'],
                                  channel=lc.channel, campaign=lc.campaign, quarter=lc.quarter,
                                  mission=lc.mission, targetid=lc.targetid, ra=lc.ra, dec=lc.dec,
                                  label='K2SC (Aigrain)')
        lc1 = lc1[np.isfinite(lc1.time)]
        hlsps.append(lc1)
    except HTTPError:
        pass

    return lk.LightCurveCollection(hlsps)
コード例 #9
0
# Temporary workaround for slow MAST queries with lightkurve
observations = Observations.query_criteria(
    target_name=f"{tic}",
    radius=0.0001,
    project=["TESS"],
    obs_collection=["TESS"],
    provenance_name="SPOC",
    dataproduct_type="timeseries",
)
if not len(observations):
    raise RuntimeError("no 2-minute cadence data")
products = Observations.get_product_list(observations)
products = products[products["productSubGroupDescription"] == "LC"]
files = Observations.download_products(
    products, download_dir=tess_world.get_lightkurve_directory())
lcfs = lk.LightCurveCollection(
    [lk.open(file).PDCSAP_FLUX for file in files["Local Path"]])
lc = lcfs.stitch().remove_nans()

# Extract the data in the correct format
x = np.ascontiguousarray(lc.time, dtype=np.float64)
y = np.ascontiguousarray(1e3 * (lc.flux - 1), dtype=np.float64)
yerr = np.ascontiguousarray(1e3 * lc.flux_err, dtype=np.float64)

# Plot the light curve
plt.plot(x, y, "k", linewidth=0.5)
plt.xlabel("time [days]")
plt.ylabel("relative flux [ppt]")
plt.title(f"TOI {toi_num}; TIC {tic}", fontsize=14)

# Label the transits on the plot
for n in range(num_toi):
コード例 #10
0
ファイル: superstamp.py プロジェクト: SSDataLab/psfmachine
    def fit_lightcurves(
        self,
        plot=False,
        iter_negative=False,
        fit_mean_shape_model=False,
        fit_va=False,
        sap=False,
    ):
        """
        Fit the sources in the data to get its light curves.
        By default it only uses the per cadence PSF model to do the photometry.
        Alternatively it can fit the mean-PSF and the mean-PSF with time model to
        the data, this is the original method implemented in `PSFmachine` and described
        in the paper. Aperture Photometry is also available by creating aperture masks
        that follow the mean-PSF shape.

        This function creates the `lcs` attribuite that contains a collection of light
        curves in the form of `lightkurve.LightCurveCollection`. Each entry in the
        collection is a `lightkurve.KeplerLightCurve` object with the different type
        of photometry (PSF per cadence, SAP, mean-PSF, and mean-PSF velocity-aberration
        corrected). Also each `lightkurve.KeplerLightCurve` object includes its
        asociated metadata.

        The photometry can also be accessed independently from the following attribuites
        that `fit_lightcurves` create:
            * `ws` and `werrs` have the uncorrected PSF flux and flux errors.
            * `ws_va` and `werrs_va` have the PSF flux and flux errors corrected by
            velocity aberration.
            * `sap_flux` and `sap_flux_err` have the flux and flux errors computed
            using aperture mask.
            * `ws_frame` and `werrs_frame` have the flux from PSF at each cadence.

        Parameters
        ----------
        plot : bool
            Whether or not to show some diagnostic plots. These can be helpful
            for a user to see if the PRF and time dependent models are being calculated
            correctly.
        iter_negative : bool
            When fitting light curves, it isn't possible to force the flux to be
            positive. As such, when we find there are light curves that deviate into
            negative flux values, we can clip these targets out of the analysis and
            rerun the model.
            If iter_negative is True, PSFmachine will run up to 3 times, clipping out
            any negative targets each round. This is used when
            `fit_mean_shape_model` is `True`.
        fit_mean_shape_model : bool
            Will do PSF photmetry using the mean-PSF.
        fit_va : bool
            Whether or not to fit Velocity Aberration (which implicitly will try to fit
            other kinds of time variability). `fit_mean_shape_model` must set to `True`
            ortherwise will be ignored. This will try to fit the "long term"
            trends in the dataset. If True, this will take slightly longer to fit.
            If you are interested in short term phenomena, like transits, you may
            find you do not need this to be set to True. If you have the time, it
            is recommended to run it.
        sap : boolean
            Compute or not Simple Aperture Photometry. See
            `Machine.compute_aperture_photometry()` for details.
        """
        # create mean shape model to be used by SAP and mean-PSF
        self.build_shape_model(plot=plot, frame_index="mean")
        # do SAP first
        if sap:
            self.compute_aperture_photometry(
                aperture_size="optimal", target_complete=1, target_crowd=1
            )

        # do mean-PSF photometry and time model if asked
        if fit_mean_shape_model:
            self.build_time_model(plot=plot, downsample=True)
            # fit the OG time model
            self.fit_model(fit_va=fit_va)
            if iter_negative:
                # More than 2% negative cadences
                negative_sources = (self.ws_va < 0).sum(axis=0) > (0.02 * self.nt)
                idx = 1
                while len(negative_sources) > 0:
                    self.mean_model[negative_sources] *= 0
                    self.fit_model(fit_va=fit_va)
                    negative_sources = np.where((self.ws_va < 0).all(axis=0))[0]
                    idx += 1
                    if idx >= 3:
                        break

        # fit shape model at each cadence
        self.build_frame_shape_model()
        self.fit_frame_model()

        self.lcs = []
        for idx, s in self.sources.iterrows():
            meta = {
                "ORIGIN": "PSFMACHINE",
                "APERTURE": "PSF + SAP" if sap else "PSF",
                "LABEL": s.designation,
                "MISSION": self.meta["MISSION"],
                "RA": s.ra,
                "DEC": s.dec,
                "PMRA": s.pmra / 1000,
                "PMDEC": s.pmdec / 1000,
                "PARALLAX": s.parallax,
                "GMAG": s.phot_g_mean_mag,
                "RPMAG": s.phot_rp_mean_mag,
                "BPMAG": s.phot_bp_mean_mag,
            }

            attrs = [
                "channel",
                "module",
                "ccd",
                "camera",
                "quarter",
                "campaign",
                "quarter",
                "row",
                "column",
                "mission",
            ]
            for attr in attrs:
                if attr in self.meta.keys():
                    meta[attr.upper()] = self.meta[attr]

            lc = lk.KeplerLightCurve(
                time=(self.time) * u.d,
                flux=self.ws_frame[:, idx] * (u.electron / u.second),
                flux_err=self.werrs_frame[:, idx] * (u.electron / u.second),
                meta=meta,
                time_format="jd",
            )
            if fit_mean_shape_model:
                lc["flux_NVA"] = (self.ws[:, idx]) * u.electron / u.second
                lc["flux_err_NVA"] = (self.werrs[:, idx]) * u.electron / u.second
                if fit_va:
                    lc["flux_VA"] = (self.ws_va[:, idx]) * u.electron / u.second
                    lc["flux_err_VA"] = (self.werrs_va[:, idx]) * u.electron / u.second
            if sap:
                lc["sap_flux"] = (self.sap_flux[:, idx]) * u.electron / u.second
                lc["sap_flux_err"] = (self.sap_flux_err[:, idx]) * u.electron / u.second

            self.lcs.append(lc)
        self.lcs = lk.LightCurveCollection(self.lcs)
        return
コード例 #11
0
    lc = tpf.to_lightcurve(aperture_mask=tpf.pipeline_mask)
    flat, trend = lc.flatten(window_length=301, return_trend=True)
    lcs.append(lc)
    flat_lcs.append(flat)
    trend_lcs.append(trend)


    #lc.plot()
    #plt.show();
def my_custom_corrector_func(lc):
    corrected_lc = lc.normalize().flatten(window_length=401)
    return corrected_lc


lc = lk.LightCurveCollection(
    lcs).stitch()  #corrector_func=my_custom_corrector_func)
#lc = lcfs.PDCSAP_FLUX.stitch()
#lc.plot()
#plt.show()
#lc.scatter()
#plt.show()

flat = lk.LightCurveCollection(flat_lcs).stitch()
trend = lk.LightCurveCollection(trend_lcs).stitch()
#flat, trend = lc.flatten(window_length=301, return_trend=True)
ax = lc.errorbar(label=args.star)  # plot() returns a matplotlib axes ...
trend.plot(ax=ax, color='red', lw=2, label='Trend')
# which we can pass to the next plot() to use the same axes
plt.show()

flat.errorbar(label=args.star)
コード例 #12
0
def SIP(tpfs,
        sigma=5,
        min_period=10,
        max_period=100,
        nperiods=300,
        npca_components=2,
        aperture_threshold=3,
        sff=False,
        sff_kwargs={}):
    """
    Systematics-insensitive periodogram for finding periods in long period NASA's TESS data.

    SIP can be used to find the best fitting sinusoid period in long period TESS data, while
    mitigating the instrument and scattered light background systematics.

    A description of the concepts behind a SIP is given here in the context of K2 data:
    https://ui.adsabs.harvard.edu/abs/2016ApJ...818..109A/abstract

    Parameters
    ----------
    tpfs : lightkurve.TargetPixelFileCollection
        A collection of target pixel files from the TESS mission. This can be
        generated using lightkurve's search functions, for example:
            tpfs = lk.search_targetpixelfile('TIC 288735205', mission='tess').download_all()
    sigma : int or float
        SIP will run a single first iteration at a period of 27 days to remove significant
        outliers. Set sigma to a value, above which outliers will be clipped
    min_period : float
        The minimum period for the periodogram
    max_period : float
        The maximum period for the periodogram
    nperiods : int
        The number of periods to fit
    npca_components : int
        Number of pca components to detrend with. Default is 2.
    aperture_threshold : float
        If there is no aperture mask from the pipeline, will create one. Set
        aperture_threshold to set the thresholding for the aperture creation.
        (See lightkurve's create_threshold_mask function.)
    sff : boolean
        Whether to run SFF detrending simultaneously. This is most useful for K2 data.
        When True, will run SFF detrending.
    sff_kwargs : dict
        Dictionary of SFF key words to pass. See lightkurve's SFFCorrector.

    Returns
    -------
    r : dict
        Dictionary containing the following entries:
            periods: the periods evaluated
            power: the power at each period (definite as the amplitude of the sinusoid)
            raw_lc: the original light curve from the input target pixel files
            corr_lc: the light curve with the best fitting systematics removed
            period_at_max_power: the best fit period of the sinusoid.
            power_bkg: the power at each period for the pixels -outside- the aperture
            raw_lc_bkg: the background light curve (pixels outside aperture)
    """

    # Get the un-background subtracted data
    if hasattr(tpfs[0], 'flux_bkg'):
        tpfs_uncorr = [(tpf + np.nan_to_num(tpf.flux_bkg.value))[np.isfinite(
            np.nansum(tpf.flux_bkg.value, axis=(1, 2)))] for tpf in tpfs]
    else:
        tpfs_uncorr = tpfs

    apers = [
        tpf.pipeline_mask if tpf.pipeline_mask.any() else
        tpf.create_threshold_mask(aperture_threshold) for tpf in tpfs_uncorr
    ]
    bkg_apers = [(~aper) & (np.nansum(tpf.flux, axis=0) != 0)
                 for aper, tpf in zip(apers, tpfs_uncorr)]
    lc = lk.LightCurveCollection([
        tpf.to_lightcurve(aperture_mask=aper)
        for tpf, aper in zip(tpfs_uncorr, apers)
    ]).stitch(lambda x: x).normalize()
    lc.flux_err.value[~np.isfinite(lc.flux_err.value)] = np.nanmedian(
        lc.flux_err.value)

    # Run the same routines on the background pixels
    lc_bkg = lk.LightCurveCollection([
        tpf.to_lightcurve(aperture_mask=bkg_aper)
        for tpf, bkg_aper in zip(tpfs_uncorr, bkg_apers)
    ]).stitch(lambda x: x).normalize()
    lc_bkg.flux_err.value[~np.isfinite(lc_bkg.flux_err.value)] = np.nanmedian(
        lc_bkg.flux_err.value)

    with warnings.catch_warnings():
        warnings.simplefilter('ignore')
        bkgs = [
            lk.correctors.DesignMatrix(
                tpf.flux.value[:, bkg_aper],
                name='bkg').pca(npca_components).append_constant().to_sparse()
            for tpf, bkg_aper in zip(tpfs_uncorr, bkg_apers)
        ]
        for bkg in bkgs:
            bkg.prior_mu[-1] = 1
            bkg.prior_sigma[-1] = 0.1

            bkg.prior_mu[:-1] = 0
            bkg.prior_sigma[:-1] = 0.1

    # Split at the datadownlink
    bkgs = [
        bkg.split(list((np.where(np.diff(tpf.time.jd) > 0.3)[0] + 1)))
        for bkg, tpf in zip(bkgs, tpfs_uncorr)
    ]
    systematics_dm = vstack(bkgs)

    sigma_f_inv = sparse.csr_matrix(1 / lc.flux_err.value[:, None]**2)

    def fit_model(lc, mask=None, return_model=False):
        if mask is None:
            mask = np.ones(len(lc.flux.value), bool)
        sigma_w_inv = dm.X[mask].T.dot(dm.X[mask].multiply(
            sigma_f_inv[mask])).toarray()
        sigma_w_inv += np.diag(1. / dm.prior_sigma**2)

        B = dm.X[mask].T.dot(
            (lc.flux.value[mask] / lc.flux_err.value[mask]**2))
        B += dm.prior_mu / dm.prior_sigma**2
        w = np.linalg.solve(sigma_w_inv, B)
        with warnings.catch_warnings():
            warnings.simplefilter('ignore')
            werr = ((np.linalg.inv(sigma_w_inv))**0.5).diagonal()
        if return_model:
            return dm.X.dot(w)
        return w, werr

    # Make a dummy design matrix
    period = 27
    ls_dm = lk.correctors.DesignMatrix(
        lombscargle.implementations.mle.design_matrix(lc.time.jd,
                                                      frequency=1 / period,
                                                      bias=False,
                                                      nterms=1),
        name='LS').to_sparse()
    dm = lk.correctors.SparseDesignMatrixCollection(
        [systematics_dm, ls_dm]).to_designmatrix(name='design_matrix')

    if sff:
        sff_dm = []
        for tpf in tpfs_uncorr:
            s = lk.correctors.SFFCorrector(tpf.to_lightcurve())
            _ = s.correct(**sff_kwargs)
            sff_dm.append(s.dmc['sff'].to_sparse())
        sff_dm = vstack(sff_dm)
        dm = lk.correctors.SparseDesignMatrixCollection(
            [dm, sff_dm]).to_designmatrix(name='design_matrix')

    # Do a first pass at 27 days, just to find ridiculous outliers
    mod = fit_model(lc, return_model=True)
    mask = ~(lc - mod * lc.flux.unit).remove_outliers(return_mask=True,
                                                      sigma=sigma)[1]

    # Loop over some periods we care about
    periods = 1 / np.linspace(1 / min_period, 1 / max_period, nperiods)
    ws = np.zeros((len(periods), dm.X.shape[1]))
    ws_err = np.zeros((len(periods), dm.X.shape[1]))
    ws_bkg = np.zeros((len(periods), dm.X.shape[1]))
    ws_err_bkg = np.zeros((len(periods), dm.X.shape[1]))

    for idx, period in enumerate(
            tqdm(periods, desc='Running pixels in aperture')):
        dm.X[:,
             -ls_dm.shape[1]:] = lombscargle.implementations.mle.design_matrix(
                 lc.time.jd, frequency=1 / period, bias=False, nterms=1)
        ws[idx], ws_err[idx] = fit_model(lc, mask=mask)
        ws_bkg[idx], ws_err_bkg[idx] = fit_model(lc_bkg, mask=mask)
    power = (ws[:, -2]**2 + ws[:, -1]**2)**0.5
    am = np.argmax(power)
    dm.X[:, -ls_dm.shape[1]:] = lombscargle.implementations.mle.design_matrix(
        lc.time.jd, frequency=1 / periods[am], bias=False, nterms=1)
    mod = dm.X[:, :-2].dot(ws[am][:-2])

    power_bkg = (ws_bkg[:, -2]**2 + ws_bkg[:, -1]**2)**0.5

    r = {
        'periods': periods,
        'power': power,
        'raw_lc': lc,
        'power_bkg': power_bkg,
        'raw_lc_bkg': lc_bkg,
        'corr_lc': lc - mod * lc.flux.unit + 1,
        'period_at_max_power': periods[am]
    }

    return r
コード例 #13
0
ファイル: tpf.py プロジェクト: SSDataLab/psfmachine
    def fit_lightcurves(
        self,
        plot=False,
        fit_va=True,
        iter_negative=True,
        load_shape_model=False,
        shape_model_file=None,
        sap=True,
    ):
        """
        Fit the sources inside the TPFs passed to `TPFMachine`.
        This function creates the `lcs` attribuite that contains a collection of light
        curves in the form of `lightkurve.LightCurveCollection`. Each entry in the
        collection is a `lightkurve.KeplerLightCurve` object with the different type
        of photometry (SAP, PSF, and PSF velocity-aberration corrected). Also each
        `lightkurve.KeplerLightCurve` object includes its asociated metadata.
        The photometry can also be accessed independently from the following attribuites
        that `fit_lightcurves` create:
            * `ws` and `werrs` have the uncorrected PSF flux and flux errors.
            * `ws_va` and `werrs_va` have the PSF flux and flux errors corrected by
            velocity aberration.
            * `sap_flux` and `sap_flux_err` have the flux and flux errors computed
            using aperture mask.

        Parameters
        ----------
        plot : bool
            Whether or not to show some diagnostic plots. These can be helpful
            for a user to see if the PRF and time dependent models are being calculated
            correctly.
        fit_va : bool
            Whether or not to fit Velocity Aberration (which implicitly will try to fit
            other kinds of time variability). This will try to fit the "long term"
            trends in the dataset. If True, this will take slightly longer to fit.
            If you are interested in short term phenomena, like transits, you may
            find you do not need this to be set to True. If you have the time, it
            is recommended to run it.
        iter_negative : bool
            When fitting light curves, it isn't possible to force the flux to be
            positive.
            As such, when we find there are light curves that deviate into negative flux
            values, we can clip these targets out of the analysis and rerun the model.
            If iter_negative is True, PSFmachine will run up to 3 times, clipping out
            any negative targets each round.
        load_shape_model : bool
            Load PRF shape model from disk or not. Default models were computed from
            FFI of the same channel and quarter.
        shape_model_file : string
            Path to PRF model file to be passed to `load_shape_model(input)`. If None,
            then precomputed models will be download from Zenodo repo.
        sap : boolean
            Compute or not Simple Aperture Photometry. See
            `Machine.compute_aperture_photometry()` for details.
        """
        # use PRF model from FFI or create one with TPF data
        if load_shape_model:
            self.load_shape_model(input=shape_model_file, plot=plot)
        else:
            self.build_shape_model(plot=plot)
        # sap photometry is independent of the time model and fitting the PSF model.
        # it only uses the shape mean_model
        if sap:
            self.compute_aperture_photometry(aperture_size="optimal",
                                             target_complete=1,
                                             target_crowd=1)
        self.build_time_model(plot=plot)
        self.fit_model(fit_va=fit_va)
        if iter_negative:
            # More than 2% negative cadences
            negative_sources = (self.ws_va < 0).sum(axis=0) > (0.02 * self.nt)
            idx = 1
            while len(negative_sources) > 0:
                self.mean_model[negative_sources] *= 0
                self.fit_model(fit_va=fit_va)
                negative_sources = np.where((self.ws_va < 0).all(axis=0))[0]
                idx += 1
                if idx >= 3:
                    break

        self.lcs = []
        for idx, s in self.sources.iterrows():

            meta = self._make_meta_dict(idx, s, sap)

            if fit_va:
                flux, flux_err = (
                    (self.ws_va[:, idx]) * u.electron / u.second,
                    self.werrs_va[:, idx] * u.electron / u.second,
                )
            else:
                flux, flux_err = (
                    (self.ws[:, idx]) * u.electron / u.second,
                    self.werrs[:, idx] * u.electron / u.second,
                )
            lc = lk.KeplerLightCurve(
                time=(self.time) * u.d,
                flux=flux,
                flux_err=flux_err,
                meta=meta,
                time_format="jd",
            )

            if fit_va:
                lc["psf_flux_NVA"] = (self.ws[:, idx]) * u.electron / u.second
                lc["psf_flux_err_NVA"] = (
                    self.werrs[:, idx]) * u.electron / u.second
            if sap:
                lc["sap_flux"] = (self.sap_flux[:,
                                                idx]) * u.electron / u.second
                lc["sap_flux_err"] = (
                    self.sap_flux_err[:, idx]) * u.electron / u.second
            self.lcs.append(lc)
        self.lcs = lk.LightCurveCollection(self.lcs)
        return