示例#1
0
def load_file(file: str,
              clip: float = 4,
              it: int = 1,
              apply_file_correction: bool = False) -> LightCurve:
    """
    Loads and normalizes target content
    :param file: Name of target including path
    :return: LightCurve object
    """
    if not os.path.exists(file):
        raise IOError(ctext(f"File {file} doesn't exist!", error))

    mprint(f"Reading data from {file} ...", log)
    try:
        data = np.loadtxt(file)
    except ValueError:
        data = read_csv(file)
        data = np.array((data.time, data.flux))
    if data.shape[0] > data.shape[1]:
        data = data.T

    if data.shape[0] == 2:
        lc = LightCurve(time=data[0], flux=data[1])
    else:
        lc = LightCurve(time=data[0], flux=data[1], flux_err=data[2])

    lc = lc.remove_nans()
    if apply_file_correction:
        lc.flux = lc.flux + float(np.amin(lc.flux)) + 10
        lc = lc.remove_outliers(clip, maxiters=it)
        lc = mag(lc)
        lc = lc.remove_nans()
    else:
        if np.amax(np.abs(lc.flux)) > 10:
            mprint(
                f"It seems as if your flux isn't in magnitudes. Be aware, that SMURFS expects the flux in magnitudes. "
                f"Continuing ...", warn)
        if np.abs(np.median(lc.flux)) > 1:
            mprint(
                f"The median of your flux is {'%.2f' % np.median(lc.flux)}. To do a proper analysis, the median should "
                f"be close to 0. Be aware, that this might cause issues. Continuing...",
                warn)
    mprint(
        f"Total observation length: {'%.2f' % (lc.time[-1] - lc.time[0])} days.",
        log)
    mprint("Extracted data from target!", info)
    return lc
def loadLC(folderName, downloadDir, errorIfNot2Min=True, dumpHeader=False, delimiter="|", fluxType="PDCSAP", normalised=True):
    """
    Loads multiple and single Tess light curves and creates a Lightkurve object to store them in. 
    Multiple LCs are detected when the folderName string contains a delimiter.
    
    :param folderName: name of the data folder
    :param downloadDir: name of the root data folder
    :param errorIfNot2Min: behaviour if cadence is not 2 min if `True` (default), raises an error, else warning
    :param dumpHeader: if `True` prints the header of the data
    :param delimiter: delimiter chosen to separate the data path, defualt: |
    :param fluxType: SAP or PDCSAP
    :param normalised: if `True` returns the median-normalised flux
    """

    lc = None
    if "|" in folderName:
        folderNames = folderName.split(delimiter)
    else:
        folderNames = [folderName]
        
    for folderName in folderNames:
        imgFname = "{}_lc.fits".format(folderName)
        imgFname = os.path.join(downloadDir, folderName, imgFname)
        head = fits.getheader(imgFname)
        sector = head["sector"]
        if dumpHeader:
            print(repr(head))
        lightCurveData = Table.read(imgFname)
        cadeances = np.nanmedian((lightCurveData["TIME"][1:] - lightCurveData["TIME"][:-1])*24*60)
        if np.abs(cadeances-2.) < 0.5:
            logger.info("Cadence is 2 min for {}".format(imgFname))
        else:
            if errorIfNot2Min:
                raise RuntimeError("Cadence is {:1.1f} min for {}".format(cadeances, imgFname))
            else:
                logger.warning("Cadence is {:1.1f} min for {}".format(cadeances, imgFname))
        
        lightCurveData["TIME"].unit = u.day
        time = lightCurveData["TIME"]
        flux = lightCurveData["{}_FLUX".format(fluxType)] 
        fluxErr = lightCurveData["{}_FLUX_ERR".format(fluxType)]

        meta = {
            "TIME": lightCurveData["TIME"],
            "MOM_CENTR1": lightCurveData["MOM_CENTR1"],
            "MOM_CENTR2": lightCurveData["MOM_CENTR2"],
            "MOM_CENTR1_ERR": lightCurveData["MOM_CENTR1_ERR"],
            "MOM_CENTR2_ERR": lightCurveData["MOM_CENTR2_ERR"],
            "POS_CORR1": lightCurveData["POS_CORR1"],
            "POS_CORR2": lightCurveData["POS_CORR2"],
            }

        lcTemp = LightCurve(time=time, flux=flux, flux_err=fluxErr, meta=meta)
        lcTemp = lcTemp.remove_nans()
        if normalised:
            lcTemp = lcTemp.normalize()
            
        if lc is None:
            lc = lcTemp
            sectors = sector
        else:
            lc = lc.append(lcTemp)
            sectors = "{},{}".format(sectors, sector)
    
    ids = np.argsort(lc.time)
    lc = lc[ids]
    
    return lc, sectors