def test_build(self):
     lc_build = LcBuilder().build(MissionObjectInfo("TIC 352315023", [13]), "./")
     self.assertEqual(18140, len(lc_build.lc))
     self.assertEqual(20479, len(lc_build.lc_data))
     lc_build = LcBuilder().build(MissionObjectInfo("KIC 12557548", [13]), "./")
     self.assertEqual(126304, len(lc_build.lc))
     self.assertEqual(130290, len(lc_build.lc_data))
     lc_build = LcBuilder().build(MissionObjectInfo("EPIC 211945201", 'all'), "./")
     self.assertEqual(106384, len(lc_build.lc))
     self.assertEqual(116820, len(lc_build.lc_data))
     lc_build = LcBuilder().build(MissionFfiIdObjectInfo("KIC 12557548", [1]), "./")
     self.assertEqual(1543, len(lc_build.lc))
     self.assertEqual(1639, len(lc_build.lc_data))
     lc_build = LcBuilder().build(MissionFfiIdObjectInfo("EPIC 211945201", [5]), "./")
     self.assertEqual(3271, len(lc_build.lc))
     self.assertEqual(3663, len(lc_build.lc_data))
     lc_build = LcBuilder().build(MissionFfiIdObjectInfo("TIC 352315023", [13]), "./")
     self.assertEqual(1223, len(lc_build.lc))
     self.assertEqual(1320, len(lc_build.lc_data))
Beispiel #2
0
 def test_run_with_rms_mask(self):
     run_dir = "TIC181804752_FFI_[9]"
     try:
         Sherlock([
             SherlockTarget(MissionFfiIdObjectInfo("TIC 181804752", [9],
                                                   high_rms_enabled=True),
                            max_runs=1,
                            oversampling=0.05)
         ]).run()
         self.__assert_run_files(run_dir)
     finally:
         self.__clean(run_dir)
Beispiel #3
0
 def test_run(self):
     run_dir = "TIC181804752_FFI_[9]"
     try:
         Sherlock([
             SherlockTarget(MissionFfiIdObjectInfo("TIC 181804752", [9],
                                                   smooth_enabled=False,
                                                   high_rms_enabled=False),
                            detrends_number=1,
                            max_runs=1,
                            oversampling=0.05)
         ]).run()
         self.__assert_run_files(run_dir, assert_rms_mask=False)
     finally:
         self.__clean(run_dir)
Beispiel #4
0
 def test_run_with_explore(self):
     run_dir = None
     try:
         Sherlock([
             SherlockTarget(MissionFfiIdObjectInfo("TIC 181804752", [9],
                                                   high_rms_enabled=True),
                            detrends_number=1,
                            oversampling=0.05)
         ], True).run()
         run_dir = "TIC181804752_FFI_[9]"
         self.assertTrue(os.path.exists(run_dir))
         self.assertTrue(
             os.path.exists(
                 run_dir + "/Periodogram_Initial_TIC181804752_FFI_[9].png"))
         self.assertFalse(os.path.exists(run_dir + "/1"))
     finally:
         self.__clean(run_dir)
Beispiel #5
0
 def test_run_with_star_info(self):
     run_dir = None
     try:
         Sherlock([
             SherlockTarget(MissionFfiIdObjectInfo(
                 "TIC 181804752", [9],
                 high_rms_enabled=True,
                 star_info=StarInfo(ld_coefficients=(0.15, 0.25),
                                    teff=4000,
                                    lum=1.50,
                                    logg=0.15,
                                    radius=0.4,
                                    radius_min=0.10,
                                    radius_max=0.15,
                                    mass=0.3,
                                    mass_min=0.05,
                                    mass_max=0.075,
                                    ra=13.132258,
                                    dec=64.334238)),
                            detrends_number=1,
                            max_runs=1,
                            oversampling=0.05)
         ], True).run()
         run_dir = "TIC181804752_FFI_[9]"
         self.assertTrue(os.path.exists(run_dir))
         self.assertTrue(
             os.path.exists(
                 run_dir + "/Periodogram_Initial_TIC181804752_FFI_[9].png"))
         self.assertFalse(os.path.exists(run_dir + "/1"))
         with open(run_dir + '/TIC181804752_FFI_[9]_report.log') as f:
             content = f.read()
             self.assertTrue('mass = 0.3' in content)
             self.assertTrue('mass_min = 0.25' in content)
             self.assertTrue('mass_max = 0.375' in content)
             self.assertTrue('radius = 0.4' in content)
             self.assertTrue('radius_min = 0.3' in content)
             self.assertTrue('radius_max = 0.55' in content)
             self.assertTrue(
                 'limb-darkening estimates using quadratic LD (a,b)= (0.15, 0.25)'
                 in content)
             self.assertTrue('teff = 4000' in content)
             self.assertTrue('logg = 0.15' in content)
             self.assertTrue('lum = 1.50' in content)
     finally:
         self.__clean(run_dir)
Beispiel #6
0
 def test_run_with_transit_customs(self):
     run_dir = None
     try:
         sherlock = Sherlock([SherlockTarget(MissionFfiIdObjectInfo("TIC 181804752", [9], high_rms_enabled=True),
                                             detrends_number=1, max_runs=1, oversampling=0.1, t0_fit_margin=0.09,
                                             duration_grid_step=1.075, fit_method="bls",
                                             best_signal_algorithm="quorum", quorum_strength=0.31)], False)\
             .run()
         run_dir = "TIC181804752_FFI_[9]"
         with open(run_dir + '/TIC181804752_FFI_[9]_report.log') as f:
             content = f.read()
             self.assertTrue('Fit method: box' in content)
             self.assertTrue('Duration step: 1.075' in content)
             self.assertTrue('T0 Fit Margin: 0.09' in content)
             self.assertTrue('Oversampling: 0.1' in content)
             self.assertTrue('Signal scoring algorithm: quorum' in content)
             self.assertTrue(
                 'Quorum algorithm vote strength: 0.31' in content)
         self.__assert_run_files(run_dir)
     finally:
         self.__clean(run_dir)
Beispiel #7
0
 def test_run_epic_ffi(self):
     run_dir = None
     try:
         Sherlock([
             SherlockTarget(MissionFfiIdObjectInfo(
                 "EPIC 249631677",
                 'all',
                 high_rms_enabled=True,
                 auto_detrend_enabled=False),
                            detrends_number=1,
                            max_runs=1,
                            oversampling=0.05)
         ], False).run()
         run_dir = "EPIC249631677_FFI_all"
         self.assertTrue(os.path.exists(run_dir))
         self.assertTrue(
             os.path.exists(
                 run_dir +
                 "/Periodogram_Initial_EPIC249631677_FFI_all.png"))
         self.assertTrue(os.path.exists(run_dir + "/1"))
     finally:
         self.__clean(run_dir)
 def test_long_cadence(self):
     lc_build = LcBuilder().build(MissionFfiIdObjectInfo("TIC 352315023", 'all'), "./")
     self.assertEqual(lc_build.cadence, 600)
     self.assertGreater(len(lc_build.lc), 0)
     self.__test_tess_star_params(lc_build.star_info)
Beispiel #9
0
    # 'auto_detrend_ratio' value, which ensures that we are detrending the light curve at 'auto_detrend_ratio' times
    # the stronger period.

    # 7 Set the maximum number of runs to be executed.
    # 8 Select the period protect value, which restricts the minimum detrending window length.
    # 9 Select the min period for a transit to be fit.
    # 10 Select the max period for a transit to be fit.
    # 11 Select the binning to calculate RMS
    # 12 Select the number of CPU cores to be used for the transit search.
    # 13 Select the found transits masking method. We use subtract here as example, but it is discouraged.
    # 14 Select the best signal algorithm, which provides a different implementation to decide which of the detrend
    # signals is the stronger one to be selected.
    # 15 Set the strength of the quorum algorithm votes, which makes every vote that is found to increase the SNR by
    # a factor of 1.2 for our selection.
    arguments = {"smoooth_enabled": True, "high_rms_enabled": True, "high_rms_threshold": 2.5,
                 "high_rms_bin_hours": 3,
                 "detrends_number": 12, "detrend_method": "gp", "cpu_cores": 2, "auto_detrend_enabled": True,
                 "auto_detrend_ratio": 0.33, "auto_detrend_method": "cosine",
                 "max_runs": 10, "period_protect": 12, "period_min": 1, "period_max": 10, "bin_minutes": 20,
                 "run_cores": 3, "snr_min": 6, "sde_min": 6, "mask_mode": "subtract",
                 "best_signal_algorithm": 'quorum', "quorum_strength": 1.2}
    sherlock = Sherlock([SherlockTarget(MissionFfiIdObjectInfo("TIC 181804752", 'all'), **arguments),
                                        SherlockTarget(MissionObjectInfo("TIC 259168516", [15]), **arguments),
                                        SherlockTarget(MissionObjectInfo('KIC 10905746', 'all'), **arguments),
                                        SherlockTarget(MissionObjectInfo('EPIC 249631677', 'all'), **arguments),
                                        SherlockTarget(MissionInputObjectInfo("TIC 181804752", 'example_lc.csv',
                                                               initial_mask=[[1625, 1626], [1645, 1646]]), **arguments),
                                        SherlockTarget(InputObjectInfo("example_lc.csv", initial_detrend_period=0.8), **arguments)]) \
        .run()
    print("Analysis took " + elapsed() + "s")
Beispiel #10
0
from sherlockpipe.sherlock import Sherlock
from lcbuilder.objectinfo.InputObjectInfo import InputObjectInfo
from lcbuilder.objectinfo.MissionFfiCoordsObjectInfo import MissionFfiCoordsObjectInfo
from lcbuilder.objectinfo.MissionFfiIdObjectInfo import MissionFfiIdObjectInfo
from lcbuilder.objectinfo.MissionInputObjectInfo import MissionInputObjectInfo
from lcbuilder.objectinfo.MissionObjectInfo import MissionObjectInfo

from sherlockpipe.sherlock_target import SherlockTarget


@contextmanager
def elapsed_timer():
    start = default_timer()
    elapser = lambda: str(default_timer() - start)
    yield lambda: elapser()
    end = default_timer()
    elapser = lambda: str(end - start)


with elapsed_timer() as elapsed:
    # Adding several kinds of objects to the run: one short cadence TIC, one FFI TIC, one coordinates FFI, one input
    # file related to a TIC and one plain input file.
    # Ensure that your input light curve CSV files have three columns: #TBJD,flux,flux_err
    sherlock = Sherlock([SherlockTarget(MissionObjectInfo("TIC 181804752", 'all')),
                                        SherlockTarget(MissionFfiIdObjectInfo("TIC 259168516", [14, 15])),
                                        SherlockTarget(MissionFfiCoordsObjectInfo(14, 19, 'all')),
                                        SherlockTarget(MissionInputObjectInfo("TIC 470381900", "example_lightcurve.csv")),
                                        SherlockTarget(InputObjectInfo("example_lc.csv"))])\
        .run()
    print("Analysis took " + elapsed() + "s")
Beispiel #11
0
 def build_object_info(self,
                       target_name,
                       author,
                       sectors,
                       file,
                       cadence,
                       initial_mask,
                       initial_transit_mask,
                       star_info,
                       aperture,
                       eleanor_corr_flux='pca_flux',
                       outliers_sigma=None,
                       high_rms_enabled=True,
                       high_rms_threshold=2.5,
                       high_rms_bin_hours=4,
                       smooth_enabled=False,
                       auto_detrend_enabled=False,
                       auto_detrend_method="cosine",
                       auto_detrend_ratio=0.25,
                       auto_detrend_period=None,
                       prepare_algorithm=None,
                       reduce_simple_oscillations=False,
                       oscillation_snr_threshold=4,
                       oscillation_amplitude_threshold=0.1,
                       oscillation_ws_scale=60,
                       oscillation_min_period=0.002,
                       oscillation_max_period=0.2,
                       binning=1):
     mission, mission_prefix, id = MissionLightcurveBuilder(
     ).parse_object_id(target_name)
     coords = None if mission is not None else self.parse_coords(
         target_name)
     cadence = cadence if cadence is not None else self.DEFAULT_CADENCES_FOR_MISSION[
         mission]
     if mission is not None and file is None and cadence <= 300:
         return MissionObjectInfo(
             target_name, sectors, author, cadence, initial_mask,
             initial_transit_mask, star_info, aperture, outliers_sigma,
             high_rms_enabled, high_rms_threshold, high_rms_bin_hours,
             smooth_enabled, auto_detrend_enabled, auto_detrend_method,
             auto_detrend_ratio, auto_detrend_period, prepare_algorithm,
             reduce_simple_oscillations, oscillation_snr_threshold,
             oscillation_amplitude_threshold, oscillation_ws_scale,
             oscillation_min_period, oscillation_max_period, binning)
     elif mission is not None and file is None and cadence > 300:
         return MissionFfiIdObjectInfo(
             target_name, sectors, author, cadence, initial_mask,
             initial_transit_mask, star_info, aperture, eleanor_corr_flux,
             outliers_sigma, high_rms_enabled, high_rms_threshold,
             high_rms_bin_hours, smooth_enabled, auto_detrend_enabled,
             auto_detrend_method, auto_detrend_ratio, auto_detrend_period,
             prepare_algorithm, reduce_simple_oscillations,
             oscillation_snr_threshold, oscillation_amplitude_threshold,
             oscillation_ws_scale, oscillation_min_period,
             oscillation_max_period, binning)
     elif mission is not None and file is not None:
         return MissionInputObjectInfo(
             target_name, file, initial_mask, initial_transit_mask,
             star_info, outliers_sigma, high_rms_enabled,
             high_rms_threshold, high_rms_bin_hours, smooth_enabled,
             auto_detrend_enabled, auto_detrend_method, auto_detrend_ratio,
             auto_detrend_period, prepare_algorithm,
             reduce_simple_oscillations, oscillation_snr_threshold,
             oscillation_amplitude_threshold, oscillation_ws_scale,
             oscillation_min_period, oscillation_max_period, binning)
     elif mission is None and coords is not None and cadence > 300:
         return MissionFfiCoordsObjectInfo(
             coords[0], coords[1], sectors, author, cadence, initial_mask,
             initial_transit_mask, star_info, aperture, eleanor_corr_flux,
             outliers_sigma, high_rms_enabled, high_rms_threshold,
             high_rms_bin_hours, smooth_enabled, auto_detrend_enabled,
             auto_detrend_method, auto_detrend_ratio, auto_detrend_period,
             prepare_algorithm, reduce_simple_oscillations,
             oscillation_snr_threshold, oscillation_amplitude_threshold,
             oscillation_ws_scale, oscillation_min_period,
             oscillation_max_period, binning)
     elif mission is None and file is not None:
         return InputObjectInfo(
             file, initial_mask, initial_transit_mask, star_info,
             outliers_sigma, high_rms_enabled, high_rms_threshold,
             high_rms_bin_hours, smooth_enabled, auto_detrend_enabled,
             auto_detrend_method, auto_detrend_ratio, auto_detrend_period,
             prepare_algorithm, reduce_simple_oscillations,
             oscillation_snr_threshold, oscillation_amplitude_threshold,
             oscillation_ws_scale, oscillation_min_period,
             oscillation_max_period, binning)
     else:
         raise ValueError(
             "Invalid target definition with target_name={}, mission={}, id={}, coords={}, sectors={}, file={}, "
             "cadence={}".format(target_name, mission, id, coords, sectors,
                                 file, cadence))
Beispiel #12
0
from examples.custom_algorithms.ButterworthCurvePreparer import ButterworthCurvePreparer
from examples.custom_algorithms.NeptunianDesertSearchZone import NeptunianDesertSearchZone
from examples.custom_algorithms.RandomSignalSelector import RandomSignalSelector
from sherlockpipe.sherlock import Sherlock
from lcbuilder.objectinfo.MissionFfiIdObjectInfo import MissionFfiIdObjectInfo

from sherlockpipe.sherlock_target import SherlockTarget


@contextmanager
def elapsed_timer():
    start = default_timer()
    elapser = lambda: str(default_timer() - start)
    yield lambda: elapser()
    end = default_timer()
    elapser = lambda: str(end - start)


with elapsed_timer() as elapsed:
    # We will use one TIC from the TESS mission and add provide external customized algorithms for light curve
    # preparation, signal selection and search zone settings. These algorithms are extensions from SHERLOCK abstract
    # classes as you can inspect under the examples/custom_algorithms directory.
    sherlock = Sherlock(update_ois=False, sherlock_targets=[
        SherlockTarget(object_info=MissionFfiIdObjectInfo("TIC 181804752", 'all'), smooth_enabled=False, high_rms_enabled=False,
                       cpu_cores=2, auto_detrend_enabled=False, prepare_algorithm=ButterworthCurvePreparer(),
                       max_runs=10, custom_search_zone=NeptunianDesertSearchZone(),
                       custom_selection_algorithm=RandomSignalSelector())]) \
        .run()
    print("Analysis took " + elapsed() + "s")
Beispiel #13
0
 def prepare_tic(self, prepare_tic_input):
     tic_id = str(prepare_tic_input.tic)
     target_dir = prepare_tic_input.dir + tic_id + "/"
     if not os.path.isdir(target_dir):
         os.mkdir(target_dir)
     tpfs_short_dir = target_dir + "tpfs_short/"
     tpfs_long_dir = target_dir + "tpfs_long/"
     if not os.path.isdir(tpfs_short_dir):
         os.mkdir(tpfs_short_dir)
     if not os.path.isdir(tpfs_long_dir):
         os.mkdir(tpfs_long_dir)
     lc_short = None
     lc_data_short = None
     failed_target = None
     try:
         mission_lightcurve_builder = MissionLightcurveBuilder()
         mission_ffi_lightcurve_builder = MissionFfiLightcurveBuilder()
     except Exception as e:
         failed_target = tic_id
         logging.exception(e)
     try:
         logging.info("Trying to get short cadence info for " +
                      str(prepare_tic_input.tic))
         lcbuild_short = \
             mission_lightcurve_builder.build(MissionObjectInfo(tic_id, 'all'), None, self.cache_dir)
         lc_short = lcbuild_short.lc
         lc_data_short = self.store_lc_data(
             lcbuild_short.lc_data, target_dir + "time_series_short.csv")
         tpf_short = lk.search_targetpixelfile(
             tic_id, cadence="short",
             author="spoc").download_all(download_dir=self.cache_dir +
                                         ".lightkurve-cache")
         for tpf in tpf_short.data:
             shutil.copy(tpf.path,
                         tpfs_short_dir + os.path.basename(tpf.path))
         short_periodogram = lc_short.to_periodogram(oversample_factor=5)
         periodogram_df = pd.DataFrame(columns=['period', 'power'])
         periodogram_df["period"] = short_periodogram.period.value
         periodogram_df["power"] = short_periodogram.power.value
         periodogram_df.to_csv(target_dir + "periodogram_short.csv")
     except Exception as e:
         logging.warning("No Short Cadence data for target " +
                         str(prepare_tic_input.tic))
         logging.exception(e)
     logging.info("Trying to get long cadence info for " +
                  str(prepare_tic_input.tic))
     try:
         lcbuild_long = \
             mission_ffi_lightcurve_builder.build(MissionFfiIdObjectInfo(tic_id, 'all'), None,
                                                  self.cache_dir)
         star_df = pd.DataFrame(columns=[
             'obj_id', 'ra', 'dec', 'R_star', 'R_star_lerr', 'R_star_uerr',
             'M_star', 'M_star_lerr', 'M_star_uerr', 'Teff_star',
             'Teff_star_lerr', 'Teff_star_uerr', 'ld_a', 'ld_b'
         ])
         ld_a = lcbuild_long.star_info.ld_coefficients[
             0] if lcbuild_long.star_info.ld_coefficients is not None else None
         ld_b = lcbuild_long.star_info.ld_coefficients[
             1] if lcbuild_long.star_info.ld_coefficients is not None else None
         star_df = star_df.append(
             {
                 'obj_id':
                 tic_id,
                 'ra':
                 lcbuild_long.star_info.ra,
                 'dec':
                 lcbuild_long.star_info.dec,
                 'R_star':
                 lcbuild_long.star_info.radius,
                 'R_star_lerr':
                 lcbuild_long.star_info.radius -
                 lcbuild_long.star_info.radius_min,
                 'R_star_uerr':
                 lcbuild_long.star_info.radius_max -
                 lcbuild_long.star_info.radius,
                 'M_star':
                 lcbuild_long.star_info.mass,
                 'M_star_lerr':
                 lcbuild_long.star_info.mass -
                 lcbuild_long.star_info.mass_min,
                 'M_star_uerr':
                 lcbuild_long.star_info.mass_max -
                 lcbuild_long.star_info.mass,
                 'Teff_star':
                 lcbuild_long.star_info.teff,
                 'Teff_star_lerr':
                 200,
                 'Teff_star_uerr':
                 200,
                 'logg':
                 lcbuild_long.star_info.logg,
                 'logg_err':
                 lcbuild_long.star_info.logg_err,
                 'ld_a':
                 ld_a,
                 'ld_b':
                 ld_b,
                 'feh':
                 lcbuild_long.star_info.feh,
                 'feh_err':
                 lcbuild_long.star_info.feh_err,
                 'v':
                 lcbuild_long.star_info.v,
                 'v_err':
                 lcbuild_long.star_info.v_err,
                 'j':
                 lcbuild_long.star_info.j,
                 'j_err':
                 lcbuild_long.star_info.j_err,
                 'k':
                 lcbuild_long.star_info.k,
                 'k_err':
                 lcbuild_long.star_info.k_err,
                 'h':
                 lcbuild_long.star_info.h,
                 'h_err':
                 lcbuild_long.star_info.h_err,
                 'kp':
                 lcbuild_long.star_info.kp
             },
             ignore_index=True)
         star_df.to_csv(target_dir + "params_star.csv", index=False)
         sectors = lcbuild_long.sectors
         lc_long = lcbuild_long.lc
         lc_data_long = self.store_lc_data(
             lcbuild_long.lc_data, target_dir + "time_series_long.csv")
         lcf_long = lc_long.remove_nans()
         tpf_long = lk.search_targetpixelfile(tic_id, cadence="long", author="tess-spoc")\
             .download_all(download_dir=self.cache_dir + ".lightkurve-cache")
         for tpf in tpf_long.data:
             shutil.copy(tpf.path,
                         tpfs_long_dir + os.path.basename(tpf.path))
         long_periodogram = lc_long.to_periodogram(oversample_factor=5)
         periodogram_df = pd.DataFrame(columns=['period', 'power'])
         periodogram_df["period"] = long_periodogram.period.value
         periodogram_df["power"] = long_periodogram.power.value
         periodogram_df.to_csv(target_dir + "periodogram_long.csv")
         logging.info("Downloading neighbour stars for " +
                      prepare_tic_input.tic)
         #TODO get neighbours light curves stars = self.download_neighbours(prepare_tic_input.tic, sectors)
         logging.info("Classifying candidate points for " +
                      prepare_tic_input.tic)
         target_ois = prepare_tic_input.target_ois[
             (prepare_tic_input.target_ois["Disposition"] == "CP") |
             (prepare_tic_input.target_ois["Disposition"] == "KP")]
         target_ois_df = pd.DataFrame(columns=[
             'id', 'name', 'period', 'period_err', 't0', 'to_err', 'depth',
             'depth_err', 'duration', 'duration_err'
         ])
         if lc_data_short is not None:
             tags_series_short = np.full(len(lc_data_short.time), "BL")
         tags_series_long = np.full(len(lc_data_long.time), "BL")
         if prepare_tic_input.label is not None:
             for index, row in target_ois.iterrows():
                 if row["OI"] not in prepare_tic_input.excluded_ois:
                     logging.info(
                         "Classifying candidate points with OI %s, period %s, t0 %s and duration %s for "
                         + prepare_tic_input.tic, row["OI"],
                         row["Period (days)"], row["Epoch (BJD)"],
                         row["Duration (hours)"])
                     target_ois_df = target_ois_df.append(
                         {
                             "id": row["Object Id"],
                             "name": row["OI"],
                             "period": row["Period (days)"],
                             "period_err": row["Period (days) err"],
                             "t0": row["Epoch (BJD)"] - 2457000.0,
                             "to_err": row["Epoch (BJD) err"],
                             "depth": row["Depth (ppm)"],
                             "depth_err": row["Depth (ppm) err"],
                             "duration": row["Duration (hours)"],
                             "duration_err": row["Duration (hours) err"]
                         },
                         ignore_index=True)
                     if lc_short is not None:
                         mask_short = tls.transit_mask(
                             lc_data_short["time"].to_numpy(),
                             row["Period (days)"],
                             row["Duration (hours)"] / 24,
                             row["Epoch (BJD)"] - 2457000.0)
                         tags_series_short[
                             mask_short] = prepare_tic_input.label
                     mask_long = tls.transit_mask(
                         lc_data_long["time"].to_numpy(),
                         row["Period (days)"], row["Duration (hours)"] / 24,
                         row["Epoch (BJD)"] - 2457000.0)
                     tags_series_long[mask_long] = prepare_tic_input.label
             for index, row in prepare_tic_input.target_additional_ois_df.iterrows(
             ):
                 if row["OI"] not in prepare_tic_input.excluded_ois:
                     target_ois_df = target_ois_df.append(
                         {
                             "id": row["Object Id"],
                             "name": row["OI"],
                             "period": row["Period (days)"],
                             "period_err": row["Period (days) err"],
                             "t0": row["Epoch (BJD)"] - 2457000.0,
                             "to_err": row["Epoch (BJD) err"],
                             "depth": row["Depth (ppm)"],
                             "depth_err": row["Depth (ppm) err"],
                             "duration": row["Duration (hours)"],
                             "duration_err": row["Duration (hours) err"]
                         },
                         ignore_index=True)
                     if lc_short is not None:
                         mask_short = tls.transit_mask(
                             lc_data_short["time"].to_numpy(),
                             row["Period (days)"],
                             row["Duration (hours)"] / 24,
                             row["Epoch (BJD)"] - 2457000.0)
                         tags_series_short[
                             mask_short] = prepare_tic_input.label
                     mask_long = tls.transit_mask(
                         lc_data_long["time"].to_numpy(),
                         row["Period (days)"], row["Duration (hours)"] / 24,
                         row["Epoch (BJD)"] - 2457000.0)
                     tags_series_long[mask_long] = prepare_tic_input.label
         target_ois_df.to_csv(target_dir + "/ois.csv")
         if lc_data_short is not None:
             lc_data_short["tag"] = tags_series_short
             lc_data_short.to_csv(target_dir + "time_series_short.csv")
         lc_data_long["tag"] = tags_series_long
         lc_data_long.to_csv(target_dir + "time_series_long.csv")
         # TODO store folded light curves -with local and global views-(masking previous candidates?)
     except Exception as e:
         failed_target = tic_id
         logging.exception(e)
     return failed_target