def _process_tce(feature_config, tce):
    """Reads and process the input features of a Threshold Crossing Event.

    Args:
      tce: row from TCE CSV file, read in as a Pandas dataframe.
      feature_config: ConfigDict containing the feature configurations.

    Returns:
      A dictionary of processed light curve features.

    Raises:
      ValueError: If feature_config contains features other than 'global_view'
      and 'local_view'.
    """
    if not {"global_view", "local_view"}.issuperset(feature_config.keys()):
        raise ValueError(
            "Only 'global_view' and 'local_view' features are supported.")

    # Read and process the light curve.
    time, flux = preprocess.read_and_process_light_curve(tce.tic_id,
                                                         FLAGS.tess_data_dir,
                                                         sector=tce.Sectors)
    time, flux = preprocess.phase_fold_and_sort_light_curve(
        time, flux, tce.Period, tce.Epoc)

    # Generate the local and global views.
    features = {}

    if "global_view" in feature_config:
        global_view = preprocess.global_view(time, flux, tce.Period)
        # Add a batch dimension.
        features["global_view"] = np.expand_dims(global_view, 0)

    if "local_view" in feature_config:
        local_view = preprocess.local_view(time, flux, tce.Period,
                                           tce.Duration)
        # Add a batch dimension.
        features["local_view"] = np.expand_dims(local_view, 0)

    # Possibly save plots.
    if FLAGS.output_image_dir:
        ncols = len(features)
        fig, axes = plt.subplots(1,
                                 ncols,
                                 figsize=(10 * ncols, 5),
                                 squeeze=False)

        for i, name in enumerate(sorted(features)):
            ax = axes[0][i]
            ax.plot(features[name][0], ".")
            ax.set_title(name)
            ax.set_xlabel("Bucketized Time (days)")
            ax.set_ylabel("Normalized Flux")

        fig.tight_layout()
        fig.savefig(os.path.join(FLAGS.output_image_dir,
                                 str(tce.tic_id) + '.png'),
                    bbox_inches="tight")

    return features
def _process_tce(tce):
    """Processes the light curve for a Kepler TCE and returns an Example proto.

  Args:
    tce: Row of the input TCE table.

  Returns:
    A tensorflow.train.Example proto containing TCE features.

  Raises:
    IOError: If the light curve files for this Kepler ID cannot be found.
  """
    # Read and process the light curve.
    CAMPAIGN = "C" + str(tce.campaign)

    # Make output proto.
    ex = tf.train.Example()

    try:
        time, flux = preprocess.read_and_process_light_curve(
            tce.kepid, FLAGS.kepler_data_dir, CAMPAIGN)
    except ValueError:
        print("NO SIGMA????? ep" + str(tce.kepid))
        return None

    time, flux = preprocess.phase_fold_and_sort_light_curve(
        time, flux, tce.tce_period, tce.tce_time0bk)

    # Generate the local and global views.
    try:
        global_view = preprocess.global_view(time, flux, tce.tce_period)
        local_view = preprocess.local_view(time, flux, tce.tce_period,
                                           tce.tce_duration)
    except ValueError:
        print("BROKEN LIGHTCURVE????? ep" + str(tce.kepid))
        return None
    except Exception as e:
        print(e.args)
        print("Bins Broken ep" + str(tce.kepid))
        return None

    # Set time series features.
    _set_float_feature(ex, "global_view", global_view)
    _set_float_feature(ex, "local_view", local_view)

    # Set other columns.
    for col_name, value in tce.items():
        if np.issubdtype(type(value), np.integer):
            _set_int64_feature(ex, col_name, [value])
        else:
            try:
                _set_float_feature(ex, col_name, [float(value)])
            except ValueError:
                _set_bytes_feature(ex, col_name, [value])

    return ex
Beispiel #3
0
def _process_tce(feature_config):
  """Reads and process the input features of a Threshold Crossing Event.

  Args:
    feature_config: ConfigDict containing the feature configurations.

  Returns:
    A dictionary of processed light curve features.

  Raises:
    ValueError: If feature_config contains features other than 'global_view'
    and 'local_view'.
  """
  if not {"global_view", "local_view"}.issuperset(feature_config.keys()):
    raise ValueError(
        "Only 'global_view' and 'local_view' features are supported.")

  # Read and process the light curve.
  time, flux = preprocess.read_and_process_light_curve(FLAGS.kepler_id,
                                                       FLAGS.kepler_data_dir)
  time, flux = preprocess.phase_fold_and_sort_light_curve(
      time, flux, FLAGS.period, FLAGS.t0)

  # Generate the local and global views.
  features = {}

  if "global_view" in feature_config:
    global_view = preprocess.global_view(time, flux, FLAGS.period)
    # Add a batch dimension.
    features["global_view"] = np.expand_dims(global_view, 0)

  if "local_view" in feature_config:
    local_view = preprocess.local_view(time, flux, FLAGS.period, FLAGS.duration)
    # Add a batch dimension.
    features["local_view"] = np.expand_dims(local_view, 0)

  # Possibly save plots.
  if FLAGS.output_image_file:
    ncols = len(features)
    fig, axes = plt.subplots(1, ncols, figsize=(10 * ncols, 5), squeeze=False)

    for i, name in enumerate(sorted(features)):
      ax = axes[0][i]
      ax.plot(features[name][0], ".")
      ax.set_title(name)
      ax.set_xlabel("Bucketized Time (days)")
      ax.set_ylabel("Normalized Flux")

    fig.tight_layout()
    fig.savefig(FLAGS.output_image_file, bbox_inches="tight")

  return features
Beispiel #4
0
def _process_tce(tce):
    """Processes the light curve for a Kepler TCE and returns an Example proto.

  Args:
    tce: Row of the input TCE table.

  Returns:
    A tensorflow.train.Example proto containing TCE features.

  Raises:
    IOError: If the light curve files for this Kepler ID cannot be found.
  """
    # Read and process the light curve.

    time, flux = preprocess.read_and_process_light_curve(tce.tic_id,
                                                         FLAGS.tess_data_dir,
                                                         sector=tce.Sectors,
                                                         is_multi=tce.is_multi)
    time, flux = preprocess.phase_fold_and_sort_light_curve(
        time, flux, tce.Period, tce.Epoc)

    # Generate the local and global views.
    try:
        global_view = preprocess.global_view(time, flux, tce.Period)
        local_view = preprocess.local_view(time, flux, tce.Period,
                                           tce.Duration)
        # secondary_view = preprocess.secondary_view(time, flux, tce.Period, tce.Duration)
    except RuntimeWarning:
        tf.compat.v1.logging.info('Too many invalid values in TIC %s',
                                  tce.tic_id)
        raise SparseLightCurveError

    # Make output proto.
    ex = tf.train.Example()

    # Set time series features.
    _set_float_feature(ex, "global_view", global_view)
    _set_float_feature(ex, "local_view", local_view)
    # _set_float_feature(ex, "secondary_view", secondary_view)

    # Set other columns.
    for col_name, value in tce.items():
        if np.issubdtype(type(value), np.integer):
            _set_int64_feature(ex, col_name, [value])
        else:
            try:
                _set_float_feature(ex, col_name, [float(value)])
            except ValueError:
                _set_bytes_feature(ex, col_name, [value])

    return ex
Beispiel #5
0
def _process_tce(feature_config, kepler_id, period, t0, duration, kepler_data):
    time, flux = preprocess.read_and_process_light_curve(
        kepler_id, kepler_data)
    time, flux = preprocess.phase_fold_and_sort_light_curve(
        time, flux, period, t0)

    features = {}

    if "global_view" in feature_config:
        global_view = preprocess.global_view(time, flux, period)
        features["global_view"] = np.expand_dims(global_view, 0)

    if "local_view" in feature_config:
        local_view = preprocess.local_view(time, flux, period, duration)
        features["local_view"] = np.expand_dims(local_view, 0)
    return features
def _process_tce(tce):
  """Processes the light curve for a Kepler TCE and returns an Example proto.

  Args:
    tce: Row of the input TCE table.

  Returns:
    A tensorflow.train.Example proto containing TCE features.

  Raises:
    IOError: If the light curve files for this Kepler ID cannot be found.
  """
  # Read and process the light curve.
  time, flux = preprocess.read_and_process_light_curve(tce.kepid,
                                                       FLAGS.kepler_data_dir)
  time, flux = preprocess.phase_fold_and_sort_light_curve(
      time, flux, tce.tce_period, tce.tce_time0bk)

  # Generate the local and global views.
  global_view = preprocess.global_view(time, flux, tce.tce_period)
  local_view = preprocess.local_view(time, flux, tce.tce_period,
                                     tce.tce_duration)

  # Make output proto.
  ex = tf.train.Example()

  # Set time series features.
  _set_float_feature(ex, "global_view", global_view)
  _set_float_feature(ex, "local_view", local_view)

  # Set other columns.
  for col_name, value in tce.iteritems():
    if np.issubdtype(type(value), np.integer):
      _set_int64_feature(ex, col_name, [value])
    else:
      try:
        _set_float_feature(ex, col_name, [float(value)])
      except ValueError:
        _set_bytes_feature(ex, col_name, [str(value)])

  return ex
def _process_tce(feature_config):
    """Reads and process the input features of a Threshold Crossing Event.

  Args:
    feature_config: ConfigDict containing the feature configurations.

  Returns:
    A dictionary of processed light curve features.

  Raises:
    ValueError: If feature_config contains features other than 'global_view'
    and 'local_view'.
  """
    if not {"global_view", "local_view"}.issuperset(feature_config.keys()):
        raise ValueError(
            "Only 'global_view' and 'local_view' features are supported.")

    kep_id = FLAGS.kepler_id
    filenames = kepler_io.kepler_filenames('data/kepler', kep_id)
    all_time, all_flux = kepler_io.read_kepler_light_curve(filenames)
    for f in all_flux:
        f /= np.median(f)

    plt.figure(figsize=(12, 6), dpi=80)
    plt.scatter(np.concatenate(all_time),
                np.concatenate(all_flux),
                s=0.5,
                cmap='plasma')
    plt.title('Entire Kepler Mission Data for KEP_ID=' + str(kep_id))
    plt.xlabel('Time(days)')
    plt.ylabel('Brightness')
    plt.show()

    # Read and process the light curve.
    time, flux, spline = preprocess.read_and_process_light_curve(
        FLAGS.kepler_id, FLAGS.kepler_data_dir)

    #plt.figure(figsize=(12, 6), dpi=80)
    #plt.scatter(np.concatenate(all_time), np.concatenate(all_flux), s=0.5, cmap='plasma')
    plt.plot(spline, 'r')
    plt.title('Best fit spline')
    #plt.xlabel('Time(days)')
    #plt.ylabel('Brightness')
    #plt.show()

    plt.figure(figsize=(12, 6), dpi=80)
    plt.scatter(time, flux, s=0.5, cmap='plasma')
    plt.title('Divided by best fit spline')
    plt.xlabel('Time(days)')
    plt.ylabel('Brightness')
    plt.show()

    time, flux = preprocess.phase_fold_and_sort_light_curve(
        time, flux, FLAGS.period, FLAGS.t0)

    # Generate the local and global views.
    features = {}

    if "global_view" in feature_config:
        global_view = preprocess.global_view(time, flux, FLAGS.period)
        # Add a batch dimension.
        features["global_view"] = np.expand_dims(global_view, 0)

    if "local_view" in feature_config:
        local_view = preprocess.local_view(time, flux, FLAGS.period,
                                           FLAGS.duration)
        # Add a batch dimension.
        features["local_view"] = np.expand_dims(local_view, 0)

    plt.figure(figsize=(12, 6), dpi=80)
    plt.plot(range(201), local_view, 'o')
    plt.title('Folded local view of target TCE')
    plt.xlabel('Time(days)')
    plt.ylabel('Brightness')
    plt.show()

    # Possibly save plots.
    if FLAGS.output_image_file:
        ncols = len(features)
        fig, axes = plt.subplots(1,
                                 ncols,
                                 figsize=(10 * ncols, 5),
                                 squeeze=False)

        for i, name in enumerate(sorted(features)):
            ax = axes[0][i]
            ax.plot(features[name][0], ".")
            ax.set_title(name)
            ax.set_xlabel("Bucketized Time (days)")
            ax.set_ylabel("Normalized Flux")

        fig.tight_layout()
        fig.savefig(FLAGS.output_image_file, bbox_inches="tight")

    return features