Ejemplo n.º 1
0
    def existingperiods(self):
        if not self.timereset:
            self.reset_time()

        time_seconds = self.df['t_mean'] * 60
        relativetup = WDutils.relativescales(self.df)
        flux = relativetup.flux
        ls = LombScargle(time_seconds, flux)
        freq, amp = ls.autopower(nyquist_factor=1)

        detrad = self.df['detrad']
        ls_detrad = LombScargle(time_seconds, detrad)
        freq_detrad, amp_detrad = ls_detrad.autopower(nyquist_factor=1)

        pgram_tup = WDranker_2.find_cPGRAM(ls,
                                           amp_detrad,
                                           exposure=self.exposure)
        strongest_period_tup = pgram_tup.strongest_period_tup
        if strongest_period_tup[0] != -1:
            self.period = strongest_period_tup[0]
        else:
            self.period = float('NaN')
        c_periodogram = pgram_tup.c
        if c_periodogram > 0:
            return True
        else:
            return False
Ejemplo n.º 2
0
def box_ls_pspec(cube, L, r_mpc, Nkbins=100, error=False, kz=None,cosmo=False):
    """ Estimate the 1D power spectrum for square regions with non-uniform distances using Lomb-Scargle periodogram in the radial direction."""
    Nx,Ny,Nz = cube.shape
    try:
        Lx, Ly, Lz = L
    except TypeError:
        Lx = L; Ly = L; Lz = L   # Assume L is a single side length, same for all
    dx, dy, dz = Lx/float(Nx), Ly/float(Ny), Lz/float(Nz)
    kx = np.fft.fftfreq(Nx,d=dx)*2*np.pi   #Mpc^-1
    ky = np.fft.fftfreq(Ny,d=dy)*2*np.pi   #Mpc^-1
    assert len(r_mpc) == Nz
    if kz is None:
        kz, powtest = LombScargle(r_mpc,cube[0,0,:]).autopower(nyquist_factor=1,normalization="psd")

    _cube = np.zeros((Nx,Ny,kz.size))
    for i in range(cube.shape[0]):
        for j in range(cube.shape[1]):
            if kz is None:
                kz, power = LombScargle(r_mpc,cube[i,j,:]).autopower(nyquist_factor=1,normalization="psd")
            else:
                power = LombScargle(r_mpc, cube[i,j,:]).power(kz,normalization="psd")
            _cube[i,j] = np.sqrt(power)
    kz *= 2*np.pi
    _d = np.fft.fft2(_cube,axes=(0,1))
    kx = kx[kx>0]
    ky = ky[ky>0]
    Nx = np.sum(kx>0)
    Ny = np.sum(ky>0)
    _d = _d[1:Nx+1,1:Ny+1,:]     #Remove the nonpositive k-terms from the 2D FFT
    pk3d = np.abs(_d)**2/(Nx*Ny)
    results = bin_1d(pk3d,(kx,ky,kz),Nkbins=Nkbins, error=error)

    return results
Ejemplo n.º 3
0
def lomb_scargle(t,
                 y,
                 dy=None,
                 minfreq=1. / 365,
                 maxfreq=1 / 2,
                 npeaks=0,
                 peaktol=0.05):
    # periodogram
    if isinstance(dy, np.ndarray):
        ls = LombScargle(t, y, dy)
    else:
        ls = LombScargle(t, y)

    frequency, power = ls.autopower(minimum_frequency=minfreq,
                                    maximum_frequency=maxfreq,
                                    samples_per_peak=10)
    probabilities = [0.1, 0.05, 0.01]
    try:
        pp = ls.false_alarm_level(probabilities)
    except:
        pp = [-1, -1, -1]
    # power probabilities
    # This tells us that to attain a 10% false alarm probability requires the highest periodogram peak to be approximately XX; 5% requires XX, and 1% requires XX.
    if npeaks > 0:

        # find peaks in periodogram
        peaks, amps = find_peaks(power, height=peaktol)
        Nterms = npeaks  #min(npeaks,len(peaks))
        fdata = np.zeros((Nterms, 3))  # frequency, amplitude, shift

        # fit amplitudes to each peak frequency
        if Nterms > 0 and npeaks > 0:

            # sort high to low
            peaks = peaks[np.argsort(amps['peak_heights'])[::-1]]

            # estimate priors
            for i in range(min(npeaks, len(peaks))):
                fdata[i, 0] = frequency[int(peaks[i])]
                fdata[i, 1] = np.sort(amps['peak_heights'])[::-1][i] * maxavg(
                    y)  #amplitude estimate
                fdata[i, 2] = 0  # phase shift estimate

            # ignore fitting frequencies
            priors = fdata.flatten()
            bounds = np.array([[0, 1], [0, max(y) * 1.5], [0, 2 * np.pi]] *
                              Nterms).T

            def fit_wave(pars):
                wave = make_sin(t, pars)
                return (y - wave) / y

            res = least_squares(fit_wave, x0=priors, bounds=bounds)

            fdata = res.x.reshape(Nterms, -1)

        return frequency, power, fdata
    else:
        return frequency, power
Ejemplo n.º 4
0
    def assessrecovery(self):
        exists = self.FUVexists()

        # Exposure metric already computed in init (self.c_exposure)

        # Periodogram Metric
        time_seconds = self.df['t_mean'] * 60
        #ls = LombScargle(time_seconds, self.flux_injected)
        ls = LombScargle(self.df['t_mean'], self.flux_injected)
        freq, amp = ls.autopower(nyquist_factor=1)

        detrad = self.df['detrad']
        #ls_detrad = LombScargle(time_seconds, detrad)
        ls_detrad = LombScargle(self.df['t_mean'], detrad)
        freq_detrad, amp_detrad = ls_detrad.autopower(nyquist_factor=1)
        pgram_tup = WDranker_2.find_cPGRAM(ls,
                                           amp_detrad,
                                           exposure=self.exposure)
        # Return 0,1 rseult of recovery
        c_periodogram = pgram_tup.c
        ditherperiod_exists = pgram_tup.ditherperiod_exists

        # Welch Stetson Metric
        if exists:
            c_ws = WDranker_2.find_cWS(self.t_mean, self.t_mean_fuv,
                                       self.flux_injected,
                                       self.flux_injected_fuv, self.flux_err,
                                       self.flux_err_fuv, ditherperiod_exists,
                                       self.FUVexists())
        else:
            c_ws = WDranker_2.find_cWS(self.t_mean, None, self.flux_injected,
                                       None, self.flux_err, None,
                                       ditherperiod_exists, self.FUVexists())

        # RMS Metric --- have to 'unscale' the magnitudes
        converted_flux = [f * self.original_median for f in self.flux_injected]
        injectedmags = [WDutils.flux_to_mag('NUV', f) for f in converted_flux]
        sigma_mag = median_absolute_deviation(injectedmags)
        c_magfit = WDranker_2.find_cRMS(self.mag, sigma_mag, 'NUV')

        # Weights:
        w_pgram = 1
        w_expt = .2
        w_WS = .3
        w_magfit = .25

        C = ((w_pgram * c_periodogram) + (w_expt * self.c_exposure) +
             (w_magfit * c_magfit) + (w_WS * c_ws))

        if C > self.cutoff:
            return 1
        else:
            return 0
Ejemplo n.º 5
0
def circadian_movement_energies(g):
    t = (g["timestamp"].values / 1000.0)  # seconds
    ylat = g["latitude"].values
    ylong = g["longitude"].values
    pHrs = np.arange(23.5, 24.51, 0.01)  # hours
    pSecs = pHrs * 60 * 60  # seconds
    f = 1 / pSecs

    pgram_lat = LombScargle(t, ylat).power(frequency=f, normalization='psd')
    pgram_long = LombScargle(t, ylong).power(frequency=f, normalization='psd')

    E_lat = np.sum(pgram_lat)
    E_long = np.sum(pgram_long)
    return (E_lat, E_long)
Ejemplo n.º 6
0
def getPeriod2(values):
    frequency, power = LombScargle(range(1,
                                         len(values) + 1), values).autopower()
    #plt.plot(frequency, power)
    #plt.show()
    # print power
    #power = abs(power)
    #max_pwr, max_f = sorted(zip(power,frequency),key=lambda x: x[0])[-1]
    #max_pwr2, max_f2 = sorted(zip(power,frequency),key=lambda x: x[0])[-2]

    p = 99.5
    while True:
        threshold = np.percentile(power, p)
        #print threshold
        #print(np.where(abs(power)>threshold)[0])
        #idx = np.where(abs(power)>threshold)[0]
        #threshold
        idx = np.where(abs(power) > threshold)[0]
        #print idx
        if (len(idx) > 3 and (idx[1] - idx[0]) == 1 and (idx[2] - idx[1]) == 1
                and (idx[3] - idx[2]) == 1):
            break
        p -= 0.1
    max_f = abs(frequency[idx][1])
    max_f2 = abs(frequency[idx][2])

    return [1 / max_f, 1 / max_f2]
Ejemplo n.º 7
0
def periodcheck(thistime, thisflux, mflags):
    #dates,flux,flux_pcor,flux_ptcor,mflags = readpsfk2cor(k2name)
    ig = (mflags == 0)
    #
    # k2sc documentation:
    # https://github.com/OxES/k2sc/blob/master/relase_readme.txt
    # indicates that mflags ==0 would be good data.
    #
    # This section, not used, shows how to do sigma clipping. Unncessary since mgflags already
    # applies a ~4-5 sigma clip.
    #sigma clipping stuff ; see http://docs.astropy.org/en/stable/stats/robust.html#sigma-clipping
    #from astropy.stats import sigma_clip
    #filtered_data = sigma_clip(flux_ptcor, sigma=3, iters=10)
    # that would be a mask
    #
    # DISCOVERY: WILL CRASH IF ALL DATA FLAGGED AS BAD!!!
    # SOLUTION: DON'T GIVE IT THOSE FILES!
    #
    # Periodogram stuff
    # search good data only in period range 1 hour to 10 days
    ls = LombScargle(thistime[ig], thisflux[ig])
    frequency, power = ls.autopower(maximum_frequency=24.0,
                                    minimum_frequency=0.1)
    #
    best_frequency = frequency[np.argmax(power)]
    best_fap = ls.false_alarm_probability(power.max())
    #
    # Calculate the model if desired
    #y_fit = ls.model(dates, best_frequency)
    #plt.plot(t_fit,y_fit,'k-')
    #
    return frequency, power, best_frequency, best_fap
Ejemplo n.º 8
0
def psd_scargle(time, flux, Nsample=10.):
    """
	   Calculate the power spectral density using the Lomb-Scargle (L-S) periodogram
	   
	   Parameters:
	        time (numpy array, float): time stamps of the light curve
	        flux (numpy array, float): the flux variations of the light curve
	        Nsample (optional, float): oversampling rate for the periodogram. Default value = 10.
	   
	   Returns:
	        fr (numpy array, float): evaluated frequency values in the domain of the periodogram
	        sc (numpy array, float): the PSD values of the L-S periodogram
	
	.. codeauthor:: Timothy Van Reeth <*****@*****.**>
	"""
    ndata = len(time)  # The number of data points
    fnyq = 0.5 / np.median(time[1:] - time[:-1])  # the Nyquist frequency
    fres = 1. / (time[-1] - time[0])  # the frequency resolution
    fr = np.arange(0., fnyq, fres / float(Nsample))  # the frequencies
    sc1 = LombScargle(time, flux).power(
        fr, normalization='psd')  # The non-normalized Lomb-Scargle "power"

    # Computing the appropriate rescaling factors (to convert to astrophysical units)
    fct = m.sqrt(4. / ndata)
    T = time.ptp()
    sc = fct**2. * sc1 * T

    # Ensuring the output does not contain nans
    if (np.isnan(sc).any()):
        fr = fr[~np.isnan(sc)]
        sc = sc[~np.isnan(sc)]

    return fr, sc
Ejemplo n.º 9
0
def LS(t, y):
    f, p = LombScargle(t, y).autopower(normalization='psd',
                                       nyquist_factor=1.0,
                                       samples_per_peak=1)
    f = f * 1e6  # to uHz
    p = p * np.mean(y**2) / np.sum(p) / (f[1] - f[0])  # Bill's normalization
    return f, p
Ejemplo n.º 10
0
def fft(time, flux, kepler=False):
    import numpy as np
    from astropy.stats import LombScargle

    fr, f = LombScargle(time, flux).autopower(minimum_frequency=1e-9,
                                              maximum_frequency=10000e-6,
                                              samples_per_peak=50,
                                              normalization='standard',
                                              method='fast')
    # f = np.ma.masked_where(fr <= 3160e-6, f)
    # f = np.ma.masked_where(fr >= 3170e-6, f)
    f = np.ma.masked_where(fr <= 1.1574074074074074e-6, f)
    if kepler == True:
        hm = np.array([n * 47.2042e-6 for n in np.arange(50)])
        for h in hm:
            f = np.ma.masked_where((h - 0.25e-6 <= fr) & (fr <= h + 0.25e-6),
                                   f)
        mfq = fr[f.argmax()]
        # print(f.argmax())
        p = 1 / mfq / 86400.
        return fr, p, mfq, f, hm
    else:
        mfq = fr[f.argmax()]
        p = 1 / mfq / 86400.
        return fr, p, mfq, f
Ejemplo n.º 11
0
def multiterm_periodogram(t, y, dy, omega, n_terms=3):
    """Perform a multiterm periodogram at each omega

    This calculates the chi2 for the best-fit least-squares solution
    for each frequency omega.

    Parameters
    ----------
    t : array_like
        sequence of times
    y : array_like
        sequence of observations
    dy : array_like
        sequence of observational errors
    omega : float or array_like
        frequencies at which to evaluate p(omega)

    Returns
    -------
    power : ndarray
        P = 1. - chi2 / chi2_0
        where chi2_0 is the chi-square for a simple mean fit to the data
    """
    # TODO: deprecate this
    ls = LombScargle(t, y, dy, nterms=n_terms)
    frequency = np.asarray(omega) / (2 * np.pi)
    return ls.power(frequency)
Ejemplo n.º 12
0
def main():
    df = pd.read_fwf(data_url,
                     colspecs=((0, 6), (7, 27)),
                     header=1,
                     names=('orbnum', 'utc'),
                     parse_dates=[1],
                     date_parser=_dp)

    sec_of_day = 86400.0
    df['period'] = df.utc.diff(periods=1).astype('timedelta64[s]')/sec_of_day

    ax = plt.axes()
    plt.plot(df.utc, df.period, marker='+')
    plt.xlabel('date/time')
    plt.ylabel('period')
    plt.ylim(10.0, 14.0)

    ax.xaxis.set_major_locator(mdates.MonthLocator(interval=4))
    ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))

    plt.show()


    date0 = pd.datetime.strptime('2016-04-17', '%Y-%m-%d')
    df_m = df[ df.utc > date0 ].copy()
    df_m['delta'] = df_m['utc'] - date0
    df_m['et'] = df_m['delta'].dt.total_seconds()/sec_of_day


    from astropy.stats import LombScargle
    ls = LombScargle(df_m.et, df_m.period, fit_mean=True)
    freq, power = ls.autopower()
    fmax = freq[np.argmax(power)]
    print('frequency(Lomb-Scargle): ', 1/fmax)
    print('frequency(rev/2):        ', 224.701/2.0)
Ejemplo n.º 13
0
    def __init__(self, t, y, dy=None,
                 show_progress=False,
                 progress_bar=None,
                 model=None,
                 freq=None, **kwargs):

        self.model = model
        self.t = t
        self.y = y
        self.dy = dy
        self.freq = freq
        self.show_progress = show_progress
        self.progress_bar = progress_bar
        if self.show_progress and self.progress_bar is None:
            self.progress_bar = tqdm
        elif self.progress_bar is None:
            self.progress_bar = lambda x: x

        if self.dy is None:
            self.dy = np.ones_like(t)

        if self.freq is None:
            freqs, powers = LombScargle(t, y, dy).autopower(minimum_frequency=0.5,
                                                            maximum_frequency=10,
                                                            samples_per_peak=10)
            self.freq = freqs[np.argmax(powers)]
Ejemplo n.º 14
0
def Gen_flsp(time, flux, NyFreq, s):
    if len(time) != 1:
        frequency = np.linspace(0, NyFreq, s)
        power = LombScargle(time, flux).power(frequency, method='fast')
        return {'Freq': frequency, 'Amp': power}
    else:
        return {'Freq': np.linspace(0, 1, s), 'Amp': np.ones(s)}
Ejemplo n.º 15
0
def fold(t, y, periodogram=False):
    """Folds data on pattern frequency
	
	if periodogram = True then returns T, frequency, power
	otherwise by default
	returns T only"""

    frequency, power = LombScargle(
        t, y).autopower()  #find frequencies that contribute to describe data
    fpat = frequency[np.argmax(
        power
    )]  #find the most important frequency, this will likely be the period of the orbit

    #create array of folded times T
    T = t * 0  #initialize array for folded times T
    c = 0  #initialize c the multiplication factor to choose the bin in which each datapoint goes

    for i in range(len(t)):  #iterate over all numbers in t
        while (
                t[i] >= c / fpat
        ):  #if the time t[i] is larger than the current multiple of the period  (=1/pattern frequency)
            c = c + 1  #	then add 1 to the multiplication factor c until t[i] is smaller than c*period
        T[i] = t[i] - c / fpat  #remove c-1 periods from the time

    if periodogram == True:
        return T, frequency, power
    else:
        return T
Ejemplo n.º 16
0
def compute_periodogram(data: np.ndarray, kwargs: Dict = None) -> np.ndarray:
    """
    Computes a given periodogram from the lightcurve
    :param data: Lightcurve dataset
    :return: Periodogram from the dataset
    """
    indx = np.isfinite(data[1])
    df = 1 / (86400 * (np.amax(data[0][indx]) - np.amin(data[0][indx])))  # Hz
    ls = LombScargle(data[0][indx] * 86400, data[1][indx], center_data=True)
    nyq = 1 / (2 * 86400 * np.median(np.diff(data[0][indx])))

    df = fundamental_spacing_integral(df, nyq, ls)

    freq = np.arange(df, nyq, df)
    power = ls.power(freq,
                     normalization='psd',
                     method='fast',
                     assume_regular_frequency=True)

    N = len(ls.t)
    tot_MS = np.sum((ls.y - np.mean(ls.y))**2) / N
    tot_lomb = np.sum(power)
    normfactor = tot_MS / tot_lomb
    freq *= 10**6
    power *= normfactor / (df * 10**6)

    return np.array((rebin(freq, 1), rebin(power, 1)))
Ejemplo n.º 17
0
def __get_freq_psd_from_nn_intervals(nn_intervals, method, sampling_frequency,
                                     interpolation_method, vlf_band, hf_band):
    timestamp_list = __create_timestamp_list(nn_intervals)

    if method == "welch":
        funct = interpolate.interp1d(x=timestamp_list,
                                     y=nn_intervals,
                                     kind=interpolation_method)
        timestamps_interpolation = __create_interpolated_timestamp_list(
            nn_intervals, sampling_frequency)
        nni_interpolation = funct(timestamps_interpolation)
        nni_normalized = nni_interpolation - np.mean(nni_interpolation)
        freq, psd = signal.welch(x=nni_normalized,
                                 fs=sampling_frequency,
                                 window='hann',
                                 nfft=4096)

    elif method == "lomb":
        freq, psd = LombScargle(timestamp_list,
                                nn_intervals,
                                normalization='psd').autopower(
                                    minimum_frequency=vlf_band[0],
                                    maximum_frequency=hf_band[1])

    else:
        raise ValueError(
            "Not a valid method. Choose between 'lomb' and 'welch'")

    return freq, psd
Ejemplo n.º 18
0
def estimate_frequencies(x,
                         y,
                         fmin=None,
                         fmax=None,
                         max_peaks=9,
                         oversample=4.0,
                         optimize_f=True):
    tmax = x.max()
    tmin = x.min()
    dt = np.median(np.diff(x))
    df = 1.0 / (tmax - tmin)
    ny = 0.5 / dt

    if fmin is None:
        fmin = df
    if fmax is None:
        fmax = ny

    freq = np.arange(fmin, fmax, df / oversample)
    power = LombScargle(x, y).power(freq)

    # Find peaks
    peak_inds = (power[1:-1] > power[:-2]) & (power[1:-1] > power[2:])
    peak_inds = np.arange(1, len(power) - 1)[peak_inds]
    peak_inds = peak_inds[np.argsort(power[peak_inds])][::-1]
    peaks = []
    for j in range(max_peaks):
        i = peak_inds[0]
        freq0 = freq[i]
        alias = 2.0 * ny - freq0

        m = np.abs(freq[peak_inds] - alias) > 25 * df
        m &= np.abs(freq[peak_inds] - freq0) > 25 * df

        peak_inds = peak_inds[m]
        peaks.append(freq0)
    peaks = np.array(peaks)

    if optimize_f:

        def chi2(nu):
            arg = 2 * np.pi * nu[None, :] * x[:, None]
            D = np.concatenate(
                [np.cos(arg), np.sin(arg),
                 np.ones((len(x), 1))], axis=1)

            # Solve for the amplitudes and phases of the oscillations
            DTD = np.matmul(D.T, D)
            DTy = np.matmul(D.T, y[:, None])
            w = np.linalg.solve(DTD, DTy)
            model = np.squeeze(np.matmul(D, w))

            chi2_val = np.sum(np.square(y - model))
            return chi2_val

        res = optimize.minimize(chi2, [peaks], method="L-BFGS-B")
        return res.x
    else:
        return peaks
def get_periodogram(t, amp=0, clip=80):

    time = t['t (sec)']
    y = t['Amp {} Val'.format(amp)]
    pts = np.abs(y) < clip

    psd = LombScargle(time[pts], y[pts]).power(frequency=freqGrid)
    return psd
Ejemplo n.º 20
0
def visibility_MC(P, RV, t):
    LS = LombScargle(t.value, RV.value)
    frequency, power = LS.autopower()
    false_alarm = LS.false_alarm_level(0.01, method='bootstrap')
    prob = LS.power(1. / P.value)
    if power.max() > false_alarm:
        return True
    else:
        return False
Ejemplo n.º 21
0
def visibility(Planet, Observations):
    LS = LombScargle(Observations.t.value, Observations.RV.value)
    frequency, power = LS.autopower()
    false_alarm = LS.false_alarm_level(0.01, method='bootstrap')
    prob = LS.power(1. / Planet.P.value)
    if power.max() > false_alarm:
        return True
    else:
        return False
Ejemplo n.º 22
0
def _get_freq_psd_from_nn_intervals(nn_intervals: List[float], method: str = WELCH_METHOD,
                                    sampling_frequency: int = 4,
                                    interpolation_method: str = "linear",
                                    vlf_band: namedtuple = VlfBand(0.003, 0.04),
                                    hf_band: namedtuple = HfBand(0.15, 0.40)) -> Tuple:
    """
    Returns the frequency and power of the signal.

    Parameters
    ---------
    nn_intervals : list
        list of Normal to Normal Interval
    method : str
        Method used to calculate the psd. Choice are Welch's FFT or Lomb method.
    sampling_frequency : int
        Frequency at which the signal is sampled. Common value range from 1 Hz to 10 Hz,
        by default set to 7 Hz. No need to specify if Lomb method is used.
    interpolation_method : str
        Kind of interpolation as a string, by default "linear". No need to specify if Lomb
        method is used.
    vlf_band : tuple
        Very low frequency bands for features extraction from power spectral density.
    hf_band : tuple
        High frequency bands for features extraction from power spectral density.

    Returns
    ---------
    freq : list
        Frequency of the corresponding psd points.
    psd : list
        Power Spectral Density of the signal.
    """

    timestamp_list = _create_timestamp_list(nn_intervals)

    if method == WELCH_METHOD:
        # ---------- Interpolation of signal ---------- #
        funct = interpolate.interp1d(x=timestamp_list, y=nn_intervals, kind=interpolation_method)

        timestamps_interpolation = _create_interpolated_timestamp_list(nn_intervals, sampling_frequency)
        nni_interpolation = funct(timestamps_interpolation)

        # ---------- Remove DC Component ---------- #
        nni_normalized = nni_interpolation - np.mean(nni_interpolation)

        #  --------- Compute Power Spectral Density  --------- #
        freq, psd = signal.welch(x=nni_normalized, fs=sampling_frequency, window='hann',
                                 nfft=4096)

    elif method == LOMB_METHOD:
        freq, psd = LombScargle(timestamp_list, nn_intervals,
                                normalization='psd').autopower(minimum_frequency=vlf_band[0],
                                                               maximum_frequency=hf_band[1])
    else:
        raise ValueError("Not a valid method. Choose between 'lomb' and 'welch'")

    return freq, psd
Ejemplo n.º 23
0
def periodogram(filenam):
    """Takes file filename and outputs best period with automatically determined frequency grid."""
    time, magnitude, err = np.loadtxt(filenam, dtype='float', comments='%').T
    frequency, power = LombScargle(time, magnitude,
                                   err).autopower(minimum_frequency=0.01,
                                                  maximum_frequency=20)
    period = 1 / frequency[np.argmax(power)]
    print period
    return period
Ejemplo n.º 24
0
def pb_TLS(doplot=False):
  global lc
  global ls_freq
  global ns
  
  ts = lc[:, 0]
  fs  = copy.copy(lc[:, 1])
  if flag_endmatch:
    fs -= ((fs[-1] - fs[0])/(ts[-1] - ts[0]) * (ts - ts[0]) + fs[0])
  lsp_data = LombScargle(ts, fs).power(ls_freq, normalization='standard')
  idxmax = np.argmax(lsp_data)
  
  period_obs = 1.0/ls_freq[idxmax]/365.0
  lspmax_obs = lsp_data[idxmax]
  print "LS data:", (period_obs, lspmax_obs)

  TLS_obs = lspmax_obs
  TLS = np.zeros(ns)
  
  for k in range(ns):
    i = np.random.randint(sample.shape[0])
    ts, fs = genlc_psd_data(sample[i, :])
    if flag_endmatch:
      fs -= ((fs[-1] - fs[0])/(ts[-1] - ts[0]) * (ts - ts[0]) + fs[0])
    lsp = LombScargle(ts, fs).power(ls_freq, normalization='standard')
    idxmax = np.argmax(lsp)
    period = 1.0/ls_freq[idxmax]/365.0
    TLS[k] = lsp[idxmax]

    #plt.plot(1.0/ls_freq, lsp)
    #plt.plot(1.0/ls_freq, lsp_data)
    #plt.show()
   
    
  pb_TLS = np.sum(TLS>TLS_obs)*1.0/ns
  print "TLS:", pb_TLS
  
  if doplot:
    plt.hist(TLS, bins=50)
    plt.axvline(x=TLS_obs, color='r')
    plt.show()
    plt.close()

  return pb_TLS, TLS, TLS_obs
Ejemplo n.º 25
0
def ls_well2(well_id):
    # Set up time-series
    wl_signal = pd.read_csv(data_path + well_id + "_ibp_main.csv",
                            index_col=None,
                            parse_dates=['date_time'])
    wl_signal['t_delta'] = (
        wl_signal.date_time - wl_signal.date_time.iloc[0]
    ).dt.total_seconds()  # Create a cumulative time column
    wl_signal = wl_signal[wl_signal.qual_c == 1]  # Drop flagged data
    wl_signal.dropna(inplace=True)  # Remove rows with nan
    gelev_m = sensor_meta[
        sensor_meta['sensor'] ==
        well_id].ground_elev_ft.drop_duplicates().item() * 0.3048

    # Set up input arrays and model parameters
    x = np.array(wl_signal.t_delta)
    t = x / (3600.)
    y = np.array(wl_signal.WS_elevation_m - gelev_m)  # Choose which column
    #y_detrend = scipy.signal.detrend(y)
    power = LombScargle(t, y).power(
        frequency)  # Frequency set above in rain series analysis

    dayfreq = (frequency[np.argmax(power)]**-1) / 24.0
    annfreq = dayfreq / 365.25
    report = "Max power in " + well_id + " signal at: " + "{0:0.4f}".format(
        annfreq) + " years or " + "{0:0.1f}".format(dayfreq) + " days."

    pn_l = np.log10(power)
    freq_l = np.log10(frequency)
    A = np.vstack([freq_l, np.ones(len(freq_l))]).T
    l_ind = np.where(frequency < bkp_w)
    r_ind = np.where(frequency > bkp_w)

    pn_log = pn_l[l_ind]
    regress = np.linalg.lstsq(A[l_ind],
                              pn_log)  #Slope, intercept of loglog regression
    ml, bl = regress[0]
    #rsq1 = (1 - regress[1] / sum((pn_log - pn_log.mean())**2))[0]

    pn_log = pn_l[r_ind]
    regress = np.linalg.lstsq(A[r_ind],
                              pn_log)  #Slope, intercept of loglog regression
    mr, br = regress[0]
    print(well_id + " slope: " + "{0:0.4f}".format(mr))
    #rsq2 = (1 - regress[1] / sum((pn_log - pn_log.mean())**2))[0]

    #Plot
    plt.figure()
    ax1 = plt.subplot(2, 1, 1)
    ax1.scatter(x, y)
    ax2 = plt.subplot(2, 1, 2)
    ax2.loglog(wavelength_day, power)
    #ax2.loglog(frequency[l_ind],10**(ml*freq_l[l_ind]+bl), 'r-')
    #ax2.loglog(frequency[r_ind],10**(mr*freq_l[r_ind]+br), 'r-')
    plt.suptitle(well_id)
    print report
Ejemplo n.º 26
0
def lombs(x, y):
    """ lombs calculates LombScargle for inputs x, y.
    """
    # Calculate curvature.
    curv = curvature(x, y)
    steps = np.sqrt(np.diff(x, axis=0)**2 + np.diff(y, axis=0)**2)[:-1]
    arc = np.cumsum(steps)
    # Calculate LS.
    ls_f, ls_p = LombScargle(arc, curv).autopower()
    return ls_f, ls_p
Ejemplo n.º 27
0
def lscargleTrans(df_main):
    try:
        frequency, power = LombScargle(df_main['mjd'], df_main['flux'])\
                                       .autopower(nyquist_factor=1)
        period = 1 / frequency[np.argmax(power)]
    except ValueError:
        period = 0
    period = pd.Series([period, power.mean()], index=['period', 'pow'])
    #    freq_df = pd.Series(df_main['mjd']/period)%1, )
    return period
Ejemplo n.º 28
0
def do_lombscargle(thedata, time):
    # commute lombscargle
    tvec, dvec, edvec = thedata

    mask = np.isfinite(tvec) & np.isfinite(dvec)
    tvec, dvec = tvec[mask], dvec[mask]
    tvec = tvec - tvec.min()
    power = LombScargle(tvec, dvec, dy=edvec).power(1.0 / time)

    return 1.0 / time, abs(power)
Ejemplo n.º 29
0
def pow(T, d):
    try:
        c = d.dropna()
        t = np.array(c.index, dtype='datetime64[h]').astype(float)
        if max(t) - min(t) < T / 4: return np.nan
        x = c.as_matrix()
        y = LombScargle(t, x).model(np.linspace(0, T, 100), 1 / T)
        return max(y) - min(y)
    except:
        return np.nan
Ejemplo n.º 30
0
def nonparametric_lomb_scargle(t,
                               y,
                               dy,
                               minimum_frequency=1. / 100.,
                               maximum_frequency=1. / 0.2,
                               samples_per_peak=5,
                               max_nterms=10):
    """
    Non-parametric multi-harmonic Lomb Scargle periodogram

    Uses the Bayesian Information Criterion to automatically
    choose the best number of harmonics to use at each
    trial frequency.

    Parameters
    ----------
    t: array_like
        Observation times.
    y: array_like
        Observations.
    dy: array_like
        Observation uncertainties.
    minimum_frequency: float
        Minimum frequency to search.
    maximum_frequency: float
        Maximum frequency to search.
    samples_per_peak: float
        Oversampling factor.
    max_nterms: int
        Maximum number of terms to use for any fit.
    """
    chi2_0 = chi2(t, y, dy)
    n = len(t)

    autopower_kwargs = dict(minimum_frequency=minimum_frequency,
                            maximum_frequency=maximum_frequency,
                            samples_per_peak=samples_per_peak)

    # compute mhgls periodograms for each harmonic h=1, 2, ..., H
    nterms = 1 + np.arange(max_nterms)
    periodograms = [(LombScargle(t, y, dy,
                                 nterms=h).autopower(**autopower_kwargs))
                    for h in nterms]

    frequencies = periodograms[0][0]
    periodograms = [p for f, p in periodograms]

    # add bic penalties
    periodograms = [(p * chi2_0 - 2 * h * np.log(n)) / (chi2_0 + np.log(n))
                    for p, h in zip(periodograms, nterms)]

    # get the max over all harmonics
    p_np = np.stack(periodograms).T.max(axis=-1)

    return frequencies, p_np