Пример #1
0
    def test_frequency(self):
        """Test if frequency location of peak corresponds to frequency of
        generated input signal.
        """

        # Input parameters
        ampl = 2.
        w = 1.
        phi = 0.5 * np.pi
        nin = 100
        nout = 1000
        p = 0.7 # Fraction of points to select

        # Randomly select a fraction of an array with timesteps
        np.random.seed(2353425)
        r = np.random.rand(nin)
        t = np.linspace(0.01*np.pi, 10.*np.pi, nin)[r >= p]

        # Plot a sine wave for the selected times
        x = ampl * np.sin(w*t + phi)

        # Define the array of frequencies for which to compute the periodogram
        f = np.linspace(0.01, 10., nout)

        # Calculate Lomb-Scargle periodogram
        P = lombscargle(t, x, f)

        # Check if difference between found frequency maximum and input
        # frequency is less than accuracy
        delta = f[1] - f[0]
        assert_(w - f[np.argmax(P)] < (delta/2.))
Пример #2
0
    def test_amplitude(self):
        """Test if height of peak in normalized Lomb-Scargle periodogram
        corresponds to amplitude of the generated input signal.
        """

        # Input parameters
        ampl = 2.
        w = 1.
        phi = 0.5 * np.pi
        nin = 100
        nout = 1000
        p = 0.7 # Fraction of points to select

        # Randomly select a fraction of an array with timesteps
        np.random.seed(2353425)
        r = np.random.rand(nin)
        t = np.linspace(0.01*np.pi, 10.*np.pi, nin)[r >= p]

        # Plot a sine wave for the selected times
        x = ampl * np.sin(w*t + phi)

        # Define the array of frequencies for which to compute the periodogram
        f = np.linspace(0.01, 10., nout)

        # Calculate Lomb-Scargle periodogram
        pgram = lombscargle(t, x, f)

        # Normalize
        pgram = np.sqrt(4 * pgram / t.shape[0])

        # Check if difference between found frequency maximum and input
        # frequency is less than accuracy
        assert_approx_equal(np.max(pgram), ampl, significant=2)
Пример #3
0
    def test_amplitude(self):
        """Test if height of peak in normalized Lomb-Scargle periodogram
        corresponds to amplitude of the generated input signal.
        """

        # Input parameters
        ampl = 2.
        w = 1.
        phi = 0.5 * np.pi
        nin = 100
        nout = 1000
        p = 0.7 # Fraction of points to select

        # Randomly select a fraction of an array with timesteps
        np.random.seed(2353425)
        r = np.random.rand(nin)
        t = np.linspace(0.01*np.pi, 10.*np.pi, nin)[r >= p]

        # Plot a sine wave for the selected times
        x = ampl * np.sin(w*t + phi)

        # Define the array of frequencies for which to compute the periodogram
        f = np.linspace(0.01, 10., nout)

        # Calculate Lomb-Scargle periodogram
        pgram = lombscargle(t, x, f)

        # Normalize
        pgram = np.sqrt(4 * pgram / t.shape[0])

        # Check if difference between found frequency maximum and input
        # frequency is less than accuracy
        assert_approx_equal(np.max(pgram), ampl, significant=2)
Пример #4
0
    def test_frequency(self):
        """Test if frequency location of peak corresponds to frequency of
        generated input signal.
        """

        # Input parameters
        ampl = 2.
        w = 1.
        phi = 0.5 * np.pi
        nin = 100
        nout = 1000
        p = 0.7 # Fraction of points to select

        # Randomly select a fraction of an array with timesteps
        np.random.seed(2353425)
        r = np.random.rand(nin)
        t = np.linspace(0.01*np.pi, 10.*np.pi, nin)[r >= p]

        # Plot a sine wave for the selected times
        x = ampl * np.sin(w*t + phi)

        # Define the array of frequencies for which to compute the periodogram
        f = np.linspace(0.01, 10., nout)

        # Calculate Lomb-Scargle periodogram
        P = lombscargle(t, x, f)

        # Check if difference between found frequency maximum and input
        # frequency is less than accuracy
        delta = f[1] - f[0]
        assert_(w - f[np.argmax(P)] < (delta/2.))
Пример #5
0
def lomb_search(dates,y,pmin,pmax,nfreq=1000,fap_threshold=1e-7,maxvars=100,plotfile='/dev/null'):
    nstars = y.shape[0]
    npoints = y.shape[1]
    logp = np.linspace(np.log10(pmin), np.log10(pmax), nfreq)
    freq = 2*np.pi/(10.0**logp)
    variables = []
    periods = []
    faps = []
    nvars = 0
    for star in range(nstars):
        if nvars < maxvars:
            qq = np.where(np.isfinite(y[star,:]))[0]
            if len(qq)>100:
                print star
                print dates[qq]
                print y[star,qq]
                lnp = spectral.lombscargle(dates[qq], (y[star,qq]-np.mean(y[star,qq]))/np.std(y[star,qq]), freq)
                lnpmax = np.max(lnp)
                p = 2*np.pi/freq[np.where(lnp == lnpmax)[0][0]]
                #            fap = 1.0 - (1.0 - (1.0-2.0*lnpmax/npoints)**( (npoints-3)/2.0 ) )**nfreq
                fap = ((npoints-3)/2.0)*np.exp(-lnpmax)
                if fap < fap_threshold and not(np.abs(p-1.0) < 0.01) and not(np.abs(p-0.5) < 0.01) and not(np.abs(p-0.33333) < 0.01):
                    print star,p,np.max(lnp),fap
                    variables.append(star)
                    periods.append(p)
                    faps.append(fap)
                    plt.figure()
                    plt.subplot(3,1,1)
                    plt.plot(dates[qq],y[star,qq],'b.')
                    ymax = np.percentile(y[star,qq],99)
                    ymin = np.percentile(y[star,qq],1)
                    plt.ylim([ymin,ymax])
                    ax=plt.gca()
                    ax.set_ylim(ax.get_ylim()[::-1])
                    lfap = np.log10(fap)
                    plt.title(str(star)+'     P = '+"%6.3f"%p+' d      $log_{10}$ FAP = '+"%4.1f"%lfap)
                    plt.xlabel('Date')
                    plt.ylabel('Magnitude')
                    plt.subplot(3,1,2)
                    cycle = dates[qq]/p
                    phase = cycle - np.floor(cycle)
                    plt.plot(phase,y[star,qq],'b.',phase+1,y[star,qq],'b.')
                    plt.ylim([ymin,ymax])
                    ax=plt.gca()
                    ax.set_ylim(ax.get_ylim()[::-1])
                    plt.xlabel('PHASE')
                    plt.ylabel('Magnitude')
                    plt.subplot(3,1,3)
                    plt.plot(logp,lnp,'r-')
                    plt.xlabel('$log_{10}$ Period (d)')
                    plt.ylabel('LNP')
                    plt.savefig('var%(star)05d.png'%vars(),orientation='portrait',papertype='a4')
                    plt.close()
                    nvars += 1
    return variables, periods, faps
def render_frequency_prevalence(gf, period, templ):
    """
    The frequency is in samples/year.
    """

    ff = np.zeros((gf.d.shape[1], gf.d.shape[2]), dtype = np.float64)
    tm = np.arange(0, gf.d.shape[0] / 12.0, 1.0 / 12, dtype = np.float64)
    for i in range(gf.d.shape[1]):
        for j in range(gf.d.shape[2]):
            pg = lombscargle(tm, gf.d[:, i, j].astype(np.float64), np.array([2.0 * np.pi / period]))
            ff[i,j] = np.sqrt(pg[0] * 4.0 / tm.shape[0])    

    f = render_component_single(ff, gf.lats, gf.lons, None, None, '%gyr period' % period)
    f.savefig('figs/%s_%dyr_cycle_prevalence.pdf' % (templ, period))
Пример #7
0
def pgram(x, y, peak1):
    fs = np.linspace(5, 45, 10000) # c/d
    ws = 2*np.pi*fs  # lombscargle uses angular frequencies
    pgram = lombscargle(x, y, ws)
    plt.clf()
    plt.subplot(2, 1, 1)
#     plt.errorbar(x, y, yerr=yerr, fmt='k.', capsize=0, ecolor='.8')
    plt.plot(x, y, 'k.')
    plt.subplot(2, 1, 2)
    plt.xlabel('$\mu~c/d$')
    plt.axvline(peak1, color='r')
    plt.plot(ws/(2*np.pi), pgram)
    plt.show()
    raw_input('enter')
def render_frequency_prevalence(gf, period, templ):
    """
    The frequency is in samples/year.
    """

    ff = np.zeros((gf.d.shape[1], gf.d.shape[2]), dtype=np.float64)
    tm = np.arange(0, gf.d.shape[0] / 12.0, 1.0 / 12, dtype=np.float64)
    for i in range(gf.d.shape[1]):
        for j in range(gf.d.shape[2]):
            pg = lombscargle(tm, gf.d[:, i, j].astype(np.float64),
                             np.array([2.0 * np.pi / period]))
            ff[i, j] = np.sqrt(pg[0] * 4.0 / tm.shape[0])

    f = render_component_single(ff, gf.lats, gf.lons, None, None,
                                '%gyr period' % period)
    f.savefig('figs/%s_%dyr_cycle_prevalence.pdf' % (templ, period))
Пример #9
0
    def __init__(self, timeData, magnitudeData, freq_low, \
                 freq_high, freq_num):
        """Initializing the LombScargle Object"""
        self.freqs = np.linspace(freq_low, freq_high, freq_num)

        # Convert arrays to Numpy Arrays for lombscargle to understand
        timeArray = np.asarray(timeData)
        magnitudeArray = np.asarray(magnitudeData)

        # Calculating the periodogram and return it
        self.power = spectral.lombscargle(timeArray, magnitudeArray, \
                                          self.freqs)
        self.periodogram = np.column_stack([self.freqs, self.power])

        # The Frequency and LS-Power of the Maximum LS-Power
        self.maxLS = [self.freqs[self.power.argmax(axis=0)], \
                      np.amax(self.power)]
Пример #10
0
Файл: ls.py Проект: eaydin/WWZ
 def __init__(self, timeData, magnitudeData, freq_low, \
              freq_high, freq_num):
     """Initializing the LombScargle Object"""               
     self.freqs = np.linspace(freq_low, freq_high, freq_num)
          
     # Convert arrays to Numpy Arrays for lombscargle to understand
     timeArray = np.asarray(timeData)
     magnitudeArray = np.asarray(magnitudeData)
     
     # Calculating the periodogram and return it
     self.power = spectral.lombscargle(timeArray, magnitudeArray, \
                                       self.freqs)
     self.periodogram = np.column_stack([self.freqs, self.power])
     
     # The Frequency and LS-Power of the Maximum LS-Power
     self.maxLS = [self.freqs[self.power.argmax(axis=0)], \
                   np.amax(self.power)]
Пример #11
0
# if(DEBUG > 10): print(colored(np.sum(peaks_sci_diff)/len(peaks_sci_diff),"green"))

# s0_smooth = spline(s1_data[peaks_sci_s1_idx], s1_t[peaks_sci_s1_idx], s1_t)
# plt.plot(s1_t, s0_smooth, label = "spline (????????????????)" )

####################################################
# CONPUTE PERIOD OF A ROTATION
####################################################
if (COMPUTE_ANGULAR_FREQUENCY_DEGREE_DRIFT):
    # SRC: https://stackoverflow.com/questions/13349181/using-scipy-signal-spectral-lombscargle-for-period-discovery?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa
    from scipy.signal import spectral
    # generates 1000 frequencies between 0.01 and 1
    freqs = np.linspace(0.01, 1, 100)

    # computes the Lomb Scargle Periodogram of the time and scaled magnitudes using each frequency as a guess
    periodogram_s1 = spectral.lombscargle(s1_t, s1_data, freqs)
    periodogram_s2 = spectral.lombscargle(s2_t, s2_data, freqs)
    # if(DEBUG > 10): print(periodogram)
    # returns the inverse of the frequence (i.e. the period) of the largest periodogram value

    angular_freq_s1 = 1 / freqs[np.argmax(periodogram_s1)]
    angular_freq_s2 = 1 / freqs[np.argmax(periodogram_s2)]
    if (DEBUG > 10):
        print(args.s1_filename, "Angular frequency:", angular_freq_s1,
              "radians per second")
    if (DEBUG > 10):
        print(args.s2_filename, "Angular frequency:", angular_freq_s2,
              "radians per second")
    if (DEBUG > 10):
        print(args.s1_filename, "Period:",
              (2 * math.pi) / (1 / angular_freq_s1))
Пример #12
0
def process_data(s1,
                 s2,
                 truncate_start,
                 truncate_end,
                 smooth_cut_off_freq=None,
                 smooth_cut_off_freq_w_s0=None,
                 s1_name=None,
                 s2_name=None,
                 compute_periodogram=False):

    # global SMOOTH_CUT_OFF_FREQ
    # SMOOTH_CUT_OFF_FREQ = smooth_cut_off_freq
    ########################################################
    ### ALIGN S1 AND S2 BEFORE XCORR
    ########################################################
    # s1,s2 = align(s1,s2)
    s1, s2 = align(s1, s2)

    s1_t = s1[:, 0]
    s2_t = s2[:, 0]

    s1_data = s1[:, DATA_COL]
    s2_data = s2[:, DATA_COL]

    ###############################################
    ## TRUNCATE
    ###############################################
    if truncate_end != 0:
        TRUNCATE_END = truncate_end
        s1_t = s1_t[:TRUNCATE_END]
        s2_t = s2_t[:TRUNCATE_END]
        s1_data = s1_data[:TRUNCATE_END]
        s2_data = s2_data[:TRUNCATE_END]

    if truncate_start != 0:
        TRUNCATE_START = truncate_start
        s1_t = s1_t[TRUNCATE_START:]
        s2_t = s2_t[TRUNCATE_START:]
        s1_data = s1_data[TRUNCATE_START:]
        s2_data = s2_data[TRUNCATE_START:]

    # s1_data = smoothen_without_shift(s1_data)
    # s2_data = smoothen_without_shift(s2_data)
    # impulse_length = 5000
    if (SMOOTH_WO_SHIFT):
        s1_data = smoothen_without_shift(
            s1_data, smooth_cut_off_freq_w_s0)  #, impulse_length)
        s2_data = smoothen_without_shift(
            s2_data, smooth_cut_off_freq)  #, impulse_length)

    if (NORMALIZE):
        # pass
        s1_data = normalize_regularize(s1_data)
        s2_data = normalize_regularize(s2_data)

    if (TRIM_START_END):
        s1_t = s1_t[TRIM_LENGTH:len(s1_t) - TRIM_LENGTH]
        s2_t = s2_t[TRIM_LENGTH:len(s2_t) - TRIM_LENGTH]
        s1_data = s1_data[TRIM_LENGTH:len(s1_data) - TRIM_LENGTH]
        s2_data = s2_data[TRIM_LENGTH:len(s2_data) - TRIM_LENGTH]
        # print(len(s1_data),len(s2_data))

    if (DEBUG > 10): print("len " + s1_name, len(s1_t), len(s1_data))
    if (DEBUG > 10): print("len " + s2_name, len(s2_t), len(s2_data))

    # print(find_peaks(s1_data,height=1,width=1000))
    # print(find_peaks(s2_data,height=1,width=1000))

    if (not CORRELATION_CIRCULAR):
        # METHOD 1: CORRELATION
        # numpy default correlation, 'zero padding?'
        xcorr = correlate(s1_data, s2_data)
        # xcorr = fftconvolve(s1_data, s2_data)

        time_shift = (len(xcorr) / 2 - xcorr.argmax())
        # print(len(xcorr)/2,xcorr.argmax(),time_shift)
    else:
        # METHOD 2: CORRELATION
        # periodic/circular correlation
        xcorr = periodic_corr_np(s2_data, s1_data)
        xcorr_max = xcorr.argmax()
        # time_shift = xcorr_max
        if xcorr_max <= len(s1_data) / 2:
            time_shift = xcorr_max
        else:
            time_shift = xcorr_max - len(s1_data)

    if (SHOW_PLOT):
        plt.plot(s1_data, label=s1_name)
        plt.plot(s2_data, label=s2_name)
        # plt.plot(wiener(s2_data,1000), label = 'wiener')
        # plt.plot(medfilt(s2_data,5), label = 'medfilt')
        # plt.plot(normalize_regularize(fftconvolve(s1_data, s2_data)), label = 'fftconvolve')
        if (SHOW_PLOT_XCORR):
            plt.plot(normalize_regularize(xcorr), label="xcorr")
        plt.legend()
        plt.show()

    if (DEBUG > 2):
        print(colored("Time shift:" + str(time_shift) + " ms", "yellow"))

    # generates 1000 frequencies between 0.01 and 1
    freqs = np.linspace(0.01, 1, 100)

    # computes the Lomb Scargle Periodogram of the time and scaled magnitudes using each frequency as a guess
    if (compute_periodogram):
        periodogram_s0 = spectral.lombscargle(s1_t, s1_data, freqs)
        angular_freq_s0 = 1 / freqs[np.argmax(periodogram_s0)]
        period_s0 = round(((2 * math.pi) / (1 / angular_freq_s0)) * 1000, 2)

        # negative greater than half of period
        if (math.fabs(time_shift * 2) > period_s0):
            time_shift = round(time_shift % period_s0, 1)
        # positive greater than half of period
        if ((time_shift * 2) > period_s0):
            time_shift = round(time_shift - period_s0, 1)
    else:
        period_s0 = 1

    return time_shift, period_s0
totalCounts= curve.sum()
totalTime= (jd[len(jd)-1]-jd[0])*86400
countRate= totalCounts/totalTime

#scaled_curve = curve
scaled_curve = (curve-curve.mean())/curve.std()
time = jd

# Create array of frequencies to check for fourier components, function requires angular frequencies
freqs=np.linspace(1,4000,10000)/86400
#freqs = np.logspace(1,3.63,num=10000)
angular_freqs=2*np.pi*freqs

# Create periodogram using frequencies, times, and normalized curve
periodogram = spectral.lombscargle(time, scaled_curve, angular_freqs)

# Calculate eclipse period and frequency, and compare it to expected value
eclipse_period = 2*np.pi/(angular_freqs[np.argmax(periodogram)])
eclipse_frequency = 1/eclipse_period
expected_period = 0.01966127 # in days
print 'Eclipse period =',eclipse_period,'days.'
print 'Eclipse frequency =',eclipse_frequency, 'cycles/day.'
print 'Percent error = ' + str(100*(eclipse_period-expected_period)/expected_period) + '%'

# Create a figure with light curve in top plot and periodogram in bottom plot
fig = plt.figure()
# Plot light curve
ax = fig.add_subplot(211)
ax.plot(time,scaled_curve,'b.')
ax.set_title('Light Curve')
def ls_spectral_estimate(freqs, tsn):
    """
    The frequencies must be in angular "months".
    """
    ls = lombscargle(np.arange(tsn.shape[0], dtype = np.float64), tsn, freqs)
    return np.sqrt(ls * 4.0 / tsn.shape[0])
Пример #15
0
        x = np.concatenate((x, t))
        y = np.concatenate((y, flux))
        yerr = np.concatenate((yerr, flux_err))

# convert t to seconds
x *= 24.*60.*60.

# the frequency array
nu_maxHz = nu_max(m, r, teff)*1e-6
fs = np.arange(1300e-6, 1700e-6, 1e-7) # Hz
fs = np.linspace(5, 45, 10000) # c/d
ws = 2*np.pi*fs  # lombscargle uses angular frequencies

# convert ys to float64
y2 = np.empty((len(y)))
for i in range(len(y)):
    y2[i] = y[i].astype('float64')

y = y2
pgram = lombscargle(x, y2, ws)

print np.var(y)
plt.clf()
plt.subplot(2, 1, 1)
plt.errorbar(x, y, yerr=yerr, fmt='k.', capsize=0, ecolor='.8')
plt.subplot(2, 1, 2)
plt.xlabel('$\mu Hz$')
plt.plot(ws/(2*np.pi)*1e6, pgram)
plt.savefig('KIC%s' % KID)
print 'KIC%s.png' % KID
totalAverageBins = int(exptime/averagingTime)
timestepsPerBin = int(averagingTime/timestep)
# Specify frequencies for which the periodogram algorithm should find transform components.
freqs = np.linspace(1,1000,num=10**4)
angularFreqs=2*np.pi*freqs

binnedCounts, binEdges = np.histogram(timestamps,range=[0,exptime],bins = exptime/timestep)
times = binEdges[0:timestepsPerBin]

periodogramStack=np.zeros((totalAverageBins,len(freqs)))
tic = time()
print 'Calculating individual Fourier Transforms...'
for bin in range(int(totalAverageBins)):
    counts = binnedCounts[bin*timestepsPerBin:(bin+1)*timestepsPerBin]
    scaledCounts = (counts-counts.mean())/counts.std()
    periodogram = spectral.lombscargle(times, scaledCounts, angularFreqs)
    periodogramStack[bin,:] = periodogram
    completed = 100*(float(bin)+1)/float(totalAverageBins)
    print '%.1f' % completed + '% complete'
print 'Total Fourier Transform time: ' + str(time()-tic) + 's'

averageStack = np.zeros(len(freqs))
for i in range(len(freqs)):
    timeInterval=np.zeros(totalAverageBins)
    for j in range(totalAverageBins):
	    timeInterval[j]=periodogramStack[j][i]
    averageStack[i]=np.average(timeInterval)
	   			
print 'Saving data to txt files...'
# Save data to txt files.
g=open(savePath+'frequencyData.txt','w')
Пример #17
0
def process_data(s1,s2, truncate_start, truncate_end, smooth_cut_off_freq=SMOOTH_CUT_OFF_FREQ, s1_name=None, s2_name=None):

	global SMOOTH_CUT_OFF_FREQ
	SMOOTH_CUT_OFF_FREQ = smooth_cut_off_freq
	########################################################
	### ALIGN S1 AND S2 BEFORE XCORR
	########################################################
	s1,s2 = align(s1,s2)

	s1_t = s1[:,0]
	s2_t = s2[:,0]

	s1_data = s1[:,DATA_COL]
	s2_data = s2[:,DATA_COL]

	###############################################
	## TRUNCATE
	###############################################
	if truncate_end != 0:
		TRUNCATE_END = truncate_end
		s1_t = s1_t[:TRUNCATE_END]
		s2_t = s2_t[:TRUNCATE_END]
		s1_data = s1_data[:TRUNCATE_END]
		s2_data = s2_data[:TRUNCATE_END]

	if truncate_start != 0:
		TRUNCATE_START = truncate_start
		s1_t = s1_t[TRUNCATE_START:]
		s2_t = s2_t[TRUNCATE_START:]
		s1_data = s1_data[TRUNCATE_START:]
		s2_data = s2_data[TRUNCATE_START:]

	# s1_data = smoothen_without_shift(s1_data)
	# s2_data = smoothen_without_shift(s2_data)
	# impulse_length = 5000
	if(SMOOTH_WO_SHIFT):
		s1_data = smoothen_without_shift(s1_data)#, impulse_length)
		s2_data = smoothen_without_shift(s2_data)#, impulse_length)
	if(NORMALIZE):
		# pass
		s1_data = normalize_regularize(s1_data)
		s2_data = normalize_regularize(s2_data)

	if(DEBUG > 10): print("len " + s1_name, len(s1_t), len(s1_data))
	if(DEBUG > 10): print("len " + s2_name, len(s2_t), len(s2_data))

	if(not CORRELATION_CIRCULAR):
		# METHOD 1: CORRELATION
		# numpy default correlation, 'zero padding?'
		xcorr = correlate(s1_data, s2_data)
		time_shift = (len(xcorr)/2 - xcorr.argmax())
	else:
		# METHOD 2: CORRELATION
		# periodic/circular correlation
		xcorr = periodic_corr_np(s2_data, s1_data)
		time_shift = xcorr.argmax()


	if(DEBUG > 2): print(colored("Time shift:" + str(time_shift) + " ms", "yellow"))

	# generates 1000 frequencies between 0.01 and 1
	freqs = np.linspace(0.01, 1, 100)

	# computes the Lomb Scargle Periodogram of the time and scaled magnitudes using each frequency as a guess
	periodogram_s0 = spectral.lombscargle(s1_t, s1_data, freqs)
	angular_freq_s0 = 1/freqs[np.argmax(periodogram_s0)]
	period_s0 = round(((2*math.pi)/(1/angular_freq_s0)) * 1000,2)

	return time_shift, period_s0
def ls_spectral_estimate(freqs, tsn):
    """
    The frequencies must be in angular "months".
    """
    ls = lombscargle(np.arange(tsn.shape[0], dtype=np.float64), tsn, freqs)
    return np.sqrt(ls * 4.0 / tsn.shape[0])
Пример #19
0
def process_data(s1,s2, truncate_start, truncate_end, smooth_cut_off_freq=SMOOTH_CUT_OFF_FREQ, filename1=None, filename2=None):
	# if(DEBUG > 10): print(np.min(s1[:,0]),np.max(s1[:,0]))
	# if(DEBUG > 10): print(np.min(s2[:,0]),np.max(s2[:,0]))
	global SMOOTH_CUT_OFF_FREQ
	SMOOTH_CUT_OFF_FREQ = smooth_cut_off_freq
	########################################################
	### ALIGN S1 AND S2 BEFORE XCORR
	########################################################
	s1,s2 = align(s1,s2)

	s1_t = s1[:,0]
	s2_t = s2[:,0]

	s1_data = s1[:,DATA_COL]
	s2_data = s2[:,DATA_COL]

	###############################################
	## TRUNCATE
	###############################################
	if truncate_end != 0:
		TRUNCATE_END = truncate_end
		s1_t = s1_t[:TRUNCATE_END]
		s2_t = s2_t[:TRUNCATE_END]
		s1_data = s1_data[:TRUNCATE_END]
		s2_data = s2_data[:TRUNCATE_END]

	if truncate_start != 0:
		TRUNCATE_START = truncate_start
		s1_t = s1_t[TRUNCATE_START:]
		s2_t = s2_t[TRUNCATE_START:]
		s1_data = s1_data[TRUNCATE_START:]
		s2_data = s2_data[TRUNCATE_START:]

	# s1_data = smoothen_without_shift(s1_data)
	# s2_data = smoothen_without_shift(s2_data)
	# impulse_length = 5000
	if(SMOOTH_WO_SHIFT):
		s1_data = smoothen_without_shift(s1_data)#, impulse_length)
		s2_data = smoothen_without_shift(s2_data)#, impulse_length)
	if(NORMALIZE):
		# pass
		s1_data = normalize_regularize(s1_data)
		s2_data = normalize_regularize(s2_data)
		# s1_data = normalize_max_min(s1_data)
		# s2_data = normalize_max_min(s2_data)


	# if(DEBUG > 10): print(np.min(s1[:,0]),np.max(s1[:,0]))
	# if(DEBUG > 10): print(np.min(s2[:,0]),np.max(s2[:,0]))

	# s1_data = s1[:,DATA_COL]-np.min(s1)
	# s2_data = s2[:,DATA_COL]-np.min(s2)
	# s1_data = s1_data[mean_samples-1:]

	# s1_data = running_mean((s1[:,DATA_COL]-np.min(s1)),mean_samples)
	# s2_data = running_mean((s2[:,DATA_COL]-np.min(s2)),mean_samples)

	# s2_data = signal.resample(s2_data, int(len(s2_data/2)))
	# s2_data = s2_data[::2] #decimate(s2_data, 10)

	########################################
	# REGULARIZE DATASETS
	# https://stackoverflow.com/questions/6157791/find-phase-difference-between-two-inharmonic-waves?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa
	########################################
	# s1_data -= s1_data.mean(); s1_data /= (3*s1_data.std())
	# s2_data -= s2_data.mean(); s2_data /= (3*s2_data.std())
		
	##########################################
	# OR NORMALIZE BETWEEN ZERO AND ONE
	# ONE EXTREME CHANGE CAN HIJACK EVERYTHING?
	##########################################
	# dx_s1 = np.max(s1_data) - np.min(s1_data)
	# s1_data = (s1_data-np.min(s1_data))/dx_s1
	# dx_s2 = np.max(s2_data) - np.min(s2_data)
	# s2_data = (s2_data-np.min(s2_data))/dx_s2

	# ########################################

	# s1_t = s1[:,0][mean_samples-1:]
	# s2_t = s2[:,0][mean_samples-1:]

	# ############################################
	# #NORMALIZE TIME
	# ############################################
	# s1_t = s1_t - s1_t[0]
	# s2_t = s2_t - s2_t[0]

	# s2_t = s2_t[::2] #decimate(s2_t, 10)
	# s2_t = signal.resample(s2_t, int(len(s2_t/2)))
	# s2_t = s2_t[:len(s2_data)]

	if(DEBUG > 10): print("len " + filename1, len(s1_t), len(s1_data))
	if(DEBUG > 10): print("len " + filename2, len(s2_t), len(s2_data))



	if(WAVELET):
	#######################################################
	# START OF: WAVELET TEST
	#######################################################
		[s1_cA, s1_cD] = wavedec(s1_data, pywt.Wavelet('dmey'),level=1)
		[s2_cA, s2_cD] = wavedec(s2_data, pywt.Wavelet('dmey'),level=1)

		# t = s1_t[0:len(s1_cD)]

		plt.plot(s1_cA, label='s1_cA')
		plt.plot(s2_cA, label='s2_cA')

		plt.plot(s1_cD, label='s1_cD')
		plt.plot(s2_cD, label='s2_cD')

		# plt.legend()
		# plt.show()

		s1_data = s1_cA
		s2_data = s2_cA

		# print(len(s1_t), len(s1_data))

		s1_t = np.linspace(0,len(s1_data)/500,num=len(s1_data))
		s2_t = np.linspace(0,len(s1_data)/500,num=len(s1_data))


		# ti = np.linspace(0, len(s1_data), 1)
		# dxdt, dydt = interpolate.splev(s1_data,der=1)
		# plt.plot(dxdt, label='dxdt_s1')
		# plt.plot(dydt, label='dydt_s1')

		# ti = np.linspace(0, len(s2_data), 1)
		# dxdt, dydt = interpolate.splev(s2_data,der=1)

		# plt.plot(dxdt, label='dxdt_s2')
		# plt.plot(dydt, label='dydt_s2')
		if(SHOW_PLOT):
			plt.legend()
			plt.show()


	# print(len(s1_t), len(s1_data))

	###########################################
	# CORRELATION
	###########################################

	if(DEBUG > 10): print("len " + filename1, len(s1_t), len(s1_data))
	if(DEBUG > 10): print("len " + filename2, len(s2_t), len(s2_data))

	if(not CORRELATION_CIRCULAR):
		# METHOD 1: CORRELATION
		# numpy default correlation, 'zero padding?'
		xcorr = correlate(s1_data, s2_data)
		time_shift = (len(xcorr)/2 - xcorr.argmax())
	else:
		# METHOD 2: CORRELATION
		# periodic/circular correlation
		xcorr = periodic_corr_np(s2_data, s1_data)
		time_shift = xcorr.argmax()

	# xcorr -= xcorr.mean(); xcorr /= xcorr.std() 
	#downsampling for plotting
	# xcorr = xcorr[1:len(xcorr)]

	# if(DEBUG > 10): print(colored("XCorr: " + str(xcorr), "yellow"))
	# if(DEBUG > 10): print(colored("Time shift (APPROX - IN TERMS OF SAMPLES):" + str(time_shift), "yellow"))
	# if(DEBUG > 10): print(colored("Time shift (COULD BE APPROX):" + str(s1_t[time_shift] - s1_t[0]), "yellow"))
	if(DEBUG > 10): print(colored("Time shift:" + str(time_shift) + " ms", "yellow"))
	# if(DEBUG > 10): print(len(xcorr), len(s1_data))











	# ###########################################
	# # CORRELATION
	# ###########################################

	# xcorr = correlate(s1_cD, s2_cD)
	# time_shift = (int(len(xcorr)/2) - xcorr.argmax())*2

	# # xcorr -= xcorr.mean(); xcorr /= xcorr.std() 
	# #downsampling for plotting
	# # xcorr = xcorr[1:len(xcorr):2]

	
	# # xcorr_t = 
	# plt.plot(xcorr, label='xcorr')
	# # if(DEBUG > 10): print(colored("XCorr: " + str(xcorr), "yellow"))
	# # if(DEBUG > 10): print(colored("Time shift (APPROX - IN TERMS OF SAMPLES):" + str(time_shift), "yellow"))
	# # if(DEBUG > 10): print(colored("Time shift (COULD BE APPROX):" + str(s1_t[time_shift] - s1_t[0]), "yellow"))
	# print(colored("Time shift:" + str(time_shift) + " ms", "yellow"))

	# plt.legend()
	# plt.show()

	# sys.exit()
	# #######################################################
	# # END OF: WAVELET TEST
	# #######################################################

	# plt.figure(1)
	if(SHOW_PLOT):plt.figure(figsize=(13,9))
	# plt.subplot(211)
	# plt.plot(s1[:,0], s1[:,2]-np.min(s1),  label='s1')
	if(SHOW_PLOT):plt.plot(s1_t, s1_data,  label=filename1)
	# plt.plot(s1[:,0][mean_samples-1:], s1_data,  label='s1_avg')
	# plt.plot(s2[:,0], s2[:,2]-np.min(s2),  label='s2')
	if(SHOW_PLOT):plt.plot(s2_t, s2_data,  label=filename2)

	# plt.plot(np.arange(len(xcorr)) + s1_t[0], xcorr, label='xcorr')
	# if(len(s1_t) >= len(s2_t)):
	# 	xcorr_t = s1_t[0:len(xcorr)]
	# else:
	# 	xcorr_t = s2_t[0:len(xcorr)]
	xcorr_t = np.linspace(s1_t[0], s1_t[0] + len(xcorr)/1000, num = len(xcorr))
	xcorr = normalize_regularize(xcorr)
	if(SHOW_XCORR):plt.plot(xcorr_t, xcorr, label='xcorr')


	# #####################################################
	# # filtfilt smooth back amd forth
	# #####################################################
	# # n=100
	# # sig = np.random.randn(n)**3 + 3*np.random.randn(n).cumsum()
	# # x = np.random.randn(10)
	# # b, a = signal.ellip(4, 0.01, 120, 0.125)  # Filter to be applied.
	# b, a = signal.butter(2, 0.0001) # a lowpass Butterworth filter with a cutoff of 0.125 times the Nyquist rate, or 125 Hz, and apply it to x with filtfilt. The result should be approximately xlow, with no phase shift.
	# # b, a = signal.butter(4, 100, 'low', analog=True)
	# fgust = signal.filtfilt(b, a, s1_data, method="gust")
	# fgust -= fgust.mean(); fgust /= (3*fgust.std())
	# plt.plot(s1_t,fgust, label='gust', linestyle="--")

	# plt.show()

	if(DEBUG > 10): print("peak detect")


	if(DEBUG > 10): print("xcorr peaks")
	# peakind = signal.find_peaks_cwt(s1_data, np.arange(1,1000), signal.ricker)
	# # peakind = signal.find_peaks_cwt(s1_data,  wavelet = signal.wavelets.daub, widths=10)
	# if(DEBUG > 10): print(peakind, s1[peakind])#, s1_data[peakind])

	peaks_sci_xcorr_idx, _ = find_peaks(xcorr, width=peak_width)
	prominences = peak_prominences(xcorr, peaks_sci_xcorr_idx)[0]
	if(DEBUG > 2): print("peaks and prominences")
	if(DEBUG > 2): print(peaks_sci_xcorr_idx,end="")
	if(DEBUG > 2): print(colored(np.diff(peaks_sci_xcorr_idx),"cyan"))	
	if(DEBUG > 2): print(colored(prominences,"yellow"),  colored(np.mean(prominences),"red"))


	peaks_peakdetect_xcorr = peakdetect(xcorr, lookahead=peak_width)
	# if(DEBUG > 10): print(peaks_peakdetect_s1)
	if len(peaks_peakdetect_xcorr[0]) == 0: 
		if len(peaks_peakdetect_xcorr[1]) == 0:
			peaks_peakdetect_xcorr = np.array([]) 
		else:
			peaks_peakdetect_xcorr = np.array(peaks_peakdetect_xcorr[1])[:,0]
	else:
			if len(peaks_peakdetect_xcorr[1]) == 0:
		 		peaks_peakdetect_xcorr = np.array(peaks_peakdetect_xcorr[0])[:,0]
			else:
				peaks_peakdetect_xcorr = np.append(np.array(peaks_peakdetect_xcorr[0])[:,0],np.array(peaks_peakdetect_xcorr[1])[:,0]) # contains both peaks and valleys as separate lists

	peaks_peakdetect_xcorr.sort()
	peaks_peakdetect_xcorr = peaks_peakdetect_xcorr.astype(int)
	# if(DEBUG > 10): print(peaks_peakdetect_s1, end="")
	# if(DEBUG > 10): print(colored(np.diff(peaks_peakdetect_s1),"cyan"))

	if(SHOW_PEAKS_PROMINENCES): plt.plot(xcorr_t[peaks_sci_xcorr_idx], xcorr[peaks_sci_xcorr_idx], "x")








	if(DEBUG > 10): print("s1 peaks")
	# peakind = signal.find_peaks_cwt(s1_data, np.arange(1,1000), signal.ricker)
	# # peakind = signal.find_peaks_cwt(s1_data,  wavelet = signal.wavelets.daub, widths=10)
	# if(DEBUG > 10): print(peakind, s1[peakind])#, s1_data[peakind])

	peaks_sci_s1_idx, _ = find_peaks(s1_data, width=peak_width)
	prominences = peak_prominences(s1_data, peaks_sci_s1_idx)[0]
	if(DEBUG > 2): print("peaks and prominences")
	if(DEBUG > 2): print(peaks_sci_s1_idx,end="")
	if(DEBUG > 2): print(colored(np.diff(peaks_sci_s1_idx),"cyan"))	
	if(DEBUG > 2): print(colored(prominences,"yellow"),  colored(np.mean(prominences),"red"))


	peaks_peakdetect_s1 = peakdetect(s1_data, lookahead=peak_width)
	# if(DEBUG > 10): print(peaks_peakdetect_s1)
	if len(peaks_peakdetect_s1[0]) == 0: 
		if len(peaks_peakdetect_s1[1]) == 0:
			peaks_peakdetect_s1 = np.array([]) 
		else:
			peaks_peakdetect_s1 = np.array(peaks_peakdetect_s1[1])[:,0]
	else:
			if len(peaks_peakdetect_s1[1]) == 0:
		 		peaks_peakdetect_s1 = np.array(peaks_peakdetect_s1[0])[:,0]
			else:
				peaks_peakdetect_s1 = np.append(np.array(peaks_peakdetect_s1[0])[:,0],np.array(peaks_peakdetect_s1[1])[:,0]) # contains both peaks and valleys as separate lists

	peaks_peakdetect_s1.sort()
	peaks_peakdetect_s1 = peaks_peakdetect_s1.astype(int)
	# if(DEBUG > 10): print(peaks_peakdetect_s1, end="")
	# if(DEBUG > 10): print(colored(np.diff(peaks_peakdetect_s1),"cyan"))

	if(SHOW_PEAKS_PROMINENCES): plt.plot(s1_t[peaks_sci_s1_idx], s1_data[peaks_sci_s1_idx], "x")
	# plt.plot(s1[peakind], s1_data[peakind], "o")
	# plt.plot(s1_t[peaks_peakdetect_s1], s1_data[peaks_peakdetect_s1], "X")

	# if(DEBUG > 10): print(signal.peak_prominences(s1_data,peakind)[0])
	# # plt.plot(s1[:,0][mean_samples-1:][peakind], s1_data[peakind], "x")

	if(DEBUG > 10): print("s2 peaks")
	# peakind = signal.find_peaks_cwt(s2_data, np.arange(1,1000), signal.ricker)
	# if(DEBUG > 10): print(peakind, s2[peakind])

	peaks_sci_s2_idx, _ = find_peaks(s2_data, width=peak_width)
	prominences = peak_prominences(s2_data, peaks_sci_s2_idx)[0]
	if(DEBUG > 2): print("peaks and prominences")
	if(DEBUG > 2): print(peaks_sci_s2_idx, end="")
	if(DEBUG > 2): print(colored(np.diff(peaks_sci_s2_idx),"cyan"))	
	if(DEBUG > 2): print(colored(prominences,"yellow"), colored(np.mean(prominences),"red"))

	# if(DEBUG > 10): print("peak detect")
	peaks_peakdetect_s2 = peakdetect(s2_data, lookahead=peak_width)


	if len(peaks_peakdetect_s2[0]) == 0: 
		if len(peaks_peakdetect_s2[1]) == 0:
			peaks_peakdetect_s2 = np.array([]) 
		else:
			peaks_peakdetect_s2 = np.array(peaks_peakdetect_s2[1])[:,0]
	else:
		if len(peaks_peakdetect_s2[1]) == 0:
			peaks_peakdetect_s2 = np.array(peaks_peakdetect_s2[0])[:,0]
		else:
			peaks_peakdetect_s2 = np.append(np.array(peaks_peakdetect_s2[0])[:,0],np.array(peaks_peakdetect_s2[1])[:,0]) # contains both peaks and valleys as separate lists
	# peaks_peakdetect_s2 = np.append(np.array(peaks_peakdetect_s2[0])[:,0],np.array(peaks_peakdetect_s2[1])[:,0]) # contains both peaks and valleys as separate lists
	peaks_peakdetect_s2.sort()
	peaks_peakdetect_s2 = peaks_peakdetect_s2.astype(int)
	# if(DEBUG > 10): print(peaks_peakdetect_s2, end="")
	# if(DEBUG > 10): print(colored(np.diff(peaks_peakdetect_s2),"cyan"))	

	if(SHOW_PEAKS_PROMINENCES): plt.plot(s2_t[peaks_sci_s2_idx], s2_data[peaks_sci_s2_idx], "x")
	# plt.plot(s2[peakind], s2_data[peakind], "o")
	# plt.plot(s2_t[peaks_peakdetect_s2], s2_data[peaks_peakdetect_s2], "X")

	# TAKE 10 INDICES ??????????????
	# INDICES INVOLVED IN COMPUTING DIFF IS 10?
	try:
		peaks_sci_diff = (peaks_sci_s1_idx[0:6] - peaks_sci_s2_idx[0:6])
		if(DEBUG >2): print(colored(peaks_sci_diff,"red"), np.mean(peaks_sci_diff))
	except:
		if(DEBUG >2): print("ERROR TRYING TO COMPUTE PEAKS DIFF", len(peaks_sci_s1_idx), len(peaks_sci_s2_idx))
	# if(DEBUG > 10): print(colored(np.sum(peaks_sci_diff)/len(peaks_sci_diff),"green"))


	# s0_smooth = spline(s1_data[peaks_sci_s1_idx], s1_t[peaks_sci_s1_idx], s1_t)
	# plt.plot(s1_t, s0_smooth, label = "spline (????????????????)" )

	####################################################
	# CONPUTE PERIOD OF A ROTATION
	####################################################
	if(COMPUTE_ANGULAR_FREQUENCY_DEGREE_DRIFT):
		# SRC: https://stackoverflow.com/questions/13349181/using-scipy-signal-spectral-lombscargle-for-period-discovery?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa
		from scipy.signal import spectral
		# generates 1000 frequencies between 0.01 and 1
		freqs = np.linspace(0.01, 1, 100)

		# computes the Lomb Scargle Periodogram of the time and scaled magnitudes using each frequency as a guess
		periodogram_s1 = spectral.lombscargle(s1_t, s1_data, freqs)
		periodogram_s2 = spectral.lombscargle(s2_t, s2_data, freqs)
		# if(DEBUG > 10): print(periodogram)
		# returns the inverse of the frequence (i.e. the period) of the largest periodogram value

		angular_freq_s1 = 1/freqs[np.argmax(periodogram_s1)]
		angular_freq_s2 = 1/freqs[np.argmax(periodogram_s2)]
		if(DEBUG > 10): print(filename1, "Angular frequency:" , angular_freq_s1, "radians per second")
		if(DEBUG > 10): print(filename2, "Angular frequency:" , angular_freq_s2, "radians per second")
		if(DEBUG > 10): print(filename1, "Period:", (2*math.pi)/(1/angular_freq_s1))
		if(DEBUG > 10): print(filename2, "Period:", (2*math.pi)/(1/angular_freq_s2))

		angular_freq_avg = ( (2*math.pi)/(1/angular_freq_s1) + (2*math.pi)/(1/angular_freq_s1) )/2
		angular_freq_max = (2*math.pi)/(1/angular_freq_s1) if (2*math.pi)/(1/angular_freq_s1) > (2*math.pi)/(1/angular_freq_s1)  else (2*math.pi)/(1/angular_freq_s2) 
		# degree_shift = (time_shift/1000)*(360/(angular_freq_avg))
		degree_shift = (time_shift/1000)*(360/(angular_freq_max))

		degree_shift = degree_shift%360 if math.fabs(degree_shift) > 180 else degree_shift
		degree_shift = round(degree_shift,1)

		# if(DEBUG > 0): print("file s1 \t\t", "file s2 \t\t", "time_shift (s)\t", "angular_freq_s1\t", "angular_freq_s2\t", "degree_shift")
		# if(DEBUG > 0): print(args.s1_filename, "\t", args.s2_filename,"\t", time_shift/1000,"\t", round((2*math.pi)/(1/angular_freq_s1),3), "\t\t",round((2*math.pi)/(1/angular_freq_s2),3), "\t",degree_shift)

		if(DEBUG > 0 and not CSV_OUTPUT):
			global t1, min_time_shift, min_time_shift_row, total_time_shift

			total_time_shift = total_time_shift + time_shift/1000
			time_shift_arr.append(time_shift/1000)
			# t1.add_row([args.s1_filename, args.s2_filename, truncate_start, truncate_end, time_shift/1000, round((2*math.pi)/(1/angular_freq_s1),5),round((2*math.pi)/(1/angular_freq_s2),5), round(angular_freq_s1,5), round(angular_freq_s2,5), colored(degree_shift, "cyan")])
			t1.add_row([filename1, filename2, truncate_start, truncate_end, SMOOTH_CUT_OFF_FREQ, time_shift/1000, round((2*math.pi)/(1/angular_freq_s1),5),round((2*math.pi)/(1/angular_freq_s2),5), round(degree_shift,5)])

			if(min_time_shift == None or min_time_shift > math.fabs(time_shift/1000)):
				min_time_shift = math.fabs(time_shift/1000)
				min_time_shift_row = [filename1, filename2, truncate_start, truncate_end, SMOOTH_CUT_OFF_FREQ, colored(time_shift/1000, "red"), round((2*math.pi)/(1/angular_freq_s1),5),round((2*math.pi)/(1/angular_freq_s2),5), colored(round(degree_shift,5), "red")]

			print( "\n".join(t1.get_string().splitlines()[-2:]) )

		elif(CSV_OUTPUT):
			print(filename1, ",", filename2, ",", truncate_start, ",", truncate_end, ",", SMOOTH_CUT_OFF_FREQ,",", time_shift/1000, "," ,  round((2*math.pi)/(1/angular_freq_s1),5), ",",round((2*math.pi)/(1/angular_freq_s2),5), ",", round(angular_freq_s1,5), ",",  round(angular_freq_s2,5), ",", degree_shift%360)
		# else if(PRINT_TIME_SHIFT__PERIOD_S1__PERIOD_S2_ONLY):
		# 	print(args.s1_filename, ",", args.s2_filename, ",", time_shift/1000, "," ,  round((2*math.pi)/(1/angular_freq_s1),5), ",",round((2*math.pi)/(1/angular_freq_s2),5), ",", degree_shift)


	if(SHOW_PLOT):
		plt.legend()
		plt.show()

	return time_shift/1000, round((2*math.pi)/(1/angular_freq_s1),5), round((2*math.pi)/(1/angular_freq_s2),5)
jd =bin_edges[0:-1]
#jd =bin_edges[0:-1]/86400
print 'Finished binning in',time()-tic, 'seconds.'
 
scaled_counts = (counts_per_timestep-counts_per_timestep.mean())/counts_per_timestep.std()

# Create array of frequencies to check for Fourier components, function requires angular frequencies
#freqs=np.logspace(2,5,num=10**3)
freqs= np.logspace(0,5,num=10**3)
#freqs=np.linspace(10**-4,10**-3,num=100)
#freqs = np.linspace(10**2,10**6,num=999901)
#freqs= np.linspace(100,1000000,num=999901)
angular_freqs=2*np.pi*freqs
print 'Calculating Fourier components...'
tic = time()
periodogram = spectral.lombscargle(jd, scaled_counts, angular_freqs)
print 'Calculated Fourier components in',time()-tic, 'seconds.'

'''
# Calculate eclipse period and frequency, and compare it to expected value
eclipse_period = 2*np.pi/(angular_freqs[np.argmax(periodogram)])
eclipse_frequency = 1/eclipse_period
expected_period = 0.01966127 # in days
print 'Eclipse period =',eclipse_period,'days.'
print 'Eclipse frequency =',eclipse_frequency, 'cycles/day.'
print 'Percent error = ' + str(100*(eclipse_period-expected_period)/expected_period) + '%'
'''

np.savetxt('/home/pszypryt/sdss_data/20121208/periodogram6.txt',periodogram)
np.savetxt('/home/pszypryt/sdss_data/20121208/frequencies6.txt',freqs)
np.savetxt('/home/pszypryt/sdss_data/20121208/lightcurve6.txt',scaled_counts[10::10**3])