Beispiel #1
0
def timeseries(start, end, plot=True):
    kwargs = {'verbose': True, 'host': '10.68.10.121', 'port': 8088}
    #kwargs = {'verbose':True,'host':'localhost','port':8088}
    data = TimeSeriesDict.fetch(channels, start, end, **kwargs)
    c = 299792458  # m/sec
    lam = 1064e-9  # m
    #gif = data['K1:VIS-ETMX_GIF_ARM_L_OUT16']
    gif = data['K1:GIF-X_STRAIN_OUT16'] * 3000 * 1e6
    xarm = data['K1:CAL-CS_PROC_XARM_FILT_AOM_OUT16'] * 3000.0 / (
        c / lam) * 1e6  # [um]
    #xarm = data['K1:CAL-CS_PROC_XARM_FILT_TM_OUT16']*3000.0/(c/lam)*1e6 # [um]
    _etmx_seis = data['K1:PEM-SEIS_EXV_GND_X_OUT16']
    _itmx_seis = data['K1:PEM-SEIS_IXV_GND_X_OUT16']
    #etmx_seis = data['K1:PEM-SEIS_EXV_GND_X_OUT_DQ']
    #itmx_seis = data['K1:PEM-SEIS_IXV_GND_X_OUT_DQ']
    diff_seis = _etmx_seis - _itmx_seis
    #diff_seis = etmx_seis - itmx_seis
    #etmx_lvdt = data['K1:VIS-ETMX_IP_BLEND_LVDTL_IN1_DQ']
    #itmx_lvdt = data['K1:VIS-ITMX_IP_BLEND_LVDTL_IN1_DQ']
    #diff_lvdt = etmx_lvdt + itmx_lvdt
    comm_seis = _etmx_seis + _itmx_seis
    #comm_seis = diff_seis
    if plot:
        plt.plot(gif)
        plt.savefig('timeseries.png')
        plt.close()
    return gif, xarm, diff_seis, comm_seis  #,diff_lvdt
Beispiel #2
0
    def process(self):

        # data span
        start = self.gpstime - self.duration / 2.
        end = self.gpstime + self.duration / 2.

        # get data
        if self.use_nds:
            data = TimeSeriesDict.fetch(self.chanlist, start, end)
        else:
            from glue.datafind import GWDataFindHTTPConnection
            conn = GWDataFindHTTPConnection()
            cache = conn.find_frame_urls(self.ifo[0], '%s_C' % self.ifo,
                                         self.start, self.end, urltype='file')
            if len(cache) == 0:
                data = {}
            else:
                data = TimeSeriesDict.read(cache, self.chanlist, start=start,
                                           end=end, nproc=self.nproc)

        # make plot
        plot, axes = subplots(nrows=self.geometry[0], ncols=self.geometry[1],
                              sharex=True,
                              subplot_kw={'projection': 'timeseries'},
                              FigureClass=TimeSeriesPlot, figsize=[12, 6])
        axes[0,0].set_xlim(start, end)
        for channel, ax in zip(self.chanlist, axes.flat):
            ax.set_epoch(self.gpstime)
            # plot data
            try:
                ax.plot(data[channel])
            except KeyError:
                ax.text(self.gpstime, 0.5, "No data", va='center', ha='center',
                        transform=ax.transData)
            # plot trip indicator
            ylim = ax.get_ylim()
            ax.plot([self.gpstime, self.gpstime], ylim, linewidth=0.5,
                    linestyle='--', color='red')
            ax.set_ylim(*ylim)
            ax.set_xlabel('')
            ax.set_title(channel.texname, fontsize=10)
            ax.xaxis.set_minor_locator(NullLocator())
            for tick in ax.yaxis.get_major_ticks():
                tick.label.set_fontsize(10)
            for tick in ax.xaxis.get_major_ticks():
                tick.label.set_fontsize(16)
        plot.text(0.5, 0.04, 'Time [seconds] from trip (%s)' % self.gpstime,
                  ha='center', va='bottom', fontsize=24)
        plot.text(0.01, 0.5, 'Amplitude %s' % self.unit, ha='left', va='center',
                  rotation='vertical', fontsize=24)

        plot.suptitle('%s %s %s watchdog trip: %s'
                      % (self.ifo, self.chamber, self.sensor, self.gpstime),
                      fontsize=24)

        plot.save(self.outputfile)
        plot.close()
        return self.outputfile
def getData(channels,start,stop,filename):
    data = TimeSeriesDict.fetch(channels,start,stop)
    spec = {}
    for i in channels:
        spec[i] = {}
        spec[i]['sp'],spec[i]['norm'] = specgram(data[i])
        spec[i]['sp_asd'] = spec[i]['sp'].percentile(50)
    data.write('{}.hdf5'.format(filename),overwrite=True)
    np.save(filename,spec)
    return spec 
Beispiel #4
0
 def __init__(self,
              channels,
              filename,
              start=default_start,
              end=default_end):
     self.channels = channels
     if path.exists('./data/{}.hdf5'.format(filename)):
         self.data = TimeSeriesDict.read('./data/{}.hdf5'.format(filename))
     else:
         self.data = TimeSeriesDict.fetch(channels, start, end)
         self.data.write('./data/{}.hdf5'.format(filename))
Beispiel #5
0
    def process(self):

        # data span
        start = self.gpstime - self.duration / 2.
        end = self.gpstime + self.duration / 2.

        # get data
        if self.use_nds:
            data = TimeSeriesDict.fetch(self.chanlist, start, end)
        else:
            from glue.datafind import GWDataFindHTTPConnection
            conn = GWDataFindHTTPConnection()
            cache = conn.find_frame_urls(self.ifo[0], '%s_C' % self.ifo,
                                         self.start, self.end, urltype='file')
            data = TimeSeriesDict.read(cache, self.chanlist, start=start,
                                       end=end, nproc=self.nproc)

        # make plot
        plot, axes = subplots(nrows=self.geometry[0], ncols=self.geometry[1],
                              sharex=True,
                              subplot_kw={'projection': 'timeseries'},
                              FigureClass=TimeSeriesPlot, figsize=[12, 6])
        axes[0,0].set_xlim(start, end)
        for channel, ax in zip(self.chanlist, axes.flat):
            ax.set_epoch(self.gpstime)
            # plot data
            ax.plot(data[channel])
            # plot trip indicator
            ylim = ax.get_ylim()
            ax.plot([self.gpstime, self.gpstime], ylim, linewidth=0.5,
                    linestyle='--', color='red')
            ax.set_ylim(*ylim)
            ax.set_xlabel('')
            ax.set_title(channel.texname, fontsize=10)
            ax.xaxis.set_minor_locator(NullLocator())
            for tick in ax.yaxis.get_major_ticks():
                tick.label.set_fontsize(10)
            for tick in ax.xaxis.get_major_ticks():
                tick.label.set_fontsize(16)
        plot.text(0.5, 0.04, 'Time [seconds] from trip (%s)' % self.gpstime,
                  ha='center', va='bottom', fontsize=24)
        plot.text(0.01, 0.5, 'Amplitude %s' % self.unit, ha='left', va='center',
                  rotation='vertical', fontsize=24)

        plot.suptitle('%s %s %s watchdog trip: %s'
                      % (self.ifo, self.chamber, self.sensor, self.gpstime),
                      fontsize=24)

        plot.save(self.outputfile)
        plot.close()
        return self.outputfile
Beispiel #6
0
def timeseries(start, end, plot=True):
    kwargs = {'verbose': True, 'host': '10.68.10.121', 'port': 8088}
    #kwargs = {'verbose':True,'host':'localhost','port':8088}
    data = TimeSeriesDict.fetch(channels, start, end, **kwargs)
    c = 299792458  # m/sec
    lam = 1064e-9  # m
    gif = data['K1:GIF-X_STRAIN_OUT16'] * 3000 * 1e6
    xarm = data['K1:CAL-CS_PROC_XARM_FILT_AOM_OUT16'] * 3000.0 / (
        c / lam) * 1e6  # [um]
    # Seismometer
    etmx_seis = data['K1:PEM-SEIS_EXV_GND_X_OUT16']
    itmx_seis = data['K1:PEM-SEIS_IXV_GND_X_OUT16']
    diff_seis = etmx_seis - itmx_seis
    comm_seis = etmx_seis + itmx_seis
    # # ACC
    # etmx_acc_h1 = data['K1:VIS-ETMX_IP_ACCINF_H1_OUT16']
    # etmx_acc_h2 = data['K1:VIS-ETMX_IP_ACCINF_H2_OUT16']
    # etmx_acc_h3 = data['K1:VIS-ETMX_IP_ACCINF_H3_OUT16']
    # P = etmx_acc_mat
    # etmx_acc_l = P[0][0]*etmx_acc_h1 + P[0][1]*etmx_acc_h2 + P[0][2]*etmx_acc_h3 # L
    # etmx_acc_t = P[1][0]*etmx_acc_h1 + P[1][1]*etmx_acc_h2 + P[1][2]*etmx_acc_h3 # T
    # itmx_acc_h1 = data['K1:VIS-ITMX_IP_ACCINF_H1_OUT16']
    # itmx_acc_h2 = data['K1:VIS-ITMX_IP_ACCINF_H2_OUT16']
    # itmx_acc_h3 = data['K1:VIS-ITMX_IP_ACCINF_H3_OUT16']
    # P = itmx_acc_mat
    # itmx_acc_l = P[0][0]*itmx_acc_h1 + P[0][1]*itmx_acc_h2 + P[0][2]*itmx_acc_h3 # L
    # itmx_acc_t = P[1][0]*itmx_acc_h1 + P[1][1]*itmx_acc_h2 + P[1][2]*itmx_acc_h3 # T
    # diff_acc_l = etmx_acc_l + itmx_acc_l
    # diff_acc_t = etmx_acc_t + itmx_acc_t
    # print np.abs(etmx_acc_mat[0,:]).sum()
    # IP ACT
    etmx_act_l = data['K1:VIS-ETMX_IP_DAMP_L_OUT16']
    itmx_act_l = data['K1:VIS-ITMX_IP_DAMP_L_OUT16']
    diff_act_l = -etmx_act_l - itmx_act_l
    diff_acc_l = diff_act_l
    #diff_acc_l = etmx_act_l
    # GAS ACT
    #etmx_gas_f0 = data['K1:VIS-ETMX_F0_SUMOUT_GAS_OUT16']
    #diff_acc_l = etmx_gas_f0
    #
    #pr3 = data['K1:VIS-PR3_TM_OPLEV_SERVO_YAW_OUT16'
    #pr2 = data['K1:VIS-PR3_TM_OPLEV_SERVO_YAW_OUT16']
    pr3 = data['K1:VIS-ETMX_IP_DAMP_Y_OUT16']
    pr2 = data['K1:VIS-ITMX_IP_DAMP_Y_OUT16']

    if plot:
        plt.plot(gif)
        plt.savefig('timeseries.png')
        plt.close()
    return gif, xarm, diff_seis, comm_seis, diff_acc_l, pr3, pr2
def getData(channels, start, stop, filename, fftl=4, ovlp=2):
    if path.exists('{}.hdf5'.format(filename)):
        data = TimeSeriesDict.read('{}.hdf5'.format(filename))
    else:
        data = TimeSeriesDict.fetch(channels, start, stop)
        data.write('{}.hdf5'.format(filename), overwrite=True)
    spec = {}
    for i in channels:
        spec[i] = {}
        spec[i]['sp'], spec[i]['norm'] = specgram(data[i], fftl, ovlp)
        spec[i]['sp_asd'] = spec[i]['sp'].percentile(50)
        if channels.index(i) == 0:
            spec[i]['sp_asd'] = calDARM(spec[i]['sp_asd'][1:])
        if i[10:13] == 'ACC':
            spec[i]['sp_asd'] = calAccel(spec[i]['sp_asd'][1:])
    np.save(filename, spec)
    return spec
Beispiel #8
0
    def draw(self):

        # data span
        start = self.gpstime - self.duration / 2.
        end = self.gpstime + self.duration / 2.

        # get data
        if self.use_nds:
            data = TimeSeriesDict.fetch(self.chanlist, start, end)
        else:
            from glue.datafind import GWDataFindHTTPConnection
            conn = GWDataFindHTTPConnection()
            cache = conn.find_frame_urls(self.ifo[0],
                                         '%s_R' % self.ifo,
                                         self.start,
                                         self.end,
                                         urltype='file')
            if len(cache) == 0:
                data = {}
            else:
                data = TimeSeriesDict.read(cache,
                                           self.chanlist,
                                           start=start,
                                           end=end)

        # make plot
        plot, axes = subplots(nrows=self.geometry[0],
                              ncols=self.geometry[1],
                              sharex=True,
                              subplot_kw={'xscale': 'auto-gps'},
                              FigureClass=Plot,
                              figsize=[12, 6])
        axes[0, 0].set_xlim(start, end)
        for channel, ax in zip(self.chanlist, axes.flat):
            ax.set_epoch(self.gpstime)
            # plot data
            try:
                ax.plot(data[channel])
            except KeyError:
                ax.text(self.gpstime,
                        0.5,
                        "No data",
                        va='center',
                        ha='center',
                        transform=ax.transData)
            # plot trip indicator
            ax.axvline(self.gpstime,
                       linewidth=0.5,
                       linestyle='--',
                       color='red')
            ax.set_xlabel('')
            ax.set_ylabel('')
            ax.set_title(usetex_tex(channel.name), fontsize=10)
            ax.xaxis.set_minor_locator(NullLocator())
            for tick in ax.yaxis.get_major_ticks():
                tick.label.set_fontsize(10)
            for tick in ax.xaxis.get_major_ticks():
                tick.label.set_fontsize(16)
        plot.text(0.5,
                  0.02,
                  'Time [seconds] from trip (%s)' % self.gpstime,
                  ha='center',
                  va='bottom',
                  fontsize=24)
        plot.text(0.01,
                  0.5,
                  'Amplitude %s' % self.unit,
                  ha='left',
                  va='center',
                  rotation='vertical',
                  fontsize=24)

        plot.suptitle('%s %s %s watchdog trip: %s' %
                      (self.ifo, self.chamber, self.sensor, self.gpstime),
                      fontsize=24)

        plot.save(self.outputfile)
        plot.close()
        return self.outputfile
Beispiel #9
0
def get_data(channels,
             gpstime,
             duration,
             pad,
             frametype=None,
             source=None,
             dtype='float64',
             nproc=1,
             verbose=False):
    """Retrieve data for a given channel, centered at a given time

    Parameters
    ----------
    channels : `list`
        required data channels
    gpstime : `float`
        GPS time of required data
    duration : `float`
        duration (in seconds) of required data
    pad : `float`
        amount of extra data to read in at the start and end for filtering
    frametype : `str`, optional
        name of frametype in which this channel is stored, by default will
        search for all required frame types
    source : `str`, `list`, optional
        `str` path of a LAL-format cache file or single data file, will
        supercede `frametype` if given, defaults to `None`
    dtype : `str` or `dtype`, optional
        typecode or data-type to which the output `TimeSeries` is cast
    nproc : `int`, optional
        number of parallel processes to use, uses serial process by default
    verbose : `bool`, optional
        print verbose output about NDS progress, default: False

    See Also
    --------
    gwpy.timeseries.TimeSeries.get
        for the underlying method to read from frames or NDS
    gwpy.timeseries.TimeSeries.read
        for the underlying method to read from a local file cache
    """
    # set GPS start and end time
    start = gpstime - duration / 2. - pad
    end = gpstime + duration / 2. + pad
    # construct file cache if none is given
    if source is None:
        source = find_frames(frametype[0], frametype, start, end)
    # read from frames or NDS
    if source:
        return TimeSeriesDict.read(source,
                                   channels,
                                   start=start,
                                   end=end,
                                   nproc=nproc,
                                   verbose=verbose,
                                   dtype=dtype)
    else:
        return TimeSeriesDict.fetch(channels,
                                    start,
                                    end,
                                    verbose=verbose,
                                    dtype=dtype)
Beispiel #10
0
def main(chnames,
         start,
         end,
         check_timeseries=False,
         check_velocity=False,
         check_reduction_rate=True):
    # ----------------------------------------
    # TimeSeries Data from nds
    # ----------------------------------------
    datadict = TimeSeriesDict.fetch(chnames,
                                    start,
                                    end,
                                    host='10.68.10.121',
                                    verbose=True,
                                    port=8088)
    try:
        ts_eyv = datadict['K1:PEM-SEIS_EYV_GND_EW_IN1_DQ']
        ts_exv = datadict['K1:PEM-SEIS_EXV_GND_EW_IN1_DQ']
        ts_ixv = datadict['K1:PEM-SEIS_IXV_GND_EW_IN1_DQ']
        ts_bs = datadict['K1:PEM-SEIS_BS_GND_EW_IN1_DQ']
        ts_mcf = datadict['K1:PEM-SEIS_MCF_GND_EW_IN1_DQ']
    except KeyError as e:
        print(
            '''KeyError: Channel name not found, {0}. Please check this channel name.'''
            .format(e))
        exit()
    except Exception as e:
        print('Unexpected Error!!')
        print(e)
        exit()
    if check_timeseries:
        for data in datadict.values():
            plot = data.rms().plot()
            plot.savefig('./{0}/img_TimeSeries_RMS_{1}.png'.format(
                segname, data.name))
            print('./{0}/img_TimeSeries_RMS_{1}.png'.format(
                segname, data.name))
            plot.close()
    # ----------------------------------------
    # Coherence
    # ----------------------------------------
    fftlength = 2**7
    csd3000 = ts_exv.csd(ts_ixv, fftlength=fftlength,
                         overlap=fftlength / 2.0)  # CSD
    csd30 = ts_mcf.csd(ts_bs, fftlength=fftlength,
                       overlap=fftlength / 2.0)  # CSD
    csd60 = ts_mcf.csd(ts_ixv, fftlength=fftlength,
                       overlap=fftlength / 2.0)  # CSD
    exv = ts_exv.asd(fftlength=fftlength, overlap=fftlength / 2.0)  # ASD
    ixv = ts_ixv.asd(fftlength=fftlength, overlap=fftlength / 2.0)  # ASD
    mcf = ts_mcf.asd(fftlength=fftlength, overlap=fftlength / 2.0)  # ASD
    bs = ts_bs.asd(fftlength=fftlength, overlap=fftlength / 2.0)  # ASD
    coh3000 = np.real(csd3000) / exv / ixv  # Re[coh]
    coh30 = np.real(csd30) / mcf / bs  # Re[coh]
    coh60 = np.real(csd60) / mcf / ixv  # Re[coh]

    # ----------------------------------------
    # SPAC model
    # ----------------------------------------
    freq = np.logspace(-3, 2, 1e4)
    w = 2.0 * np.pi * freq
    L_ixv2exv = 3000.0
    L_mcf2bs = 22.0
    L_mcf2ixc = 62.0
    L_mcf2ixv = 62.0  # guess
    L_mcf2mce = 22.0
    #cp = 5500.0 # m/sec
    #cr = 2910 # m/sec
    L_KAGRA = 3000.0
    L_Virgo = 4000.0
    cr_virgo = lambda f: 150.0 + 1000.0 * np.exp(-f / 1.5
                                                 )  # from M. Beker PhD thesis.
    cr_kagra = lambda f: 800.0 + 3000.0 * np.exp(-f / 1.5)  # by eye
    cr_kagra_x = lambda f: 2200.0 + 3000.0 * np.exp(-f / 1.5)  # by eye
    cp_kagra_x = lambda f: 5400.0 * np.ones(len(f))
    spac = lambda f, c, L: np.real(jv(0, L * 2.0 * np.pi * f / c))
    spac_kagra = lambda f, L: spac(f, cr_kagra(f), L)
    spac_kagra_p = lambda f, L: spac(f, cp_kagra_x(f), L)
    spac_kagra_r = lambda f, L: spac(f, cr_kagra_x(f), L)
    reduction = lambda f, c, L: 1.0 - np.real(jv(0, L * 2.0 * np.pi * f / c))

    # ----------------------------------------
    # ASD
    # ----------------------------------------
    from miyopy.utils.trillium import Trillium
    tr120q = Trillium('120QA')
    trcpt = Trillium('compact')
    v2vel_120 = tr120q.v2vel
    v2vel_cpt = trcpt.v2vel
    c2v = 20.0 / 2**15
    print c2v
    amp = 10**(30.0 / 20.0)
    #ixv = v2vel_120(ixv*c2v)/amp
    #exv = v2vel_120(exv*c2v)/amp
    #mcf = v2vel_cpt(mcf*c2v)/amp
    #bs = v2vel_cpt(bs*c2v)/amp
    if True:
        fig, ax = plt.subplots(1, 1, figsize=(8, 6))
        if 'UD' in ts_ixv.name:
            plt.title(segname.replace('_', '\_') + ', ASD' + ', Z')
        elif 'EW' in ts_ixv.name:
            plt.title(segname.replace('_', '\_') + ', ASD' + ', X')
        ax.loglog(ixv, label='ixv')
        ax.loglog(exv, label='exv')
        ax.loglog(mcf, label='mcf')
        ax.loglog(bs, label='bs')
        ax.set_xlim(0.01, 100)
        ax.set_xlabel('Frequency [Hz]')
        ax.set_ylabel('Count')
        ax.legend()
        if 'UD' in ts_ixv.name:
            plt.savefig('./{0}/img_ASD_Z.png'.format(segname))
            print('saved ./{0}/img_ASD_Z.png'.format(segname))
        elif 'EW' in ts_ixv.name:
            plt.savefig('./{0}/img_ASD_X.png'.format(segname))
            print('saved ./{0}/img_ASD_X.png'.format(segname))
        plt.close()

    # ------------------------------------------
    # Spatial autocorrection (SPAC)
    # ------------------------------------------
    if 'UD' in ts_ixv.name:
        fig, ax = plt.subplots(1, 1, figsize=(8, 6))
        plt.title(segname.replace('_', '\_') + ', SPAC')
        ax.axvspan(0.01, 0.1, alpha=0.2, color='gray')
        ax.axvspan(2, 100, alpha=0.2, color='gray')
        ax.semilogx(coh3000, 'r', label='3000 m ')
        ax.semilogx(freq,
                    spac_kagra(freq, 3000.0),
                    'k--',
                    label='3000 m model')
        ax.semilogx(coh30, 'b', label='22 m')
        ax.semilogx(freq, spac_kagra(freq, 22.0), 'k--', label='3000 m model')
        #ax.semilogx(coh60,'g',label='60 m ')
        #ax.semilogx(freq,spac_kagra(freq,62.0),'k--',label='60 m model')
        ax.set_ylim(-1.1, 1.1)
        ax.set_xlim(0.01, 20)
        ax.set_xlabel('Frequency [Hz]')
        ax.set_ylabel('Coherence')
        ax.set_yticks(np.arange(-1.0, 1.1, 0.5))
        ax.text(110,
                -1.1,
                'START : {0}'.format(start),
                rotation=90,
                ha='left',
                va='bottom')
        ax.legend()
        plt.savefig('./{0}/img_SPAC.png'.format(segname))
        print('saved ./{0}/img_SPAC.png'.format(segname))
        plt.close()
    # ------------------------------------------
    # Spatial autocorrection (SPAC) X
    # ------------------------------------------
    if 'EW' in ts_ixv.name:
        fig, ax = plt.subplots(1, 1, figsize=(8, 6))
        plt.title(segname.replace('_', '\_') + ', X')
        ax.axvspan(0.01, 0.1, alpha=0.2, color='gray')
        ax.axvspan(1.5, 100, alpha=0.2, color='gray')
        ax.semilogx(coh3000, 'r', label='3000 m ')
        ax.semilogx(freq,
                    spac_kagra_r(freq, 3000.0),
                    'k--',
                    label='3000 m model')
        ax.semilogx(freq,
                    spac_kagra_p(freq, 3000.0),
                    'g--',
                    label='3000 m model (p)')
        ax.semilogx(coh30, 'b', label='22 m')
        ax.semilogx(freq, spac_kagra_r(freq, 22.0), 'k--', label='22 m model')
        ax.semilogx(freq,
                    spac_kagra_p(freq, 22.0),
                    'g--',
                    label='22 m model (p)')
        #ax.semilogx(coh60,'g',label='60 m ')
        #ax.semilogx(freq,spac_kagra_x(freq,62.0),'k--',label='60 m model')
        ax.set_ylim(-1.1, 1.1)
        ax.set_xlim(0.01, 20)
        ax.set_xlabel('Frequency [Hz]')
        ax.set_ylabel('Coherence')
        ax.set_yticks(np.arange(-1.0, 1.1, 0.5))
        ax.text(110,
                -1.1,
                'START : {0}'.format(start),
                rotation=90,
                ha='left',
                va='bottom')
        ax.legend()
        plt.savefig('./{0}/img_SPAC_X.png'.format(segname))
        print('saved ./{0}/img_SPAC_X.png'.format(segname))
        plt.close()

    # ------------------------------------------
    # Reyleigh wave velocity in KAGRA and Virgo
    # ------------------------------------------
    if check_velocity:
        fig, ax = plt.subplots(1, 1, figsize=(7, 6))
        plt.title('Phase Velocity of Rayleigh Wave', fontsize=25)
        ax.loglog(freq, cr_virgo(freq), 'k', label='Virgo')
        ax.loglog(freq, cr_kagra(freq), 'r', label='KAGRA (Vertical)')
        ax.loglog(freq, cr_kagra_x(freq), 'r--', label='KAGRA (Horizontal)??')
        ax.loglog(freq, cp_kagra_x(freq), 'r:', label='ref. KAGRA P-wave')
        ax.set_xlabel('Frequency [Hz]')
        ax.set_ylabel('Velocity [m/s]')
        ax.set_ylim(100, 10e3)
        ax.set_xlim(1e-3, 100)
        ax.legend(fontsize=15)
        plt.savefig('./{0}/img_RwaveVelocity.png'.format(segname))
        print('saved ./{0}/img_RwaveVelocity.png'.format(segname))
        plt.close()

    # ----------------------------------
    # Reduction Rate of arm displacement in KAGRA and Virgo
    # ----------------------------------
    if check_reduction_rate:
        fig, ax = plt.subplots(1, 1, figsize=(7, 6))
        plt.title('Displacement Reduction of the Bedrock', fontsize=25)
        ax.loglog(freq,
                  reduction(freq, cr_virgo(freq), L_Virgo),
                  'k',
                  label='Virgo (Z velo.)')
        ax.loglog(freq,
                  reduction(freq, cr_kagra_x(freq), L_KAGRA),
                  'r',
                  label='KAGRA (X velo.)')
        ax.set_xlim(0.01, 20)
        ax.set_ylim(1e-3, 2)
        ax.legend(fontsize=15)
        ax.set_xlabel('Frequency [Hz]')
        ax.set_ylabel('Reduction Rate')
        plt.savefig('./{0}/img_ReductionRate.png'.format(segname))
        print('saved ./{0}/img_ReductionRate.png'.format(segname))
        plt.close()
Beispiel #11
0
'L1:HPI-ETMX_BLND_IPS_RZ_IN1_DQ.mean, m-trend',
'L1:HPI-ETMX_BLND_IPS_VP_IN1_DQ.mean, m-trend',
'L1:HPI-ETMX_BLND_IPS_X_IN1_DQ.mean, m-trend', 
'L1:HPI-ETMX_BLND_IPS_Y_IN1_DQ.mean, m-trend',
'L1:HPI-ETMX_BLND_IPS_Z_IN1_DQ.mean, m-trend',
'L1:HPI-ETMY_BLND_IPS_HP_IN1_DQ.mean, m-trend',
'L1:HPI-ETMY_BLND_IPS_RX_IN1_DQ.mean, m-trend',
'L1:HPI-ETMY_BLND_IPS_RY_IN1_DQ.mean, m-trend',
'L1:HPI-ETMY_BLND_IPS_RZ_IN1_DQ.mean, m-trend',
'L1:HPI-ETMY_BLND_IPS_VP_IN1_DQ.mean, m-trend',
'L1:HPI-ETMY_BLND_IPS_X_IN1_DQ.mean, m-trend',
'L1:HPI-ETMY_BLND_IPS_Y_IN1_DQ.mean, m-trend',
'L1:HPI-ETMY_BLND_IPS_Z_IN1_DQ.mean, m-trend']

#data = dict()
data = TimeSeriesDict.fetch(channels, start, end, verbose=True)

print "DONE"

#for data in channles:
#    plot_data = data.plot()
#    ax = plot_data.gca()

#    ax.set_epoch(start.gps)
#    ax.set_xlim(start.gps, end.gps)
#    ax.set_ylabel('Amplitude[ ]')
#    ax.set_title(data.channel.texname)
#    ax.set_ylim([20000,30000])
 
#    print "DONE"
#    plot_data.save(data.channel.text)
Beispiel #12
0
    'K1:VIS-ITMX_IP_DAMP_Y_OUT16',
    'K1:VIS-ITMX_IP_VELDAMP_L_OUT16',
    'K1:VIS-ITMX_IP_VELDAMP_T_OUT16',
    'K1:VIS-ITMX_IP_VELDAMP_Y_OUT16',
]
ifo_ch = [
    'K1:LSC-CARM_SERVO_SLOW_MON_OUT16',
    'K1:IMC-MCL_SERVO_OUT16',
    'K1:VIS-MCE_TM_LOCK_L_OUT16',
    'K1:ALS-X_PDH_SLOW_DAQ_INMON',
    'K1:CAL-CS_PROC_XARM_FREQUENCY_MON',
    'K1:CAL-CS_PROC_XARM_FILT_AOM_OUT16',
]

chname = seis_ch + gif_ch + sus_ch + ifo_ch
#chname = ifo_ch

data = TimeSeriesDict.fetch(chname,
                            start,
                            end,
                            host='10.68.10.121',
                            verbose=True,
                            port=8088,
                            pad=np.nan)

for _data in data.values():
    print './segment_{0}/{1}.gwf'.format(segnum, _data.name)
    _data.override_unit('ct')
    _data.write('./segment_{0}/{1}.gwf'.format(segnum, _data.name),
                format='gwf.lalframe')
Beispiel #13
0
(`~gwpy.spectrum.Spectrum`) giving a time-averaged measure of coherence.

The `TimeSeries` method :meth:`~TimeSeries.coherence_spectrogram` performs the
same coherence calculation every ``stride``, giving a time-varying coherence
measure.

"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.timeseries'

# First, we import the `TimeSeriesDict`
from gwpy.timeseries import TimeSeriesDict

# and then fetch both data sets:
data = TimeSeriesDict.fetch(['L1:LSC-SRCL_IN1_DQ', 'L1:LSC-CARM_IN1_DQ'],
                            'Feb 13 2015', 'Feb 13 2015 00:15')

# We can then use the :meth:`~TimeSeries.coherence_spectrogram` method
# of one `TimeSeries` to calcululate the time-varying coherence with
# respect to the other, using a 0.5-second FFT length, with a
# 0.45-second (90%) overlap, with a 8-second stride:
coh = data['L1:LSC-SRCL_IN1_DQ'].coherence_spectrogram(
    data['L1:LSC-CARM_IN1_DQ'], 8, 0.5, 0.45)

# Finally, we can :meth:`~gwpy.spectrogram.Spectrogram.plot` the
# resulting data
plot = coh.plot()
ax = plot.gca()
ax.set_ylabel('Frequency [Hz]')
ax.set_yscale('log')
ax.set_ylim(10, 8000)
Beispiel #14
0
start = tconvert('Jul 18 2019 12:00:00 JST')
#start = tconvert('Jul 20 2019 13:00:00 JST')
end = tconvert('Jul 20 2019 15:00:00 JST')
channels = [
    'K1:PEM-SEIS_EYV_GND_X_OUT16', 'K1:PEM-SEIS_EYV_GND_Y_OUT16',
    'K1:PEM-SEIS_EYV_GND_Z_OUT16', 'K1:PEM-SEIS_EXV_GND_X_OUT16',
    'K1:PEM-SEIS_EXV_GND_Y_OUT16', 'K1:PEM-SEIS_EXV_GND_Z_OUT16',
    'K1:PEM-SEIS_IXV_GND_X_OUT16', 'K1:PEM-SEIS_IXV_GND_Y_OUT16',
    'K1:PEM-SEIS_IXV_GND_Z_OUT16'
]

data = TimeSeriesDict.fetch(channels,
                            start,
                            end,
                            host='10.68.10.122',
                            port=8088,
                            verbose=True,
                            pad=0.0)
eyv_x = data.values()[0]
eyv_y = data.values()[1]
eyv_z = data.values()[2]
exv_x = data.values()[3]
exv_y = data.values()[4]
exv_z = data.values()[5]
ixv_x = data.values()[6]
ixv_y = data.values()[7]
ixv_z = data.values()[8]

# eyv
specgram(eyv_x, 2**8, 2**6, 2**5)
Beispiel #15
0
from gwpy.timeseries import TimeSeriesDict

kwargs = {
    'host': '10.68.10.121',
    'port': 8088,
    'verbose': True,
    'start': tconvert('Dec 16 18:00:00 2020 JST'),
    'end': tconvert('Dec 17 06:00:00 2020 JST')
}

chname = [
    'K1:VIS-SRM_IP_LVDTINF_H1_OUT16', 'K1:VIS-SRM_IP_LVDTINF_H2_OUT16',
    'K1:VIS-SRM_IP_LVDTINF_H3_OUT16'
]

data = TimeSeriesDict.fetch(chname, **kwargs)

if __name__ == '__main__':
    fftlen = 2**9
    overlap = fftlen / 2

    chname = [
        'K1:VIS-SRM_IP_LVDTINF_H1_OUT16', 'K1:VIS-SRM_IP_LVDTINF_H2_OUT16',
        'K1:VIS-SRM_IP_LVDTINF_H3_OUT16'
    ]
    labels = ['H1', 'H2', 'H3']

    fig, ax = plt.subplots(2, 2, figsize=(10, 6))
    fig.suptitle('No title')
    plt.subplots_adjust(wspace=0.1, hspace=0.1)
Beispiel #16
0
from gwpy.timeseries import TimeSeriesDict
import phasespace as ps

chan1 = 'H1:ASC-DSOFT_P_OUT_DQ'
chan2 = 'H1:ASC-DSOFT_Y_OUT_DQ'
startgps = 1128411017
duration = 300
startgps2 = 1128453317

time1 = TimeSeriesDict.fetch([chan1, chan2],
                             startgps,
                             startgps + duration,
                             verbose=True)
time2 = TimeSeriesDict.fetch([chan1, chan2],
                             startgps2,
                             startgps2 + duration,
                             verbose=True)

pit_yaw = ps.phase_space(y_ts=time1[chan1],
                         x_ts=time1[chan2],
                         y_ts_comp=time2[chan1],
                         x_ts_comp=time2[chan2])
scatterhist = pit_yaw.plot_2d_scatter_hist_comparison(timer=32,
                                                      median=False,
                                                      flip=True)
scatterhist.savefig('test.png')
Beispiel #17
0
front_end_channel_list = []
if options.front_end_channel_list is not None:
    front_end_channels = options.front_end_channel_list.split(',')
    for channel in front_end_channels:
        front_end_channel_list.append((ifo, channel))
    plot_front_end = True
else:
    plot_front_end = False

data = TimeSeriesDict.read(
    options.frame_cache,
    list(map("%s:%s".__mod__, channel_list), start=start, end=end))
if plot_front_end:
    front_end_data = TimeSeriesDict.fetch(
        list(map("%s:%s".__mod__, front_end_channel_list),
             start=start,
             end=end))
if plot_additional_hoft:
    additional_hoft_data = TimeSeriesDict.read(
        options.additional_hoft_frames_cache,
        list(map("%s:%s".__mod__, additional_channel_list)),
        start=start,
        end=end)

print(map("%s:%s".__mod__, front_end_channel_list))

segs = DataQualityFlag.query('%s:DMT-CALIBRATED:1' % ifo, start, end)

for n, channel in enumerate(channels):
    plot = TimeSeries.plot(data["%s:%s" % (ifo, channel)])
    ax = plot.gca()
Beispiel #18
0
            channels_M0.append('L1:SUS-%s_M0_DAMP_%s_INMON.%s,m-trend' % (optic_m0, dof, trend))

for optic_m1 in OPTICS_M1:
    for dof in DOFS:
        for trend in TRENDS:
            channels_M1.append('L1:SUS-%s_M1_DAMP_%s_INMON.%s,m-trend' % (optic_m1, dof, trend))

for optic_m2 in OPTICS_M2:
    for dof2 in DOFS2:
        for trend in TRENDS:
            channels_M2.append('L1:SUS-%s_M2_WIT_%s_DQ.%s,m-trend' % (optic_m2, dof2, trend))
            channels_M3.append('L1:SUS-%s_M3_WIT_%s_DQ.%s,m-trend' % (optic_m2, dof2, trend))



data_m0 = TimeSeriesDict.fetch(channels_M0, start, end, verbose=True)
data_m1 = TimeSeriesDict.fetch(channels_M1, start, end, verbose=True)
data_m2 = TimeSeriesDict.fetch(channels_M2, start, end, verbose=True)
data_m3 = TimeSeriesDict.fetch(channels_M3, start, end, verbose=True)

for optic_m1 in OPTICS_M1:
    print "%s " %(optic_m1)
    for dof in DOFS:
        print "%s  " %(dof)
        data_m1_mean = data_m1['L1:SUS-%s_M1_DAMP_%s_INMON.mean,m-trend' % (optic_m1, dof)]-data_m1['L1:SUS-%s_M1_DAMP_%s_INMON.mean,m-trend' % (optic_m1, dof)].mean().value
        plot_m1_mean = data_m1_mean.plot()
        axP = plot_m1_mean.gca()    
        axP.set_ylabel('Amplitude - Mean Value (urad)')
        axP.set_title('Mean %s M1 %s' %(optic_m1, dof))
        pylab.ylim([-200,200])
#        L = axP.legend(loc='upper right', ncol=1, fancybox=True, shadow=True)
from gwpy.timeseries import TimeSeriesDict
from gwsumm.html.markup import page
import phasespace as ps
import numpy as np

startgps = 1128411017
duration = 300
startgps2 = 1128453317
outdir = '/path/to/save/images/'
chans = np.loadtxt('ASC_channels.txt', dtype=str)

time1 = TimeSeriesDict.fetch(np.ravel(chans),
                             startgps,
                             startgps + duration,
                             verbose=True)
time2 = TimeSeriesDict.fetch(np.ravel(chans),
                             startgps2,
                             startgps2 + duration,
                             verbose=True)

page = page()
page.init(css='style.css')
page.div(class_='banner')
page.div(class_='title')
page.strong('Phase space plots: %s - %s' % (startgps, startgps2))
page.div.close()
page.div.close()

page.ul(style='list-style-type:none')
for chan1, chan2 in chans:
    outfile = '%s-%s-%d-%d.png' % (chan1.replace(
Beispiel #20
0
# and one for plotting the data:
from gwpy.plotter import TimeSeriesPlot

# Next we define the channels we want, namely the 0.03Hz-1Hz ground motion
# band-limited RMS channels (1-second average trends).
# We do this using string-replacement so we can substitute the interferometer
# prefix easily when we need to:
channels = [
    '%s:ISI-BS_ST1_SENSCOR_GND_STS_X_BLRMS_30M_100M.mean,s-trend',
    '%s:ISI-BS_ST1_SENSCOR_GND_STS_Y_BLRMS_30M_100M.mean,s-trend',
    '%s:ISI-BS_ST1_SENSCOR_GND_STS_Z_BLRMS_30M_100M.mean,s-trend',
]

# At last we can fetch 12 hours of data for each interferometer using the
# `TimeSeriesDict.fetch` method:
lho = TimeSeriesDict.fetch([c % 'H1' for c in channels],
                           'Feb 13 2015 16:00', 'Feb 14 2015 04:00')
llo = TimeSeriesDict.fetch([c % 'L1' for c in channels],
                           'Feb 13 2015 16:00', 'Feb 14 2015 04:00')

# Next we can plot the data, with a separate `~gwpy.plotter.Axes` for each
# instrument:
plot = TimeSeriesPlot(lho, llo)
for ifo, ax in zip(['H1', 'L1'], plot.axes):
   ax.legend(['X', 'Y', 'Z'])
   ax.yaxis.set_label_position('right')
   ax.set_ylabel(ifo, rotation=0, va='center', ha='left')
   ax.set_yscale('log')
plot.text(0.1, 0.5, '$1-3$\,Hz motion [nm/s]', rotation=90, fontsize=24,
          ha='center', va='center')
plot.axes[0].set_title('Magnitude 7.1 earthquake impact on LIGO', fontsize=24)
plot.show()
Beispiel #21
0
chname = [
    'K1:GIF-X_ANGLE_IN1_DQ',
    'K1:GIF-X_LAMP_IN1_DQ',
    'K1:GIF-X_PHASE_IN1_DQ',
    'K1:GIF-X_PPOL_IN1_DQ',
    'K1:GIF-X_P_AMP_IN1_DQ',
    'K1:GIF-X_P_OFFSET_IN1_DQ',
    'K1:GIF-X_ROTATION_IN1_DQ',
    'K1:GIF-X_SPOL_IN1_DQ',
    'K1:GIF-X_STRAIN_IN1_DQ',
    'K1:GIF-X_S_AMP_IN1_DQ',
    'K1:GIF-X_S_OFFSET_IN1_DQ',
    'K1:GIF-X_ZABS_IN1_DQ',
    ]
    
data = TimeSeriesDict.fetch(chname,start,end,
                            host='10.68.10.121',port=8088)

N = 128
angle = data['K1:GIF-X_ANGLE_IN1_DQ']
angle = angle.value[:N]
angle = np.unwrap(angle)
angle = np.rad2deg(angle)
ppol = data['K1:GIF-X_PPOL_IN1_DQ']
p_ave = np.average(ppol.value)
ppol = ppol.value[:N]
spol = data['K1:GIF-X_SPOL_IN1_DQ']
s_ave = np.average(spol.value)
spol = spol.value[:N]
time = np.arange(len(angle))/2048.0
print time
Beispiel #22
0
    chlst = [
        'K1:PEM-IXV_WEATHER_TEMP_OUT_DQ',
        'K1:PEM-IXV_WEATHER_HUMD_OUT_DQ',
        'K1:PEM-IXV_WEATHER_PRES_OUT_DQ',
        'K1:PEM-IYC_WEATHER_TEMP_OUT_DQ',
        'K1:PEM-IYC_WEATHER_HUMD_OUT_DQ',
        'K1:PEM-IYC_WEATHER_PRES_OUT_DQ',
        'K1:FEC-99_STATE_WORD_FE',    
        'K1:FEC-121_STATE_WORD_FE']

    kwargs = {}
    kwargs['verbose'] = True
    kwargs['pad'] = np.nan
    kwargs['port'] = 8088
    kwargs['host'] = '10.68.10.121'
    data = TimeSeriesDict.fetch(chlst,start,end,**kwargs)
    
    no5_temp = data['K1:PEM-IXV_WEATHER_TEMP_OUT_DQ']
    no5_humd = data['K1:PEM-IXV_WEATHER_HUMD_OUT_DQ']
    no5_baro = data['K1:PEM-IXV_WEATHER_PRES_OUT_DQ']
    no6_temp = data['K1:PEM-IYC_WEATHER_TEMP_OUT_DQ']
    no6_humd = data['K1:PEM-IYC_WEATHER_HUMD_OUT_DQ']
    no6_baro = data['K1:PEM-IYC_WEATHER_PRES_OUT_DQ']
    daq_iy0 = data['K1:FEC-99_STATE_WORD_FE']
    daq_ix1 = data['K1:FEC-121_STATE_WORD_FE']        
    daq_iy0_ok = (daq_iy0==0).to_dqflag(round=False)
    daq_ix1_ok = (daq_ix1==0).to_dqflag(round=False)
        
    if True:
        plot_timeseries(no5_temp,no6_temp,ylim=[25,30],
                        fname='TimeSeries_temp.png',title='Temperature')
Beispiel #23
0
print end.iso, end.gps



channels_M1 = []
OPTICS_M1 = ['MC1', 'MC2', 'MC3']
DOFS = ['P', 'R', 'Y']


for optic_m1 in OPTICS_M1:
    for dof in DOFS:
        channels_M1.append('L1:SUS-%s_M1_DAMP_%s_INMON.mean,m-trend' % (optic_m1, dof))



data_m1 = TimeSeriesDict.fetch(channels_M1, start, end, verbose=True)


for dof in DOFS:
    print "DOF = %s  " %(dof)
    data_mc1_mean = data_m1['L1:SUS-MC1_M1_DAMP_%s_INMON.mean,m-trend' % (dof)]-data_m1['L1:SUS-MC1_M1_DAMP_%s_INMON.mean,m-trend' % (dof)].mean().value
    data_mc2_mean = data_m1['L1:SUS-MC2_M1_DAMP_%s_INMON.mean,m-trend' % (dof)]-data_m1['L1:SUS-MC2_M1_DAMP_%s_INMON.mean,m-trend' % (dof)].mean().value
    data_mc3_mean = data_m1['L1:SUS-MC3_M1_DAMP_%s_INMON.mean,m-trend' % (dof)]-data_m1['L1:SUS-MC3_M1_DAMP_%s_INMON.mean,m-trend' % (dof)].mean().value
    plot_mc1_mean = data_mc1_mean.plot()
    ax = plot_mc1_mean.gca()
    ax.plot(data_mc2_mean, label='MC2')
    ax.plot(data_mc3_mean, label='MC3')
    ax.set_ylabel('Mean amplitude - Mean Value (urad)')
    ax.set_title('%s' %(dof))
    pylab.ylim([-200,200])
    L = ax.legend(loc='upper right', ncol=1, fancybox=True, shadow=True)
Beispiel #24
0
        print dof2,'QUAD'
        for trend in TRENDS:
            print trend,'QUAD'
            l1_channels.append('L1:SUS-%s_L1_WIT_%s_DQ.%s,m-trend' % (optic, dof2, trend))
            l2_channels.append('L1:SUS-%s_L2_WIT_%s_DQ.%s,m-trend' % (optic, dof2, trend))
            print l1_channels
            print l2_channels
    for dof3 in DEGREE_OF_FREEDOM:
        print dof3, 'QUAD'
        for trend in TRENDS:
            print trend, 'QUAD'
            l3_channels.append('L1:SUS-%s_L3_OPLEV_%s_OUT_DQ.%s,m-trend' % (optic, dof3, trend))
            print l3_channels

data = dict()
data[topstage] = TimeSeriesDict.fetch(m1_channels, start, end, verbose=True)

if optic in TRIPLE:
    data['M2'] = TimeSeriesDict.fetch(m2_channels, start, end, verbose=True)
    data['M3'] = TimeSeriesDict.fetch(m3_channels, start, end, verbose=True)

else:
    data['L1'] = TimeSeriesDict.fetch(l1_channels, start, end, verbose=True)
    data['L2'] = TimeSeriesDict.fetch(l2_channels, start, end, verbose=True)
    data['L3'] = TimeSeriesDict.fetch(l3_channels, start, end, verbose=True)


for dof in TOPSTAGE_DOFS:
    if optic in QUAD:
        print "%s  QUAD" %(dof)
        stub = 'L1:SUS-%s_%s_DAMP_%s_INMON.%s,m-trend' % (optic, topstage, dof, '%s')
Beispiel #25
0
    pyplot.ion()

# Before anything else, we import the objects we will need:
from gwpy.time import tconvert
from gwpy.timeseries import TimeSeriesDict
from gwpy.plotter import BodePlot

# and set the times of our query, and the channels we want:
start = tconvert('May 27 2014 04:00')
end = start + 1800
gndchannel = 'L1:ISI-GND_STS_ITMY_Z_DQ'
hpichannel = 'L1:HPI-ITMY_BLND_L4C_Z_IN1_DQ'

# We can call the :meth:`~TimeSeriesDict.fetch` method of the `TimeSeriesDict`
# to retrieve all data in a single operation:
data = TimeSeriesDict.fetch([gndchannel, hpichannel], start, end, verbose=True)
gnd = data[gndchannel]
hpi = data[hpichannel]

# Next, we can call the :meth:`~TimeSeries.average_fft` method to calculate
# an averages, complex-valued FFT for each `TimeSeries`:
gndfft = gnd.average_fft(100, 50, window='hamming')
hpifft = hpi.average_fft(100, 50, window='hamming')

# Finally, we can divide one by the other to get the transfer function
# (up to the lower Nyquist)
size = min(gndfft.size, hpifft.size)
tf = hpifft[:size] / gndfft[:size]

# The `~gwpy.plotter.BodePlot` knows how to separate a complex-valued
# `~gwpy.spectrum.Spectrum` into magnitude and phase:
from gwpy.timeseries import TimeSeriesDict
from gwpy.plotter import BodePlot

from gwpy import version
__author__ = "Duncan Macleod <*****@*****.**>"
__version__ = version.version

# set the times
start = tconvert('May 27 2014 04:00')
end = start + 1800

gndchannel = 'L1:ISI-GND_STS_ITMY_Z_DQ'
hpichannel = 'L1:HPI-ITMY_BLND_L4C_Z_IN1_DQ'

# get data
data = TimeSeriesDict.fetch([gndchannel, hpichannel], start, end, verbose=True)
gnd = data[gndchannel]
gnd.name = 'Before HEPI (ground)'
hpi = data[hpichannel]
hpi.name = 'After HEPI'

gnd.unit = 'nm/s'
hpi.unit = 'nm/s'

# get FFTs
gndfft = gnd.average_fft(100, 50, window='hamming')
hpifft = hpi.average_fft(100, 50, window='hamming')

# get transfer function (up to lower Nyquist)
size = min(gndfft.size, hpifft.size)
tf = hpifft[:size] / gndfft[:size]