Exemplo n.º 1
0
def connect(host=None, port=None):
    """Open a new datafind connection

    Parameters
    ----------
    host : `str`
        name of datafind server to query

    port : `int`
        port of datafind server on host

    Returns
    -------
    connection : :class:`~glue.datafind.GWDataFindHTTPConnection`
        the new open connection
    """
    from glue import datafind

    port = port and int(port)
    if port is not None and port != 80:
        cert, key = datafind.find_credential()
        return datafind.GWDataFindHTTPSConnection(host=host,
                                                  port=port,
                                                  cert_file=cert,
                                                  key_file=key)

    return datafind.GWDataFindHTTPConnection(host=host, port=port)
Exemplo n.º 2
0
def main(centerTime,duration,frameTypes,channelNames,detectors,rightascension,declination,FFTlength,sampleFrequency):

    #----- Start and stop time for this event.
    startTime = centerTime - duration / 2;
    stopTime = centerTime + duration / 2;

    # zip frameTypes and detectors, and channel names and detectors
    frameType   = dict(zip(detectors,frameTypes))
    channelName = dict(zip(detectors,channelNames))
    data        = dict()
    white_data  = dict()

    # Read in the data
    for iDet in detectors:
        connection = datafind.GWDataFindHTTPConnection()
        cache      = connection.find_frame_urls(iDet.strip('1'), frameType[iDet], startTime, stopTime, urltype='file')
        data[iDet] = TimeSeries.read(cache,channelName[iDet], format='gwf',start=startTime,end=stopTime)

    for (iDet,iSeries) in data.iteritems():
        # resample data
        if iSeries.sample_rate.decompose().value != sampleFrequency:
            iSeries = iSeries.resample(sampleFrequency)
        asd = iSeries.asd(FFTlength, FFTlength/2., method='median-mean')
        # Apply ASD to the data to whiten it
        whitened = iSeries.whiten(FFTlength, FFTlength/2., asd=asd) 
        white_data[iDet] = whitened.fft()
Exemplo n.º 3
0
 def _query():
     if cert is not None:
         dfconn = datafind.GWDataFindHTTPSConnection(
             host=host, port=port, cert_file=cert, key_file=key)
     else:
         dfconn = datafind.GWDataFindHTTPConnection(host=host, port=port)
     return dfconn.find_frame_urls(ifo[0].upper(), frametype, gpsstart,
                                   gpsend, urltype=urltype, on_gaps=gaps)
def generate_fast_vco(ifo, segment, frames=False, fit=True):
    """
    Parameters:
    -----------
        ifo : start
            interferometer, e.g. 'L1'
        segment : array like
            time segment. first entry start second entry end
        frames : bool
            read from frames or nds2
        fit : bool
            fit from imc-f (default)
            or spline interpolation

    Returns:
    --------
        vco_data : saves file 'L1:IMC-VCO_PREDICTION-st-dur.hdf'
    """
    st = segment[0]
    et = segment[1]
    chan1_pat = '%s:SYS-TIMING_C_FO_A_PORT_11_SLAVE_CFC_FREQUENCY_5'
    chan2_pat = '%s:IMC-F_OUT_DQ'

    if frames:
        connection = datafind.GWDataFindHTTPConnection()
        cache = connection.find_frame_urls(
            ifo[0], '%s_R' % ifo, st, et + 1, urltype='file')
        if fit:
            imc = TimeSeries.read(cache, chan2_pat % ifo, st, et)
        else:
            imc = TimeSeries.read(cache, chan2_pat % ifo, st, st + 1)
        pslvco = TimeSeries.read(cache, chan1_pat % ifo, st, et + 1)
    else:
        if fit:
            imc = TimeSeries.fetch(chan2_pat % ifo, st, et)
        else:
            imc = TimeSeries.fetch(chan2_pat % ifo, st, st + 1)
        pslvco = TimeSeries.fetch(chan1_pat % ifo, st, et + 1)

    pslvco = pslvco[16 + 8::16]

    if fit:
        imc_srate = int(imc.sample_rate.value)
        imc2 = imc[imc_srate / 2::imc_srate]
        data = np.array((imc2.value, pslvco.value)).T
        vco_interp = fit_with_imc(data, imc)
    else:
        vco_interp = interp_spline(pslvco)

    chan = "%s:IMC-VCO_PREDICTION" % (ifo,)
    vco_data = TimeSeries(vco_interp, epoch=st,
                          sample_rate=256,
                          name=chan, channel=chan)

    return vco_data
Exemplo n.º 5
0
def find_frames(site, frametype, gpsstart, gpsend, **kwargs):
    """Find frames for given site and frametype
    """
    # connect
    host = kwargs.pop('host', None)
    port = kwargs.pop('port', None)
    port = port and int(port)
    if port is not None and port != 80:
        cert, key = datafind.find_credential()
        connection = datafind.GWDataFindHTTPSConnection(
            host=host, port=port, cert_file=cert, key_file=key)
    else:
        connection = datafind.GWDataFindHTTPConnection(host=host, port=port)
    # find frames
    kwargs.setdefault('urltype', 'file')
    return connection.find_frame_urls(site[0], frametype, gpsstart, gpsend,
                                      **kwargs)
Exemplo n.º 6
0
def find_types(site=None, match=None):
    """Query the DataFind server for frame types matching the given options
    """
    conn = datafind.GWDataFindHTTPConnection()
    return conn.find_types(site=site, match=match)
    snrs.append(float(line.split('\t')[1]))
    freqs.append(float(line.split('\t')[2]))

# for now let's just assume you're not looking at a lock
# longer than a day...
for i in range(int(86400 / t)):
    new_times = []
    new_freqs = []
    for time, snr, freq in zip(times, snrs, freqs):
        if time > st and time < st + t:
            new_times.append(time)
            new_freqs.append(freq)
    if len(new_times) == 0:
        st += t
        continue
    connection = datafind.GWDataFindHTTPConnection()
    cache = connection.find_frame_urls(
        'L', 'L1_C', st, st + t, urltype='file')
    data = TimeSeries.read(cache, channel, st, st + t)
    data2 = detrend(data)
    data = TimeSeries(
        data2, dx=data.dx, sample_rate=data.sample_rate, x0=data.x0)
    specgram = data.spectrogram2(fftlength=.1, overlap=0.1 * 0.9)
    specgram = specgram.ratio('median')
    plot = specgram.plot(vmin=1, vmax=10, norm='log')
    plot.add_colorbar(label='amplitude relative to median')
    ax = plot.gca()
    ax.set_ylim(40, 7e4)
    ax.set_yscale('log')
    # ax.scatter(new_times,new_freqs,'x',color='r')
    for time, freq in zip(new_times, new_freqs):
def dump_calibrated_data(fname):
    data = numpy.load(fname)

    # Figure out the times covered by the file from the filename
    # I should start using HDF5 so I can store metadata
    temp = fname.split('.')[0]
    temp = temp.split('-')
    ifo = temp[0]
    st, dur = int(temp[-2]), int(temp[-1])
    et = st + dur

    maxidx = len(data)
    width = 45

    weights = 1. - ((numpy.arange(-width, width) / float(width))**2)

    # The VCO frequencies are integers so we could dither them
    # to avoid quantization error if we wanted to be fancy
    # but it seems to make no differece
    if False:
        from numpy.random import triangular
        data[:, 1] += triangular(-1., 0., 1., size=len(data))

    # Just fit the whole thing at once, to get a single coefficient
    a, b = numpy.polyfit(data[:, 0], data[:, 1], 1)
    print "%.1f %u" % (a, b)

    # Slide through the data fitting PSL to IMC for data around each sample
    coeffs = []
    for idx in xrange(maxidx):
        idx1 = max(0, idx - width)
        idx2 = min(idx + width, maxidx)
        coeffs.append(
            numpy.polyfit(data[idx1:idx2, 0],
                          data[idx1:idx2, 1],
                          1,
                          w=weights[idx1 - idx + width:idx2 - idx + width]))
    coeffs = numpy.array(coeffs)
    times = numpy.arange(len(coeffs)) + 0.5
    connection = datafind.GWDataFindHTTPConnection()
    cache = connection.find_frame_urls(ifo[0],
                                       '%s_R' % ifo,
                                       st,
                                       et,
                                       urltype='file')

    imc = TimeSeries.read(cache, "%s:IMC-F_OUT_DQ" % ifo, st, et)
    imc = imc[::16384 / 256]
    print imc
    samp_times = numpy.arange(len(imc)) / 256.

    coeffs0 = numpy.interp(samp_times, times, coeffs[:, 0])
    coeffs1 = numpy.interp(samp_times, times, coeffs[:, 1]) - 7.6e7

    vco_interp = coeffs0 * imc.data + coeffs1

    chan = "%s:IMC-VCO_PREDICTION" % (ifo, )
    vco_data = TimeSeries(vco_interp,
                          epoch=st,
                          sample_rate=imc.sample_rate.value,
                          name=chan,
                          channel=chan)
    vco_data.write("%s-vcoprediction-%u-%u.hdf" % (ifo, st, dur), format='hdf')
Exemplo n.º 9
0
def main():
    # Parse commandline arguments

    opts = parse_commandline()

    ###########################################################################
    #                                   Parse Ini File                        #
    ###########################################################################

    # ---- Create configuration-file-parser object and read parameters file.
    cp = ConfigParser.ConfigParser()
    cp.read(opts.inifile)

    # ---- Read needed variables from [parameters] and [channels] sections.
    alwaysPlotFlag = cp.getint('parameters', 'alwaysPlotFlag')
    sampleFrequency = cp.getint('parameters', 'sampleFrequency')
    blockTime = cp.getint('parameters', 'blockTime')
    searchFrequencyRange = json.loads(
        cp.get('parameters', 'searchFrequencyRange'))
    searchQRange = json.loads(cp.get('parameters', 'searchQRange'))
    searchMaximumEnergyLoss = cp.getfloat('parameters',
                                          'searchMaximumEnergyLoss')
    searchWindowDuration = cp.getfloat('parameters', 'searchWindowDuration')
    whiteNoiseFalseRate = cp.getfloat('parameters', 'whiteNoiseFalseRate')
    plotTimeRanges = json.loads(cp.get('parameters', 'plotTimeRanges'))
    plotFrequencyRange = json.loads(cp.get('parameters', 'plotFrequencyRange'))
    plotNormalizedERange = json.loads(
        cp.get('parameters', 'plotNormalizedERange'))
    frameCacheFile = cp.get('channels', 'frameCacheFile')
    frameTypes = cp.get('channels', 'frameType').split(',')
    channelNames = cp.get('channels', 'channelName').split(',')
    detectorName = channelNames[0].split(':')[0]
    det = detectorName.split('1')[0]

    ###########################################################################
    #                           create output directory                       #
    ###########################################################################

    # if outputDirectory not specified, make one based on center time
    if opts.outDir is None:
        outDir = './scans'
    else:
        outDir = opts.outDir + '/'
    outDir += '/'

    # report status
    if not os.path.isdir(outDir):
        if opts.verbose:
            print('creating event directory')
        os.makedirs(outDir)
    if opts.verbose:
        print('outputDirectory:  {0}'.format(outDir))

    ########################################################################
    #     Determine if this is a normal omega scan or a Gravityspy         #
    #    omega scan with unique ID. If Gravity spy then additional         #
    #    files and what not must be generated                              #
    ########################################################################

    IDstring = "{0:.2f}".format(opts.eventTime)

    ###########################################################################
    #               Process Channel Data                                      #
    ###########################################################################

    # find closest sample time to event time
    centerTime = np.floor(opts.eventTime) + np.round(
        (opts.eventTime - np.floor(opts.eventTime)) *
        sampleFrequency) / sampleFrequency

    # determine segment start and stop times
    startTime = round(centerTime - blockTime / 2)
    stopTime = startTime + blockTime

    # This is for ordering the output page by SNR
    loudestEnergyAll = []
    channelNameAll = []
    peakFreqAll = []
    mostSignQAll = []

    for channelName in channelNames:
        if 'STRAIN' in channelName:
            frameType = frameTypes[0]
        else:
            frameType = frameTypes[1]

        # Read in the data
        if opts.NSDF:
            data = TimeSeries.fetch(channelName, startTime, stopTime)
        else:
            connection = datafind.GWDataFindHTTPConnection()
            cache = connection.find_frame_urls(det,
                                               frameType,
                                               startTime,
                                               stopTime,
                                               urltype='file')
            data = TimeSeries.read(cache,
                                   channelName,
                                   format='gwf',
                                   start=startTime,
                                   end=stopTime)

        # resample data
        if data.sample_rate.decompose().value != sampleFrequency:
            data = data.resample(sampleFrequency)

# Cropping the results before interpolation to save on time and memory
# perform the q-transform
        try:
            specsgrams = []
            for iTimeWindow in plotTimeRanges:
                durForPlot = iTimeWindow / 2
                try:
                    outseg = Segment(centerTime - durForPlot,
                                     centerTime + durForPlot)
                    qScan = data.q_transform(qrange=(4, 64),
                                             frange=(10, 2048),
                                             gps=centerTime,
                                             search=0.5,
                                             tres=0.002,
                                             fres=0.5,
                                             outseg=outseg,
                                             whiten=True)
                    qValue = qScan.q
                    qScan = qScan.crop(centerTime - iTimeWindow / 2,
                                       centerTime + iTimeWindow / 2)
                except:
                    outseg = Segment(centerTime - 2 * durForPlot,
                                     centerTime + 2 * durForPlot)
                    qScan = data.q_transform(qrange=(4, 64),
                                             frange=(10, 2048),
                                             gps=centerTime,
                                             search=0.5,
                                             tres=0.002,
                                             fres=0.5,
                                             outseg=outseg,
                                             whiten=True)
                    qValue = qScan.q
                    qScan = qScan.crop(centerTime - iTimeWindow / 2,
                                       centerTime + iTimeWindow / 2)
                specsgrams.append(qScan)

            loudestEnergyAll.append(qScan.max().value)
            peakFreqAll.append(qScan.yindex[np.where(
                qScan.value == qScan.max().value)[1]].value[0])
            mostSignQAll.append(qValue)
            channelNameAll.append(channelName)

        except:
            print('bad channel {0}: skipping qScan'.format(channelName))
            continue

        if opts.make_webpage:
            # Set some plotting params
            myfontsize = 15
            mylabelfontsize = 20
            myColor = 'k'
            if detectorName == 'H1':
                title = "Hanford"
            elif detectorName == 'L1':
                title = "Livingston"
            else:
                title = "VIRGO"

            if 1161907217 < startTime < 1164499217:
                title = title + ' - ER10'
            elif startTime > 1164499217:
                title = title + ' - O2a'
            elif 1126400000 < startTime < 1137250000:
                title = title + ' - O1'
            else:
                raise ValueError("Time outside science or engineering run\
			   or more likely code not updated to reflect\
			   new science run")

            # Create one image containing all spectogram grams
            superFig = Plot(figsize=(27, 6))
            superFig.add_subplot(141, projection='timeseries')
            superFig.add_subplot(142, projection='timeseries')
            superFig.add_subplot(143, projection='timeseries')
            superFig.add_subplot(144, projection='timeseries')
            iN = 0

            for iAx, spec in zip(superFig.axes, specsgrams):
                iAx.plot(spec)

                iAx.set_yscale('log', basey=2)
                iAx.set_xscale('linear')

                xticks = np.linspace(spec.xindex.min().value,
                                     spec.xindex.max().value, 5)
                xticklabels = []
                dur = float(plotTimeRanges[iN])
                [
                    xticklabels.append(str(i))
                    for i in np.linspace(-dur / 2, dur / 2, 5)
                ]
                iAx.set_xticks(xticks)
                iAx.set_xticklabels(xticklabels)

                iAx.set_xlabel('Time (s)',
                               labelpad=0.1,
                               fontsize=mylabelfontsize,
                               color=myColor)
                iAx.set_ylim(10, 2048)
                iAx.yaxis.set_major_formatter(ScalarFormatter())
                iAx.ticklabel_format(axis='y', style='plain')
                iN = iN + 1

                superFig.add_colorbar(ax=iAx,
                                      cmap='viridis',
                                      label='Normalized energy',
                                      clim=plotNormalizedERange,
                                      pad="3%",
                                      width="5%")

            superFig.suptitle(title,
                              fontsize=mylabelfontsize,
                              color=myColor,
                              x=0.51)
            superFig.save(outDir + channelName.replace(':', '-') + '_' +
                          IDstring + '_spectrogram_' + '.png')

    if opts.make_webpage:

        channelNameAll = [i.replace(':', '-') for i in channelNameAll]
        loudestEnergyAll = [str(i) for i in loudestEnergyAll]
        peakFreqAll = [str(i) for i in peakFreqAll]
        mostSignQAll = [str(i) for i in mostSignQAll]

        # Zip SNR with channelName
        loudestEnergyAll = dict(zip(channelNameAll, loudestEnergyAll))
        peakFreqAll = dict(zip(channelNameAll, peakFreqAll))
        mostSignQAll = dict(zip(channelNameAll, mostSignQAll))

        plots = glob.glob(outDir + '*.png'.format(channelName))
        plots = [i.split('/')[-1] for i in plots]
        channelPlots = dict(zip(channelNameAll, plots))

        f1 = open(outDir + 'index.html', 'w')
        env = Environment(loader=FileSystemLoader('../'))
        template = env.get_template('webpage/omegatemplate.html')
        print >> f1, template.render(channelNames=channelNameAll,
                                     SNR=loudestEnergyAll,
                                     Q=mostSignQAll,
                                     FREQ=peakFreqAll,
                                     ID=IDstring,
                                     plots=channelPlots)
        f1.close()

        for channelName in channelNameAll:
            f2 = open(outDir + '%s.html' % channelName, 'w')
            template = env.get_template('webpage/channeltemplate.html'.format(
                opts.pathToHTML))
            # List plots for given channel
            print >> f2, template.render(channelNames=channelNameAll,
                                         thisChannel=channelName,
                                         plots=channelPlots)
            f2.close()