Esempio n. 1
0
def fetch(i=0, start=1126259446, delta=32):
    s, e = start + (delta * i), start + (delta * (i + 1))
    print("Fetching data for {0} to {1}".format(pretty(s), pretty(e)))
    hdata = TimeSeries.fetch_open_data('H1', s, e, cache=True)
    ldata = TimeSeries.fetch_open_data('L1', s, e, cache=True)
    print("{0} points for H, {1} points for L".format(len(hdata), len(ldata)))
    return hdata, ldata
def fetch_data(ifo,
               event_time,
               duration=8,
               sample_frequency=4096,
               verbose=False,
               **kwargs):
    """Fetch raw data around a glitch

    Parameters:

        ifo (str):

        event_time (str):

        duration (int, optional):

        sample_frequency (int, optional):

        verbose (bool, optional):

    Returns:

        a `gwpy.timeseries.TimeSeries`
    """
    # find closest sample time to event time
    center_time = (np.floor(event_time) + np.round(
        (event_time - np.floor(event_time)) * sample_frequency) /
                   sample_frequency)

    # determine segment start and stop times
    start_time = round(center_time - duration / 2)
    stop_time = start_time + duration
    frametype = kwargs.pop('frametype', None)
    frametype = '{0}_HOFT_{1}'.format(ifo, frametype)

    try:
        channel_name = '{0}:GDS-CALIB_STRAIN'.format(ifo)
        data = TimeSeries.get(channel_name,
                              start_time,
                              stop_time,
                              frametype=frametype,
                              verbose=verbose).astype('float64')
    except:
        TimeSeries.fetch_open_data(ifo, start_time, stop_time, verbose=verbose)

    if data.sample_rate.decompose().value != sample_frequency:
        data = data.resample(sample_frequency)

    return data
Esempio n. 3
0
def load_gw(t0, detectorlist):
    straindict = {}
    for ifo in detectorlist:
        straindict[ifo] = TimeSeries.fetch_open_data(ifo,
                                                     t0 - 14,
                                                     t0 + 14,
                                                     cache=False)
    return straindict
Esempio n. 4
0
def load_inject_condition(t_i, t_f, t_inj, inj_type, inj_params=None, local=False, Tc=16, To=2, fw=2048, window='tukey', detector='H', 
						  qtrans=False, qsplit=False, dT=2.0, hp=None, save=False, data_path=None):
	"""Fucntion to load a chunk, inject a waveform and condition, created to enable parallelizing.
	"""
	if local:
		files = get_files(detector)
		try:
			data = TimeSeries.read(files, start=t_i, end=t_f, format='hdf5.losc') # load data locally
		except:
			return

	else:
		# load data from losc
		try:
			data = TimeSeries.fetch_open_data(detector + '1', *(t_i, t_f), sample_rate=fw, verbose=False, cache=True)
		except:
			return

	if np.isnan(data.value).any():
		return

	wf_times = data.times.value

	if inj_type == 'ccsn':
		shift = int((t_inj - (wf_times[0] + Tc/2)) * fw)
		hp = np.roll(hp.value, shift)
		
		hp = TimeSeries(hp, t0=wf_times[0], dt=data.dt)
		try:
			hp = hp.taper()
		except:
			pass

		injected_data = data.inject(hp)

	else:
		injected_data = inject(data, t_inj, inj_type, inj_params)

	cond_data = condition_data(injected_data, To, fw, window, qtrans, qsplit, dT)

	x = []
	times = []

	for dat in cond_data:
		x.append(dat.values)
		times.append(dat.t0)

	x = np.asarray(x)
	times = np.asarray(times)

	idx = find_closest_index(t_inj, times)

	x = x[idx]
	times = times[idx]
	return x, times
Esempio n. 5
0
def get_open_strain_data(
        name, start_time, end_time, outdir, cache=False, buffer_time=0, **kwargs):
    """ A function which accesses the open strain data

    This uses `gwpy` to download the open data and then saves a cached copy for
    later use

    Parameters
    ==========
    name: str
        The name of the detector to get data for
    start_time, end_time: float
        The GPS time of the start and end of the data
    outdir: str
        The output directory to place data in
    cache: bool
        If true, cache the data
    buffer_time: float
        Time to add to the beginning and end of the segment.
    **kwargs:
        Passed to `gwpy.timeseries.TimeSeries.fetch_open_data`

    Returns
    =======
    strain: gwpy.timeseries.TimeSeries
        The object containing the strain data. If the connection to the open-data server
        fails, this function returns `None`.

    """
    from gwpy.timeseries import TimeSeries
    filename = '{}/{}_{}_{}.txt'.format(outdir, name, start_time, end_time)

    if buffer_time < 0:
        raise ValueError("buffer_time < 0")
    start_time = start_time - buffer_time
    end_time = end_time + buffer_time

    if os.path.isfile(filename) and cache:
        logger.info('Using cached data from {}'.format(filename))
        strain = TimeSeries.read(filename)
    else:
        logger.info('Fetching open data from {} to {} with buffer time {}'
                    .format(start_time, end_time, buffer_time))
        try:
            strain = TimeSeries.fetch_open_data(name, start_time, end_time, **kwargs)
            logger.info('Saving cache of data to {}'.format(filename))
            strain.write(filename)
        except Exception as e:
            logger.info("Unable to fetch open data, see debug for detailed info")
            logger.info("Call to gwpy.timeseries.TimeSeries.fetch_open_data returned {}"
                        .format(e))
            strain = None

    return strain
Esempio n. 6
0
def read_gwosc(ifo, GPSstart, GPSend, srate=4096, version=None):
    """
        Read GW OpenScience in order to fetch the data,
        this method uses gwpy
    """
    
    from gwpy.timeseries import TimeSeries
    data    = TimeSeries.fetch_open_data(ifo, GPSstart, GPSend,
                                         sample_rate=srate,
                                         version=version,
                                         verbose=True,
                                         tag='CLN',
                                         format='hdf5',
                                         host='https://www.gw-openscience.org')
        
    s   = np.array(data.value)
    t   = np.arange(len(s))*(1./srate) + GPSstart
    return t , s
Esempio n. 7
0
 def getStrainData(self):
     '''
     gathers data in the interval from a to b centered around the GPS time
     stamp for the merger event
     '''
     #sets the number of cores that the strain data loader can use
     args = {'nproc': 8}
     #checking that the ids check out
     with warnings.catch_warnings():
         #ignores user warnings
         warnings.simplefilter('ignore')
         #downloading and storing in an obj the grav wave strain data
         strainData = TimeSeries.fetch_open_data(self.detector_id,\
                                                 *self.timeInterval,\
                                                 cache = True,\
                                                 verbose = True,\
                                                 **args)
     return strainData
Esempio n. 8
0
File: qscan.py Progetto: bfarr/gwpy
__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.timeseries'

# First, we identify the GPS time of interest:
gps = 968654558

# and use that to define the start and end times of our required data
duration = 32
start = int(round(gps - duration/2.))
end = start + duration

# next, we import the `TimeSeries` and fetch some open data from
# `LOSC <//losc.ligo.org>`_:
from gwpy.timeseries import TimeSeries
data = TimeSeries.fetch_open_data('H1', start, end)

# and next we generate the `~TimeSeries.q_transform` of these data:
qspecgram = data.q_transform()

# Now, we can plot the resulting `~gwpy.spectrogram.Spectrogram`, focusing on a
# specific window around the interesting time
#
# .. note::
#
#    Using `~gwpy.spectrogram.Spectrogram.crop` is highly recommended at
#    this stage because rendering the high-resolution spectrogram as it is
#    done here is very slow (for experts this is because we're using
#    `~matplotlib.axes.Axes.pcolormesh` and not any sort of image
#    interpolation, mainly to support both linear and log scaling nicely)
Esempio n. 9
0
As described in :ref:`gwpy-example-frequencyseries-rayleigh`, the Rayleigh
statistic can be used to study non-Gaussianity in a timeseries.
We can study the time variance of these features by plotting a
time-frequency spectrogram where we calculate the Rayleigh statistic for
each time bin.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.spectrogram'

# To demonstate this, we can load some data from the LIGO Livingston
# intereferometer around the time of the GW151226 gravitational wave detection:

from gwpy.timeseries import TimeSeries
gwdata = TimeSeries.fetch_open_data('L1', 'Dec 26 2015 03:37',
                                    'Dec 26 2015 03:47', verbose=True)

# Next, we can calculate a Rayleigh statistic `Spectrogram` using the
# :meth:`~gwpy.timeseries.TimeSeries.rayleigh_spectrogram` method of the
# `~gwpy.timeseries.TimeSeries` and a 5-second stride with a 2-second FFT and
# 1-second overlap (50%):
rayleigh = gwdata.rayleigh_spectrogram(5, fftlength=2, overlap=1)

# and can make a plot using the :meth:`~Spectrogram.plot` method
plot = rayleigh.plot(norm='log', vmin=0.25, vmax=4)
ax = plot.gca()
ax.set_yscale('log')
ax.set_ylim(30, 1500)
ax.set_title('Sensitivity of LIGO-Livingston around GW151226')
plot.add_colorbar(cmap='coolwarm', label='Rayleigh statistic')
plot.show()
Esempio n. 10
0
from gwpy.timeseries import TimeSeries
hoft = TimeSeries.fetch_open_data('H1', 1187007040, 1187009088, tag='C00')
Esempio n. 11
0
def load_condition_save(t_i, t_f, local=False, Tc=16, To=2, fw=2048, window='tukey', detector='H', 
						qtrans=False, qsplit=False, dT=2.0, save=False, data_path=None):
	"""Fucntion to load condition and save chunk, created to enable parallelizing.
	"""

	conditioned_files = []
	if exists(data_path):
		conditioned_files = [join(data_path, f) for f in listdir(data_path) if isfile(join(data_path, f))]
		# print(len(conditioned_files))
	fname = 'conditioned-chunk-' + str(t_i) + '-' + str(t_f) + '.hdf5'
	if join(data_path, fname) in conditioned_files:
		# print(fname)
		return

	if local:
		files = get_files(detector)
		try:
			data = TimeSeries.read(files, start=t_i, end=t_f, format='hdf5.losc') # load data locally
		except:
			return

	else:
		# load data from losc
		try:
			data = TimeSeries.fetch_open_data(detector + '1', *(t_i, t_f), sample_rate=fw, verbose=False, cache=True)
		except:
			return

	if np.isnan(data.value).any():
		return

	cond_data = condition_data(data, To, fw, window, qtrans, qsplit, dT)
	if save:
		values = []
		t0 = []
		times = []
		f0 = []
		frequencies = []
		for dat in cond_data:
			values.append(dat.values)
			t0.append(dat.t0)
			times.append(dat.times)
			f0.append(dat.f0)
			frequencies.append(dat.frequencies)

		values = np.asarray(values)
		t0 = np.asarray(t0)
		times = np.asarray(times)
		f0 = np.asarray(f0)
		frequencies = np.asarray(frequencies)

		if data_path == None:
			data_path = Path('/storage/fast/users/tommaria/data/conditioned_data/16KHZ/' + detector + '1')

		if not exists(data_path):
			makedirs(data_path)

		fname = 'conditioned-chunk-' + str(t_i) + '-' + str(t_f) + '.hdf5'
		with h5py.File(join(data_path,fname), 'w') as f:
			f.create_dataset('values', data=values)
			f.create_dataset('t0', data=t0)
			f.create_dataset('times', data=times)
			f.create_dataset('f0', data=f0)
			f.create_dataset('frequencies', data=frequencies)

		return

	else:
		return cond_data
Esempio n. 12
0
The standard metric of the sensitivity of a gravitational-wave detector
is the distance to which a canonical binary neutron star (BNS) inspiral
(with two 1.4 solar mass components) would be detected with a
signal-to-noise ratio (SNR) of 8.

We can estimate using :func:`gwpy.astro.inspiral_range` after calculating
the power-spectral density (PSD) of the strain readout for a detector, and
can plot the variation over time by looping over a power spectral density
:class:`~gwpy.spectrogram.Spectrogram`.
"""

# First, we need to load some data, for this we can use the
# `LOSC <https://losc.ligo.org>`_ public data around the GW150914 event:

from gwpy.timeseries import TimeSeries
h1 = TimeSeries.fetch_open_data('H1', 1126257414, 1126261510)
l1 = TimeSeries.fetch_open_data('L1', 1126257414, 1126261510)

# and then calculating the PSD spectrogram:

h1spec = h1.spectrogram(30, fftlength=4)
l1spec = l1.spectrogram(30, fftlength=4)

# To calculate the inspiral range variation, we need to create a
# :class:`~gwpy.timeseries.TimeSeries` in which to store the values, then
# loop over each PSD bin in the spectrogram, calculating the
# :func:`gwpy.astro.inspiral_range` for each one:

import numpy
from gwpy.astro import inspiral_range
h1range = TimeSeries(numpy.zeros(len(h1spec)),
Esempio n. 13
0
We have seen how the binary neutron star (BNS) inspiral range of a
gravitational-wave detector can be measured directly from the strain
readout. In this example, we will estimate the average spectral
contribution to BNS range from the strain record surrounding GW170817
using :func:`gwpy.astro.range_spectrogram`.
"""

__author__ = 'Alex Urban <*****@*****.**>'

# First, we need to load some data. As before we can `fetch` the
# `public data <https://www.gw-openscience.org/catalog/>`__
# around the GW170817 BNS merger:

from gwpy.timeseries import TimeSeries
l1 = TimeSeries.fetch_open_data('L1', 1187006834, 1187010930)

# Then, we can calculate a `Spectrogram` of the inspiral range
# amplitude spectrum:

from gwpy.astro import range_spectrogram
l1spec = range_spectrogram(l1, 30, fftlength=4, fmin=15, fmax=500) ** (1./2)

# We can plot this `Spectrogram` to visualise spectral variation in
# LIGO-Livingston's sensitivity in the hour or so surrounding GW170817:

plot = l1spec.plot(figsize=(12, 5))
ax = plot.gca()
ax.set_yscale('log')
ax.set_ylim(15, 500)
ax.set_title('LIGO-Livingston sensitivity to BNS around GW170817')
Esempio n. 14
0
roll_off = 0.4  # Roll off duration of tukey window in seconds, default is 0.4s
duration = 4  # Analysis segment duration
post_trigger_duration = 2  # Time between trigger time and end of segment
end_time = trigger_time + post_trigger_duration
start_time = end_time - duration

psd_duration = 32 * duration
psd_start_time = start_time - psd_duration
psd_end_time = start_time

# We now use gwpy to obtain analysis and psd data and create the ifo_list
ifo_list = bilby.gw.detector.InterferometerList([])
for det in ["H1", "L1"]:
    logger.info("Downloading analysis data for ifo {}".format(det))
    ifo = bilby.gw.detector.get_empty_interferometer(det)
    data = TimeSeries.fetch_open_data(det, start_time, end_time)
    ifo.strain_data.set_from_gwpy_timeseries(data)

    logger.info("Downloading psd data for ifo {}".format(det))
    psd_data = TimeSeries.fetch_open_data(det, psd_start_time, psd_end_time)
    psd_alpha = 2 * roll_off / duration
    psd = psd_data.psd(fftlength=duration,
                       overlap=0,
                       window=("tukey", psd_alpha),
                       method="median")
    ifo.power_spectral_density = bilby.gw.detector.PowerSpectralDensity(
        frequency_array=psd.frequencies.value, psd_array=psd.value)
    ifo_list.append(ifo)

logger.info("Saving data plots to {}".format(outdir))
bilby.core.utils.check_directory_exists_and_if_not_mkdir(outdir)
Esempio n. 15
0
from gwpy.timeseries import TimeSeries
lho = TimeSeries.fetch_open_data('H1', 1126259458, 1126259467, verbose=True)
Esempio n. 16
0
print(abs(h1segs.active))

# .. currentmodule:: gwpy.timeseries
#
# Working with strain data
# ------------------------
#
# Now, we can loop through the active segments of ``'H1_DATA'`` and fetch the
# strain `TimeSeries` for each segment, calculating a
# :class:`~gwpy.spectrogram.Spectrogram` for each segment.

from gwpy.timeseries import TimeSeries
spectrograms = []
for start, end in h1segs.active:
    h1strain = TimeSeries.fetch_open_data('H1', start, end, verbose=True)
    specgram = h1strain.spectrogram(30, fftlength=4) ** (1/2.)
    spectrograms.append(specgram)

# Finally, we can build a :meth:`~gwpy.spectrogram.Spectrogram.plot`:

from gwpy.plotter import SpectrogramPlot
plot = SpectrogramPlot()
ax = plot.gca()
for specgram in spectrograms:
    ax.plot(specgram)
ax.set_epoch('Sep 16 2010')
ax.set_xlim('Sep 16 2010', 'Sep 17 2010')
ax.set_ylim(40, 2000)
ax.set_yscale('log')
ax.set_ylabel('Frequency [Hz]')
Esempio n. 17
0
from gwpy.timeseries import TimeSeries
from gwpy.plot import Plot
h1 = TimeSeries.fetch_open_data('H1', 1126259457, 1126259467)
h1b = h1.bandpass(50, 250).notch(60).notch(120)
l1 = TimeSeries.fetch_open_data('L1', 1126259457, 1126259467)
l1b = l1.bandpass(50, 250).notch(60).notch(120)
plot = Plot(figsize=(12, 4.8))
ax = plot.gca(xscale='auto-gps')
ax.plot(h1b, color='gwpy:ligo-hanford', label='LIGO-Hanford')
ax.plot(l1b, color='gwpy:ligo-livingston', label='LIGO-Livingston')
ax.set_epoch(1126259462.427)
ax.set_xlim(1126259462, 1126259462.6)
ax.set_ylim(-1e-21, 1e-21)
ax.set_ylabel('Strain noise')
ax.legend()
plot.show()
Esempio n. 18
0
from gwpy.timeseries import TimeSeries
data = TimeSeries.fetch_open_data('L1', 1187008866, 1187008898, tag='C00')
specgram = data.spectrogram2(fftlength=.5, overlap=.25,
                             window='hann') ** (1/2.)
plot = specgram.plot(yscale='log', ylim=(30, 1400))
plot.colorbar(norm='log', clim=(1e-24, 1e-21), label='Strain ASD')
plot.show()
Esempio n. 19
0
to calculate discrete PSDs for each stride. This is fine for long-duration
data, but give poor resolution when studying short-duration phenomena.

The `~TimeSeries.spectrogram2` method allows for highly-overlapping FFT
calculations to over-sample the frequency content of the input `TimeSeries`
to produce a much more feature-rich output.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.timeseries'

# To demonstrate this, we can download some data associated with the
# gravitational-wave event GW510914:

from gwpy.timeseries import TimeSeries
lho = TimeSeries.fetch_open_data('H1', 1126259458, 1126259467, verbose=True)

# and can :meth:`~TimeSeries.highpass` and :meth:`~TimeSeries.whiten`
# the remove low-frequency noise and try and enhance low-amplitude signals
# across the middle of the frequency band:

hp = lho.highpass(20)
white = hp.whiten(4, 2).crop(1126259460, 1126259465)

# .. note::
#
#    We chose to :meth:`~TimeSeries.crop` out the leading and trailing 2
#    seconds of the the whitened data series here to remove any filtering
#    artefacts that may have been introduced.

# Now we can call the `~TimeSeries.spectrogram2` method of `gwdata` to
Esempio n. 20
0
indicates Gaussian behaviour, less than 1 indicates coherent variations,
and greater than 1 indicates incoherent variation.
It is a useful measure of the quality of the strain data being generated
and recorded at a LIGO site.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.frequencyseries'

# To demonstate, we can download some public LIGO data from the sixth science
# run (S6) for the H1 interferometer:

from gwpy.timeseries import TimeSeries

gwdata = TimeSeries.fetch_open_data('H1',
                                    'September 16 2010 07:00',
                                    'September 16 2010 07:10',
                                    verbose=True)

# Next, we can calculate a Rayleigh statistic `FrequencySeries` using the
# :meth:`~gwpy.timeseries.TimeSeries.rayleigh_spectrum` method of the
# `~gwpy.timeseries.TimeSeries` with a 2-second FFT and 1-second overlap (50%):

rayleigh = gwdata.rayleigh_spectrum(2, 1)

# For easy comparison, we can calculate the spectral sensitivity ASD of the
# strain data and plot both on the same figure:

asd = gwdata.asd(2, 1)
plot = asd.plot()
plot.add_frequencyseries(rayleigh, newax=True, sharex=plot.axes[0])
plot.axes[0].set_xlabel('')
Esempio n. 21
0
File: plot.py Progetto: stefco/gwpy
One of the most useful methods of visualising gravitational-wave data is to
use a spectrogram, highlighting the frequency-domain content of some data
over a number of time steps.

For this example we can use the public data around the GW150914 detection.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.timeseries'

# First, we import the `TimeSeries` and call
# :meth:`TimeSeries.fetch_open_data` the download the strain
# data for the LIGO-Hanford interferometer
from gwpy.timeseries import TimeSeries
data = TimeSeries.fetch_open_data(
    'H1', 'Sep 14 2015 09:45', 'Sep 14 2015 09:55')

# Next, we can calculate a `~gwpy.spectrogram.Spectrogram` using the
# :meth:`spectrogram` method of the `TimeSeries` over a 2-second stride
# with a 1-second FFT and # .5-second overlap (50%):
specgram = data.spectrogram(2, fftlength=1, overlap=.5) ** (1/2.)

# .. note::
#    :meth:`TimeSeries.spectrogram` returns a Power Spectral Density (PSD)
#    `~gwpy.spectrogram.Spectrogram` by default, so we use the ``** (1/2.)``
#    to convert this into a (more familiar) Amplitude Spectral Density.

# Finally, we can make a plot using the
# :meth:`~gwpy.spectrogram.Spectrogram.plot` method
plot = specgram.plot(norm='log', vmin=5e-24, vmax=1e-19)
ax = plot.gca()
from gwpy.timeseries import (TimeSeries, StateVector)
print(TimeSeries.fetch_open_data('H1', 1126259446, 1126259478))
# TimeSeries([  2.17704028e-19,  2.08763900e-19,  2.39681183e-19,
# ...,   3.55365541e-20,  6.33533516e-20,
# 7.58121195e-20]
# unit: Unit(dimensionless),
# t0: 1126259446.0 s,
# dt: 0.000244140625 s,
# name: Strain,
# channel: None)
print(StateVector.fetch_open_data('H1', 1126259446, 1126259478))
# StateVector([127,127,127,127,127,127,127,127,127,127,127,127,
# 127,127,127,127,127,127,127,127,127,127,127,127,
# 127,127,127,127,127,127,127,127]
# unit: Unit(dimensionless),
# t0: 1126259446.0 s,
# dt: 1.0 s,
# name: Data quality,
# channel: None,
# bits: Bits(0: data present
# 1: passes cbc CAT1 test
# 2: passes cbc CAT2 test
# 3: passes cbc CAT3 test
# 4: passes burst CAT1 test
# 5: passes burst CAT2 test
# 6: passes burst CAT3 test,
# channel=None,
# epoch=1126259446.0))

# For the `StateVector`, the naming of the bits will be
# ``format``-dependent, because they are recorded differently by LOSC
Esempio n. 23
0
can wash out transient noise (as is often desired).

The `SpectralVariance` histogram provide by `gwpy.frequencyseries` allows
us to look at the spectral sensitivity in a different manner, displaying
which frequencies sit at which amplitude _most_ of the time, but also
highlighting excursions from normal behaviour.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.frequencyseries'

# To demonstate this, we can load some data from the LIGO Livingston
# intereferometer around the time of the GW151226 gravitational wave detection:

from gwpy.timeseries import TimeSeries
llo = TimeSeries.fetch_open_data('L1', 1135136228, 1135140324, verbose=True)

# We can then call the :meth:`~gwpy.timeseries.TimeSeries.spectral_variance`
# method of the ``llo`` `~gwpy.timeseries.TimeSeries` by calculating an ASD
# every 5 seconds and counting the amount of time each frequency bin spends
# at each ASD value:

variance = llo.spectral_variance(5, fftlength=2, overlap=1, log=True,
                                 low=1e-24, high=1e-19, nbins=100)

# We can then :meth:`~SpectralVariance.plot` the `SpectralVariance`

plot = variance.plot(norm='log', vmin=.5, cmap='plasma')
ax = plot.gca()
ax.grid()
ax.set_xlim(20, 1500)
Esempio n. 24
0
from gwpy.timeseries import TimeSeries
h1 = TimeSeries.fetch_open_data('H1', 1126259457, 1126259467)
l1 = TimeSeries.fetch_open_data('L1', 1126259457, 1126259467)
Esempio n. 25
0
File: public.py Progetto: bfarr/gwpy
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWpy.  If not, see <http://www.gnu.org/licenses/>.

"""Plotting public LIGO data

I would like to study the gravitational wave strain time-series around the time of an interesting simulated signal during the last science run (S6).

These data are public, so we can load them directly from the web.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.timeseries'

# The `TimeSeries` object has a `classmethod` dedicated to fetching open-access
# data hosted by the LIGO Open Science Center, so we can just import that
# object
from gwpy.timeseries import TimeSeries

# then call the `~TimeSeries.fetch_open_data` method, passing it the prefix
# for the interferometer we want ('L1'), and the GPS start and stop times of
# our query:
data = TimeSeries.fetch_open_data('L1', 968654552, 968654562)

# and then we can make a plot:
plot = data.plot()
plot.set_title('LIGO Livingston Observatory data for GW100916')
plot.set_ylabel('Gravitational-wave strain amplitude')
plot.show()
Esempio n. 26
0
def load_inject_condition(t_i,
                          t_f,
                          t_inj,
                          ra,
                          dec,
                          pol,
                          alpha,
                          inj_type,
                          inj_params=None,
                          local=False,
                          Tc=16,
                          To=2,
                          fw=2048,
                          window='tukey',
                          detector='H',
                          qtrans=False,
                          qsplit=False,
                          dT=2.0,
                          hp=None,
                          save=False,
                          data_path=None):
    """Fucntion to load a chunk, inject a waveform and condition, created to enable parallelizing.
	"""
    vmem = psutil.virtual_memory()
    free_mem = vmem.free >> 20
    avail_mem = vmem.available >> 20
    # if free_mem < 3e5:
    if avail_mem < 3e5:
        return

    if local:
        files = get_files(detector)
        try:
            data = TimeSeries.read(files,
                                   start=t_i,
                                   end=t_f,
                                   format='hdf5.losc')  # load data locally
        except:
            return

    else:
        # load data from losc
        try:
            data = TimeSeries.fetch_open_data(detector + '1',
                                              *(t_i, t_f),
                                              sample_rate=fw,
                                              verbose=False,
                                              cache=True)
        except:
            return

    if np.isnan(data.value).any():
        return

    det_obj = Detector(detector + '1')
    delay = det_obj.time_delay_from_detector(Detector('H1'), ra, dec, t_inj)
    t_inj += delay
    fp, fc = det_obj.antenna_pattern(ra, dec, pol, t_inj)

    wf_times = data.times.value

    hp, hc = gen_inject(wf_times, data.dt, t_inj, alpha, inj_type, inj_params,
                        Tc, fw)
    h = fp * hp + fc * hc
    injected_data = data.inject(h)

    del data
    gc.collect()

    cond_data = condition_data(injected_data, To, fw, window, qtrans, qsplit,
                               dT)

    del injected_data
    gc.collect()

    x = []
    times = []

    for dat in cond_data:
        x.append(dat.values)
        times.append(dat.t0)

    del cond_data
    gc.collect()

    x = np.asarray(x)
    times = np.asarray(times)

    idx = find_closest_index(t_inj, times)

    x = x[idx]
    times = times[idx]

    return x, times
Esempio n. 27
0
detector is the distance to which a binary neutron star (BNS) inspiral
with two 1.4 solar mass components would be detected with a signal-to-noise
ratio (SNR) of 8. We can estimate this using
:func:`gwpy.astro.range_timeseries` directly from the strain readout for
a detector.
"""

__author__ = 'Duncan Macleod <*****@*****.**>'
__credits__ = 'Alex Urban <*****@*****.**>'

# First, we need to load some data. We can `fetch` the
# `public data <https://www.gw-openscience.org/catalog/>`__
# around the GW170817 BNS merger:

from gwpy.timeseries import TimeSeries
h1 = TimeSeries.fetch_open_data('H1', 1187006834, 1187010930, tag='C02')
l1 = TimeSeries.fetch_open_data('L1', 1187006834, 1187010930, tag='C02')

# Then, we can measure the inspiral range directly:

from gwpy.astro import range_timeseries
h1range = range_timeseries(h1, 30, fftlength=4, fmin=10)
l1range = range_timeseries(l1, 30, fftlength=4, fmin=10)

# We can now plot these trends to see the variation in LIGO
# sensitivity over an hour or so surrounding GW170817:

plot = h1range.plot(label='LIGO-Hanford',
                    color='gwpy:ligo-hanford',
                    figsize=(12, 5))
ax = plot.gca()
Esempio n. 28
0
def load_inject_condition_ccsn(t_i,
                               t_f,
                               t_inj,
                               ra,
                               dec,
                               pol,
                               hp,
                               hc,
                               local=False,
                               Tc=16,
                               To=2,
                               fw=2048,
                               window='tukey',
                               detector='H',
                               qtrans=False,
                               qsplit=False,
                               dT=2.0,
                               save=False,
                               data_path=None):
    """Fucntion to load a chunk, inject a waveform and condition, created to enable parallelizing.
	"""
    vmem = psutil.virtual_memory()
    free_mem = vmem.free >> 20
    avail_mem = vmem.available >> 20
    # if free_mem < 3e5:
    if avail_mem < 3e5:
        return

    if local:
        files = get_files(detector)
        try:
            data = TimeSeries.read(files,
                                   start=t_i,
                                   end=t_f,
                                   format='hdf5.losc')  # load data locally
        except:
            return

    else:
        # load data from losc
        try:
            data = TimeSeries.fetch_open_data(detector + '1',
                                              *(t_i, t_f),
                                              sample_rate=fw,
                                              verbose=False,
                                              cache=True)
        except:
            return

    if np.isnan(data.value).any():
        return

    det_obj = Detector(detector + '1')
    delay = det_obj.time_delay_from_detector(Detector('H1'), ra, dec, t_inj)
    t_inj += delay
    fp, fc = det_obj.antenna_pattern(ra, dec, pol, t_inj)

    # wfs_path = Path(git_path + '/shared/ccsn_wfs/' + ccsn_paper)
    # sim_data = [i.strip().split() for i in open(join(wfs_path, ccsn_file)).readlines()]
    # if ccsn_paper == 'radice':
    # 	line_s = 1
    # else:
    # 	line_s = 0

    # D = D_kpc *  3.086e+21 # cm
    # sim_times = np.asarray([float(dat[0]) for dat in sim_data[line_s:]])
    # hp = np.asarray([float(dat[1]) for dat in sim_data[line_s:]]) / D
    # if ccsn_paper == 'abdikamalov':
    # 	hc = np.zeros(hp.shape)
    # else:
    # 	hc = np.asarray([float(dat[2]) for dat in sim_data[line_s:]]) / D

    # dt = sim_times[1] - sim_times[0]
    h = fp * hp + fc * hc
    # h = TimeSeries(h, t0=sim_times[0], dt=dt)

    # h = h.resample(rate=fw, ftype = 'iir', n=20) # downsample to working frequency fw
    # h = h.highpass(frequency=11, filtfilt=True) # filter out frequencies below 20Hz
    # inj_window = scisig.tukey(M=len(h), alpha=0.08, sym=True)
    # h = h * inj_window

    # h = h.pad(int((fw * Tc - len(h)) / 2))

    wf_times = data.times.value

    shift = int((t_inj - (wf_times[0] + Tc / 2)) * fw)
    h = np.roll(h.value, shift)

    h = TimeSeries(h, t0=wf_times[0], dt=data.dt)
    try:
        h = h.taper()
    except:
        pass

    injected_data = data.inject(h)

    del data
    gc.collect()

    cond_data = condition_data(injected_data, To, fw, window, qtrans, qsplit,
                               dT)

    del injected_data
    gc.collect()

    x = []
    times = []

    for dat in cond_data:
        x.append(dat.values)
        times.append(dat.t0)

    del cond_data
    gc.collect()

    x = np.asarray(x)
    times = np.asarray(times)

    idx = find_closest_index(t_inj, times)

    x = x[idx]
    times = times[idx]
    return x, times
Esempio n. 29
0
from gwosc import datasets
from gwpy.timeseries import TimeSeries
gps = datasets.event_gps('GW170817')
data = TimeSeries.fetch_open_data('L1', gps-34, gps+34, tag='C00')
Esempio n. 30
0
print(abs(h1segs.active))

# .. currentmodule:: gwpy.timeseries
#
# Working with strain data
# ------------------------
#
# Now, we can loop through the active segments of ``'H1_DATA'`` and fetch the
# strain `TimeSeries` for each segment, calculating a
# :class:`~gwpy.spectrogram.Spectrogram` for each segment.

from gwpy.timeseries import TimeSeries
spectrograms = []
for start, end in h1segs.active:
    h1strain = TimeSeries.fetch_open_data('H1', start, end, verbose=True)
    specgram = h1strain.spectrogram(30, fftlength=4)**(1 / 2.)
    spectrograms.append(specgram)

# Finally, we can build a :meth:`~gwpy.spectrogram.Spectrogram.plot`:

from gwpy.plot import Plot
plot = Plot(figsize=(12, 6))
ax = plot.gca()
for specgram in spectrograms:
    ax.imshow(specgram)
ax.set_xscale('auto-gps', epoch='Sep 16 2010')
ax.set_xlim('Sep 16 2010', 'Sep 17 2010')
ax.set_ylim(40, 2000)
ax.set_yscale('log')
ax.set_ylabel('Frequency [Hz]')
Esempio n. 31
0
#
# You should have received a copy of the GNU General Public License
# along with GWpy.  If not, see <http://www.gnu.org/licenses/>.
"""Plotting public LIGO data

I would like to study the gravitational wave strain time-series around the
time of an interesting simulated signal during the last science run (S6).

These data are public, so we can load them directly from the web.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.timeseries'

# The `TimeSeries` object has a `classmethod` dedicated to fetching open-access
# data hosted by the LIGO Open Science Center, so we can just import that
# object
from gwpy.timeseries import TimeSeries

# then call the `~TimeSeries.fetch_open_data` method, passing it the prefix
# for the interferometer we want ('L1'), and the GPS start and stop times of
# our query:
data = TimeSeries.fetch_open_data('L1', 968654552, 968654562)

# and then we can make a plot:
plot = data.plot(
    title='LIGO Livingston Observatory data for HW100916',
    ylabel='Gravitational-wave strain amplitude',
)
plot.show()
Esempio n. 32
0
from gwpy.timeseries import TimeSeries
llo = TimeSeries.fetch_open_data('L1', 1135136228, 1135140324, verbose=True)
# Demodulation is useful when trying to examine steady sinusoidal
# signals we know to be contained within data. For instance,
# we can download some data from LOSC to look at trends of the
# amplitude and phase of Livingston's calibration line at 331.3 Hz:

from gwpy.timeseries import TimeSeries
data = TimeSeries.fetch_open_data('L1', 1131350417, 1131357617)

# We can demodulate the `TimeSeries` at 331.3 Hz with a stride of once
# per minute:

amp, phase = data.demodulate(331.3, stride=60)

# We can then plot these trends to visualize changes in the amplitude
# and phase of the calibration line:

from gwpy.plotter import TimeSeriesPlot
plot = TimeSeriesPlot(amp, phase, sep=True)
plot.show()
Esempio n. 34
0
# We can design an arbitrarily complicated filter using
# :mod:`gwpy.signal.filter_design`

from gwpy.signal import filter_design
bp = filter_design.bandpass(50, 250, 4096.)
notches = [filter_design.notch(f, 4096.) for f in (60, 120, 180)]
zpk = filter_design.concatenate_zpks(bp, *notches)

# And then can download some data from LOSC to apply it using
# `TimeSeries.filter`:

from gwpy.timeseries import TimeSeries
data = TimeSeries.fetch_open_data('H1', 1126259446, 1126259478)
filtered = data.filter(zpk, filtfilt=True)

# We can plot the original signal, and the filtered version, cutting
# off either end of the filtered data to remove filter-edge artefacts

from gwpy.plotter import TimeSeriesPlot
plot = TimeSeriesPlot(data, filtered[128:-128], sep=True)
plot.show()
Esempio n. 35
0
However, because of the shape of the LIGO sensitivity curve, picking out
features in the most sensitive frequency band (a few hundred Hertz) is
very hard.

We can normalise our `~gwpy.spectrogram.Spectrogram` to highligh those
features.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.timeseries'

# Again, we import the `TimeSeries` and call
# :meth:`TimeSeries.fetch_open_data` the download the strain
# data for the LIGO-Hanford interferometer
from gwpy.timeseries import TimeSeries
data = TimeSeries.fetch_open_data('H1', 'Sep 14 2015 09:45',
                                  'Sep 14 2015 09:55')

# Next, we can calculate a `~gwpy.spectrogram.Spectrogram` using the
# :meth:`spectrogram` method of the `TimeSeries` over a 2-second stride
# with a 1-second FFT and # .5-second overlap (50%):
specgram = data.spectrogram(2, fftlength=1, overlap=.5)**(1 / 2.)

# and can normalise it against the overall median ASD by calling the
# :meth:`~gwpy.spectrogram.Spectrogram.ratio` method:

normalised = specgram.ratio('median')

# Finally, we can make a plot using the
# :meth:`~gwpy.spectrogram.Spectrogram.plot` method
plot = normalised.plot(norm='log', vmin=.1, vmax=10, cmap='Spectral_r')
ax = plot.gca()
Esempio n. 36
0
from gwpy.timeseries import TimeSeries
raw = TimeSeries.fetch_open_data('L1', 1126259446, 1126259478)
data = raw.bandpass(50, 300).notch(60).crop(1126259446+1)
plot = data.plot(xscale='auto-gps')
plot.show()
Esempio n. 37
0
It is used to measure the 'Gaussianity' of those data, where a value of 1
indicates Gaussian behaviour, less than 1 indicates coherent variations,
and greater than 1 indicates incoherent variation.
It is a useful measure of the quality of the strain data being generated
and recorded at a LIGO site.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.frequencyseries'

# To demonstate this, we can load some data from the LIGO Livingston
# intereferometer around the time of the GW151226 gravitational wave detection:

from gwpy.timeseries import TimeSeries
gwdata = TimeSeries.fetch_open_data('L1',
                                    'Dec 26 2015 03:37',
                                    'Dec 26 2015 03:47',
                                    verbose=True)

# Next, we can calculate a Rayleigh statistic `FrequencySeries` using the
# :meth:`~gwpy.timeseries.TimeSeries.rayleigh_spectrum` method of the
# `~gwpy.timeseries.TimeSeries` with a 2-second FFT and 1-second overlap (50%):

rayleigh = gwdata.rayleigh_spectrum(2, 1)

# For easy comparison, we can calculate the spectral sensitivity ASD of the
# strain data and plot both on the same figure:

asd = gwdata.asd(2, 1)
plot = asd.plot(figsize=(8, 6))
plot.add_frequencyseries(rayleigh, newax=True, sharex=plot.axes[0])
asdax, rayax = plot.axes
Esempio n. 38
0
The LIGO Laboratory has publicly released the strain data around the time of
the GW150914 gravitational-wave detection; we can use these to calculate
and display the spectral sensitivity of each of the detectors at that time.
"""

__author__ = "Duncan Macleod <*****@*****.**>"
__currentmodule__ = 'gwpy.frequencyseries'

# In order to generate a `FrequencySeries` we need to import the
# `~gwpy.timeseries.TimeSeries` and use
# :meth:`~gwpy.timeseries.TimeSeries.fetch_open_data` to download the strain
# records:

from gwpy.timeseries import TimeSeries
lho = TimeSeries.fetch_open_data('H1', 1126259446, 1126259478)
llo = TimeSeries.fetch_open_data('L1', 1126259446, 1126259478)

# We can then call the :meth:`~gwpy.timeseries.TimeSeries.asd` method to
# calculated the amplitude spectral density for each
# `~gwpy.timeseries.TimeSeries`:
lhoasd = lho.asd(4, 2)
lloasd = llo.asd(4, 2)

# We can then :meth:`~FrequencySeries.plot` the spectra using the 'standard'
# colour scheme:

plot = lhoasd.plot(label='LIGO-Hanford', color='gwpy:ligo-hanford')
ax = plot.gca()
ax.plot(lloasd, label='LIGO-Livingston', color='gwpy:ligo-livingston')
ax.set_xlim(10, 2000)
Esempio n. 39
0
def load_gw(t0, detector):
    strain = TimeSeries.fetch_open_data(detector,
                                        t0 - 14,
                                        t0 + 14,
                                        cache=False)
    return strain
Esempio n. 40
0
from gwpy.timeseries import TimeSeries  # import the class
data = TimeSeries.fetch_open_data('L1', 968654500, 968654600)  # fetch data from LOSC
asd = data.asd(4, 2)  # calculated amplitude spectral density with 4-second FFT and 50% overlap
plot = asd.plot()  # make plot
ax = plot.gca()  # extract Axes
ax.set_xlabel('Frequency [Hz]')  # set X-axis label
ax.set_ylabel(r'ASD [strain/\rtHz]')  # set Y-axis label (requires latex)
ax.set_xlim(40, 2000)  # set X-axis limits
ax.set_ylim(8e-24, 5e-20)  # set Y-axis limits
ax.set_title('Strain sensitivity of LLO during S6')  # set Axes title
plot.show()  # show me the plot
Esempio n. 41
0
    bm = b.mean()
    delta = bm - am
    ad = a + delta
    dist = np.zeros(len(a))
    xf = 1000000
    xfi = 1 / xf
    ad = a.copy()
    bd = b.copy()

    thresh = 10000000
    for i in range(len(a)):
        dist[i] = 1 / np.linalg.norm(a[i] - b[i])
        if dist[i] < -1E-6:
            ad[i] = am
            bd[i] = bm
    return dist, ad, bd


# time=np.arange(0, 100, 0.1);
# a= TimeSeries(np.sin(time))
# b= TimeSeries(np.sin(time*1.5))

# plot=Plot([a,b, dist])
# plot.show()

hdata = TimeSeries.fetch_open_data('H1', 1126259446, 1126259478, cache=True)
ldata = TimeSeries.fetch_open_data('L1', 1126259446, 1126259478, cache=True)
d, hl, ll = distance(hdata, ldata)
print(d)
plot = Plot([hdata, ldata], d, [hl, ll])
plot.show()
Esempio n. 42
0
over time.
One tool for that is the :ref:`spectrogram <gwpy-spectrogram>`, while another
is simply to show percentiles of a number of ASD measurements.

In this example we calculate the median ASD over 2048-seconds surrounding
the GW178017 event, and also the 5th and 95th percentiles of the ASD, and
plot them on a single figure.
"""

__author__ = 'Duncan Macleod <*****@*****.**>'
__currentmodule__ = 'gwpy.timeseries'

# First, as always, we get the data using :meth:`TimeSeries.fetch_open_data`:

from gwpy.timeseries import TimeSeries
hoft = TimeSeries.fetch_open_data('H1', 1187007040, 1187009088, tag='C00')

# Next we calculate a :class:`~gwpy.spectrogram.Spectrogram` by calculating
# a number of ASDs, using the :meth:`~gwpy.timeseries.TimeSeries.spectrogram2`
# method:

sg = hoft.spectrogram2(fftlength=4, overlap=2)**(1 / 2.)

# From this we can trivially extract the median, 5th and 95th percentiles:

median = sg.percentile(50)
min_ = sg.percentile(5)
max_ = sg.percentile(95)

# Finally, we can make plot, using
# :meth:`~gwpy.plotter.FrequencySeriesAxes.plot_frequencyseries_mmm` to