示例#1
0
def create_cache(ifo, start, end):
    """Find .gwf files and create cache. Will first look in the llhoft, and
    if the frames have expired from llhoft, will call gwdatafind.

    Parameters
    ----------
    ifo : str
        Interferometer name (e.g. ``H1``).
    start, end: int or float
        GPS start and end times desired.

    Returns
    -------
    :class:`glue.lal.Cache`

    Example
    -------
    >>> create_cache('H1', 1198800018, 1198800618)
    [<glue.lal.CacheEntry at 0x7fbae6b71278>,
      <glue.lal.CacheEntry at 0x7fbae6ae5b38>,
      <glue.lal.CacheEntry at 0x7fbae6ae5c50>,
     ...
      <glue.lal.CacheEntry at 0x7fbae6b15080>,
      <glue.lal.CacheEntry at 0x7fbae6b15828>]

    """
    pattern = app.conf['llhoft_glob'].format(detector=ifo)
    filenames = glob.glob(pattern)
    cache = Cache.from_urls(filenames)

    try:
        cache_starttime = int(
            list(cache.to_segmentlistdict().values())[0][0][0])
    except IndexError:
        log.exception('Files do not exist in llhoft_glob')
        return cache  # returns empty cache

    if start >= cache_starttime:  # required data is in llhoft
        return cache

    # otherwise, required data has left llhoft
    high_latency = app.conf['high_latency_frame_types'][ifo]
    urls = find_urls(ifo[0], high_latency, start, end)
    if urls:
        return Cache.from_urls(urls)

    # required data not in high latency frames
    low_latency = app.conf['low_latency_frame_types'][ifo]
    urls = find_urls(ifo[0], low_latency, start, end)
    if not urls:  # required data not in low latency frames
        log.error('This data cannot be found, or does not exist.')

    return Cache.from_urls(urls)
示例#2
0
def checkifo(event):
    server, repository = connect_gitlab()
    gitlab_events = gitlab.find_events(repository, subset=event)

    for event in gitlab_events:
        if "event time" not in event.event_object.meta:
            print(f"Time not found {event.event_object.name}")
        time = event.event_object.meta['event time']
        gpsstart = time - 600
        gpsend = time + 600
        bits = ['Bit 0', 'Bit 1', 'Bit 2']

        active_ifo = []
        for ifo in ["L1", "H1", "V1"]:
            frametypes = event.event_object.meta['data']['frame-types']
            urls = find_urls(site=f"{ifo[0]}",
                             frametype=frametypes[ifo],
                             gpsstart=gpsstart,
                             gpsend=gpsend)
            datacache = Cache.from_urls(urls)
            if len(datacache) == 0:
                print(f"No {ifo} data found.")
                continue

            if "state vector" in event.meta:
                state_vector_channel = event.meta['state vector']
            else:
                state_vector_channel = ast.literal_eval(
                    config.get("data", "state-vector"))

            state = gwpy.timeseries.StateVector.read(
                datacache,
                state_vector_channel[ifo],
                start=gpsstart,
                end=gpsend,
                pad=
                0  # padding data so that errors are not raised even if found data are not continuous.
            )
            if not np.issubdtype(state.dtype, np.unsignedinteger):
                # if data are not unsigned integers, cast to them now so that
                # we can determine the bit content for the flags
                state = state.astype(
                    "uint32",
                    casting="unsafe",
                    subok=True,
                    copy=False,
                )
            flags = state.to_dqflags()

            segments = flags[bits[0]].active
            for bit in bits:
                segments -= ~flags[bit].active

            if len(segments) > 0: active_ifo += [ifo]
        print(event.event_object.name)
        if event.event_object.meta['interferometers'] != active_ifo:
            print(f"Gitlab data\t{event.event_object.meta['interferometers']}")
            print(f"Recommended IFOS\t{active_ifo}")
        event.event_object.meta['interferometers'] = active_ifo
        event.update_data()
示例#3
0
 def _query(self, channel, start, end):
     "Do we know where the frame file is?"
     if segment(start, end) in self._remotecoverage:
         return True
     urls = query_LDR(self.host, self.port, channel[0], self.frametype, start, end, urlType="file")
     if urls:
         new = Cache.from_urls(urls, coltype=int)
         new.sort(key=operator.attrgetter("segment"))
         self.add_cache(new)
     return segment(start, end) in self._remotecoverage
示例#4
0
文件: losc.py 项目: rpfisher/gwpy
def _losc_json_cache(metadata, detector, sample_rate=4096,
                      format='hdf5', duration=4096):
    """Parse a :class:`~glue.lal.Cache` from a LOSC metadata packet
    """
    urls = []
    for fmd in metadata:  # loop over file metadata dicts
        # skip over files we don't want
        if (fmd['detector'] != detector or
                fmd['sampling_rate'] != sample_rate or
                fmd['format'] != format or
                fmd['duration'] != duration):
            continue
        urls.append(fmd['url'])
    return Cache.from_urls(urls)
示例#5
0
def _losc_json_cache(metadata,
                     detector,
                     sample_rate=4096,
                     format='hdf5',
                     duration=4096):
    """Parse a :class:`~glue.lal.Cache` from a LOSC metadata packet
    """
    urls = []
    for fmd in metadata:  # loop over file metadata dicts
        # skip over files we don't want
        if (fmd['detector'] != detector or fmd['sampling_rate'] != sample_rate
                or fmd['format'] != format or fmd['duration'] != duration):
            continue
        urls.append(fmd['url'])
    return Cache.from_urls(urls)
示例#6
0
def test_lalcache_from_gluecache():
    files = [
        "X-TEST-0-1.gwf",
        "X-TEST-1-1.gwf",
    ]
    gcache = GlueCache.from_urls(files, coltype=LIGOTimeGPS)
    try:
        lcache = lal_utils.lalcache_from_gluecache(gcache)
    finally:
        for fp in files:
            if os.path.isfile(fp):
                os.remove(fp)
    assert lcache.length == len(gcache)
    assert lcache.list.url == (
        "file://localhost{}".format(os.path.abspath(files[0]))
    )
示例#7
0
 def _query(self, channel, start, end):
     "Do we know where the frame file is?"
     if segment(start, end) in self._remotecoverage:
         return True
     urls = query_LDR(self.host,
                      self.port,
                      channel[0],
                      self.frametype,
                      start,
                      end,
                      urlType="file")
     if urls:
         new = Cache.from_urls(urls, coltype=int)
         new.sort(key=operator.attrgetter("segment"))
         self.add_cache(new)
     return segment(start, end) in self._remotecoverage
示例#8
0
    def test_read_write_gwf(self, api):
        array = self.create(name='TEST')

        # map API to format name
        if api is None:
            fmt = 'gwf'
        else:
            fmt = 'gwf.%s' % api

        # test basic write/read
        try:
            utils.test_read_write(array,
                                  fmt,
                                  extension='gwf',
                                  read_args=[array.name],
                                  assert_equal=utils.assert_quantity_sub_equal,
                                  assert_kw={'exclude': ['channel']})
        except ImportError as e:
            pytest.skip(str(e))

        # test read keyword arguments
        suffix = '-%d-%d.gwf' % (array.t0.value, array.duration.value)
        with tempfile.NamedTemporaryFile(prefix='GWpy-', suffix=suffix) as f:
            array.write(f.name)

            def read_(**kwargs):
                return type(array).read(f, array.name, format='gwf', **kwargs)

            # test start, end
            start, end = array.span.contract(10)
            t = read_(start=start, end=end)
            utils.assert_quantity_sub_equal(t,
                                            array.crop(start, end),
                                            exclude=['channel'])
            assert t.span == (start, end)
            t = read_(start=start)
            utils.assert_quantity_sub_equal(t,
                                            array.crop(start=start),
                                            exclude=['channel'])
            t = read_(end=end)
            utils.assert_quantity_sub_equal(t,
                                            array.crop(end=end),
                                            exclude=['channel'])

            # test dtype
            t = read_(dtype='float32')
            assert t.dtype is numpy.dtype('float32')
            t = read_(dtype={f.name: 'float64'})
            assert t.dtype is numpy.dtype('float64')

            # check errors
            with pytest.raises((ValueError, RuntimeError)):
                read_(start=array.span[1])
            with pytest.raises((ValueError, RuntimeError)):
                read_(end=array.span[0] - 1)

            # check old format prints a deprecation warning
            if api:
                with pytest.warns(DeprecationWarning):
                    type(array).read(f, array.name, format=api)

            # check reading from cache
            try:
                from glue.lal import Cache
            except ImportError:
                pass
            else:
                a2 = self.create(name='TEST', t0=array.span[1], dt=array.dx)
                suffix = '-%d-%d.gwf' % (a2.t0.value, a2.duration.value)
                with tempfile.NamedTemporaryFile(prefix='GWpy-',
                                                 suffix=suffix) as f2:
                    a2.write(f2.name)
                    cache = Cache.from_urls([f.name, f2.name], coltype=int)
                    comb = type(array).read(cache, 'TEST', format=fmt, nproc=2)
                    utils.assert_quantity_sub_equal(comb,
                                                    array.append(
                                                        a2, inplace=False),
                                                    exclude=['channel'])
示例#9
0
 def test_aux_channels_from_cache(self):
     cache = Cache.from_urls(AUX_FILES.values())
     channels = triggers.find_auxiliary_channels('omicron', None, None,
                                                 cache=cache)
     self.assertListEqual(channels, sorted(AUX_FILES.keys()))
示例#10
0
def test_open_data_source_glue():
    from glue.lal import Cache
    Cache.entry_class = lal_utils.CacheEntry
    cache = Cache.from_urls([TEST_GWF_FILE])
    return _test_open_data_source(cache)