Esempio n. 1
0
def path2cache(rootdir, pathname):
    """
	given a rootdir and a glob-compatible pathname that may contain shell-style wildcards,
	will find all files that match and populate a Cache.
	NOTE: this will only work with files that comply with the T050017 file convention.
	"""
    return [
        CacheEntry.from_T050017(file_)
        for file_ in glob.iglob(os.path.join(rootdir, pathname))
    ]
Esempio n. 2
0
 def setup_class(cls):
     cls.FRAMES = {}
     cls._tempdir = tempfile.mkdtemp(prefix='gwsumm-test-data-')
     # get data
     for channel in LOSC_DATA:
         cls.FRAMES[channel] = Cache()
         for gwf in LOSC_DATA[channel]:
             target = os.path.join(cls._tempdir, os.path.basename(gwf))
             download(gwf, target)
             cls.FRAMES[channel].append(CacheEntry.from_T050017(target))
Esempio n. 3
0
 def setup_class(cls):
     cls.FRAMES = {}
     cls._tempdir = tempfile.mkdtemp(prefix='gwsumm-test-data-')
     # get data
     for channel in LOSC_DATA:
         cls.FRAMES[channel] = Cache()
         for gwf in LOSC_DATA[channel]:
             target = os.path.join(cls._tempdir, os.path.basename(gwf))
             download(gwf, target)
             cls.FRAMES[channel].append(CacheEntry.from_T050017(target))
Esempio n. 4
0
def cache():
    try:
        from lal.utils import CacheEntry
    except ImportError as e:
        pytest.skip(str(e))

    cache = []
    for seg in SEGMENTS:
        d = seg[1] - seg[0]
        f = 'A-B-%d-%d.tmp' % (seg[0], d)
        cache.append(CacheEntry.from_T050017(f, coltype=int))
    return cache
Esempio n. 5
0
    def make_cache():
        try:
            from lal.utils import CacheEntry
        except ImportError as e:
            pytest.skip(str(e))

        segs = SegmentList()
        cache = []
        for seg in [(0, 1), (1, 2), (4, 5)]:
            d = seg[1] - seg[0]
            f = 'A-B-%d-%d.tmp' % (seg[0], d)
            cache.append(CacheEntry.from_T050017(f, coltype=int))
            segs.append(Segment(*seg))
        return cache, segs
Esempio n. 6
0
    def make_cache():
        try:
            from lal.utils import CacheEntry
        except ImportError as e:
            pytest.skip(str(e))

        segs = SegmentList()
        cache = Cache()
        for seg in [(0, 1), (1, 2), (4, 5)]:
            d = seg[1] - seg[0]
            f = 'A-B-%d-%d.tmp' % (seg[0], d)
            cache.append(CacheEntry.from_T050017(f, coltype=int))
            segs.append(Segment(*seg))
        return cache, segs
Esempio n. 7
0
def mock_datafind_connection(framefile):
    try:
        from lal.utils import CacheEntry
    except ImportError as e:
        pytest.skip(str(e))
    from glue import datafind
    ce = CacheEntry.from_T050017(framefile)
    frametype = ce.description
    # create mock up of connection object
    DatafindConnection = mock.create_autospec(
        datafind.GWDataFindHTTPConnection)
    DatafindConnection.find_types.return_value = [frametype]
    DatafindConnection.find_latest.return_value = [ce]
    DatafindConnection.find_frame_urls.return_value = [ce]
    return DatafindConnection
Esempio n. 8
0
File: mocks.py Progetto: stefco/gwpy
def mock_datafind_connection(framefile):
    try:
        from lal.utils import CacheEntry
    except ImportError as e:
        pytest.skip(str(e))
    from glue import datafind
    ce = CacheEntry.from_T050017(framefile)
    frametype = ce.description
    # create mock up of connection object
    DatafindConnection = mock.create_autospec(
        datafind.GWDataFindHTTPConnection)
    DatafindConnection.find_types.return_value = [frametype]
    DatafindConnection.find_latest.return_value = [ce]
    DatafindConnection.find_frame_urls.return_value = [ce]
    DatafindConnection.host = 'mockhost'
    DatafindConnection.port = 80
    return DatafindConnection
Esempio n. 9
0
def test_get_mp_cache_segments():
    """Test `gwpy.timeseries.io.cache.get_mp_cache_segments`
    """
    from lal.utils import CacheEntry
    from glue.lal import Cache
    from glue.segmentsUtils import segmentlist_range
    Cache.entry_class = CacheEntry

    # make cache
    cache = Cache()
    segments = SegmentList([Segment(0, 10), Segment(20, 30)])
    fsegs = SegmentList([s for seg in segments for
                         s in segmentlist_range(seg[0], seg[1], 2)])
    cache = Cache([CacheEntry.from_T050017(
                       'A-B-{0}-{1}.ext'.format(s[0], abs(s)))
                   for s in fsegs])

    # assert that no multiprocessing just returns the segment
    assert_segmentlist_equal(
        tio_cache.get_mp_cache_segments(cache, 1, Segment(0, 30)),
        SegmentList([Segment(0, 30)]))

    # simple test that segments get divided as expected
    mpsegs = tio_cache.get_mp_cache_segments(cache, 2, Segment(0, 30))
    assert_segmentlist_equal(mpsegs, segments)

    # test that mismatch with files edges is fine
    mpsegs = tio_cache.get_mp_cache_segments(cache, 2, Segment(0, 21))
    assert not mpsegs - SegmentList([Segment(0, 21)])

    # test segment divisions
    mpsegs = tio_cache.get_mp_cache_segments(cache, 4, Segment(0, 30))
    assert_segmentlist_equal(
        mpsegs,
        SegmentList(map(Segment, [(0, 6), (6, 10), (20, 26), (26, 30)]))
    )
Esempio n. 10
0
#
# Other initializations
#

path_count = 0
seglists = segments.segmentlistdict()

#
# Filter input one line at a time
#

for line in src:
    path, filename = os.path.split(line.strip())
    url = "file://localhost%s" % os.path.abspath(os.path.join(path, filename))
    try:
        cache_entry = CacheEntry.from_T050017(url)
    except ValueError as e:
        if options.include_all:
            cache_entry = CacheEntry(None, None, None, url)
        elif options.force:
            continue
        else:
            raise e
    print(str(cache_entry), file=dst)
    path_count += 1
    if cache_entry.segment is not None:
        seglists |= cache_entry.segmentlistdict.coalesce()

#
# Summary
#
Esempio n. 11
0
def test_file_path_cacheentry():
    from lal.utils import CacheEntry
    path = "/path/to/A-B-0-1.txt"
    assert io_utils.file_path(CacheEntry.from_T050017(path)) == path
Esempio n. 12
0
def framecache_from_event(gid,
                          observatories,
                          frame_types,
                          time_span=500,
                          outdir=".",
                          filename="frame.cache",
                          verbose=False):
    """Get the frame cache for an event given the gracedb event id.

	Args:
		gid (str): The gracedb event id.
		observatories (list): See gwdatafind.
		frame_type (list): See gwdatafind.
		time_span (float): The time span before and after the trigger time.
		outdir (str, default="."): The output directory.
		filename (str, default="frame.cache"): The output filename.
		verbose (bool): Be verbose.

	Returns:
		Dictionary of instruments, trigger_times, gps_start_time,
		gps_end_time, channels_name.

	"""
    assert time_span >= 500., "Please use time_span larger or equal to 500."

    obs2ifo = {"H": "H1", "L": "L1", "V": "V1"}

    observatories = set(observatories)
    frame_types = set(frame_types)

    if len(observatories) != len(frame_types):
        raise ValueError("Must have as many frame_types as observatories.")
    # FIXME: This is not reliable, have a better way to map frame_type to observatory?
    obs_type_dict = dict([(obs, frame_type) for obs in observatories
                          for frame_type in frame_types
                          if obs == frame_type[0]])

    gracedb_client = gracedb.GraceDb()
    coinc_xmldoc = lvalert_helper.get_coinc_xmldoc(gracedb_client, gid)
    eventid_trigger_dict = dict(
        (row.ifo, row)
        for row in lsctables.SnglInspiralTable.get_table(coinc_xmldoc))
    channel_names_dict = dict([
        (row.value.split("=")[0], row.value)
        for row in lsctables.ProcessParamsTable.get_table(coinc_xmldoc)
        if row.param == "--channel-name"
    ])

    gwdata_metavar_headers = [
        "instruments", "trigger_times", "gps_start_time", "gps_end_time",
        "channels_name"
    ]
    gwdata_metavar_values = []
    urls = []
    for observatory, frame_type in obs_type_dict.items():
        trigger_time = eventid_trigger_dict[obs2ifo[observatory]].end
        gps_start_time = int(trigger_time - time_span)
        gps_end_time = int(trigger_time + time_span)
        gwdata_metavar_values.append(
            (obs2ifo[observatory], trigger_time, gps_start_time, gps_end_time,
             channel_names_dict[obs2ifo[observatory]]))

        urls += gwdatafind.find_urls(observatory, frame_type, gps_start_time,
                                     gps_end_time)

    with open(os.path.join(outdir, "frame.cache"), "w") as cache:
        for url in urls:
            filename = str(CacheEntry.from_T050017(url))
            cache.write("%s\n" % filename)
            if verbose:
                sys.stderr.write(
                    "writing %s to %s\n" %
                    (filename, os.path.join(outdir, "frame.cache")))
        if verbose:
            sys.stderr.write("Done.\n")

    return dict(zip(gwdata_metavar_headers, zip(*gwdata_metavar_values)))