Example #1
0
 def get_tmpltbank_data(self):
     """Read inspiral horizon data from the TmpltBank cache
     """
     tmpltsegs = find_cache_segments(self.tmpltbankcache)
     ifo = self.channel.split(':')[0]
     rangechannel = '%s:horizon_distance' % ifo
     sizechannel = '%s:tmpltbank_size' % ifo
     globalv.DATA[rangechannel] = TimeSeriesList()
     globalv.DATA[sizechannel] = TimeSeriesList()
     for seg in tmpltsegs:
         segcache = self.tmpltbankcache.sieve(segment=seg)
         rangedata = []
         sizedata = []
         for ce in segcache:
             xmldoc = llwutils.load_filename(ce.path)
             svtable = SummValueTable.get_table(xmldoc)
             svtable.sort(key=lambda row: float(row.comment.split('_')[0]))
             rangedata.append(svtable[0].value * (1.4)**(5 / 6.))
             sizedata.append(len(SnglInspiralTable.get_table(xmldoc)))
         if rangedata:
             dt = float(abs(segcache[0].segment))
             epoch = segcache[0].segment[0] + dt / 2.
             globalv.DATA[rangechannel].append(
                 TimeSeries(rangedata,
                            sample_rate=1 / dt,
                            epoch=epoch,
                            name=rangechannel))
             try:
                 globalv.DATA[rangechannel].coalesce()
             except ValueError:
                 pass
             globalv.DATA[sizechannel].append(
                 TimeSeries(sizedata,
                            sample_rate=1 / dt,
                            epoch=epoch,
                            name=sizechannel))
             try:
                 globalv.DATA[sizechannel].coalesce()
             except ValueError:
                 pass
Example #2
0
 def get_tmpltbank_data(self):
     """Read inspiral horizon data from the TmpltBank cache
     """
     tmpltsegs = find_cache_segments(self.tmpltbankcache)
     ifo = self.channel.split(':')[0]
     rangechannel = '%s:horizon_distance' % ifo
     sizechannel = '%s:tmpltbank_size' % ifo
     globalv.DATA[rangechannel] = TimeSeriesList()
     globalv.DATA[sizechannel] = TimeSeriesList()
     for seg in tmpltsegs:
         segcache = self.tmpltbankcache.sieve(segment=seg)
         rangedata = []
         sizedata = []
         for ce in segcache:
             xmldoc = llwutils.load_filename(ce.path)
             svtable = SummValueTable.get_table(xmldoc)
             svtable.sort(key=lambda row: float(row.comment.split('_')[0]))
             rangedata.append(svtable[0].value * (1.4)**(5/6.))
             sizedata.append(len(SnglInspiralTable.get_table(xmldoc)))
         if rangedata:
             dt = float(abs(segcache[0].segment))
             epoch = segcache[0].segment[0] + dt/2.
             globalv.DATA[rangechannel].append(
                 TimeSeries(rangedata, sample_rate=1/dt, epoch=epoch,
                            name=rangechannel))
             try:
                 globalv.DATA[rangechannel].coalesce()
             except ValueError:
                 pass
             globalv.DATA[sizechannel].append(
                 TimeSeries(sizedata, sample_rate=1/dt, epoch=epoch,
                            name=sizechannel))
             try:
                 globalv.DATA[sizechannel].coalesce()
             except ValueError:
                 pass
    def _make_events(self, doc, psd_file, coinc_def):
        # Look up necessary tables.
        coinc_table = CoincTable.get_table(doc)
        coinc_map_table = CoincMapTable.get_table(doc)
        sngl_inspiral_table = SnglInspiralTable.get_table(doc)
        try:
            time_slide_table = TimeSlideTable.get_table(doc)
        except ValueError:
            offsets_by_time_slide_id = None
        else:
            offsets_by_time_slide_id = time_slide_table.as_dict()

        # Indices to speed up lookups by ID.
        key = operator.attrgetter('coinc_event_id')
        event_ids_by_coinc_event_id = {
            coinc_event_id:
                tuple(coinc_map.event_id for coinc_map in coinc_maps)
            for coinc_event_id, coinc_maps
            in itertools.groupby(sorted(coinc_map_table, key=key), key=key)}
        sngl_inspirals_by_event_id = {
            row.event_id: row for row in sngl_inspiral_table}

        # Filter rows by coinc_def if requested.
        if coinc_def is not None:
            coinc_def_table = CoincDefTable.get_table(doc)
            coinc_def_ids = {
                row.coinc_def_id for row in coinc_def_table
                if (row.search, row.search_coinc_type) ==
                (coinc_def.search, coinc_def.search_coinc_type)}
            coinc_table = [
                row for row in coinc_table
                if row.coinc_def_id in coinc_def_ids]

        snr_dict = dict(self._snr_series_by_sngl_inspiral(doc))

        process_table = ProcessTable.get_table(doc)
        program_for_process_id = {
            row.process_id: row.program for row in process_table}

        try:
            process_params_table = ProcessParamsTable.get_table(doc)
        except ValueError:
            psd_filenames_by_process_id = {}
        else:
            psd_filenames_by_process_id = {
                process_param.process_id: process_param.value
                for process_param in process_params_table
                if process_param.param == '--reference-psd'}

        ts0 = TimeSlideID(0)
        for time_slide_id in {coinc.time_slide_id for coinc in coinc_table}:
            if offsets_by_time_slide_id is None and time_slide_id == ts0:
                log.warning(
                    'Time slide record is missing for %s, '
                    'guessing that this is zero-lag', time_slide_id)

        for program in {program_for_process_id[coinc.process_id]
                        for coinc in coinc_table}:
            invert_phases = self._phase_convention(program)
            if invert_phases:
                log.warning(
                    'Using anti-FINDCHIRP phase convention; inverting phases. '
                    'This is currently the default and it is appropriate for '
                    'gstlal and MBTA but not pycbc as of observing run 1 '
                    '("O1"). The default setting is likely to change in the '
                    'future.')

        for coinc in coinc_table:
            coinc_event_id = coinc.coinc_event_id
            coinc_event_num = int(coinc_event_id)
            sngls = [sngl_inspirals_by_event_id[event_id] for event_id
                     in event_ids_by_coinc_event_id[coinc_event_id]]
            if offsets_by_time_slide_id is None and coinc.time_slide_id == ts0:
                offsets = defaultdict(float)
            else:
                offsets = offsets_by_time_slide_id[coinc.time_slide_id]

            template_args = [
                {key: getattr(sngl, key) for key in self._template_keys}
                for sngl in sngls]
            if any(d != template_args[0] for d in template_args[1:]):
                raise ValueError(
                    'Template arguments are not identical for all detectors!')
            template_args = template_args[0]

            invert_phases = self._phase_convention(
                program_for_process_id[coinc.process_id])

            singles = tuple(LigoLWSingleEvent(
                self, sngl.ifo, sngl.snr, sngl.coa_phase,
                float(sngl.end + offsets[sngl.ifo]), float(sngl.end),
                psd_file or psd_filenames_by_process_id.get(sngl.process_id),
                snr_dict.get(sngl.event_id), invert_phases)
                for sngl in sngls)

            event = LigoLWEvent(coinc_event_num, singles, template_args)

            yield coinc_event_num, event