Exemple #1
0
def iso8601_to_gps(iso8601):
    """
    Convert an ISO 8601 date string to a floating-point GPS time in seconds.

    Parameters
    ----------

    iso8601 : str
        ISO 8601 date string (with fractional seconds)

    Returns
    -------

    gps : float
        Time in seconds since GPS epoch

    Example
    -------

    >>> gps_to_iso8601(1129501781.2)
    '2015-10-21T22:29:24.200000'
    >>> iso8601_to_gps('2015-10-21T22:29:24.2')
    1129501781.2
    """
    iso8601, _, second_fraction = iso8601.partition('.')
    second_fraction = float('0.' + second_fraction)
    tm = time.strptime(iso8601, "%Y-%m-%dT%H:%M:%S")
    gps_seconds = lal.UTCToGPS(tm)
    return gps_seconds + second_fraction
Exemple #2
0
def iso8601_to_gps(iso8601):
    """Convert an ISO 8601 date string to a floating-point GPS time in seconds."""
    iso8601, _, second_fraction = iso8601.partition('.')
    second_fraction = float('0.' + second_fraction)
    tm = time.strptime(iso8601, "%Y-%m-%dT%H:%M:%S")
    gps_seconds = lal.UTCToGPS(tm)
    return gps_seconds + second_fraction
Exemple #3
0
def utc_midnight(gps):
	"""
	Truncate a LIGOTimeGPS to UTC midnight.
	"""
	# convert to UTC (as list so we can edit it)
	tm = list(lal.GPSToUTC(int(gps)))

	# truncate to midnight
	tm[3] = 0       # hours
	tm[4] = 0       # minutes
	tm[5] = 0       # seconds

	# convert back to LIGOTimeGPS
	return lal.LIGOTimeGPS(lal.UTCToGPS(tuple(tm)))
Exemple #4
0
def iso8601_to_gps(iso8601):
    """Convert an ISO 8601 date string to a floating-point GPS time in seconds."""
    date, time = iso8601.split('T')
    year, month, day = (int(datepart) for datepart in date.split('-'))
    hour, minute, second = time.split(':')
    hour = int(hour)
    minute = int(minute)
    second = float(second)
    second_fraction, second = math.modf(second)
    second = int(second)

    tm = [year, month, day, hour, minute, second, -1, -1, -1]
    gps_seconds = lal.UTCToGPS(tm)
    return gps_seconds + second_fraction
assert (ptr_ptr == sts)
assert (ptr_null_ptr == sts)
assert (null_ptr_ptr == None)
del sts
del ptr_ptr
del ptr_null_ptr
del null_ptr_ptr
lal.CheckMemoryLeaks()
print("PASSED typemaps for strings and double pointers")

# check 'tm' struct conversions
print("checking 'tm' struct conversions ...")
gps0 = 989168284
utc0 = [2011, 5, 11, 16, 57, 49, 2, 131, 0]
assert (lal.GPSToUTC(gps0) == tuple(utc0))
assert (lal.UTCToGPS(utc0) == gps0)
for i in range(0, 10):
    gps = gps0 + i * 86400
    utc = list(utc0)
    utc[2] = utc[2] + i
    utc[6] = (utc[6] + i) % 7
    utc[7] = utc[7] + i
    utc[8] = -1 + (i % 3)
    assert (lal.GPSToUTC(gps)[0:8] == tuple(utc[0:8]))
    assert (lal.UTCToGPS(utc) == gps)
    utc = lal.GPSToUTC(lal.UTCToGPS(utc))
    dt = datetime.datetime(*utc[0:6])
    assert (utc[6] == dt.weekday())
lal.CheckMemoryLeaks()
print("PASSED 'tm' struct conversions")
	def pull(self, rankingstat, fapfar = None, zerolag_rankingstatpdf = None, coinc_sieve = None, flush = False, cluster = False, cap_singles = False, FAR_trialsfactor = 1.0):
		# NOTE:  rankingstat is not used to compute the ranking
		# statistic, it supplies the detector livetime segment
		# lists to determine which triggers are eligible for
		# inclusion in the background model and is the destination
		# for triggers identified for inclusion in the background
		# model. self.ln_lr_from_triggers is the ranking statistic
		# function (if set).

		# extract times when instruments were producing SNR.  used
		# to define "on instruments" for coinc tables, as a safety
		# check for impossible triggers, and to identify triggers
		# suitable for use in defining the background PDFs.  will
		# only need segment information for the times for which the
		# queues will yield triggers, so use a bisection search to
		# clip the lists to reduce subsequent operation count.

		age = float(self.time_slide_graph.age)
		snr_segments = segments.segmentlistdict((instrument, ratebinlist[ratebinlist.value_slice_to_index(slice(age, None))].segmentlist()) for instrument, ratebinlist in rankingstat.denominator.triggerrates.items())

		#
		# iterate over coincidences
		#

		gps_time_now = float(lal.UTCToGPS(time.gmtime()))
		newly_reported = []
		flushed = []
		flushed_unused = []
		self.last_coincs.clear()
		max_last_coinc_snr = {}
		for node, events in self.time_slide_graph.pull(newly_reported = newly_reported, flushed = flushed, flushed_unused = flushed_unused, coinc_sieve = coinc_sieve, event_collector = self.backgroundcollector, flush = flush):
			# construct row objects for coinc tables.

			coinc, coincmaps, coinc_inspiral = self.coinc_tables.coinc_rows(self.process_id, node.time_slide_id, events, seglists = snr_segments)

			# some tasks for zero-lag candidates

			if node.is_zero_lag:
				# populate ranking statistic's zero-lag
				# PDFs with triggers from all zero-lag
				# candidates

				for event in events:
					rankingstat.zerolag.increment(event)

			# latency goes in minimum_duration column.  NOTE:
			# latency is nonsense unless running live.  FIXME:
			# add a proper column for latency

			coinc_inspiral.minimum_duration = gps_time_now - float(coinc_inspiral.end)

			# finally, append coinc to tables

			if cluster:
				max_last_coinc_snr.setdefault(node, None)
				if max_last_coinc_snr[node] is None or coinc_inspiral.snr > max_last_coinc_snr[node][3].snr:
					max_last_coinc_snr[node] = (events, coinc, coincmaps, coinc_inspiral)
			else:
				self.coinc_tables.append_coinc(coinc, coincmaps, coinc_inspiral)

				# add events to the zero-lag ranking
				# statistic histogram

				if zerolag_rankingstatpdf is not None and coinc.likelihood is not None:
					zerolag_rankingstatpdf.zero_lag_lr_lnpdf.count[coinc.likelihood,] += 1

				self.last_coincs.add(events, coinc, coincmaps, coinc_inspiral)


		for node in max_last_coinc_snr:
			if max_last_coinc_snr[node] is not None:
				events, coinc, coincmaps, coinc_inspiral = max_last_coinc_snr[node]
				# assign ranking statistic, FAP and FAR
				if self.ln_lr_from_triggers is not None:
					coinc.likelihood = self.ln_lr_from_triggers(events, node.offset_vector)
					if fapfar is not None:
						# FIXME:  add proper columns to
						# store these values in
						coinc_inspiral.combined_far = fapfar.far_from_rank(coinc.likelihood) * FAR_trialsfactor
						if len(events) == 1 and cap_singles and coinc_inspiral.combined_far < 1. / fapfar.livetime:
							coinc_inspiral.combined_far = 1. / fapfar.livetime	
						coinc_inspiral.false_alarm_rate = fapfar.fap_from_rank(coinc.likelihood)
				if zerolag_rankingstatpdf is not None and coinc.likelihood is not None:
					zerolag_rankingstatpdf.zero_lag_lr_lnpdf.count[coinc.likelihood,] += 1

				self.coinc_tables.append_coinc(coinc, coincmaps, coinc_inspiral)
				self.last_coincs.add(events, coinc, coincmaps, coinc_inspiral)
				self.sngl_inspiral_table.extend([sngl_trigger for sngl_trigger in events if sngl_trigger.event_id not in self.clustered_sngl_ids])
				self.clustered_sngl_ids |= set(e.event_id for e in events)


		# add selected singles to the noise model

		if flushed:
			# times when at least 2 instruments were generating
			# SNR.  used to select zero-lag singles for
			# inclusion in the denominator.

			two_or_more_instruments = segmentsUtils.vote(snr_segments.values(), 2)
			# FIXME:  this is needed to work around rounding
			# problems in safety checks below, trying to
			# compare GPS trigger times to float segment
			# boundaries (the boundaries don't have enough
			# precision to know if triggers near the edge are
			# in or out).  it would be better not to have to
			# screw around like this.
			two_or_more_instruments.protract(1e-3)  # 1 ms

			for event in self.backgroundcollector.pull(rankingstat.snr_min, two_or_more_instruments, flushed):
				rankingstat.denominator.increment(event)

		# add any triggers that have been used in coincidences for
		# the first time to the sngl_inspiral table
		# FIXME:  because this information comes from the
		# coincidence code, which is not aware of the clustering,
		# we record a lot of singles that aren't really used for
		# any (retained) coincs.

		if not cluster:
			self.sngl_inspiral_table.extend(newly_reported)

		# save all sngls above the requested sngls SNR threshold.
		# all sngls that participated in coincs are already in the
		# document, so only need to check for ones being flushed
		# and that were never used.

		if self.sngls_snr_threshold is not None:
			self.sngl_inspiral_table.extend(event for event in flushed_unused if event.snr >= self.sngls_snr_threshold)

		# return the triggers that have been flushed
		return flushed
Exemple #7
0
def find_daily_cache(start,
                     end,
                     ifo,
                     clustering=None,
                     check_files=False,
                     **kwargs):
    """Find Daily ihope files from the daily runs for the given span

    @param start
        GPS start time for search
    @param end
        GPS end time for search
    @param ifo
        observatory for search
    @param clustering
        tag for clustering stage to search, default: unclustered
    @param check_files
        check that the returned files can be read on disk, default False
    @param kwargs UNDOCUMENTED
    """
    out = Cache()

    # set clustering tag
    if clustering == None or clustering.upper() == 'UNCLUSTERED':
        file_tag = 'INSPIRAL_UNCLUSTERED'
    elif clustering.upper() in ["100MS", "100MILLISEC"]:
        file_tag = 'INSPIRAL_100MILLISEC_CLUSTERED'
    elif clustering.upper() in ["30MS", "30MILLISEC"]:
        file_tag = 'INSPIRAL_30MILLISEC_CLUSTERED'
    elif clustering.upper() in ["16S", "16SECOND"]:
        file_tag = 'INSPIRAL_16SEC_CLUSTERED'

    # set base directory
    directory = kwargs.pop("directory", os.path.expanduser("~cbc/ihope_daily"))

    # work out days
    span = Segment(start, end)
    start = int(start)
    start_d = lal.UTCToGPS(
        datetime(*lal.GPSToUTC(start)[:6]).replace(hour=0, minute=0,
                                                   second=0).timetuple())
    days = []
    day = start_d
    while day <= end:
        days.append(day)
        day += 86400

    # optimise
    append = out.append
    splitext = os.path.splitext
    isfile = os.path.isfile
    pjoin = os.path.join
    intersects = span.intersects
    from_T050017 = CacheEntry.from_T050017

    # loop over days gathering files
    for day in days:
        utc = datetime(*lal.GPSToUTC(day)[:6])
        day_path = pjoin(directory, utc.strftime("%Y%m"),
                         utc.strftime("%Y%m%d"))
        day_cache = os.path.join(day_path, "%s-%s.cache" % (ifo, file_tag))
        if isfile(day_cache):
            with open(day_cache, "r") as f:
                filenames = Cache.fromfile(f).pfnlist()
        else:
            filenames = glob(
                os.path.join(day_path, ("%s-%s-*.xml.gz" % (ifo, file_tag))))
        for filename in filenames:
            e = from_T050017(filename)
            if intersects(e.segment):
                append(e)

    out.sort(key=lambda e: e.path)
    return out