Пример #1
0
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
         from ligo.gracedb.exceptions import HTTPError
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab' %
                   str(e), )
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % service_url)
     try:
         self.events[None] = list(connection.superevents(self.query))
         self._query_type = "S"
     except HTTPError:
         self.events[None] = list(connection.events(self.query))
         event_method = connection.event
         eventid_name = "graceid"
         self._query_type = "E"
     else:
         event_method = connection.superevent
         eventid_name = "superevent_id"
         for event in self.events[None]:  # get preferred event parameters
             event.update(
                 connection.event(event["preferred_event"], ).json())
     vprint("Recovered %d events for query %r\n" %
            (len(self.events[None]), self.query))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(
                 event_method(e[eventid_name]).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
Пример #2
0
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
         from ligo.gracedb.exceptions import HTTPError
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab'
                   % str(e),)
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % service_url)
     try:
         self.events[None] = list(connection.superevents(self.query))
         self._query_type = "S"
     except HTTPError:
         self.events[None] = list(connection.events(self.query))
         event_method = connection.event
         eventid_name = "graceid"
         self._query_type = "E"
     else:
         event_method = connection.superevent
         eventid_name = "superevent_id"
         for event in self.events[None]:  # get preferred event parameters
             event.update(connection.event(
                 event["preferred_event"],
             ).json())
     vprint("Recovered %d events for query %r\n"
            % (len(self.events[None]), self.query))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(event_method(
                 e[eventid_name]).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
Пример #3
0
def test_superevents():
    """Basic functionality test of ligo-gracedb

    Connect to the default server, and print the IDs of the first
    10 superevents with a FAR < 1e9

    Notes
    -----
    The whole function needs to be protected against a RequestException
    because there is no network activity until the first superevent
    is pulled out of the ``<events>`` generator.
    """
    conn = GraceDb(force_noauth=True, )
    events = conn.superevents(
        "far<1e9",
        columns=[
            "superevent_id",
            "gw_id",
        ],
    )
    for i, event in enumerate(events):
        if i >= 10:  # stop after 10
            break
        print(
            event["superevent_id"],
            event["gw_id"],
        )
Пример #4
0
def get_event(graceid, ifos=['H1', 'L1']):
    """
    Get event from GraceDb.
    """
    client = GraceDb()
    event = client.event(graceid).json()
    event_dict = {}
    # Coincident detection attributes
    coinc_insp = event['extra_attributes']['CoincInspiral']
    instruments = event['instruments'].split(',')
    mchirp = coinc_insp['mchirp']
    coinc_end_time = coinc_insp['end_time'] + float(
        coinc_insp['end_time_ns']) * 1e-9
    coinc_template_duration = estimate_duration(mchirp)
    coinc_start_time = coinc_end_time - coinc_template_duration
    coinc_dict = {
        'graceid': graceid,
        'mchirp': mchirp,
        'start_time': coinc_start_time,
        'end_time': coinc_end_time,
        'template_duration': coinc_template_duration
    }
    # Single detection attributes
    for i, ifo in enumerate(instruments):
        sngl_insp = event['extra_attributes']['SingleInspiral'][i]
        end_time = sngl_insp['end_time'] + float(
            sngl_insp['end_time_ns']) * 1e-9
        start_time = end_time - sngl_insp['template_duration']
        sngl_dict = {
            'graceid': graceid,
            'mchirp': mchirp,
            'm1': sngl_insp['mass1'],
            'm2': sngl_insp['mass2'],
            's1z': sngl_insp['spin1z'],
            's2z': sngl_insp['spin2z'],
            'start_time': start_time,
            'end_time': end_time,
            'template_duration': sngl_insp['template_duration']
        }
        event_dict[ifo] = sngl_dict
    missing_ifos = sorted(set(ifos) - set(instruments))
    if len(missing_ifos) == len(ifos):
        # All ifos missing, use coinc attributes only
        for ifo in missing_ifos:
            event_dict[ifo] = coinc_dict.copy()
    elif len(missing_ifos) > 0 and len(missing_ifos) < len(ifos):
        # One but not all ifos are missing; use existing ifo attributes for the missing ones
        existing_ifo = list(set(instruments) - set(missing_ifos))[0]
        for ifo in missing_ifos:
            event_dict[ifo] = event_dict[existing_ifo].copy()
    return event_dict
Пример #5
0
def gracedb_to_json(gracedb,
                    cred=None,
                    service_url='https://gracedb.ligo.org/api/',
                    outdir=None):
    """ Script to download a GraceDB candidate

    Parameters
    ----------
    gracedb: str
        The UID of the GraceDB candidate
    cred:
        Credentials for authentications, see ligo.gracedb.rest.GraceDb
    service_url:
        The url of the GraceDB candidate
        GraceDB 'https://gracedb.ligo.org/api/' (default)
        GraceDB-playground 'https://gracedb-playground.ligo.org/api/'
    outdir: str, optional
        If given, a string identfying the location in which to store the json
    """
    logger.info(
        'Starting routine to download GraceDb candidate {}'.format(gracedb))
    from ligo.gracedb.rest import GraceDb

    logger.info('Initialise client and attempt to download')
    logger.info('Fetching from {}'.format(service_url))
    try:
        client = GraceDb(cred=cred, service_url=service_url)
    except IOError:
        raise ValueError(
            'Failed to authenticate with gracedb: check your X509 '
            'certificate is accessible and valid')
    try:
        candidate = client.event(gracedb)
        logger.info('Successfully downloaded candidate')
    except Exception as e:
        raise ValueError(
            "Unable to obtain GraceDB candidate, exception: {}".format(e))

    json_output = candidate.json()

    if outdir is not None:
        check_directory_exists_and_if_not_mkdir(outdir)
        outfilepath = os.path.join(outdir, '{}.json'.format(gracedb))
        logger.info('Writing candidate to {}'.format(outfilepath))
        with open(outfilepath, 'w') as outfile:
            json.dump(json_output, outfile, indent=2)

    return json_output
Пример #6
0
def main():
    parser = argparse.ArgumentParser(
        description="Download skymaps from a list of events")
    parser.add_argument(
        "event",
        nargs="+",
        help=
        "A list of gravitational-wave events, can be either GID for GW event or SID for superevent"
    )
    parser.add_argument("--bayestar",
                        action="store_true",
                        help="Use bayestar skymap only")
    parser.add_argument("--verbose",
                        action="store_true",
                        help="Be very verbose")
    args = parser.parse_args()
    # FIXME Make sure that you have a valid proxy
    client = GraceDb()

    for event_id in args.event:
        try:
            download_skymap(event_id,
                            client,
                            args,
                            use_bayestar_only=args.bayestar)
        except:
            if args.verbose:
                print("Failed to download the skymap for {}".format(event_id),
                      file=sys.stderr)
Пример #7
0
    def __init__(self,
                 t0,
                 eventDicts,
                 grouperWin,
                 win,
                 targetRate,
                 group,
                 pipeline,
                 search=None,
                 requireManualReset=False,
                 conf=0.9,
                 graceDB_url='https://gracedb.ligo.org/api/'):
        self.eventDicts = eventDicts  ### pointer to the dictionary of event dicts, needed for determining number of triggers with different gpstimes
        ### record data about the pipeline (equivalently, the lvalert node)
        self.group = group
        self.pipeline = pipeline
        self.search = search

        ### set self.graceid for easy lookup and automatic management
        self.graceid = generate_ThrottleKey(group, pipeline, search)

        self.description = "a throttle on the events approval processor will react to from %s" % (
            self.graceid)

        self.events = []  ### list managed by Throttle task

        self.win = win  ### the window over which we track events
        self.targetRate = targetRate  ### the target rate at which we expect events
        self.conf = conf  ### determines the upper limit on the acceptable number of events in win via a poisson one-sided confidence interval

        self.computeNthr()  ### sets self.Nthr

        self.graceDB = GraceDb(graceDB_url)

        tasks = [
            Throttle(self.events,
                     eventDicts,
                     grouperWin,
                     win,
                     self.Nthr,
                     requireManualReset=requireManualReset
                     )  ### there is only one task!
        ]
        super(PipelineThrottle, self).__init__(t0,
                                               tasks)  ### delegate to parent
Пример #8
0
 def __init__(self,
              events,
              eventDicts,
              timeout,
              graceDB_url='https://gracedb.ligo.org/api'):
     self.events = events  ### shared reference to events tracked within Grouper QueueItem
     self.eventDicts = eventDicts  ### shared reference pointing to the local data about events
     self.graceDB = GraceDb(graceDB_url)
     super(DefineGroup, self).__init__(timeout)
def main():
	parser = argparse.ArgumentParser(description = "Download skymaps from a list of events")
	parser.add_argument("event", nargs="+", help = "A list of gravitational-wave events, can be either GID for GW event or SID for superevent")
	parser.add_argument("--verbose", action = "store_true", help = "Be very verbose")
	args = parser.parse_args()
	# FIXME Make sure that you have a valid proxy
	client = GraceDb()

	for event_id in args.event:
		download_skymap(event_id, client, args)
Пример #10
0
def initGraceDb(url):
    '''
    a method that decides whether we want an actual instance of GraceDb or an instance of FakeDb based on the url
    currently, that's done by requring 'http' to be the begining of the url for real GraceDb instances. 
    Otherwise we try to set up FakeDb, in which case we expect url to be a path.
    '''
    if 'http' == url[:4]:  ### could be fragile...
        return GraceDb(url)
    else:
        return FakeDb(url)  ### expects url to be a path
Пример #11
0
class VOEventFromEventId(VOEventFromXml):
    def __init__(self):
        self._client = GraceDb()
        self.event_id = ""
        super().__init__()

    def get(self, event_id: str):
        self.event_id = event_id
        voevents = self._get_voevents_json(event_id)
        voevents = self._sort_voevents_newest_first(voevents)
        xml = self._try_get_latest_voevent(voevents)
        super().get(xml)

    def _get_voevents_json(self, event_id: str) -> List[Dict]:
        response = self._client.voevents(event_id)
        json_voevents = response.json()["voevents"]

        return json_voevents

    def _sort_voevents_newest_first(self, voevents_json):
        voevents_json.sort(key=lambda x: x["N"], reverse=True)

        return voevents_json

    def _try_get_latest_voevent(self, voevents: List[Dict[Any, Any]]):
        # For event S190517h the file 'S190517h-3-Initial.xml' was in the
        # voevent file list. However, this file doesn't exist. Therefore looping
        # over all until a existing file is found.
        for voevent in voevents:
            url = voevent["links"]["file"]
            try:
                xml = self._client.get(url)
                return xml
            except HTTPError:
                if voevent["N"] == 1:
                    logging.error(f"Can't find VOEvent for event {self.event_id}")
                    raise HTTPError
                else:
                    logging.warning(f"Failed to get voevent from {url}")

        return ""
Пример #12
0
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab'
                   % str(e),)
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % connection.service_url)
     querystr = '%s %d .. %d' % (self.query, self.start, self.end)
     self.events[None] = list(connection.events(querystr))
     vprint("Recovered %d events for query %r\n"
            % (len(self.events[None]), querystr))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(connection.event(
                 e['graceid']).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
Пример #13
0
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab' %
                   str(e), )
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % connection.service_url)
     querystr = '%s %d .. %d' % (self.query, self.start, self.end)
     self.events[None] = list(connection.events(querystr))
     vprint("Recovered %d events for query %r\n" %
            (len(self.events[None]), querystr))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(
                 connection.event(e['graceid']).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
Пример #14
0
    def get_gracedb(self, gfile, destination):
        """
        Get a file from Gracedb, and store it in the event repository.

        Parameters
        ----------
        gfile : str
           The name of the gracedb file, e.g. `coinc.xml`.
        destination : str
           The location in the repository for this file.
        """


        gid = self.meta['gid']
        client = GraceDb(service_url=config.get("gracedb", "url"))
        file_obj = client.files(gid, gfile)

        with open("download.file", "w") as dest_file:
            dest_file.write(file_obj.read().decode())

        self.repository.add_file("download.file", destination,
                                 commit_message = f"Downloaded {gfile} from GraceDB")
Пример #15
0
    def upload(self, fname, psds, low_frequency_cutoff, testing=True):
        """Upload this trigger to gracedb

        Parameters
        ----------
        fname: str
            The name to give the xml file associated with this trigger
        pds: dict of pybc.types.FrequencySeries
            A ifo keyed dictionary of psds to be uploaded in association
        with this trigger.
        low_frequency_cutoff: float
            The low frequency cutoff of the psds.
        testing: bool
            Switch to determine if the upload should be sent to gracedb as a
        test trigger (True) or a production trigger (False)
        """
        from ligo.gracedb.rest import GraceDb
        import lal
        import lal.series

        self.save(fname)
        if testing:
            group = 'Test'
        else:
            group = 'CBC'

        gracedb = GraceDb()
        r = gracedb.createEvent(group, "pycbc", fname, "AllSky").json()
        logging.info("Uploaded event %s.", r["graceid"])

        if self.is_hardware_injection:
            gracedb.writeLabel(r['graceid'], 'INJ')
            logging.info("Tagging event %s as an injection", r["graceid"])

        # Convert our psds to the xml psd format.
        # FIXME: we should not use lal.series!!!
        psds_lal = {}
        for ifo in psds:
            psd = psds[ifo]
            kmin = int(low_frequency_cutoff / psd.delta_f)
            fseries = lal.CreateREAL8FrequencySeries(
                "psd", psd.epoch, low_frequency_cutoff, psd.delta_f,
                lal.StrainUnit**2 / lal.HertzUnit,
                len(psd) - kmin)
            fseries.data.data = psd.numpy()[kmin:] / pycbc.DYN_RANGE_FAC**2.0
            psds_lal[ifo] = fseries

        psd_xmldoc = lal.series.make_psd_xmldoc(psds_lal)
        ligolw_utils.write_filename(psd_xmldoc, "tmp_psd.xml.gz", gz=True)
        gracedb.writeLog(r["graceid"],
                         "PyCBC PSD estimate from the time of event",
                         "psd.xml.gz",
                         open("tmp_psd.xml.gz", "rb").read(), "psd").json()
        logging.info("Uploaded file psd.xml.gz to event %s.", r["graceid"])
Пример #16
0
def gracedb_to_json(gracedb, outdir=None):
    """ Script to download a GraceDB candidate

    Parameters
    ----------
    gracedb: str
        The UID of the GraceDB candidate
    outdir: str, optional
        If given, a string identfying the location in which to store the json
    """
    logger.info(
        'Starting routine to download GraceDb candidate {}'.format(gracedb))
    from ligo.gracedb.rest import GraceDb
    import urllib3

    logger.info('Initialise client and attempt to download')
    try:
        client = GraceDb()
    except FileNotFoundError:
        raise ValueError(
            'Failed to authenticate with gracedb: check your X509 '
            'certificate is accessible and valid')
    try:
        candidate = client.event(gracedb)
        logger.info('Successfully downloaded candidate')
    except urllib3.HTTPError:
        raise ValueError("No candidate found")

    json_output = candidate.json()

    if outdir is not None:
        check_directory_exists_and_if_not_mkdir(outdir)
        outfilepath = os.path.join(outdir, '{}.json'.format(gracedb))
        logger.info('Writing candidate to {}'.format(outfilepath))
        with open(outfilepath, 'w') as outfile:
            json.dump(json_output, outfile, indent=2)

    return json_output
Пример #17
0
    def upload(self, fname, psds, low_frequency_cutoff, testing=True):
        """Upload this trigger to gracedb

        Parameters
        ----------
        fname: str
            The name to give the xml file associated with this trigger
        pds: dict of pybc.types.FrequencySeries
            A ifo keyed dictionary of psds to be uploaded in association
        with this trigger.
        low_frequency_cutoff: float
            The low frequency cutoff of the psds.
        testing: bool
            Switch to determine if the upload should be sent to gracedb as a
        test trigger (True) or a production trigger (False)
        """
        from ligo.gracedb.rest import GraceDb
        import lal
        import lal.series

        self.save(fname)
        if testing:
            group = 'Test'
        else:
            group = 'CBC'

        gracedb = GraceDb()
        r = gracedb.createEvent(group, "pycbc", fname, "AllSky").json()
        logging.info("Uploaded event %s.", r["graceid"])

        if self.is_hardware_injection:
            gracedb.writeLabel(r['graceid'], 'INJ')
            logging.info("Tagging event %s as an injection", r["graceid"])

        # Convert our psds to the xml psd format.
        # FIXME: we should not use lal.series!!!
        psds_lal = {}
        for ifo in psds:
            psd = psds[ifo]
            kmin = int(low_frequency_cutoff / psd.delta_f)
            fseries = lal.CreateREAL8FrequencySeries(
                "psd", psd.epoch, low_frequency_cutoff, psd.delta_f,
                lal.StrainUnit**2 / lal.HertzUnit, len(psd) - kmin)
            fseries.data.data = psd.numpy()[kmin:] / pycbc.DYN_RANGE_FAC ** 2.0
            psds_lal[ifo] = fseries

        psd_xmldoc = lal.series.make_psd_xmldoc(psds_lal)
        ligolw_utils.write_filename(psd_xmldoc, "tmp_psd.xml.gz", gz=True)
        gracedb.writeLog(r["graceid"],
                         "PyCBC PSD estimate from the time of event",
                         "psd.xml.gz", open("tmp_psd.xml.gz", "rb").read(),
                         "psd").json()
        logging.info("Uploaded file psd.xml.gz to event %s.", r["graceid"])
Пример #18
0
def get_data(query: str):
    grace_client = GraceDb()
    events = grace_client.events("gid: GW150914")
    events = grace_client.events("GW150914")
    events = grace_client.events("is_gw: True")
    events = grace_client.events("is_gw")

    print(count_iterable(events))

    results = {}
    for event in events:
        grace_id = event.get(GRACE_ID_KEY)
        results.update({grace_id: event})

    return results
Пример #19
0
def histogram(request):
    # if this is a POST request we need to process the form data
    if request.method == 'GET':

        # create a form instance and populate it with data from the request:
        form = PosteriorForm(request.GET)
        # check whether it's valid:
        if form.is_valid():
            graceid = form.cleaned_data['graceid']
            param1 = form.cleaned_data['param1']
            param2 = form.cleaned_data['param2']
            param1_min = form.cleaned_data['param1_min']
            param1_max = form.cleaned_data['param1_max']
            param2_min = form.cleaned_data['param2_min']
            param2_max = form.cleaned_data['param2_max']
            client = GraceDb("https://gracedb-playground.ligo.org/api/")
            #event = client.event(graceid)
            #filename = client.files(graceid, 'event.log')
            ps = EventTable.fetch(
                'gravityspy',
                '\"{0}\"'.format(graceid),
                selection=[
                    '{0}<{1}<{2}'.format(param1_min, param1, param1_max),
                    '{0}<{1}<{2}'.format(param2_min, param2, param2_max)
                ],
                columns=[param1, param2])
            ps = ps.to_pandas().iloc[0:1000]

            with seaborn.axes_style('white'):
                plot = seaborn.jointplot(param1, param2, ps, kind='kde')

            fig = plot.fig
            canvas = FigureCanvas(fig)

            import io
            buf = io.BytesIO()
            canvas.print_png(buf)
            response = HttpResponse(buf.getvalue(), content_type='image/png')
            fig.clear()
            return response
Пример #20
0
def posteriors(request):
    # if this is a POST request we need to process the form data
    if request.method == 'GET':

        # create a form instance and populate it with data from the request:
        form = PosteriorForm(request.GET)
        # check whether it's valid:
        if form.is_valid():
            graceid = form.cleaned_data['graceid']
            param1 = form.cleaned_data['param1']
            param2 = form.cleaned_data['param2']
            param1_min = form.cleaned_data['param1_min']
            param1_max = form.cleaned_data['param1_max']
            param2_min = form.cleaned_data['param2_min']
            param2_max = form.cleaned_data['param2_max']
            client = GraceDb("https://gracedb-playground.ligo.org/api/")
            #event = client.event(graceid)
            #filename = client.files(graceid, 'event.log')
            ps = EventTable.fetch(
                'gravityspy',
                '\"{0}\"'.format(graceid),
                selection=[
                    '{0}<{1}<{2}'.format(param1_min, param1, param1_max),
                    '{0}<{1}<{2}'.format(param2_min, param2, param2_max)
                ],
                columns=[param1, param2])
            ps = ps.to_pandas().iloc[0:1000]
            old = 'posteriors'
            new = 'histogram'
            histogramurl = (request.get_full_path()[::-1].replace(
                old[::-1], new[::-1], 1))[::-1]

            return render(
                request, 'gracedb.html', {
                    'results': ps.iloc[0:1000].to_dict(orient='records'),
                    'histogramurl': histogramurl
                })
        else:
            return render(request, 'form.html', {'form': form})
Пример #21
0
    def upload(self, fname, gracedb_server=None, testing=True,
               extra_strings=None):
        """Upload this trigger to gracedb

        Parameters
        ----------
        fname: str
            The name to give the xml file associated with this trigger
        gracedb_server: string, optional
            URL to the GraceDB web API service for uploading the event.
            If omitted, the default will be used.
        testing: bool
            Switch to determine if the upload should be sent to gracedb as a
            test trigger (True) or a production trigger (False).
        """
        from ligo.gracedb.rest import GraceDb

        # first of all, make sure the event is saved on disk
        # as GraceDB operations can fail later
        self.save(fname)

        if self.snr_series is not None:
            if fname.endswith('.xml.gz'):
                snr_series_fname = fname.replace('.xml.gz', '.hdf')
            else:
                snr_series_fname = fname.replace('.xml', '.hdf')
            for ifo in self.snr_series:
                self.snr_series[ifo].save(snr_series_fname,
                                          group='%s/snr' % ifo)
                self.psds[ifo].save(snr_series_fname,
                                    group='%s/psd' % ifo)

        gid = None
        try:
            # try connecting to GraceDB
            gracedb = GraceDb(gracedb_server) \
                    if gracedb_server is not None else GraceDb()

            # create GraceDB event
            group = 'Test' if testing else 'CBC'
            r = gracedb.createEvent(group, "pycbc", fname, "AllSky").json()
            gid = r["graceid"]
            logging.info("Uploaded event %s", gid)

            if self.is_hardware_injection:
                gracedb.writeLabel(gid, 'INJ')
                logging.info("Tagging event %s as an injection", gid)

            # upload PSDs. Note that the PSDs are already stored in the
            # original event file and we just upload a copy of that same file
            # here. This keeps things as they were in O2 and can be removed
            # after updating the follow-up infrastructure
            psd_fname = 'psd.xml.gz' if fname.endswith('.gz') else 'psd.xml'
            gracedb.writeLog(gid, "PyCBC PSD estimate from the time of event",
                             psd_fname, open(fname, "rb").read(), "psd")
            logging.info("Uploaded PSDs for event %s", gid)

            # add other tags and comments
            gracedb.writeLog(
                    gid, "Using PyCBC code hash %s" % pycbc_version.git_hash)

            extra_strings = [] if extra_strings is None else extra_strings
            for text in extra_strings:
                gracedb.writeLog(gid, text)

            # upload SNR series in HDF format
            if self.snr_series is not None:
                gracedb.writeFile(gid, snr_series_fname)
        except Exception as exc:
            logging.error('Something failed during the upload/annotation of '
                          'event %s on GraceDB. The event may not have been '
                          'uploaded!', fname)
            logging.error(str(exc))

        return gid
Пример #22
0
def setup_roq(cp):
    """
    Generates cp objects with the different ROQs applied
    """
    use_roq=False
    if cp.has_option('paths','roq_b_matrix_directory') or cp.has_option('paths','computeroqweights'):
        if not cp.has_option('analysis','roq'):
            print("Warning: If you are attempting to enable ROQ by specifying roq_b_matrix_directory or computeroqweights,\n\
            please use analysis.roq in your config file in future. Enabling ROQ.")
            cp.set('analysis','roq',True)
    if not cp.getboolean('analysis','roq'):
        yield cp
        raise StopIteration()
    from numpy import genfromtxt, array
    path=cp.get('paths','roq_b_matrix_directory')
    if not os.path.isdir(path):
        print("The ROQ directory %s does not seem to exist\n"%path)
        sys.exit(1)
    use_roq=True
    roq_paths=os.listdir(path)
    roq_params={}
    roq_force_flow = None

    if cp.has_option('lalinference','roq_force_flow'):
        roq_force_flow = cp.getfloat('lalinference','roq_force_flow')
        print("WARNING: Forcing the f_low to ", str(roq_force_flow), "Hz")
        print("WARNING: Overwriting user choice of flow, srate, seglen, and (mc_min, mc_max and q-min) or (mass1_min, mass1_max, mass2_min, mass2_max)")

    def key(item): # to order the ROQ bases
        return float(item[1]['seglen'])

    coinc_xml_obj = None
    row=None

    # Get file object of coinc.xml
    if opts.gid is not None:
        from ligo.gracedb.rest import GraceDb
        gid=opts.gid
        cwd=os.getcwd()
        if cp.has_option('analysis', 'service-url'):
            client = GraceDb(cp.get('analysis', 'service-url'))
        else:
            client = GraceDb()
        coinc_xml_obj = ligolw_utils.load_fileobj(client.files(gid, "coinc.xml"), contenthandler = lsctables.use_in(ligolw.LIGOLWContentHandler))[0]
    elif cp.has_option('input', 'coinc-xml'):
        coinc_xml_obj = ligolw_utils.load_fileobj(open(cp.get('input', 'coinc-xml'), "rb"), contenthandler = lsctables.use_in(ligolw.LIGOLWContentHandler))[0]

    # Get sim_inspiral from injection file
    if cp.has_option('input','injection-file'):
        print("Only 0-th event in the XML table will be considered while running with ROQ\n")
        row = lsctables.SimInspiralTable.get_table(
                  ligolw_utils.load_filename(cp.get('input','injection-file'),contenthandler=lsctables.use_in(ligolw.LIGOLWContentHandler))
              )[0]

    roq_bounds = pipe_utils.Query_ROQ_Bounds_Type(path, roq_paths)
    if roq_bounds == 'chirp_mass_q':
        print('ROQ has bounds in chirp mass and mass-ratio')
        mc_priors, trigger_mchirp = pipe_utils.get_roq_mchirp_priors(
            path, roq_paths, roq_params, key, coinc_xml_obj=coinc_xml_obj, sim_inspiral=row
        )
    elif roq_bounds == 'component_mass':
        print('ROQ has bounds in component masses')
        # get component mass bounds, then compute the chirp mass that can be safely covered
        # further below we pass along the component mass bounds to the sampler, not the tighter chirp-mass, q bounds
        m1_priors, m2_priors, trigger_mchirp = pipe_utils.get_roq_component_mass_priors(
            path, roq_paths, roq_params, key, coinc_xml_obj=coinc_xml_obj, sim_inspiral=row
        )
        mc_priors = {}
        for (roq,m1_prior), (roq2,m2_prior) in zip(m1_priors.items(), m2_priors.items()):
            mc_priors[roq] = sorted([pipe_utils.mchirp_from_components(m1_prior[1], m2_prior[0]), pipe_utils.mchirp_from_components(m1_prior[0], m2_prior[1])])

    if cp.has_option('lalinference','trigger_mchirp'):
        trigger_mchirp=float(cp.get('lalinference','trigger_mchirp'))
    roq_mass_freq_scale_factor = pipe_utils.get_roq_mass_freq_scale_factor(mc_priors, trigger_mchirp, roq_force_flow)
    if roq_mass_freq_scale_factor != 1.:
        print('WARNING: Rescaling ROQ basis, please ensure it is allowed with the model used.')

    # If the true chirp mass is unknown, add variations over the mass bins
    if opts.gid is not None or (opts.injections is not None or cp.has_option('input','injection-file')) or cp.has_option('lalinference','trigger_mchirp') or cp.has_option('input', 'coinc-xml'):

        for mc_prior in mc_priors:
            mc_priors[mc_prior] = array(mc_priors[mc_prior])
        # find mass bin containing the trigger
        trigger_bin = None
        for roq in roq_paths:
            if mc_priors[roq][0]*roq_mass_freq_scale_factor <= trigger_mchirp <= mc_priors[roq][1]*roq_mass_freq_scale_factor:
                trigger_bin = roq
                print('Prior in Mchirp will be ['+str(mc_priors[roq][0]*roq_mass_freq_scale_factor)+','+str(mc_priors[roq][1]*roq_mass_freq_scale_factor)+'] to contain the trigger Mchirp '+str(trigger_mchirp))
                break
        roq_paths = [trigger_bin]
    else:
        for mc_prior in mc_priors:
            mc_priors[mc_prior] = array(mc_priors[mc_prior])*roq_mass_freq_scale_factor

    # write the master configparser
    cur_basedir = cp.get('paths','basedir')
    masterpath=os.path.join(cur_basedir,'config.ini')
    with open(masterpath,'w') as cpfile:
        cp.write(cpfile)

    for roq in roq_paths:
        this_cp = configparser.ConfigParser()
        this_cp.optionxform = str
        this_cp.read(masterpath)
        basedir = this_cp.get('paths','basedir')
        for dirs in 'basedir','daglogdir','webdir':
            val = this_cp.get('paths',dirs)
            newval = os.path.join(val,roq)
            mkdirs(newval)
            this_cp.set('paths',dirs,newval)
        this_cp.set('paths','roq_b_matrix_directory',os.path.join(cp.get('paths','roq_b_matrix_directory'),roq))
        flow=roq_params[roq]['flow'] / roq_mass_freq_scale_factor
        srate=2.*roq_params[roq]['fhigh'] / roq_mass_freq_scale_factor
        #if srate > 8192:
        #    srate = 8192

        seglen=roq_params[roq]['seglen'] * roq_mass_freq_scale_factor
        # params.dat uses the convention q>1 so our q_min is the inverse of their qmax
        this_cp.set('engine','srate',str(srate))
        this_cp.set('engine','seglen',str(seglen))
        if this_cp.has_option('lalinference','flow'):
            tmp=this_cp.get('lalinference','flow')
            tmp=eval(tmp)
            ifos=tmp.keys()
        else:
            tmp={}
            ifos=eval(this_cp.get('analysis','ifos'))
        for i in ifos:
            tmp[i]=flow
            this_cp.set('lalinference','flow',str(tmp))
        if roq_bounds == 'chirp_mass_q':
            mc_min=mc_priors[roq][0]*roq_mass_freq_scale_factor
            mc_max=mc_priors[roq][1]*roq_mass_freq_scale_factor
            # params.dat uses the convention q>1 so our q_min is the inverse of their qmax
            q_min=1./float(roq_params[roq]['qmax'])
            this_cp.set('engine','chirpmass-min',str(mc_min))
            this_cp.set('engine','chirpmass-max',str(mc_max))
            this_cp.set('engine','q-min',str(q_min))
            this_cp.set('engine','comp-min', str(max(roq_params[roq]['compmin'] * roq_mass_freq_scale_factor, mc_min * pow(1+q_min, 1./5.) * pow(q_min, 2./5.))))
            this_cp.set('engine','comp-max', str(mc_max * pow(1+q_min, 1./5.) * pow(q_min, -3./5.)))
        elif roq_bounds == 'component_mass':
            m1_min = m1_priors[roq][0]
            m1_max = m1_priors[roq][1]
            m2_min = m2_priors[roq][0]
            m2_max = m2_priors[roq][1]
            this_cp.set('engine','mass1-min',str(m1_min))
            this_cp.set('engine','mass1-max',str(m1_max))
            this_cp.set('engine','mass2-min',str(m2_min))
            this_cp.set('engine','mass2-max',str(m2_max))
        yield this_cp
    raise StopIteration()
    alert = json.loads(alert_message)
    if alert["alert_type"] != "new":
        if opts.verbose:
            print "alert_type!=\"new\", skipping"
        sys.exit(0) ### not a new alert

    gdb_id = alert['uid']
    if opts.verbose:
        print "New event detectected : %s"%gdb_id

### set up the connection to gracedb
if opts.gracedb_url:
    if opts.verbose:
        print "conecting to GraceDb : %s"%(opts.gracedb_url)
    gracedb = GraceDb( opts.gracedb_url )
else:
    if opts.verbose:
        print "connecting to GraceDb"
    gracedb = GraceDb()

try:
    gdb_entry = json.loads(gracedb.event(gdb_id).read())
except:
    import traceback
    traceback.print_exc()
    sys.exit(1)

### get parameters about event type from gracedb
group = gdb_entry['group']
pipeline = gdb_entry['pipeline']
Пример #24
0
state_vector_channel = {"L1": "L1:DCS-CALIB_STATE_VECTOR_C01",
                        "H1": "H1:DCS-CALIB_STATE_VECTOR_C01",
                        "V1": "V1:DQ_ANALYSIS_STATE_VECTOR"}

frametypes= {"L1": "L1_HOFT_CLEAN_SUB60HZ_C01",
             "H1": "H1_HOFT_CLEAN_SUB60HZ_C01",
             "V1": "V1Online"}

server = gitlab.gitlab.Gitlab(config.get("gitlab", "url"), private_token=config.get("gitlab", "token"))
repository = server.projects.get(config.get("olivaw", "tracking_repository"))

#
# Get GraceDB stuff
#
from ligo.gracedb.rest import GraceDb, HTTPError 
client = GraceDb(service_url=config.get("gracedb", "url"))
r = client.ping()

#superevent_iterator = client.superevents('O3B_CBC_CATALOG')
#superevent_ids = [superevent['superevent_id'] for superevent in superevent_iterator]


CALIBRATION_NOTE = """
## Calibration envelopes
The following calibration envelopes have been found.
```yaml
---
{}
---
```
"""
Пример #25
0
if tag and opts.gracedb_id:
    filetag = "_%s_%s" % (tag, opts.gracedb_id)
elif opts.gracedb_id:
    filetag = "_%s" % (opts.gracedb_id)
elif tag:
    filetag = "_%s" % (tag)
else:
    filetag = ""

realtimedir = config.get('general', 'realtimedir')
gdbdir = config.get('gdb general', 'main_gdb_dir')

if not opts.skip_gracedb_upload:
    if config.has_option('gdb general', 'gdb_url'):
        gracedb = GraceDb(config.get('gdb general', 'gdb_url'))
    else:
        gracedb = GraceDb()

if config.has_option(opts.classifier, 'plotting_label'):
    plotting_label = config.get(opts.classifier, 'plotting_label')
else:
    plotting_label = opts.classifier

#===================================================================================================

rank_channame = idq.channame(ifo, opts.classifier, "%s_rank" % tag)
fap_channame = idq.channame(ifo, opts.classifier, "%s_fap" % tag)
fapUL_channame = idq.channame(ifo, opts.classifier, "%s_fapUL" % tag)

#===================================================================================================
	
	#Check if in online run mode
	if run_mode == 'Online':

		ifos = run_dic['coincidence'][coin_group]['ifos']
		segdir = run_dic['seg dir']
		
		if coin_mode == '0lag':		
			email_flag = run_dic['run mode']['email flag']
			if email_flag:
				email_addresses = run_dic['run mode']['email addresses']
			
			gdb_flag = run_dic['run mode']['gdb flag']
			if gdb_flag:
				if run_dic['run mode']['gdb server'] == 'Production':
					gdb = GraceDb()
				elif run_dic['run mode']['gdb server'] == 'Test':
					gdb = GraceDb('https://gracedb-test.ligo.org/api/')
		
		elif coin_mode == 'sig_train':
			bindir = run_dic['config']['LIB bin dir']
			LIB_window = run_dic['prior ranges']['LIB window']

		#===============================================================

		#Initialize dictionary
		dictionary = {}

		#Find trigtimes and timeslides and add to dictionary
		if coin_mode == '0lag':		
			timeslide_array = np.genfromtxt('%s/PostProc/LIB_trigs/%s/%s/LIB_0lag_timeslides_%s.txt'%(segdir,coin_group, coin_mode, coin_group)).reshape(-1,len(ifos))
Пример #27
0
#!/usr/bin/env python3
from ligo.gracedb.rest import GraceDb

client = GraceDb()

# Retrieve an iterator for events matching a query.
events = client.events("gstlal ER5 far < 1.0e-4")

# For each event in the search results, add the graceid
# and chirp mass to a dictionary.
results = {}
for event in events:
    graceid = event["graceid"]
    mchirp = event["extra_attributes"]["CoincInspiral"]["mchirp"]
    results.update({graceid: mchirp})

# For each super event in the search results, add the superevent_id
# and chirp mass to a dictionary.
superevents = client.superevents("gstlal ER5 far < 1.0e-4")
s_results = {}
for superevent in superevents:
    superevent_id = superevent["superevent_id"]
    mchirp = superevent["extra_attributes"]["CoincInspiral"]["mchirp"]
    s_results.update({superevent_id: mchirp})
if tag and opts.gracedb_id:
    filetag = "_%s_%s"%(tag, opts.gracedb_id)
elif opts.gracedb_id:
    filetag = "_%s"%(opts.gracedb_id)
elif tag:
    filetag = "_%s"%(tag)
else:
    filetag = ""

realtimedir = config.get('general','realtimedir')
gdbdir = config.get('gdb general','main_gdb_dir')

if not opts.skip_gracedb_upload:
    if config.has_option('gdb general', 'gdb_url'):
        gracedb = GraceDb(config.get('gdb general', 'gdb_url'))
    else:
        gracedb = GraceDb()

if config.has_option(opts.classifier, 'plotting_label'):
    plotting_label = config.get(opts.classifier, 'plotting_label')
else:
    plotting_label = opts.classifier

#===================================================================================================

rank_channame  = idq.channame(ifo, opts.classifier, "%s_rank"%tag)
fap_channame   = idq.channame(ifo, opts.classifier, "%s_fap"%tag)
fapUL_channame = idq.channame(ifo, opts.classifier, "%s_fapUL"%tag)

#===================================================================================================
    type='string',
    help='url of GraceDB e.g. ')


(opts, args) = parser.parse_args()

if not opts.ifo:
    opts.ifo = raw_input('ifo = ')

if opts.tag != '':
    opts.tag = opts.tag + '-'

if not opts.skip_gracedb_upload:
    # initialize instance of gracedb interface
    if opts.gdb_url:
        gracedb = GraceDb(opts.gdb_url)
    else:
        gracedb = GraceDb()
    # check that gracedb id is given
    if not opts.gracedb_id:
        print "GraceDB ID must be specified for enabling correct uploading of the data. Please use --gracedb-id option."
        sys.exit(1)



# #########################################
# ## Find relevant files
# #########################################

if opts.verbose:
    print 'Finding relevant *glitch*.xml files'
gdbdir = config.get('gdb general','main_gdb_dir')

gwchannel = config.get('general', 'gwchannel')

if not opts.KWsignifThr:
    opts.KWsignifThr.append( config.getfloat('general', 'gw_kwsignif_thr') )

GWkwconfig = idq.loadkwconfig(config.get('data_discovery', 'GWkwconfig'))
GWkwbasename = GWkwconfig['basename']
GWgdsdir = config.get('data_discovery', 'GWgdsdir')

kwstride = int(float(GWkwconfig['stride']))

if not opts.skip_gracedb_upload:
    if config.has_option('gdb general', 'gdb_url'):
        gracedb = GraceDb(config.get('gdb general', 'gdb_url'))
    else:
        gracedb = GraceDb()

if config.has_option(opts.classifier, 'plotting_label'):
    plotting_label = config.get(opts.classifier, 'plotting_label')
else:
    plotting_label = opts.classifier

#===================================================================================================

### science segments
if opts.ignore_science_segments:
    if opts.verbose:
        print 'analyzing data regardless of science segements'
    scisegs = [[opts.start, opts.end]] ### set segs to be this stride range
    if os.environ.has_key("X509_USER_PROXY"):
        del os.environ['X509_USER_PROXY']

    ### get cert and key from ini file
    robot_cert = config.get('ldg_certificate', 'robot_certificate')
    robot_key = config.get('ldg_certificate', 'robot_key')

    ### set cert and key
    os.environ['X509_USER_CERT'] = robot_cert
    os.environ['X509_USER_KEY'] = robot_key


### initialize instance of gracedb interface
if config.has_option("gdb general","gdb_url"):
    gdb_url = config.get('gdb general', 'gdb_url')
    gracedb = GraceDb( gdb_url )
else:
    gdb_url = None
    gracedb = GraceDb()

### connect to gracedb and get event gps time
try: 
    gdb_entry = json.loads(gracedb.event(gdb_id).read())
except:
    traceback.print_exc()
    logger.info("    Error: Connection to GraceDB failed!")
    logger.info("    Exiting.")
    sys.exit(1)

#========================
# get parameters about event type from gracedb
if not opts.ifo:
    opts.ifo = raw_input('ifo = ')

if opts.tag:
    opts.tag = "_%s"%opts.tag
		
if (opts.plotting_gps_start == None) or (opts.plotting_gps_start > opts.start):
    opts.plotting_gps_start = opts.start

if (opts.plotting_gps_end == None) or (opts.plotting_gps_end < opts.end):
   opts.plotting_gps_end = opts.end

if not opts.skip_gracedb_upload:
    # initialize instance of gracedb interface
    if opts.gdb_url:
        gracedb = GraceDb(opts.gdb_url)
    else:
        gracedb = GraceDb()
    # check that gracedb id is given
    if not opts.gracedb_id:
        print "GraceDB ID must be specified for enabling correct uploading of the data. Please use --gracedb-id option."
        sys.exit(1)

#===================================================================================================

rank_channame = idq.channame(opts.ifo, opts.classifier, "%s_rank"%opts.tag)
fap_channame = idq.channame(opts.ifo, opts.classifier, "%s_fap"%opts.tag)
fapUL_channame = idq.channame(opts.ifo, opts.classifier, "%s_fapUL"%opts.tag)

#===================================================================================================
Пример #33
0
def parseAlert(queue, queueByGraceID, alert, t0, config):
    '''
    the way approval_processorMP digests lvalerts

    --> check if this alert is a command and delegate to parseCommand

    1) instantiates GraceDB client
    2) pulls childConfig settings
    3) makes sure we have the logger
    4) get lvalert specifics
    5) ensure we have the event_dict for the graceid = lvalert['uid']
    6) take proper action depending on the lvalert info coming in and currentstate of the event_dict 
    '''

    #-------------------------------------------------------------------
    # process commands sent via lvalert_commandMP
    #-------------------------------------------------------------------

    if alert['uid'] == 'command':  ### this is a command message!
        return parseCommand(queue, queueByGraceID, alert,
                            t0)  ### delegate to parseCommand and return

    #-------------------------------------------------------------------
    # extract relevant config parameters and set up necessary data structures
    #-------------------------------------------------------------------

    # instantiate GraceDB client from the childConfig
    client = config.get('general', 'client')
    g = GraceDb(client)

    # get other childConfig settings; save in configdict
    voeventerror_email = config.get('general', 'voeventerror_email')
    force_all_internal = config.get('general', 'force_all_internal')
    preliminary_internal = config.get('general', 'preliminary_internal')
    forgetmenow_timeout = config.getfloat('general', 'forgetmenow_timeout')
    approval_processorMPfiles = config.get('general',
                                           'approval_processorMPfiles')
    hardware_inj = config.get('labelCheck', 'hardware_inj')
    wait_for_hardware_inj = config.getfloat('labelCheck',
                                            'wait_for_hardware_inj')
    default_farthresh = config.getfloat('farCheck', 'default_farthresh')
    time_duration = config.getfloat('injectionCheck', 'time_duration')
    humanscimons = config.get('operator_signoffCheck', 'humanscimons')

    ### extract options about advocates
    advocates = config.get('advocate_signoffCheck', 'advocates')
    advocate_text = config.get('advocate_signoffCheck', 'advocate_text')
    advocate_email = config.get('advocate_signoffCheck', 'advocate_email')

    ### extract options for GRB alerts
    em_coinc_text = config.get('GRB_alerts', 'em_coinc_text')
    coinc_text = config.get('GRB_alerts', 'coinc_text')
    grb_email = config.get('GRB_alerts', 'grb_email')
    notification_text = config.get('GRB_alerts', 'notification_text')

    ### extract options about idq
    ignore_idq = config.get('idq_joint_fapCheck', 'ignore_idq')
    default_idqthresh = config.getfloat('idq_joint_fapCheck',
                                        'default_idqthresh')
    idq_pipelines = config.get('idq_joint_fapCheck', 'idq_pipelines')
    idq_pipelines = idq_pipelines.replace(' ', '')
    idq_pipelines = idq_pipelines.split(',')

    skymap_ignore_list = config.get('have_lvem_skymapCheck',
                                    'skymap_ignore_list')

    ### set up configdict (passed to local data structure: eventDicts)
    configdict = makeConfigDict(config)

    # set up logging
    ### FIXME: why not open the logger each time parseAlert is called?
    ###        that would allow you to better control which loggers are necessary and minimize the number of open files.
    ###        it also minimizes the possibility of something accidentally being written to loggers because they were left open.
    ###        what's more, this is a natural place to set up multiple loggers, one for all data and one for data pertaining only to this graceid

    global logger
    if globals().has_key('logger'):  # check to see if we have logger
        logger = globals()['logger']
    else:  # if not, set one up
        logger = loadLogger(config)
        logger.info(
            '\n{0} ************ approval_processorMP.log RESTARTED ************\n'
            .format(convertTime()))

    #-------------------------------------------------------------------
    # extract relevant info about this alert
    #-------------------------------------------------------------------

    # get alert specifics and event_dict information
    graceid = alert['uid']
    alert_type = alert['alert_type']
    description = alert['description']
    filename = alert['file']

    #-------------------------------------------------------------------
    # ensure we have an event_dict and ForgetMeNow tracking this graceid
    #-------------------------------------------------------------------

    if alert_type == 'new':  ### new event -> we must first create event_dict and set up ForgetMeNow queue item for G events

        ### create event_dict
        event_dict = EventDict(
        )  # create a new instance of EventDict class which is a blank event_dict
        if is_external_trigger(
                alert) == True:  # this is an external GRB trigger
            event_dict.grb_trigger_setup(
                alert['object'], graceid, g, config, logger
            )  # populate this event_dict with grb trigger info from lvalert
        else:
            event_dict.setup(
                alert['object'], graceid, configdict, g, config, logger
            )  # populate this event_dict with information from lvalert
        eventDicts[
            graceid] = event_dict  # add the instance to the global eventDicts
        eventDictionaries[
            graceid] = event_dict.data  # add the dictionary to the global eventDictionaries

        ### ForgetMeNow queue item
        item = ForgetMeNow(t0, forgetmenow_timeout, graceid, eventDicts, queue,
                           queueByGraceID, logger)
        queue.insert(item)  # add queue item to the overall queue

        ### set up queueByGraceID
        newSortedQueue = utils.SortedQueue(
        )  # create sorted queue for event candidate
        newSortedQueue.insert(
            item)  # put ForgetMeNow queue item into the sorted queue
        queueByGraceID[
            item.
            graceid] = newSortedQueue  # add queue item to the queueByGraceID
        saveEventDicts(
            approval_processorMPfiles
        )  # trying to see if expirationtime is updated from None

        message = '{0} -- {1} -- Created event dictionary for {1}.'.format(
            convertTime(), graceid)
        if loggerCheck(event_dict.data, message) == False:
            logger.info(message)
            g.writeLog(graceid,
                       'AP: Created event dictionary.',
                       tagname='em_follow')
        else:
            pass

    else:  ### not a new alert -> we may already be tracking this graceid

        if eventDicts.has_key(graceid):  ### we're already tracking it

            # get event_dict with expirationtime key updated for the rest of parseAlert
            event_dict = eventDicts[graceid]

            # find ForgetMeNow corresponding to this graceid and update expiration time
            for item in queueByGraceID[graceid]:
                if item.name == ForgetMeNow.name:  # selects the queue item that is a ForgetMeNow instance
                    item.setExpiration(t0)  # updates the expirationtime key
                    queue.resort(
                    )  ### may be expensive, but is needed to guarantee that queue remains sorted
                    queueByGraceID[graceid].resort()
                    break
            else:  ### we couldn't find a ForgetMeNow for this event! Something is wrong!
                os.system(
                    'echo \'ForgetMeNow KeyError\' | mail -s \'ForgetMeNow KeyError {0}\' {1}'
                    .format(graceid, advocate_email))
                raise KeyError(
                    'could not find ForgetMeNow for %s' % graceid
                )  ### Reed thinks this is necessary as a safety net.
                ### we want the process to terminate if things are not set up correctly to force us to fix it

        else:  # event_dict for event candidate does not exist. we need to create it with up-to-date information
            event_dict = EventDict(
            )  # create a new instance of the EventDict class which is a blank event_dict
            if is_external_trigger(alert) == True:
                event_dict.grb_trigger_setup(
                    g.events(graceid).next(), graceid, g, config, logger)
            else:
                event_dict.setup(
                    g.events(graceid).next(), graceid, configdict, g, config,
                    logger
                )  # fill in event_dict using queried event candidate dictionary
                event_dict.update(
                )  # update the event_dict with signoffs and iDQ info
            eventDicts[
                graceid] = event_dict  # add this instance to the global eventDicts
            eventDictionaries[
                graceid] = event_dict.data  # add the dictionary to the global eventDictionaries

            # create ForgetMeNow queue item and add to overall queue and queueByGraceID
            item = ForgetMeNow(t0, forgetmenow_timeout, graceid, eventDicts,
                               queue, queueByGraceID, logger)
            queue.insert(item)  # add queue item to the overall queue

            ### set up queueByGraceID
            newSortedQueue = utils.SortedQueue(
            )  # create sorted queue for new event candidate
            newSortedQueue.insert(
                item)  # put ForgetMeNow queue item into the sorted queue
            queueByGraceID[
                item.
                graceid] = newSortedQueue  # add queue item to the queueByGraceID

            message = '{0} -- {1} -- Created event dictionary for {1}.'.format(
                convertTime(), graceid)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Created event dictionary.',
                           tagname='em_follow')
            else:
                pass

    #--------------------
    # ignore alerts that are not relevant, like simulation or MDC events
    #--------------------

    # if the graceid starts with 'M' for MDCs or 'S' for Simulation, ignore
    if re.match('M', graceid) or re.match(
            'S',
            graceid):  ### FIXME: we want to make this a config-file option!
        message = '{0} -- {1} -- Mock data challenge or simulation. Ignoring.'.format(
            convertTime(), graceid)
        if loggerCheck(event_dict.data, message) == False:
            logger.info(message)
            g.writeLog(graceid,
                       'AP: Mock data challenge or simulation. Ignoring.',
                       tagname='em_follow')
        else:
            pass
        saveEventDicts(approval_processorMPfiles)
        return 0

    #--------------------
    # take care of external GRB triggers
    #--------------------
    if is_external_trigger(
            alert
    ) == True:  # for now, grouped everything related to external triggers together below
        # if it's not a log message updating us about possible coincidence with gravitational-waves OR labels OR json file uploads we are not interested
        if alert_type == 'label':
            record_label(event_dict.data, description)
        if alert_type == 'update':
            # is this a comment containing coinc info that needs to be parsed?
            if 'comment' in alert['object'].keys():
                comment = alert['object']['comment']
                if 'Significant event in on-source' in comment:  # got comment structure from Dipongkar
                    coinc_pipeline, coinc_fap = record_coinc_info(
                        event_dict.data, comment, alert, logger)
                    # begin creating the dictionary that will turn into json file
                    message_dict = {}
                    # populate text field for the GCN circular-to-be
                    message_dict['message'] = coinc_text.format(
                        graceid, coinc_fap)
                    message_dict['loaded_to_gracedb'] = 0
                    # make json string and file
                    message_dict = json.dumps(message_dict)
                    tmpfile = open('/tmp/coinc_{0}.json'.format(graceid), 'w')
                    tmpfile.write(message_dict)
                    tmpfile.close()
                    # make sure to load with a comment that we look for to check off that it's been loaded into gracedb
                    # was it an online or offline pipeline?
                    if 'Online' in coinc_pipeline:
                        event_dict.data['grb_online_json'] = message_dict
                        g.writeLog(
                            graceid,
                            'GRB-GW Coincidence JSON file: grb_online_json',
                            '/tmp/coinc_{0}.json'.format(graceid),
                            tagname='em_follow')
                    elif 'Offline' in coinc_pipeline:
                        event_dict.data['grb_offline_json'] = message_dict
                        g.writeLog(
                            graceid,
                            'GRB-GW Coincidence JSON file: grb_offline_json',
                            '/tmp/coinc_{0}.json'.format(graceid),
                            tagname='em_follow')
                    os.remove('/tmp/coinc_{0}.json'.format(graceid))
                    ### alert via email
                    os.system(
                        'echo \{0}\' | mail -s \'Coincidence JSON created for {1}\' {2}'
                        .format(notification_text, graceid, grb_email))
                # is this the json file loaded into GraceDb?
                if 'GRB-GW Coincidence JSON file' in comment:
                    # if it is, find out which type of json it was and then message_dict['loaded_to_gracedb'] = 1
                    json_type = re.findall('file: (.*)', comment)[0]
                    message_dict = event_dict.data[json_type]
                    message_dict = json.loads(
                        message_dict)  # converts string to dictionary
                    message_dict['loaded_to_gracedb'] = 1
                    # when we send to observers, message_dict['sent_to_observers'] = 1
            else:
                pass
        saveEventDicts(approval_processorMPfiles)
        return 0

    #--------------------
    # Appending which checks must be satisfied in preliminary_to_initial state before moving on
    #--------------------

    if humanscimons == 'yes':
        preliminary_to_initial.append('operator_signoffCheck')
    if advocates == 'yes':
        preliminary_to_initial.append('advocate_signoffCheck')

    #--------------------
    # update information based on the alert_type
    # includes extracting information from the alert
    # may also include generating VOEvents and issuing them
    #--------------------

    # actions for each alert_type
    currentstate = event_dict.data[
        'currentstate']  ### actions depend on the current state

    ### NOTE: we handle alert_type=="new" above as well and this conditional is slightly redundant...
    if alert_type == 'new':

        #----------------
        ### pass event through PipelineThrottle
        #----------------

        ### check if a PipelineThrottle exists for this node
        group = event_dict.data['group']
        pipeline = event_dict.data['pipeline']
        search = event_dict.data['search']
        key = generate_ThrottleKey(group, pipeline, search=search)
        if queueByGraceID.has_key(key):  ### a throttle already exists
            if len(queueByGraceID[key]) > 1:
                raise ValueError(
                    'too many QueueItems in SortedQueue for pipelineThrottle key=%s'
                    % key)
            item = queueByGraceID[key][
                0]  ### we expect there to be only one item in this SortedQueue

        else:  ### we need to make a throttle!
            # pull PipelineThrottle parameters from the config
            if config.has_section(key):
                throttleWin = config.getfloat(key, 'throttleWin')
                targetRate = config.getfloat(key, 'targetRate')
                requireManualReset = config.get(key, 'requireManualReset')
                conf = config.getfloat(key, 'conf')

            else:
                throttleWin = config.getfloat('default_PipelineThrottle',
                                              'throttleWin')
                targetRate = config.getfloat('default_PipelineThrottle',
                                             'targetRate')
                requireManualReset = config.get('default_PipelineThrottle',
                                                'requireManualReset')
                conf = config.getfloat('default_PipelineThrottle', 'conf')
            item = PipelineThrottle(t0,
                                    throttleWin,
                                    targetRate,
                                    group,
                                    pipeline,
                                    search=search,
                                    requireManualReset=False,
                                    conf=0.9,
                                    graceDB_url=client)

            queue.insert(item)  ### add to overall queue

            newSortedQueue = utils.SortedQueue(
            )  # create sorted queue for event candidate
            newSortedQueue.insert(
                item)  # put ForgetMeNow queue item into the sorted queue
            queueByGraceID[
                item.
                graceid] = newSortedQueue  # add queue item to the queueByGraceID

        item.addEvent(graceid, t0)  ### add new event to throttle
        ### this takes care of labeling in gracedb as necessary

        if item.isThrottled():
            ### send some warning message?
            return 0  ### we're done here because we're ignoring this event -> exit from parseAlert

#        #----------------
#        ### pass data to Grouper
#        #----------------
#        raise Warning("Grouper is not implemented yet! we're currently using a temporate groupTag and prototype code")

#        '''
#        need to extract groupTag from group_pipeline[_search] mapping.
#            These associations should be specified in the config file, so we'll have to specify this somehow.
#            probably just a "Grouper" section, with (option = value) pairs that look like (groupTag = nodeA nodeB nodeC ...)
#        '''
#        groupTag = 'TEMPORARY'

#        ### check to see if Grouper exists for this groupTag
#        if queueByGraceID.has_key(groupTag): ### at least one Grouper already exists

#            ### determine if any of the existing Groupers are still accepting new triggers
#            for item in queueByGraceID[groupTag]:
#                if item.isOpen():
#                    break ### this Grouper is still open, so we'll just use it
#            else: ### no Groupers are open, so we need to create one
#                item = Grouper(t0, grouperWin, groupTag, eventDicts, graceDB_url=client) ### create the actual QueueItem

#                queue.insert( item ) ### insert it in the overall queue

#                newSortedQueue = utils.SortedQueue() ### set up the SortedQueue for queueByGraceID
#                newSortedQueue.insert(item)
#                queueByGraceID[groupTag] = newSortedQueue

#        else: ### we need to make a Grouper
#            grouperWin = config.getfloat('grouper', 'grouperWin')
#            item = Grouper(t0, grouperWin, groupTag, eventDicts, graceDB_url=client) ### create the actual QueueItem

#            queue.insert( item ) ### insert it in the overall queue

#            newSortedQueue = utils.SortedQueue() ### set up the SortedQueue for queueByGraceID
#            newSortedQueue.insert(item)
#            queueByGraceID[groupTag] = newSortedQueue

#        item.addEvent( graceid ) ### add this graceid to the item

        return 0  ### we're done here. When Grouper makes a decision, we'll tick through the rest of the processes with a "selected" label

    elif alert_type == 'label':
        record_label(event_dict.data, description)

        if description == 'PE_READY':  ### PE_READY label was just applied. We may need to send an update alert

            message = '{0} -- {1} -- Sending update VOEvent.'.format(
                convertTime(), graceid)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(
                    graceid,
                    'AP: Received PE_READY label. Sending update VOEvent.',
                    tagname='em_follow')
                process_alert(event_dict.data, 'update', g, config, logger)

            else:
                pass

            message = '{0} -- {1} -- State: {2} --> complete.'.format(
                convertTime(), graceid, currentstate)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: State: {0} --> complete.'.format(currentstate),
                           tagname='em_follow')
                event_dict.data['currentstate'] = 'complete'

            else:
                pass

        elif description == 'EM_READY':  ### EM_READY label was just applied. We may need to send an initial alert
            message = '{0} -- {1} -- Sending initial VOEvent.'.format(
                convertTime(), graceid)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(
                    graceid,
                    'AP: Received EM_READY label. Sending initial VOEvent.',
                    tagname='em_follow')
                process_alert(event_dict.data, 'initial', g, config, logger)

            else:
                pass

            message = '{0} -- {1} -- State: {2} --> initial_to_update.'.format(
                convertTime(), graceid, currentstate)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: State: {0} --> initial_to_update.'.format(
                               currentstate),
                           tagname='em_follow')
                event_dict.data['currentstate'] = 'initial_to_update'

            else:
                pass

        elif description == "EM_Throttled":  ### the event is throttled and we need to turn off all processing for it

            event_dict.data[
                'currentstate'] = 'throttled'  ### update current state

            ### check if we need to send retractions
            voevents = event_dict.data['voevents']
            if len(voevents) > 0:
                if 'retraction' not in sorted(voevents)[-1]:
                    # there are existing VOEvents we've sent, but no retraction alert
                    process_alert(event_dict.data, 'retraction', g, config,
                                  logger)

            ### update ForgetMeNow expiration to handle all the clean-up?
            ### we probably do NOT want to change the clean-up schedule because we'll still likely receive a lot of alerts about this guy
            ### therefore, we just retain the local data and ignore him, rather than erasing the local data and having to query to reconstruct it repeatedly as new alerts come in
#            for item in queueByGraceID[graceid]: ### update expiration of the ForgetMeNow so it is immediately processed next.
#                if item.name == ForgetMeNow.name:
#                    time.setExpiration(-np.infty )
#                                                                ### FIXME: this can break the order in SortedQueue's. We need to pop and reinsert or call a manual resort
#                    queue.resort() ### may be expensive but is needed to guarantee that queue remains sorted
#                    queueByGraceID[graceid].resort()
#                    break
#            else:
#                raise ValueError('could not find ForgetMeNow QueueItem for graceid=%s'%graceid)

        elif description == "EM_Selected":  ### this event was selected by a Grouper
            raise NotImplementedError(
                'write logic to handle \"Selected\" labels')

        elif description == "EM_Superseded":  ### this event was superceded by another event within Grouper
            raise NotImplementedError(
                'write logic to handle \"Superseded" labels')

        elif (
                checkLabels(description.split(), config) > 0
        ):  ### some other label was applied. We may need to issue a retraction notice.
            event_dict.data['currentstate'] = 'rejected'

            ### check to see if we need to send a retraction
            voevents = event_dict.data['voevents']
            if len(voevents) > 0:
                if 'retraction' not in sorted(voevents[-1]):
                    # there are existing VOEvents we've sent, but no retraction alert
                    process_alert(event_dict.data, 'retraction', g, config,
                                  logger)

        saveEventDicts(
            approval_processorMPfiles)  ### save the updated eventDict to disk
        return 0

    ### FIXME: Reed left off commenting here...

    elif alert_type == 'update':
        # first the case that we have a new lvem skymap
        if (filename.endswith('.fits.gz') or filename.endswith('.fits')):
            if 'lvem' in alert['object'][
                    'tag_names']:  # we only care about skymaps tagged lvem for sharing with MOU partners
                submitter = alert['object']['issuer'][
                    'display_name']  # in the past, we used to care who submitted skymaps; keeping this functionality just in case
                record_skymap(event_dict.data, filename, submitter, logger)
            else:
                pass
        # interested in iDQ information or other updates
        else:
            if 'comment' in alert['object'].keys():
                comment = alert['object']['comment']
                if re.match(
                        'minimum glitch-FAP', comment
                ):  # looking to see if it's iDQ glitch-FAP information
                    record_idqvalues(event_dict.data, comment, logger)
                elif re.match(
                        'resent VOEvent', comment
                ):  # looking to see if another running instance of approval_processorMP sent a VOEvent
                    response = re.findall(
                        r'resent VOEvent (.*) in (.*)',
                        comment)  # extracting which VOEvent was re-sent
                    event_dict.data[response[0][1]].append(response[0][0])
                    saveEventDicts(approval_processorMPfiles)
                elif 'EM-Bright probabilities computed from detection pipeline' in comment:  # got comment structure from Shaon G.
                    record_em_bright(event_dict.data, comment, logger)
                elif 'Temporal coincidence with external trigger' in comment:  # got comment structure from Alex U.
                    exttrig, coinc_far = record_coinc_info(
                        event_dict.data, comment, alert, logger)
                    # create dictionary that will become json file
                    message_dict = {}
                    grb_instrument = eventDictionaries[exttrig]['pipeline']
                    message_dict['message'] = em_coinc_text.format(
                        exttrig, grb_instrument, graceid, coinc_far)
                    message_dict['loaded_to_gracedb'] = 0
                    message_dict = json.dumps(message_dict)
                    # update event dictionaries for both the gw and external trigger
                    eventDictionaries[exttrig][
                        'em_coinc_json'] = message_dict  # this updates the external trigger event_dict.data
                    event_dict.data[
                        'em_coinc_json'] = message_dict  # this updates the gw trigger event_dict.data
                    # load json file to the gw gracedb page
                    tmpfile = open('/tmp/coinc_{0}.json'.format(graceid), 'w')
                    tmpfile.write(message_dict)
                    tmpfile.close()
                    g.writeLog(graceid,
                               'GRB-GW Coincidence JSON file: em_coinc_json',
                               '/tmp/coinc_{0}.json'.format(graceid),
                               tagname='em_follow')
                    os.remove('/tmp/coinc_{0}.json'.format(graceid))
                    # load json file to the external trigger page
                    tmpfile = open('/tmp/coinc_{0}.json'.format(exttrig), 'w')
                    tmpfile.write(message_dict)
                    tmpfile.close()
                    g.writeLog(exttrig,
                               'GRB-GW Coincidence JSON file: em_coinc_json',
                               '/tmp/coinc_{0}.json'.format(exttrig),
                               tagname='em_follow')
                    os.remove('/tmp/coinc_{0}.json'.format(exttrig))
                    ### alert via email
                    os.system(
                        'echo \{0}\' | mail -s \'Coincidence JSON created for {1}\' {2}'
                        .format(notification_text, exttrig, grb_email))
                    saveEventDicts(approval_processorMPfiles)
                elif 'GRB-GW Coincidence JSON file' in comment:  # this is the comment that accompanies a loaded coinc json file
                    message_dict = event_dict.data['em_coinc_json']
                    message_dict = json.loads(
                        message_dict)  # converts string to dictionary
                    message_dict['loaded_to_gracedb'] = 1
                    saveEventDicts(approval_processorMPfiles)
                else:
                    pass

    elif alert_type == 'signoff':
        signoff_object = alert['object']
        record_signoff(event_dict.data, signoff_object)

    #---------------------------------------------
    # run checks specific to currentstate of the event candidate
    #---------------------------------------------

    passedcheckcount = 0

    if currentstate == 'new_to_preliminary':
        time.sleep(
            wait_for_hardware_inj
        )  #this is for those cases where we dont have the INJ label right away
        queried_dict = g.events(graceid).next()  #query gracedb for the graceid
        event_dict.data['labels'] = queried_dict['labels'].keys(
        )  #get the latest labels before running checks
        for Check in new_to_preliminary:
            eval('event_dict.{0}()'.format(Check))
            checkresult = event_dict.data[Check + 'result']
            if checkresult == None:
                pass
            elif checkresult == False:
                # because in 'new_to_preliminary' state, no need to apply DQV label
                message = '{0} -- {1} -- Failed {2} in currentstate: {3}.'.format(
                    convertTime(), graceid, Check, currentstate)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(graceid,
                               'AP: Failed {0} in currentstate: {1}.'.format(
                                   Check, currentstate),
                               tagname='em_follow')
                else:
                    pass
                message = '{0} -- {1} -- State: {2} --> rejected.'.format(
                    convertTime(), graceid, currentstate)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(
                        graceid,
                        'AP: State: {0} --> rejected.'.format(currentstate),
                        tagname='em_follow')
                    event_dict.data['currentstate'] = 'rejected'
                else:
                    pass
                saveEventDicts(approval_processorMPfiles)
                return 0
            elif checkresult == True:
                passedcheckcount += 1
        if passedcheckcount == len(new_to_preliminary):
            message = '{0} -- {1} -- Passed all {2} checks.'.format(
                convertTime(), graceid, currentstate)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Passed all {0} checks.'.format(currentstate),
                           tagname='em_follow')
            else:
                pass
            message = '{0} -- {1} -- Sending preliminary VOEvent.'.format(
                convertTime(), graceid)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Sending preliminary VOEvent.',
                           tagname='em_follow')
                process_alert(event_dict.data, 'preliminary', g, config,
                              logger)
            else:
                pass
            message = '{0} -- {1} -- State: {2} --> preliminary_to_initial.'.format(
                convertTime(), graceid, currentstate)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: State: {0} --> preliminary_to_initial.'.format(
                               currentstate),
                           tagname='em_follow')
                event_dict.data['currentstate'] = 'preliminary_to_initial'
            else:
                pass
            labels = event_dict.data['labels']
            # notify the operators if we haven't previously processed this event
            instruments = event_dict.data['instruments']
            for instrument in instruments:
                if instrument in str(labels):
                    pass
                else:
                    message = '{0} -- {1} -- Labeling {2}OPS.'.format(
                        convertTime(), graceid, instrument)
                    if loggerCheck(event_dict.data, message) == False:
                        logger.info(message)
                        g.writeLog(graceid,
                                   'AP: Labeling {0}OPS.'.format(instrument),
                                   tagname='em_follow')
                        g.writeLabel(graceid, '{0}OPS'.format(instrument))
                    else:
                        pass
            # notify the advocates if we haven't previously processed this event
            if 'ADV' in str(labels):
                pass
            else:
                message = '{0} -- {1} -- Labeling ADVREQ.'.format(
                    convertTime(), graceid)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(graceid,
                               'AP: Labeling ADVREQ.',
                               tagname='em_follow')
                    g.writeLabel(graceid, 'ADVREQ')
                    os.system(
                        'echo \'{0}\' | mail -s \'{1} passed criteria for follow-up\' {2}'
                        .format(advocate_text, graceid, advocate_email))
                    # expose event to LV-EM
                    url_perm_base = g.service_url + urllib.quote(
                        'events/{0}/perms/gw-astronomy:LV-EM:Observers/'.
                        format(graceid))
                    for perm in ['view', 'change']:
                        url = url_perm_base + perm
                        #g.put(url)
                else:
                    pass
        saveEventDicts(approval_processorMPfiles)
        return 0

    elif currentstate == 'preliminary_to_initial':
        for Check in preliminary_to_initial:
            eval('event_dict.{0}()'.format(Check))
            checkresult = event_dict.data[Check + 'result']
            if checkresult == None:
                pass
            elif checkresult == False:
                message = '{0} -- {1} -- Failed {2} in currentstate: {3}.'.format(
                    convertTime(), graceid, Check, currentstate)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(graceid,
                               'AP: Failed {0} in currentstate: {1}.'.format(
                                   Check, currentstate),
                               tagname='em_follow')
                else:
                    pass
                message = '{0} -- {1} -- State: {2} --> rejected.'.format(
                    convertTime(), graceid, currentstate)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(
                        graceid,
                        'AP: State: {0} --> rejected.'.format(currentstate),
                        tagname='em_follow')
                    event_dict.data['currentstate'] = 'rejected'
                else:
                    pass
                # need to set DQV label so long as it isn't the operator_signoffCheck or advocate_signoffCheck
                if 'signoffCheck' in Check:
                    message = '{0} -- {1} -- Not labeling DQV because signoffCheck is separate from explicit data quality checks.'.format(
                        convertTime(), graceid)
                    if loggerCheck(event_dict.data, message) == False:
                        logger.info(message)
                        g.writeLog(
                            graceid,
                            'AP: Not labeling DQV because signoffCheck is separate from explicit data quality checks.',
                            tagname='em_follow')
                    else:
                        pass
                else:
                    message = '{0} -- {1} -- Labeling DQV.'.format(
                        convertTime(), graceid)
                    if loggerCheck(event_dict.data, message) == False:
                        logger.info(message)
                        g.writeLog(graceid,
                                   'AP: Labeling DQV.',
                                   tagname='em_follow')
                        g.writeLabel(graceid, 'DQV')
                    else:
                        pass
                saveEventDicts(approval_processorMPfiles)
                return 0
            elif checkresult == True:
                passedcheckcount += 1
                if Check == 'have_lvem_skymapCheck':  # we want to send skymaps out as quickly as possible, even if humans have not vetted the event
                    process_alert(
                        event_dict.data, 'preliminary', g, config, logger
                    )  # if it turns out we've sent this alert with this skymap before, the process_alert function will just not send this repeat
        if passedcheckcount == len(preliminary_to_initial):
            message = '{0} -- {1} -- Passed all {2} checks.'.format(
                convertTime(), graceid, currentstate)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Passed all {0} checks.'.format(currentstate),
                           tagname='em_follow')
            else:
                pass
            message = '{0} -- {1} -- Labeling EM_READY.'.format(
                convertTime(), graceid)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Labeling EM_READY.',
                           tagname='em_follow')
                g.writeLabel(graceid, 'EM_READY')
            else:
                pass
        saveEventDicts(approval_processorMPfiles)
        return 0

    elif currentstate == 'initial_to_update':
        for Check in initial_to_update:
            eval('event_dict.{0}()'.format(Check))
            checkresult = event_dict.data[Check + 'result']
            if checkresult == None:
                pass
            elif checkresult == False:
                # need to set DQV label
                message = '{0} -- {1} -- Failed {2} in currentstate: {3}.'.format(
                    convertTime(), graceid, Check, currentstate)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(graceid,
                               'AP: Failed {0} in currentstate: {1}.'.format(
                                   Check, currentstate),
                               tagname='em_follow')
                else:
                    pass
                message = '{0} -- {1} -- State: {2} --> rejected.'.format(
                    convertTime(), graceid, currentstate)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(
                        graceid,
                        'AP: State: {0} --> rejected.'.format(currentstate),
                        tagname='em_follow')
                    event_dict.data['currentstate'] = 'rejected'
                else:
                    pass
                message = '{0} -- {1} -- Labeling DQV.'.format(
                    convertTime(), graceid)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(graceid,
                               'AP: Labeling DQV.',
                               tagname='em_follow')
                    g.writeLabel(graceid, 'DQV')
                else:
                    pass
                saveEventDicts(approval_processorMPfiles)
                return 0
            elif checkresult == True:
                passedcheckcount += 1
        if passedcheckcount == len(initial_to_update):
            message = '{0} -- {1} -- Passed all {2} checks.'.format(
                convertTime(), graceid, currentstate)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Passed all {0} checks.'.format(currentstate),
                           tagname='em_follow')
            else:
                pass
            message = '{0} -- {1} -- Labeling PE_READY.'.format(
                convertTime(), graceid)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Labeling PE_READY.',
                           tagname='em_follow')
                g.writeLabel(graceid, 'PE_READY')
            else:
                pass
        saveEventDicts(approval_processorMPfiles)
        return 0

    else:
        saveEventDicts(approval_processorMPfiles)
        return 0
Пример #34
0
    def upload(self, fname, gracedb_server=None, testing=True,
               extra_strings=None):
        """Upload this trigger to gracedb

        Parameters
        ----------
        fname: str
            The name to give the xml file associated with this trigger
        gracedb_server: string, optional
            URL to the GraceDB web API service for uploading the event.
            If omitted, the default will be used.
        testing: bool
            Switch to determine if the upload should be sent to gracedb as a
            test trigger (True) or a production trigger (False).
        """
        from ligo.gracedb.rest import GraceDb
        import matplotlib
        matplotlib.use('Agg')
        import pylab

        # first of all, make sure the event is saved on disk
        # as GraceDB operations can fail later
        self.save(fname)

        if self.snr_series is not None:
            if fname.endswith('.xml.gz'):
                snr_series_fname = fname.replace('.xml.gz', '.hdf')
            else:
                snr_series_fname = fname.replace('.xml', '.hdf')
            snr_series_plot_fname = snr_series_fname.replace('.hdf',
                                                             '_snr.png')
            psd_series_plot_fname = snr_series_fname.replace('.hdf',
                                                             '_psd.png')
            pylab.figure()
            for ifo in sorted(self.snr_series):
                curr_snrs = self.snr_series[ifo]
                curr_snrs.save(snr_series_fname, group='%s/snr' % ifo)
                pylab.plot(curr_snrs.sample_times, abs(curr_snrs),
                           c=ifo_color(ifo), label=ifo)
                if ifo in self.ifos:
                    snr = self.coinc_results['foreground/%s/%s' %
                                             (ifo, 'snr')]
                    endt = self.coinc_results['foreground/%s/%s' %
                                              (ifo, 'end_time')]
                    pylab.plot([endt], [snr], c=ifo_color(ifo), marker='x')

            pylab.legend()
            pylab.xlabel('GPS time (s)')
            pylab.ylabel('SNR')
            pylab.savefig(snr_series_plot_fname)
            pylab.close()

            pylab.figure()
            for ifo in sorted(self.snr_series):
                # Undo dynamic range factor
                curr_psd = self.psds[ifo].astype(numpy.float64)
                curr_psd /= pycbc.DYN_RANGE_FAC ** 2.0
                curr_psd.save(snr_series_fname, group='%s/psd' % ifo)
                # Can't plot log(0) so start from point 1
                pylab.loglog(curr_psd.sample_frequencies[1:],
                             curr_psd[1:]**0.5, c=ifo_color(ifo), label=ifo)
            pylab.legend()
            pylab.xlim([10, 1300])
            pylab.ylim([3E-24, 1E-20])
            pylab.xlabel('Frequency (Hz)')
            pylab.ylabel('ASD')
            pylab.savefig(psd_series_plot_fname)
            pylab.close()

        if self.probabilities is not None:
            prob_fname = fname.replace('.xml.gz', '_probs.json')
            prob_plot_fname = prob_fname.replace('.json', '.png')

            prob_plot = {k: v for (k, v) in self.probabilities.items()
                         if v != 0.0}
            labels, sizes = zip(*prob_plot.items())
            colors = [source_color(label) for label in labels]
            fig, ax = pylab.subplots()
            ax.pie(sizes, labels=labels, colors=colors, autopct='%1.1f%%',
                   textprops={'fontsize': 15})
            ax.axis('equal')
            fig.savefig(prob_plot_fname)
            pylab.close()

        gid = None
        try:
            # try connecting to GraceDB
            gracedb = GraceDb(gracedb_server) \
                    if gracedb_server is not None else GraceDb()

            # create GraceDB event
            group = 'Test' if testing else 'CBC'
            r = gracedb.createEvent(group, "pycbc", fname, "AllSky").json()
            gid = r["graceid"]
            logging.info("Uploaded event %s", gid)

            if self.is_hardware_injection:
                gracedb.writeLabel(gid, 'INJ')
                logging.info("Tagging event %s as an injection", gid)

            # upload PSDs. Note that the PSDs are already stored in the
            # original event file and we just upload a copy of that same file
            # here. This keeps things as they were in O2 and can be removed
            # after updating the follow-up infrastructure
            psd_fname = 'psd.xml.gz' if fname.endswith('.gz') else 'psd.xml'
            gracedb.writeLog(gid, "PyCBC PSD estimate from the time of event",
                             psd_fname, open(fname, "rb").read(), "psd")
            logging.info("Uploaded PSDs for event %s", gid)

            # add info for tracking code version
            version_str = 'Using PyCBC version {}{} at {}'
            version_str = version_str.format(
                    pycbc_version.version,
                    ' (release)' if pycbc_version.release else '',
                    os.path.dirname(pycbc.__file__))
            gracedb.writeLog(gid, version_str)

            extra_strings = [] if extra_strings is None else extra_strings
            for text in extra_strings:
                gracedb.writeLog(gid, text, tag_name=['analyst_comments'])

            # upload SNR series in HDF format and plots
            if self.snr_series is not None:
                gracedb.writeLog(gid, 'SNR timeseries HDF file upload',
                                 filename=snr_series_fname)
                gracedb.writeLog(gid, 'SNR timeseries plot upload',
                                 filename=snr_series_plot_fname,
                                 tag_name=['background'],
                                 displayName=['SNR timeseries'])
                gracedb.writeLog(gid, 'PSD plot upload',
                                 filename=psd_series_plot_fname,
                                 tag_name=['psd'], displayName=['PSDs'])

            # upload source probabilities in json format and plot
            if self.probabilities is not None:
                gracedb.writeLog(gid, 'source probabilities JSON file upload',
                                 filename=prob_fname, tag_name=['em_follow'])
                logging.info('Uploaded source probabilities for event %s', gid)
                gracedb.writeLog(gid, 'source probabilities plot upload',
                                 filename=prob_plot_fname,
                                 tag_name=['em_follow'])
                logging.info('Uploaded source probabilities pie chart for '
                             'event %s', gid)

        except Exception as exc:
            logging.error('Something failed during the upload/annotation of '
                          'event %s on GraceDB. The event may not have been '
                          'uploaded!', fname)
            logging.error(str(exc))

        return gid
        print "reading lvalert message from stdin"
    alert = sys.stdin.read()

    if opts.Verbose:
        print "    %s"%(alert)
    alert = json.loads(alert)

    if alert['alert_type'] != 'new': ### ignore alerts that aren't new
        if opts.Verbose:
            print "ignoring alert"
        sys.exit(0)

    opts.graceid = alert['uid']

### extract things about the event
gdb = GraceDb( gracedb_url )
event = gdb.event( opts.graceid ).json()

gps = event['gpstime']
far = event['far']
if farThr < far:
    if opts.Verbose:
        print "ignoring alert due to high FAR (%.3e > %.3e)"%(far, farThr)
    sys.exit(0)

if opts.verbose:
    print "generating OmegaScans for : %s\n    gps : %.6f"%(opts.graceid, gps)

#-------------------------------------------------

### report to GraceDB that we've started follow-up
Пример #36
0
class GraceDbTab(get_tab('default')):
    """Custom tab displaying a summary of GraceDb results.
    """
    type = 'gracedb'

    def __init__(self, name, url='https://gracedb.ligo.org',
                 query='External', columns=['gpstime', 'date', 'pipeline'],
                 headers=['GPS time', 'UTC time', 'Source'], rank='gpstime',
                 **kwargs):
        super(GraceDbTab, self).__init__(name, **kwargs)
        self.url = url
        self.query = '{} {} .. {}'.format(
            query,
            int(self.start),
            int(self.end),
        )
        self.events = dict()
        self.headers = headers
        self.columns = columns
        self.rank = rank

    @classmethod
    def from_ini(cls, config, section, **kwargs):
        """Define a new `GraceDbTab` from a `ConfigParser`.
        """
        for key in ['url', 'query', 'rank']:
            try:
                kwargs.setdefault(
                    key, re_quote.sub('', config.get(section, key)))
            except NoOptionError:
                pass
        for key in ['columns', 'headers']:
            try:
                raw = config.get(section, key)
                val = eval(raw)
            except NoOptionError:
                continue
            except (SyntaxError, NameError, TypeError):
                val = [x.strip().rstrip() for x in raw.split(',')]
            kwargs.setdefault(key, val)
        return super(GraceDbTab, cls).from_ini(config, section, **kwargs)

    def process(self, config=GWSummConfigParser(), **kwargs):
        try:
            from ligo.gracedb.rest import GraceDb
            from ligo.gracedb.exceptions import HTTPError
        except ImportError as e:
            e.args = ('%s, this module is required to generate a GraceDbTab'
                      % str(e),)
            raise
        # query gracedb
        service_url = '%s/api/' % self.url
        self.connection = GraceDb(service_url=service_url)
        self.exception = HTTPError
        vprint('Connected to gracedb at %s\n' % service_url)
        try:
            self.events[None] = list(self.connection.superevents(self.query))
            self._query_type = 'S'
        except self.exception:
            self.events[None] = list(self.connection.events(self.query))
            event_method = self.connection.event
            eventid_name = 'graceid'
            self._query_type = 'E'
        else:
            event_method = self.connection.superevent
            eventid_name = 'superevent_id'
            for event in self.events[None]:  # get preferred event parameters
                event.update(self.connection.event(
                    event['preferred_event'],
                ).json())
        vprint('Recovered %d events for query %r\n'
               % (len(self.events[None]), self.query))
        if 'labels' in self.columns:
            for e in self.events[None]:
                e['labels'] = ', '.join(event_method(
                    e[eventid_name]).json()['labels'])
            vprint('Downloaded labels\n')
        return super(GraceDbTab, self).process(config=config, **kwargs)

    def process_state(self, state, **kwargs):
        def in_state(event):
            return int(event['gpstime']) in state.active
        self.events[str(state)] = list(filter(in_state, self.events[None]))
        reverse = self.rank not in ['gpstime', 'far']
        self.events[str(state)].sort(key=lambda x: x[self.rank],
                                     reverse=reverse)
        vprint('    Selected %d events\n' % len(self.events[str(state)]))

    def write_state_html(self, state):
        """Write the '#main' HTML content for this `GraceDbTab`.
        """
        page = markup.page()
        # build table of events
        page.table(class_='table table-sm table-hover table-striped mt-2',
                   id_='gracedb')
        # thead
        page.thead()
        page.tr()
        for head in self.headers:
            page.th(head)
        page.tr.close()
        page.thead.close()
        # tbody
        page.tbody()
        for event in sorted(self.events[str(state)],
                            key=lambda e: e['gpstime']):
            context = None
            try:
                labs = set(event['labels'].split(', '))
            except (AttributeError, KeyError):
                pass
            else:
                for ctx, labels in LABELS.items():
                    if (
                            ctx == 'success' and labs.union(labels) == labs or
                            labs.intersection(labels)
                    ):
                        context = ctx
                        break
            if context:
                page.tr(class_='table-%s' % context)
            else:
                page.tr()
            for col in self.columns:
                if col == 'date':
                    gpskey = 't_0' if 'superevent_id' in event else 'gpstime'
                    page.td(from_gps(event[gpskey]).strftime(
                        '%B %d %Y %H:%M:%S.%f',
                    )[:-3])
                    continue
                elif col.lower() == 'dqr' and 'superevent_id' in event:
                    page.td()
                    sid = event['superevent_id']
                    href = ('{0}/apiweb/superevents/{1}/files/'
                            'dqr.html'.format(self.url, sid))
                    try:
                        self.connection.get(href)
                    except self.exception:
                        page.p('&mdash;')
                    else:
                        title = 'Data-quality report for {}'.format(sid)
                        page.a('DQR', title=title, href=href, target='_blank',
                               rel='external', class_='btn btn-info btn-sm')
                    page.td.close()
                    continue
                elif col.lower() == 'dqr':
                    page.td()
                    page.p('&mdash;')
                    page.td.close()
                    continue
                try:
                    v = event[col]
                except KeyError:
                    try:
                        v = event['extra_attributes']['GRB'][col]
                        assert v is not None
                    except (KeyError, AssertionError):
                        page.td('-')
                        continue
                if col in ('graceid', 'superevent_id', 'preferred_event'):
                    page.td()
                    tag = 'superevents' if col == 'superevent_id' else 'events'
                    href = '{}/{}/view/{}'.format(self.url, tag, v)
                    title = 'GraceDB {} page for {}'.format(tag[:-1], v)
                    page.a(v, title=title, href=href, target='_blank',
                           rel='external', class_='btn btn-info btn-sm')
                    page.td.close()
                elif col not in ('gpstime', 't_0') and isinstance(v, float):
                    page.td('%.3g' % v)
                elif col == 'labels':
                    page.td(', '.join(
                        ['<samp>%s</samp>' % l for l in sorted(labs)]))
                else:
                    page.td(str(v))
            page.tr.close()
        page.tbody.close()
        page.table.close()
        if len(self.events[str(state)]) == 0:
            page.p('No events were recovered for this state.')
        else:
            page.button(
                'Export to CSV',
                class_='btn btn-outline-secondary btn-table mt-2',
                **{'data-table-id': 'gracedb', 'data-filename': 'gracedb.csv'})

        # query doc
        qurl = '{}/search/?query={}&query_type={}&results_format=S'.format(
            self.url,
            self.query.replace(' ', '+'),
            getattr(self, '_query_type', 'E'),
        )
        qlink = markup.oneliner.a(
            'here',
            href=qurl,
            target='_blank',
        )
        page.p('The above table was generated from a query to {} with the '
               'form <code>{}</code>. To view the results of the same query '
               'via the GraceDB web interface, click {}.'.format(
                   self.url, self.query, qlink), class_='mt-2')

        # reference the labelling
        page.h4('Labelling reference')
        page.p('Events in the above table may have a context based on '
               'its labels as follows:')
        for c, labels in LABELS.items():
            c = (c if c == 'warning' else '%s text-white' % c)
            labstr = ', '.join(['<samp>%s</samp>' % l for l in sorted(labels)])
            page.p(labstr, class_='bg-%s pl-2' % c, style='width: auto;')

        # write to file
        idx = self.states.index(state)
        with open(self.frames[idx], 'w') as fobj:
            fobj.write(str(page))
        return self.frames[idx]
Пример #37
0
from ligo.gracedb.rest import GraceDb

from datetime import datetime

import numpy

import os

import pickle, json

from optparse import OptionParser

from sys import argv

g = GraceDb()

now = datetime.now()


usage = "retrieve_data.py/retrieve_data.py [--options] arg1 arg2 arg3 arg4 arg5 arg6 arg7"
description = "collects several pieces of data from important events on  GraceDB and compiles it into an array"

from optparse import OptionParser

if __name__ == "__main__":
    parser = OptionParser(usage=usage, description=description)
    parser.add_option("-v", "--verbose", default=False, action="store_true")
    parser.add_option(
        "-s",
        "--gpsstart",
        dest="gpsstart",
gdbdir = config.get('gdb general', 'main_gdb_dir')

gwchannel = config.get('general', 'gwchannel')

if not opts.KWsignifThr:
    opts.KWsignifThr.append(config.getfloat('general', 'gw_kwsignif_thr'))

GWkwconfig = idq.loadkwconfig(config.get('data_discovery', 'GWkwconfig'))
GWkwbasename = GWkwconfig['basename']
GWgdsdir = config.get('data_discovery', 'GWgdsdir')

kwstride = int(float(GWkwconfig['stride']))

if not opts.skip_gracedb_upload:
    if config.has_option('gdb general', 'gdb_url'):
        gracedb = GraceDb(config.get('gdb general', 'gdb_url'))
    else:
        gracedb = GraceDb()

if config.has_option(opts.classifier, 'plotting_label'):
    plotting_label = config.get(opts.classifier, 'plotting_label')
else:
    plotting_label = opts.classifier

#===================================================================================================

### science segments
if opts.ignore_science_segments:
    if opts.verbose:
        print 'analyzing data regardless of science segements'
    scisegs = [[opts.start, opts.end]]  ### set segs to be this stride range
Пример #39
0
def get_event(params):

    g = GraceDb()
    eventString = params["event"]
    events = g.events('%s' % eventString)
    event = [x for x in events][0]

    keys = [
        'graceid', 'gpstime', 'extra_attributes', 'group', 'links', 'created',
        'far', 'instruments', 'labels', 'nevents', 'submitter', 'search',
        'likelihood'
    ]
    fileorder = [
        'LALInference_skymap.fits.gz', 'bayestar.fits.gz', 'BW_skymap.fits',
        'LIB_skymap.fits.gz', 'skyprobcc_cWB.fits'
    ]
    #fileorder = ['LALInference3d.fits.gz','bayestar3d.fits.gz','bayestar.fits.gz']

    fileorder = [
        'LALInference_skymap.fits.gz', 'bayestar.fits.gz', 'BW_skymap.fits',
        'LIB_skymap.fits.gz', 'skyprobcc_cWB.fits'
    ]

    eventinfo = {}
    for key in keys:
        if not key in event: continue
        eventinfo[key] = event[key]
    eventinfo['gpstime'] = float(eventinfo['gpstime'])
    if eventinfo['far'] == None:
        eventinfo['far'] = np.nan

    triggerfile = "%s/%s.txt" % (params["outputDir"], eventinfo['graceid'])
    skymapfile = '%s/%s.fits' % (params["outputDir"], eventinfo['graceid'])
    #if os.path.isfile(triggerfile) and os.path.isfile(skymapfile):
    #    print "Already have info for %s... continuing."%event["graceid"]
    #    return
    print "Getting info for %s" % event["graceid"]

    mjds = [-1, -1]
    timediff = -1
    if 'CoincInspiral' in event['extra_attributes']:
        eventinfo['coinc'] = event['extra_attributes']['CoincInspiral']
    if 'SingleInspiral' in event['extra_attributes']:
        eventinfo['singles'] = {}
        for single in event['extra_attributes']['SingleInspiral']:
            eventinfo['singles'][single['ifo']] = single
            eventinfo['singles'][single['ifo']]['gpstime'] = single[
                'end_time'] + 10**-9 * single['end_time_ns']

        if ("H1" in eventinfo['singles']) and ("L1" in eventinfo['singles']):
            eventinfo["H1_L1_difference"] = eventinfo['singles']['H1'][
                "gpstime"] - eventinfo['singles']['L1']["gpstime"]
            t = Time([
                eventinfo['singles']['H1']["gpstime"],
                eventinfo['singles']['L1']["gpstime"]
            ],
                     format='gps',
                     scale='utc')
            mjds = t.mjd
            timediff = eventinfo["H1_L1_difference"]

    if 'MultiBurst' in event['extra_attributes']:
        eventinfo['burst'] = event['extra_attributes']['MultiBurst']

        single_ifo_times = eventinfo['burst']['single_ifo_times'].split(",")
        ifos = eventinfo['burst']['ifos'].split(",")

        if len(ifos) > 1 and len(single_ifo_times) > 1:
            ifo1 = ifos[0]
            gps1 = float(single_ifo_times[0])

            ifo2 = ifos[1]
            gps2 = float(single_ifo_times[1])

            eventinfo['burst'][ifo1] = {}
            eventinfo['burst'][ifo1]['gpstime'] = gps1

            eventinfo['burst'][ifo2] = {}
            eventinfo['burst'][ifo2]['gpstime'] = gps2

            if ("H1" in eventinfo['burst']) and ("L1" in eventinfo['burst']):
                eventinfo["H1_L1_difference"] = eventinfo['burst']['H1'][
                    "gpstime"] - eventinfo['burst']['L1']["gpstime"]
                t = Time([
                    eventinfo['burst']['H1']["gpstime"],
                    eventinfo['burst']['L1']["gpstime"]
                ],
                         format='gps',
                         scale='utc')
                mjds = t.mjd
                timediff = eventinfo["H1_L1_difference"]

    try:
        print "Looking for EM bright file..."
        r = g.files(eventinfo['graceid'],
                    "Source_Classification_%s.json" % eventinfo['graceid'])
        emfile = open('embright.json', 'w')
        emfile.write(r.read())
        emfile.close()

        with open('embright.json') as data_file:
            emdata = json.load(data_file)

        os.system('rm embright.json')

        embright_keys = [
            "Prob remnant_mass_greater_than 0M_sun", "Prob EMbright"
        ]
        ns_keys = ["Prob Mass2_less_than 3M_sun", "Prob NS2"]

        embright_prob = -1
        for key in embright_keys:
            if not key in embright_keys: continue
            embright_prob = emdata[key]
            break
        ns_prob = -1
        for key in embright_keys:
            if not key in embright_keys: continue
            ns_prob = emdata[key]
            break

        eventinfo['embright'] = {}
        eventinfo['embright']['embright'] = embright_prob
        eventinfo['embright']['ns'] = ns_prob

    except:
        print "No EM bright file..."

    try:
        print "Looking for cWB file..."
        r = g.files(eventinfo['graceid'],
                    "trigger_%.4f.txt" % eventinfo['gpstime'])
        # r = g.files(eventinfo['graceid'], "eventDump.txt")
        cwbfile = open('trigger.txt', 'w')
        cwbfile.write(r.read())
        cwbfile.close()

        eventinfo['burst'] = {}
        lines = [line.rstrip('\n') for line in open('trigger.txt')]
        for line in lines:
            lineSplit = line.split(":")
            if len(lineSplit) < 2: continue
            key = lineSplit[0]
            value = filter(None, lineSplit[1].split(" "))
            eventinfo['burst'][lineSplit[0]] = value

        ifo1 = eventinfo['burst']['ifo'][0]
        gps1 = float(eventinfo['burst']['time'][0])

        ifo2 = eventinfo['burst']['ifo'][1]
        gps2 = float(eventinfo['burst']['time'][1])

        eventinfo['burst'][ifo1] = {}
        eventinfo['burst'][ifo1]['gpstime'] = gps1

        eventinfo['burst'][ifo2] = {}
        eventinfo['burst'][ifo2]['gpstime'] = gps2

        if ("H1" in eventinfo['burst']) and ("L1" in eventinfo['burst']):
            eventinfo["H1_L1_difference"] = eventinfo['burst']['H1'][
                "gpstime"] - eventinfo['burst']['L1']["gpstime"]
            t = Time([
                eventinfo['burst']['H1']["gpstime"],
                eventinfo['burst']['L1']["gpstime"]
            ],
                     format='gps',
                     scale='utc')
            mjds = t.mjd
            timediff = eventinfo["H1_L1_difference"]

    except:
        print "No cWB file..."

    ra = 0
    dec = 0

    r = []
    for lvfile in fileorder:
        #r = g.files(eventinfo['graceid'], lvfile)
        try:
            r = g.files(eventinfo['graceid'], lvfile)
            break
        except:
            continue
    if r == []:
        print "Download of skymaps file for %s failed..." % eventinfo['graceid']
    else:

        skymap = open(skymapfile, 'w')
        skymap.write(r.read())
        skymap.close()

    return skymapfile, eventinfo
    ### unset ligo-proxy just in case
    if os.environ.has_key("X509_USER_PROXY"):
        del os.environ['X509_USER_PROXY']

    ### get cert and key from ini file
    robot_cert = config.get('ldg_certificate', 'robot_certificate')
    robot_key = config.get('ldg_certificate', 'robot_key')

    ### set cert and key
    os.environ['X509_USER_CERT'] = robot_cert
    os.environ['X509_USER_KEY'] = robot_key

### initialize instance of gracedb interface
if config.has_option("gdb general", "gdb_url"):
    gdb_url = config.get('gdb general', 'gdb_url')
    gracedb = GraceDb(gdb_url)
else:
    gdb_url = None
    gracedb = GraceDb()

### connect to gracedb and get event gps time
try:
    gdb_entry = json.loads(gracedb.event(gdb_id).read())
except:
    traceback.print_exc()
    logger.info("    Error: Connection to GraceDB failed!")
    logger.info("    Exiting.")
    sys.exit(1)

#========================
# get parameters about event type from gracedb
logger = logging.getLogger(opts.logTag)
logger.setLevel(opts.logLevel)

logname = utils.genLogname(opts.logDir, 'event_supervisor')

for handler in [logging.FileHandler(logname), logging.StreamHandler()]:
    handler.setFormatter(utils.genFormatter())
    logger.addHandler(handler)

logger.debug("writing log into : %s" % logname)

#-------------------------------------------------

### set up standard options for QueueItems
if opts.gracedb_url[:5] == "https":  ### assume this is a remote server
    gdb = GraceDb(opts.gracedb_url)
    logger.info("GraceDb : %s" % opts.gracedb_url)

else:  ### assume this is a FakeDb directory
    gdb = FakeDb(opts.gracedb_url)
    logger.info("FakeDb : %s" % opts.gracedb_url)

#-------------------------------------------------

logger.info("graceid : %s" % graceid)

#-------------------------------------------------

### load config file
logger.info("loading config from : %s" % configname)
config = SafeConfigParser()
Пример #42
0
    def upload(self, fname, gracedb_server=None, testing=True,
               extra_strings=None):
        """Upload this trigger to gracedb

        Parameters
        ----------
        fname: str
            The name to give the xml file associated with this trigger
        gracedb_server: string, optional
            URL to the GraceDB web API service for uploading the event.
            If omitted, the default will be used.
        testing: bool
            Switch to determine if the upload should be sent to gracedb as a
            test trigger (True) or a production trigger (False).
        """
        from ligo.gracedb.rest import GraceDb
        import matplotlib
        matplotlib.use('Agg')
        import pylab

        # first of all, make sure the event is saved on disk
        # as GraceDB operations can fail later
        self.save(fname)

        if self.snr_series is not None:
            if fname.endswith('.xml.gz'):
                snr_series_fname = fname.replace('.xml.gz', '.hdf')
            else:
                snr_series_fname = fname.replace('.xml', '.hdf')
            snr_series_plot_fname = snr_series_fname.replace('.hdf',
                                                             '_snr.png')
            psd_series_plot_fname = snr_series_fname.replace('.hdf',
                                                             '_psd.png')
            pylab.figure()
            for ifo in self.snr_series:
                curr_snrs = self.snr_series[ifo]
                curr_snrs.save(snr_series_fname, group='%s/snr' % ifo)
                pylab.plot(curr_snrs.sample_times, abs(curr_snrs),
                           c=ifo_color(ifo), label=ifo)
                if ifo in self.ifos:
                    snr = self.coinc_results['foreground/%s/%s' %
                                             (ifo, 'snr')]
                    endt = self.coinc_results['foreground/%s/%s' %
                                              (ifo, 'end_time')]
                    pylab.plot([endt], [snr], c=ifo_color(ifo), marker='x')

            pylab.legend()
            pylab.xlabel('GPS time (s)')
            pylab.ylabel('SNR')
            pylab.savefig(snr_series_plot_fname)
            pylab.close()

            pylab.figure()
            for ifo in self.snr_series:
                # Undo dynamic range factor
                curr_psd = self.psds[ifo].astype(numpy.float64)
                curr_psd /= pycbc.DYN_RANGE_FAC ** 2.0
                curr_psd.save(snr_series_fname, group='%s/psd' % ifo)
                # Can't plot log(0) so start from point 1
                pylab.loglog(curr_psd.sample_frequencies[1:],
                             curr_psd[1:]**0.5, c=ifo_color(ifo), label=ifo)
            pylab.legend()
            pylab.xlim([20, 2000])
            pylab.ylim([1E-24, 1E-21])
            pylab.xlabel('Frequency (Hz)')
            pylab.ylabel('ASD')
            pylab.savefig(psd_series_plot_fname)

        gid = None
        try:
            # try connecting to GraceDB
            gracedb = GraceDb(gracedb_server) \
                    if gracedb_server is not None else GraceDb()

            # create GraceDB event
            group = 'Test' if testing else 'CBC'
            r = gracedb.createEvent(group, "pycbc", fname, "AllSky").json()
            gid = r["graceid"]
            logging.info("Uploaded event %s", gid)

            if self.is_hardware_injection:
                gracedb.writeLabel(gid, 'INJ')
                logging.info("Tagging event %s as an injection", gid)

            # upload PSDs. Note that the PSDs are already stored in the
            # original event file and we just upload a copy of that same file
            # here. This keeps things as they were in O2 and can be removed
            # after updating the follow-up infrastructure
            psd_fname = 'psd.xml.gz' if fname.endswith('.gz') else 'psd.xml'
            gracedb.writeLog(gid, "PyCBC PSD estimate from the time of event",
                             psd_fname, open(fname, "rb").read(), "psd")
            logging.info("Uploaded PSDs for event %s", gid)

            # add other tags and comments
            gracedb.writeLog(
                    gid, "Using PyCBC code hash %s" % pycbc_version.git_hash)

            extra_strings = [] if extra_strings is None else extra_strings
            for text in extra_strings:
                gracedb.writeLog(gid, text)

            # upload SNR series in HDF format and plots
            if self.snr_series is not None:
                gracedb.writeLog(gid, 'SNR timeseries HDF file upload',
                                 filename=snr_series_fname)
                gracedb.writeLog(gid, 'SNR timeseries plot upload',
                                 filename=snr_series_plot_fname,
                                 tag_name=['background'],
                                 displayName=['SNR timeseries'])
                gracedb.writeLog(gid, 'PSD plot upload',
                                 filename=psd_series_plot_fname,
                                 tag_name=['psd'], displayName=['PSDs'])

        except Exception as exc:
            logging.error('Something failed during the upload/annotation of '
                          'event %s on GraceDB. The event may not have been '
                          'uploaded!', fname)
            logging.error(str(exc))

        return gid
Пример #43
0
### read in config file
if opts.verbose:
    print "reading config from : %s"%args[0]
config = SafeConfigParser()
config.read( args[0] )

#-------------------------------------------------

### figure out where we're writing segment files locally
output_dir = config.get('general', 'output-dir')
if not os.path.exists(output_dir):
    os.makedirs( output_dir )

### find which GraceDb we're using and pull out parameters of this event
if config.has_option('general', 'gracedb_url'):
    gracedb = GraceDb( config.get('general', 'gracedb_url') )
else:
    gracedb = GraceDb()

event = gracedb.event( opts.graceid ).json() ### query for this event
gpstime = float(event['gpstime'])
if opts.verbose:
    print "processing %s -> %.6f"%(opts.graceid, gpstime)

### find which segDB we're using
if config.has_option('general', 'segdb-url'):
    segdb_url = config.get('general', 'segdb-url')
else:
    segdb_url = 'https://segments.ligo.org'
if opts.verbose:
    print "searching for segments in : %s"%segdb_url
Пример #44
0
def setup_roq(cp):
    """
    Generates cp objects with the different ROQs applied
    """
    use_roq = False
    if cp.has_option('paths', 'roq_b_matrix_directory') or cp.has_option(
            'paths', 'computeroqweights'):
        if not cp.has_option('analysis', 'roq'):
            print(
                "Warning: If you are attempting to enable ROQ by specifying roq_b_matrix_directory or computeroqweights,\n\
            please use analysis.roq in your config file in future. Enabling ROQ."
            )
            cp.set('analysis', 'roq', True)
    if not cp.getboolean('analysis', 'roq'):
        yield cp
        raise StopIteration()
    from numpy import genfromtxt, array
    path = cp.get('paths', 'roq_b_matrix_directory')
    if not os.path.isdir(path):
        print("The ROQ directory %s does not seem to exist\n" % path)
        sys.exit(1)
    use_roq = True
    roq_paths = os.listdir(path)
    roq_params = {}
    roq_force_flow = None

    if cp.has_option('lalinference', 'roq_force_flow'):
        roq_force_flow = cp.getfloat('lalinference', 'roq_force_flow')
        print("WARNING: Forcing the f_low to ", str(roq_force_flow), "Hz")
        print(
            "WARNING: Overwriting user choice of flow, srate, seglen, and (mc_min, mc_max and q-min) or (mass1_min, mass1_max, mass2_min, mass2_max)"
        )

    def key(item):  # to order the ROQ bases
        return float(item[1]['seglen'])

    coinc_xml_obj = None
    row = None

    # Get file object of coinc.xml
    if opts.gid is not None:
        from ligo.gracedb.rest import GraceDb
        gid = opts.gid
        cwd = os.getcwd()
        if cp.has_option('analysis', 'service-url'):
            client = GraceDb(cp.get('analysis', 'service-url'))
        else:
            client = GraceDb()
        coinc_xml_obj = ligolw_utils.load_fileobj(
            client.files(gid, "coinc.xml"),
            contenthandler=lsctables.use_in(ligolw.LIGOLWContentHandler))[0]
    elif cp.has_option('input', 'coinc-xml'):
        coinc_xml_obj = ligolw_utils.load_fileobj(
            open(cp.get('input', 'coinc-xml'), "rb"),
            contenthandler=lsctables.use_in(ligolw.LIGOLWContentHandler))[0]

    # Get sim_inspiral from injection file
    if cp.has_option('input', 'injection-file'):
        print(
            "Only 0-th event in the XML table will be considered while running with ROQ\n"
        )
        row = lsctables.SimInspiralTable.get_table(
            ligolw_utils.load_filename(cp.get('input', 'injection-file'),
                                       contenthandler=lsctables.use_in(
                                           ligolw.LIGOLWContentHandler)))[0]

    roq_bounds = pipe_utils.Query_ROQ_Bounds_Type(path, roq_paths)
    if roq_bounds == 'chirp_mass_q':
        print('ROQ has bounds in chirp mass and mass-ratio')
        mc_priors, trigger_mchirp = pipe_utils.get_roq_mchirp_priors(
            path,
            roq_paths,
            roq_params,
            key,
            coinc_xml_obj=coinc_xml_obj,
            sim_inspiral=row)
    elif roq_bounds == 'component_mass':
        print('ROQ has bounds in component masses')
        # get component mass bounds, then compute the chirp mass that can be safely covered
        # further below we pass along the component mass bounds to the sampler, not the tighter chirp-mass, q bounds
        m1_priors, m2_priors, trigger_mchirp = pipe_utils.get_roq_component_mass_priors(
            path,
            roq_paths,
            roq_params,
            key,
            coinc_xml_obj=coinc_xml_obj,
            sim_inspiral=row)
        mc_priors = {}
        for (roq, m1_prior), (roq2, m2_prior) in zip(m1_priors.items(),
                                                     m2_priors.items()):
            mc_priors[roq] = sorted([
                pipe_utils.mchirp_from_components(m1_prior[1], m2_prior[0]),
                pipe_utils.mchirp_from_components(m1_prior[0], m2_prior[1])
            ])

    if cp.has_option('lalinference', 'trigger_mchirp'):
        trigger_mchirp = float(cp.get('lalinference', 'trigger_mchirp'))
    roq_mass_freq_scale_factor = pipe_utils.get_roq_mass_freq_scale_factor(
        mc_priors, trigger_mchirp, roq_force_flow)
    if roq_mass_freq_scale_factor != 1.:
        print(
            'WARNING: Rescaling ROQ basis, please ensure it is allowed with the model used.'
        )

    # If the true chirp mass is unknown, add variations over the mass bins
    if opts.gid is not None or (opts.injections is not None or cp.has_option(
            'input', 'injection-file')) or cp.has_option(
                'lalinference', 'trigger_mchirp') or cp.has_option(
                    'input', 'coinc-xml'):

        for mc_prior in mc_priors:
            mc_priors[mc_prior] = array(mc_priors[mc_prior])
        # find mass bin containing the trigger
        trigger_bin = None
        for roq in roq_paths:
            if mc_priors[roq][
                    0] * roq_mass_freq_scale_factor <= trigger_mchirp <= mc_priors[
                        roq][1] * roq_mass_freq_scale_factor:
                trigger_bin = roq
                print('Prior in Mchirp will be [' +
                      str(mc_priors[roq][0] * roq_mass_freq_scale_factor) +
                      ',' +
                      str(mc_priors[roq][1] * roq_mass_freq_scale_factor) +
                      '] to contain the trigger Mchirp ' + str(trigger_mchirp))
                break
        roq_paths = [trigger_bin]
    else:
        for mc_prior in mc_priors:
            mc_priors[mc_prior] = array(
                mc_priors[mc_prior]) * roq_mass_freq_scale_factor

    # write the master configparser
    cur_basedir = cp.get('paths', 'basedir')
    masterpath = os.path.join(cur_basedir, 'config.ini')
    with open(masterpath, 'w') as cpfile:
        cp.write(cpfile)

    for roq in roq_paths:
        this_cp = configparser.ConfigParser()
        this_cp.optionxform = str
        this_cp.read(masterpath)
        basedir = this_cp.get('paths', 'basedir')
        for dirs in 'basedir', 'daglogdir', 'webdir':
            val = this_cp.get('paths', dirs)
            newval = os.path.join(val, roq)
            mkdirs(newval)
            this_cp.set('paths', dirs, newval)
        this_cp.set(
            'paths', 'roq_b_matrix_directory',
            os.path.join(cp.get('paths', 'roq_b_matrix_directory'), roq))
        flow = roq_params[roq]['flow'] / roq_mass_freq_scale_factor
        srate = 2. * roq_params[roq]['fhigh'] / roq_mass_freq_scale_factor
        #if srate > 8192:
        #    srate = 8192

        seglen = roq_params[roq]['seglen'] * roq_mass_freq_scale_factor
        # params.dat uses the convention q>1 so our q_min is the inverse of their qmax
        this_cp.set('engine', 'srate', str(srate))
        this_cp.set('engine', 'seglen', str(seglen))
        if this_cp.has_option('lalinference', 'flow'):
            tmp = this_cp.get('lalinference', 'flow')
            tmp = eval(tmp)
            ifos = tmp.keys()
        else:
            tmp = {}
            ifos = eval(this_cp.get('analysis', 'ifos'))
        for i in ifos:
            tmp[i] = flow
            this_cp.set('lalinference', 'flow', str(tmp))
        if roq_bounds == 'chirp_mass_q':
            mc_min = mc_priors[roq][0] * roq_mass_freq_scale_factor
            mc_max = mc_priors[roq][1] * roq_mass_freq_scale_factor
            # params.dat uses the convention q>1 so our q_min is the inverse of their qmax
            q_min = 1. / float(roq_params[roq]['qmax'])
            this_cp.set('engine', 'chirpmass-min', str(mc_min))
            this_cp.set('engine', 'chirpmass-max', str(mc_max))
            this_cp.set('engine', 'q-min', str(q_min))
            this_cp.set(
                'engine', 'comp-min',
                str(
                    max(
                        roq_params[roq]['compmin'] *
                        roq_mass_freq_scale_factor,
                        mc_min * pow(1 + q_min, 1. / 5.) *
                        pow(q_min, 2. / 5.))))
            this_cp.set(
                'engine', 'comp-max',
                str(mc_max * pow(1 + q_min, 1. / 5.) * pow(q_min, -3. / 5.)))
        elif roq_bounds == 'component_mass':
            m1_min = m1_priors[roq][0]
            m1_max = m1_priors[roq][1]
            m2_min = m2_priors[roq][0]
            m2_max = m2_priors[roq][1]
            this_cp.set('engine', 'mass1-min', str(m1_min))
            this_cp.set('engine', 'mass1-max', str(m1_max))
            this_cp.set('engine', 'mass2-min', str(m2_min))
            this_cp.set('engine', 'mass2-max', str(m2_max))
        yield this_cp
    raise StopIteration()
Пример #45
0
<tr><td rowspan=2>bandwidth (Hz)</td><td>H1</td><td align=right>{bwH}</td><td align=right>{bwHlow}</td><td align=right>{bwHhigh}</td></tr> \
<tr><td>L1</td><td align=right>{bwL}</td><td align=right>{bwLlow}</td><td align=right>{bwLhigh}</td></tr> \
<tr><td rowspan=2>duration (s)</td><td>H1</td><td align=right>{durH}</td><td align=right>{durHlow}</td><td align=right>{durHhigh}</td></tr> \
<tr><td>L1</td><td align=right>{durL}</td><td align=right>{durLlow}</td><td align=right>{durLhigh}</td></tr></table> \
'.format(freqH=freq[0][0],freqHlow=freq[0][1],freqHhigh=freq[0][2],freqL=freq[1][0],freqLlow=freq[1][1],freqLhigh=freq[1][2],bwH=bandwidth[0][0],bwHlow=bandwidth[0][1],bwHhigh=bandwidth[0][2],bwL=bandwidth[1][0],bwLlow=bandwidth[1][1],bwLhigh=bandwidth[1][2],durH=dur_c[0],durL=dur_c[1],durHlow=dur_low[0],durLlow=dur_low[1],durHhigh=dur_high[0],durLhigh=dur_high[1])

BFtable = '<table> \
<tr><th colspan=2>BWB Bayes Factors</th></tr> \
<tr><td>lnBSG</td><td align=right>{BSG}+/-{errBSG}</td></tr> \
<tr><td>lnBSN</td><td align=right>{BSN}+/-{errBSN}</td></tr> \
</table>'.format(BSG=BSG,BSN=BSN,errBSG=err_SG,errBSN=err_SN)

# Sky map
skyname = glob.glob('skymap*.fits')[0]

os.system('cp {sky} BW_skymap.fits'.format(sky=skyname)) # (change name so it's clear on GraceDB which skymap is ours)

#skytag = ["sky_loc","lvem"]
skytag = ["sky_loc"]

# Actually send info to gracedb
gracedb = GraceDb()
#gracedb.writeLog(graceid, "BayesWave Skymap image", filename='plots/skymap.png', tagname='sky_loc')
gracedb.writeLog(graceid, "BayesWave skymap FITS", filename='BW_skymap.fits', tagname=skytag)
gracedb.writeLog(graceid, "<a href='https://ldas-jobs.ligo.caltech.edu/~meg.millhouse/O1/zero_lag/job_{0}'>BWB Follow-up results</a>".format(graceid), tagname='pe')
gracedb.writeLog(graceid,paramtable,tagname='pe')
gracedb.writeLog(graceid,BFtable,tagname='pe')

os.chdir('..')
os.system('cp -r '+dirname+' /home/meg.millhouse/public_html/O1/zero_lag/job_'+graceid)
#!/usr/bin/env python3
from ligo.gracedb.rest import GraceDb, HTTPError

client = GraceDb()

try:
    r = client.ping()
except HTTPError as e:
    print(e.message)

print("Response code: {}".format(r.status))
print("Response content: {}".format(r.json()))
Пример #47
0
stride = opts.stride
overlap = opts.overlap
lag = opts.lag
FAR_thresh = opts.FAR_thresh
back_dic_path = opts.background_dic
back_livetime = opts.background_livetime
signal_kde_coords = opts.signal_kde_coords
signal_kde_values = opts.signal_kde_values
noise_kde_coords = opts.noise_kde_coords
noise_kde_values = opts.noise_kde_values

#===============================================================

#Initialize GraceDb
if gdb_flag:
	gdb = GraceDb()

#Initialize dictionary
dictionary = {}

#Initialize peparser
#peparser=bppu.PEOutputParser('common')

#Find trigtimes and timeslides and add to dictionary		
timeslide_array = np.genfromtxt('%s/PostProc/LIB_trigs/LIB_%s_timeslides_%s.txt'%(rundir,lag,"".join(ifos))).reshape(-1,len(ifos))
trigtime_array = np.genfromtxt('%s/PostProc/LIB_trigs/LIB_%s_times_%s.txt'%(rundir,lag,"".join(ifos))).reshape(-1,1)
for event in xrange(len(trigtime_array)):
	dictionary[event] = {}
	dictionary[event]['gpstime'] = str(trigtime_array[event,0])
	dictionary[event]['timeslides'] = {}
	for i, ifo in enumerate(ifos):
Пример #48
0
    def upload(self, fname, psds, low_frequency_cutoff,
               testing=True,
               extra_strings=None,
               ):
        """Upload this trigger to gracedb

        Parameters
        ----------
        fname: str
            The name to give the xml file associated with this trigger
        pds: dict of pybc.types.FrequencySeries
            A ifo keyed dictionary of psds to be uploaded in association
        with this trigger.
        low_frequency_cutoff: float
            The low frequency cutoff of the psds.
        testing: bool
            Switch to determine if the upload should be sent to gracedb as a
        test trigger (True) or a production trigger (False)
        """
        from ligo.gracedb.rest import GraceDb

        # first of all, make sure the event and PSDs are saved on disk
        # as GraceDB operations can fail later

        self.save(fname)

        psds_lal = {}
        for ifo in psds:
            psd = psds[ifo]
            kmin = int(low_frequency_cutoff / psd.delta_f)
            fseries = lal.CreateREAL8FrequencySeries(
                "psd", psd.epoch, low_frequency_cutoff, psd.delta_f,
                lal.StrainUnit**2 / lal.HertzUnit, len(psd) - kmin)
            fseries.data.data = psd.numpy()[kmin:] / pycbc.DYN_RANGE_FAC ** 2.0
            psds_lal[ifo] = fseries
        psd_xmldoc = make_psd_xmldoc(psds_lal)
        psd_xml_path = os.path.splitext(fname)[0] + '-psd.xml.gz'
        ligolw_utils.write_filename(psd_xmldoc, psd_xml_path, gz=True)

        if self.upload_snr_series:
            snr_series_fname = os.path.splitext(fname)[0] + '.hdf'
            for ifo in self.snr_series:
                self.snr_series[ifo].save(snr_series_fname,
                                          group='%s/snr' % ifo)
                self.snr_series_psd[ifo].save(snr_series_fname,
                                              group='%s/psd' % ifo)

        # try connecting to GraceDB
        try:
            gracedb = GraceDb(self.gracedb_server) \
                    if self.gracedb_server is not None else GraceDb()
        except Exception as exc:
            logging.error('Cannot connect to GraceDB')
            logging.error(str(exc))
            logging.error('Carrying on, but event %s will NOT be uploaded!', fname)
            return None

        # create GraceDB event
        group = 'Test' if testing else 'CBC'
        try:
            r = gracedb.createEvent(group, "pycbc", fname, "AllSky").json()
        except Exception as exc:
            logging.error('Cannot create GraceDB event')
            logging.error(str(exc))
            logging.error('Carrying on, but event %s will NOT be uploaded!', fname)
            return None
        logging.info("Uploaded event %s", r["graceid"])

        if self.is_hardware_injection:
            try:
                gracedb.writeLabel(r['graceid'], 'INJ')
            except Exception as exc:
                logging.error("Cannot tag event %s as an injection", r["graceid"])
                logging.error(str(exc))
            logging.info("Tagging event %s as an injection", r["graceid"])

        # upload PSDs
        try:
            gracedb.writeLog(r["graceid"],
                             "PyCBC PSD estimate from the time of event",
                             "psd.xml.gz", open(psd_xml_path, "rb").read(),
                             "psd").json()
        except Exception as exc:
            logging.error("Cannot upload PSDs for event %s", r["graceid"])
            logging.error(str(exc))
        logging.info("Uploaded PSDs for event %s", r["graceid"])

        # add other tags and comments
        try:
            gracedb.writeLog(r["graceid"],
                "Using PyCBC code hash %s" % pycbc_version.git_hash).json()
            extra_strings = [] if extra_strings is None else extra_strings
            for text in extra_strings:
                gracedb.writeLog(r["graceid"], text).json()
        except Exception as exc:
            logging.error("Cannot write comments for event %s", r["graceid"])
            logging.error(str(exc))

        # upload SNR series in HDF format
        if self.upload_snr_series:
            try:
                gracedb.writeFile(r['graceid'], snr_series_fname)
            except Exception as exc:
                logging.error("Cannot upload HDF SNR series for event %s",
                              r["graceid"])
                logging.error(str(exc))

        return r['graceid']
logger = logging.getLogger(opts.logTag)
logger.setLevel(opts.logLevel)

logname = utils.genLogname(opts.logDir, 'event_supervisor')

for handler in [logging.FileHandler(logname), logging.StreamHandler()]:
    handler.setFormatter( utils.genFormatter() )
    logger.addHandler( handler )

logger.debug( "writing log into : %s"%logname )

#-------------------------------------------------

### set up standard options for QueueItems
if opts.gracedb_url[:5] == "https": ### assume this is a remote server
    gdb = GraceDb( opts.gracedb_url )
    logger.info( "GraceDb : %s"%opts.gracedb_url )

else:                               ### assume this is a FakeDb directory
    gdb = FakeDb( opts.gracedb_url )
    logger.info( "FakeDb : %s"%opts.gracedb_url )

#-------------------------------------------------

logger.info( "graceid : %s"%graceid )

#-------------------------------------------------

### load config file
logger.info( "loading config from : %s"%configname )
config = SafeConfigParser()