예제 #1
0
def get_data(query: str):
    grace_client = GraceDb()
    events = grace_client.events("gid: GW150914")
    events = grace_client.events("GW150914")
    events = grace_client.events("is_gw: True")
    events = grace_client.events("is_gw")

    print(count_iterable(events))

    results = {}
    for event in events:
        grace_id = event.get(GRACE_ID_KEY)
        results.update({grace_id: event})

    return results
예제 #2
0
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
         from ligo.gracedb.exceptions import HTTPError
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab' %
                   str(e), )
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % service_url)
     try:
         self.events[None] = list(connection.superevents(self.query))
         self._query_type = "S"
     except HTTPError:
         self.events[None] = list(connection.events(self.query))
         event_method = connection.event
         eventid_name = "graceid"
         self._query_type = "E"
     else:
         event_method = connection.superevent
         eventid_name = "superevent_id"
         for event in self.events[None]:  # get preferred event parameters
             event.update(
                 connection.event(event["preferred_event"], ).json())
     vprint("Recovered %d events for query %r\n" %
            (len(self.events[None]), self.query))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(
                 event_method(e[eventid_name]).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
예제 #3
0
파일: gracedb.py 프로젝트: gwpy/gwsumm
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
         from ligo.gracedb.exceptions import HTTPError
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab'
                   % str(e),)
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % service_url)
     try:
         self.events[None] = list(connection.superevents(self.query))
         self._query_type = "S"
     except HTTPError:
         self.events[None] = list(connection.events(self.query))
         event_method = connection.event
         eventid_name = "graceid"
         self._query_type = "E"
     else:
         event_method = connection.superevent
         eventid_name = "superevent_id"
         for event in self.events[None]:  # get preferred event parameters
             event.update(connection.event(
                 event["preferred_event"],
             ).json())
     vprint("Recovered %d events for query %r\n"
            % (len(self.events[None]), self.query))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(event_method(
                 e[eventid_name]).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
예제 #4
0
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab' %
                   str(e), )
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % connection.service_url)
     querystr = '%s %d .. %d' % (self.query, self.start, self.end)
     self.events[None] = list(connection.events(querystr))
     vprint("Recovered %d events for query %r\n" %
            (len(self.events[None]), querystr))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(
                 connection.event(e['graceid']).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
예제 #5
0
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab'
                   % str(e),)
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % connection.service_url)
     querystr = '%s %d .. %d' % (self.query, self.start, self.end)
     self.events[None] = list(connection.events(querystr))
     vprint("Recovered %d events for query %r\n"
            % (len(self.events[None]), querystr))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(connection.event(
                 e['graceid']).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
예제 #6
0
def get_event(params):

    g = GraceDb()
    eventString = params["event"]
    events = g.events('%s' % eventString)
    event = [x for x in events][0]

    keys = [
        'graceid', 'gpstime', 'extra_attributes', 'group', 'links', 'created',
        'far', 'instruments', 'labels', 'nevents', 'submitter', 'search',
        'likelihood'
    ]
    fileorder = [
        'LALInference_skymap.fits.gz', 'bayestar.fits.gz', 'BW_skymap.fits',
        'LIB_skymap.fits.gz', 'skyprobcc_cWB.fits'
    ]
    #fileorder = ['LALInference3d.fits.gz','bayestar3d.fits.gz','bayestar.fits.gz']

    fileorder = [
        'LALInference_skymap.fits.gz', 'bayestar.fits.gz', 'BW_skymap.fits',
        'LIB_skymap.fits.gz', 'skyprobcc_cWB.fits'
    ]

    eventinfo = {}
    for key in keys:
        if not key in event: continue
        eventinfo[key] = event[key]
    eventinfo['gpstime'] = float(eventinfo['gpstime'])
    if eventinfo['far'] == None:
        eventinfo['far'] = np.nan

    triggerfile = "%s/%s.txt" % (params["outputDir"], eventinfo['graceid'])
    skymapfile = '%s/%s.fits' % (params["outputDir"], eventinfo['graceid'])
    #if os.path.isfile(triggerfile) and os.path.isfile(skymapfile):
    #    print "Already have info for %s... continuing."%event["graceid"]
    #    return
    print "Getting info for %s" % event["graceid"]

    mjds = [-1, -1]
    timediff = -1
    if 'CoincInspiral' in event['extra_attributes']:
        eventinfo['coinc'] = event['extra_attributes']['CoincInspiral']
    if 'SingleInspiral' in event['extra_attributes']:
        eventinfo['singles'] = {}
        for single in event['extra_attributes']['SingleInspiral']:
            eventinfo['singles'][single['ifo']] = single
            eventinfo['singles'][single['ifo']]['gpstime'] = single[
                'end_time'] + 10**-9 * single['end_time_ns']

        if ("H1" in eventinfo['singles']) and ("L1" in eventinfo['singles']):
            eventinfo["H1_L1_difference"] = eventinfo['singles']['H1'][
                "gpstime"] - eventinfo['singles']['L1']["gpstime"]
            t = Time([
                eventinfo['singles']['H1']["gpstime"],
                eventinfo['singles']['L1']["gpstime"]
            ],
                     format='gps',
                     scale='utc')
            mjds = t.mjd
            timediff = eventinfo["H1_L1_difference"]

    if 'MultiBurst' in event['extra_attributes']:
        eventinfo['burst'] = event['extra_attributes']['MultiBurst']

        single_ifo_times = eventinfo['burst']['single_ifo_times'].split(",")
        ifos = eventinfo['burst']['ifos'].split(",")

        if len(ifos) > 1 and len(single_ifo_times) > 1:
            ifo1 = ifos[0]
            gps1 = float(single_ifo_times[0])

            ifo2 = ifos[1]
            gps2 = float(single_ifo_times[1])

            eventinfo['burst'][ifo1] = {}
            eventinfo['burst'][ifo1]['gpstime'] = gps1

            eventinfo['burst'][ifo2] = {}
            eventinfo['burst'][ifo2]['gpstime'] = gps2

            if ("H1" in eventinfo['burst']) and ("L1" in eventinfo['burst']):
                eventinfo["H1_L1_difference"] = eventinfo['burst']['H1'][
                    "gpstime"] - eventinfo['burst']['L1']["gpstime"]
                t = Time([
                    eventinfo['burst']['H1']["gpstime"],
                    eventinfo['burst']['L1']["gpstime"]
                ],
                         format='gps',
                         scale='utc')
                mjds = t.mjd
                timediff = eventinfo["H1_L1_difference"]

    try:
        print "Looking for EM bright file..."
        r = g.files(eventinfo['graceid'],
                    "Source_Classification_%s.json" % eventinfo['graceid'])
        emfile = open('embright.json', 'w')
        emfile.write(r.read())
        emfile.close()

        with open('embright.json') as data_file:
            emdata = json.load(data_file)

        os.system('rm embright.json')

        embright_keys = [
            "Prob remnant_mass_greater_than 0M_sun", "Prob EMbright"
        ]
        ns_keys = ["Prob Mass2_less_than 3M_sun", "Prob NS2"]

        embright_prob = -1
        for key in embright_keys:
            if not key in embright_keys: continue
            embright_prob = emdata[key]
            break
        ns_prob = -1
        for key in embright_keys:
            if not key in embright_keys: continue
            ns_prob = emdata[key]
            break

        eventinfo['embright'] = {}
        eventinfo['embright']['embright'] = embright_prob
        eventinfo['embright']['ns'] = ns_prob

    except:
        print "No EM bright file..."

    try:
        print "Looking for cWB file..."
        r = g.files(eventinfo['graceid'],
                    "trigger_%.4f.txt" % eventinfo['gpstime'])
        # r = g.files(eventinfo['graceid'], "eventDump.txt")
        cwbfile = open('trigger.txt', 'w')
        cwbfile.write(r.read())
        cwbfile.close()

        eventinfo['burst'] = {}
        lines = [line.rstrip('\n') for line in open('trigger.txt')]
        for line in lines:
            lineSplit = line.split(":")
            if len(lineSplit) < 2: continue
            key = lineSplit[0]
            value = filter(None, lineSplit[1].split(" "))
            eventinfo['burst'][lineSplit[0]] = value

        ifo1 = eventinfo['burst']['ifo'][0]
        gps1 = float(eventinfo['burst']['time'][0])

        ifo2 = eventinfo['burst']['ifo'][1]
        gps2 = float(eventinfo['burst']['time'][1])

        eventinfo['burst'][ifo1] = {}
        eventinfo['burst'][ifo1]['gpstime'] = gps1

        eventinfo['burst'][ifo2] = {}
        eventinfo['burst'][ifo2]['gpstime'] = gps2

        if ("H1" in eventinfo['burst']) and ("L1" in eventinfo['burst']):
            eventinfo["H1_L1_difference"] = eventinfo['burst']['H1'][
                "gpstime"] - eventinfo['burst']['L1']["gpstime"]
            t = Time([
                eventinfo['burst']['H1']["gpstime"],
                eventinfo['burst']['L1']["gpstime"]
            ],
                     format='gps',
                     scale='utc')
            mjds = t.mjd
            timediff = eventinfo["H1_L1_difference"]

    except:
        print "No cWB file..."

    ra = 0
    dec = 0

    r = []
    for lvfile in fileorder:
        #r = g.files(eventinfo['graceid'], lvfile)
        try:
            r = g.files(eventinfo['graceid'], lvfile)
            break
        except:
            continue
    if r == []:
        print "Download of skymaps file for %s failed..." % eventinfo['graceid']
    else:

        skymap = open(skymapfile, 'w')
        skymap.write(r.read())
        skymap.close()

    return skymapfile, eventinfo
예제 #7
0
#!/usr/bin/env python3
from ligo.gracedb.rest import GraceDb

client = GraceDb()

# Retrieve an iterator for events matching a query.
events = client.events("gstlal ER5 far < 1.0e-4")

# For each event in the search results, add the graceid
# and chirp mass to a dictionary.
results = {}
for event in events:
    graceid = event["graceid"]
    mchirp = event["extra_attributes"]["CoincInspiral"]["mchirp"]
    results.update({graceid: mchirp})

# For each super event in the search results, add the superevent_id
# and chirp mass to a dictionary.
superevents = client.superevents("gstlal ER5 far < 1.0e-4")
s_results = {}
for superevent in superevents:
    superevent_id = superevent["superevent_id"]
    mchirp = superevent["extra_attributes"]["CoincInspiral"]["mchirp"]
    s_results.update({superevent_id: mchirp})
예제 #8
0
class GraceDbTab(get_tab('default')):
    """Custom tab displaying a summary of GraceDb results.
    """
    type = 'gracedb'

    def __init__(self, name, url='https://gracedb.ligo.org',
                 query='External', columns=['gpstime', 'date', 'pipeline'],
                 headers=['GPS time', 'UTC time', 'Source'], rank='gpstime',
                 **kwargs):
        super(GraceDbTab, self).__init__(name, **kwargs)
        self.url = url
        self.query = '{} {} .. {}'.format(
            query,
            int(self.start),
            int(self.end),
        )
        self.events = dict()
        self.headers = headers
        self.columns = columns
        self.rank = rank

    @classmethod
    def from_ini(cls, config, section, **kwargs):
        """Define a new `GraceDbTab` from a `ConfigParser`.
        """
        for key in ['url', 'query', 'rank']:
            try:
                kwargs.setdefault(
                    key, re_quote.sub('', config.get(section, key)))
            except NoOptionError:
                pass
        for key in ['columns', 'headers']:
            try:
                raw = config.get(section, key)
                val = eval(raw)
            except NoOptionError:
                continue
            except (SyntaxError, NameError, TypeError):
                val = [x.strip().rstrip() for x in raw.split(',')]
            kwargs.setdefault(key, val)
        return super(GraceDbTab, cls).from_ini(config, section, **kwargs)

    def process(self, config=GWSummConfigParser(), **kwargs):
        try:
            from ligo.gracedb.rest import GraceDb
            from ligo.gracedb.exceptions import HTTPError
        except ImportError as e:
            e.args = ('%s, this module is required to generate a GraceDbTab'
                      % str(e),)
            raise
        # query gracedb
        service_url = '%s/api/' % self.url
        self.connection = GraceDb(service_url=service_url)
        self.exception = HTTPError
        vprint('Connected to gracedb at %s\n' % service_url)
        try:
            self.events[None] = list(self.connection.superevents(self.query))
            self._query_type = 'S'
        except self.exception:
            self.events[None] = list(self.connection.events(self.query))
            event_method = self.connection.event
            eventid_name = 'graceid'
            self._query_type = 'E'
        else:
            event_method = self.connection.superevent
            eventid_name = 'superevent_id'
            for event in self.events[None]:  # get preferred event parameters
                event.update(self.connection.event(
                    event['preferred_event'],
                ).json())
        vprint('Recovered %d events for query %r\n'
               % (len(self.events[None]), self.query))
        if 'labels' in self.columns:
            for e in self.events[None]:
                e['labels'] = ', '.join(event_method(
                    e[eventid_name]).json()['labels'])
            vprint('Downloaded labels\n')
        return super(GraceDbTab, self).process(config=config, **kwargs)

    def process_state(self, state, **kwargs):
        def in_state(event):
            return int(event['gpstime']) in state.active
        self.events[str(state)] = list(filter(in_state, self.events[None]))
        reverse = self.rank not in ['gpstime', 'far']
        self.events[str(state)].sort(key=lambda x: x[self.rank],
                                     reverse=reverse)
        vprint('    Selected %d events\n' % len(self.events[str(state)]))

    def write_state_html(self, state):
        """Write the '#main' HTML content for this `GraceDbTab`.
        """
        page = markup.page()
        # build table of events
        page.table(class_='table table-sm table-hover table-striped mt-2',
                   id_='gracedb')
        # thead
        page.thead()
        page.tr()
        for head in self.headers:
            page.th(head)
        page.tr.close()
        page.thead.close()
        # tbody
        page.tbody()
        for event in sorted(self.events[str(state)],
                            key=lambda e: e['gpstime']):
            context = None
            try:
                labs = set(event['labels'].split(', '))
            except (AttributeError, KeyError):
                pass
            else:
                for ctx, labels in LABELS.items():
                    if (
                            ctx == 'success' and labs.union(labels) == labs or
                            labs.intersection(labels)
                    ):
                        context = ctx
                        break
            if context:
                page.tr(class_='table-%s' % context)
            else:
                page.tr()
            for col in self.columns:
                if col == 'date':
                    gpskey = 't_0' if 'superevent_id' in event else 'gpstime'
                    page.td(from_gps(event[gpskey]).strftime(
                        '%B %d %Y %H:%M:%S.%f',
                    )[:-3])
                    continue
                elif col.lower() == 'dqr' and 'superevent_id' in event:
                    page.td()
                    sid = event['superevent_id']
                    href = ('{0}/apiweb/superevents/{1}/files/'
                            'dqr.html'.format(self.url, sid))
                    try:
                        self.connection.get(href)
                    except self.exception:
                        page.p('&mdash;')
                    else:
                        title = 'Data-quality report for {}'.format(sid)
                        page.a('DQR', title=title, href=href, target='_blank',
                               rel='external', class_='btn btn-info btn-sm')
                    page.td.close()
                    continue
                elif col.lower() == 'dqr':
                    page.td()
                    page.p('&mdash;')
                    page.td.close()
                    continue
                try:
                    v = event[col]
                except KeyError:
                    try:
                        v = event['extra_attributes']['GRB'][col]
                        assert v is not None
                    except (KeyError, AssertionError):
                        page.td('-')
                        continue
                if col in ('graceid', 'superevent_id', 'preferred_event'):
                    page.td()
                    tag = 'superevents' if col == 'superevent_id' else 'events'
                    href = '{}/{}/view/{}'.format(self.url, tag, v)
                    title = 'GraceDB {} page for {}'.format(tag[:-1], v)
                    page.a(v, title=title, href=href, target='_blank',
                           rel='external', class_='btn btn-info btn-sm')
                    page.td.close()
                elif col not in ('gpstime', 't_0') and isinstance(v, float):
                    page.td('%.3g' % v)
                elif col == 'labels':
                    page.td(', '.join(
                        ['<samp>%s</samp>' % l for l in sorted(labs)]))
                else:
                    page.td(str(v))
            page.tr.close()
        page.tbody.close()
        page.table.close()
        if len(self.events[str(state)]) == 0:
            page.p('No events were recovered for this state.')
        else:
            page.button(
                'Export to CSV',
                class_='btn btn-outline-secondary btn-table mt-2',
                **{'data-table-id': 'gracedb', 'data-filename': 'gracedb.csv'})

        # query doc
        qurl = '{}/search/?query={}&query_type={}&results_format=S'.format(
            self.url,
            self.query.replace(' ', '+'),
            getattr(self, '_query_type', 'E'),
        )
        qlink = markup.oneliner.a(
            'here',
            href=qurl,
            target='_blank',
        )
        page.p('The above table was generated from a query to {} with the '
               'form <code>{}</code>. To view the results of the same query '
               'via the GraceDB web interface, click {}.'.format(
                   self.url, self.query, qlink), class_='mt-2')

        # reference the labelling
        page.h4('Labelling reference')
        page.p('Events in the above table may have a context based on '
               'its labels as follows:')
        for c, labels in LABELS.items():
            c = (c if c == 'warning' else '%s text-white' % c)
            labstr = ', '.join(['<samp>%s</samp>' % l for l in sorted(labels)])
            page.p(labstr, class_='bg-%s pl-2' % c, style='width: auto;')

        # write to file
        idx = self.states.index(state)
        with open(self.frames[idx], 'w') as fobj:
            fobj.write(str(page))
        return self.frames[idx]
예제 #9
0
def parseAlert(queue, queueByGraceID, alert, t0, config):
    '''
    the way approval_processorMP digests lvalerts

    --> check if this alert is a command and delegate to parseCommand

    1) instantiates GraceDB client
    2) pulls childConfig settings
    3) makes sure we have the logger
    4) get lvalert specifics
    5) ensure we have the event_dict for the graceid = lvalert['uid']
    6) take proper action depending on the lvalert info coming in and currentstate of the event_dict 
    '''

    #-------------------------------------------------------------------
    # process commands sent via lvalert_commandMP
    #-------------------------------------------------------------------

    if alert['uid'] == 'command':  ### this is a command message!
        return parseCommand(queue, queueByGraceID, alert,
                            t0)  ### delegate to parseCommand and return

    #-------------------------------------------------------------------
    # extract relevant config parameters and set up necessary data structures
    #-------------------------------------------------------------------

    # instantiate GraceDB client from the childConfig
    client = config.get('general', 'client')
    g = GraceDb(client)

    # get other childConfig settings; save in configdict
    voeventerror_email = config.get('general', 'voeventerror_email')
    force_all_internal = config.get('general', 'force_all_internal')
    preliminary_internal = config.get('general', 'preliminary_internal')
    forgetmenow_timeout = config.getfloat('general', 'forgetmenow_timeout')
    approval_processorMPfiles = config.get('general',
                                           'approval_processorMPfiles')
    hardware_inj = config.get('labelCheck', 'hardware_inj')
    wait_for_hardware_inj = config.getfloat('labelCheck',
                                            'wait_for_hardware_inj')
    default_farthresh = config.getfloat('farCheck', 'default_farthresh')
    time_duration = config.getfloat('injectionCheck', 'time_duration')
    humanscimons = config.get('operator_signoffCheck', 'humanscimons')

    ### extract options about advocates
    advocates = config.get('advocate_signoffCheck', 'advocates')
    advocate_text = config.get('advocate_signoffCheck', 'advocate_text')
    advocate_email = config.get('advocate_signoffCheck', 'advocate_email')

    ### extract options for GRB alerts
    em_coinc_text = config.get('GRB_alerts', 'em_coinc_text')
    coinc_text = config.get('GRB_alerts', 'coinc_text')
    grb_email = config.get('GRB_alerts', 'grb_email')
    notification_text = config.get('GRB_alerts', 'notification_text')

    ### extract options about idq
    ignore_idq = config.get('idq_joint_fapCheck', 'ignore_idq')
    default_idqthresh = config.getfloat('idq_joint_fapCheck',
                                        'default_idqthresh')
    idq_pipelines = config.get('idq_joint_fapCheck', 'idq_pipelines')
    idq_pipelines = idq_pipelines.replace(' ', '')
    idq_pipelines = idq_pipelines.split(',')

    skymap_ignore_list = config.get('have_lvem_skymapCheck',
                                    'skymap_ignore_list')

    ### set up configdict (passed to local data structure: eventDicts)
    configdict = makeConfigDict(config)

    # set up logging
    ### FIXME: why not open the logger each time parseAlert is called?
    ###        that would allow you to better control which loggers are necessary and minimize the number of open files.
    ###        it also minimizes the possibility of something accidentally being written to loggers because they were left open.
    ###        what's more, this is a natural place to set up multiple loggers, one for all data and one for data pertaining only to this graceid

    global logger
    if globals().has_key('logger'):  # check to see if we have logger
        logger = globals()['logger']
    else:  # if not, set one up
        logger = loadLogger(config)
        logger.info(
            '\n{0} ************ approval_processorMP.log RESTARTED ************\n'
            .format(convertTime()))

    #-------------------------------------------------------------------
    # extract relevant info about this alert
    #-------------------------------------------------------------------

    # get alert specifics and event_dict information
    graceid = alert['uid']
    alert_type = alert['alert_type']
    description = alert['description']
    filename = alert['file']

    #-------------------------------------------------------------------
    # ensure we have an event_dict and ForgetMeNow tracking this graceid
    #-------------------------------------------------------------------

    if alert_type == 'new':  ### new event -> we must first create event_dict and set up ForgetMeNow queue item for G events

        ### create event_dict
        event_dict = EventDict(
        )  # create a new instance of EventDict class which is a blank event_dict
        if is_external_trigger(
                alert) == True:  # this is an external GRB trigger
            event_dict.grb_trigger_setup(
                alert['object'], graceid, g, config, logger
            )  # populate this event_dict with grb trigger info from lvalert
        else:
            event_dict.setup(
                alert['object'], graceid, configdict, g, config, logger
            )  # populate this event_dict with information from lvalert
        eventDicts[
            graceid] = event_dict  # add the instance to the global eventDicts
        eventDictionaries[
            graceid] = event_dict.data  # add the dictionary to the global eventDictionaries

        ### ForgetMeNow queue item
        item = ForgetMeNow(t0, forgetmenow_timeout, graceid, eventDicts, queue,
                           queueByGraceID, logger)
        queue.insert(item)  # add queue item to the overall queue

        ### set up queueByGraceID
        newSortedQueue = utils.SortedQueue(
        )  # create sorted queue for event candidate
        newSortedQueue.insert(
            item)  # put ForgetMeNow queue item into the sorted queue
        queueByGraceID[
            item.
            graceid] = newSortedQueue  # add queue item to the queueByGraceID
        saveEventDicts(
            approval_processorMPfiles
        )  # trying to see if expirationtime is updated from None

        message = '{0} -- {1} -- Created event dictionary for {1}.'.format(
            convertTime(), graceid)
        if loggerCheck(event_dict.data, message) == False:
            logger.info(message)
            g.writeLog(graceid,
                       'AP: Created event dictionary.',
                       tagname='em_follow')
        else:
            pass

    else:  ### not a new alert -> we may already be tracking this graceid

        if eventDicts.has_key(graceid):  ### we're already tracking it

            # get event_dict with expirationtime key updated for the rest of parseAlert
            event_dict = eventDicts[graceid]

            # find ForgetMeNow corresponding to this graceid and update expiration time
            for item in queueByGraceID[graceid]:
                if item.name == ForgetMeNow.name:  # selects the queue item that is a ForgetMeNow instance
                    item.setExpiration(t0)  # updates the expirationtime key
                    queue.resort(
                    )  ### may be expensive, but is needed to guarantee that queue remains sorted
                    queueByGraceID[graceid].resort()
                    break
            else:  ### we couldn't find a ForgetMeNow for this event! Something is wrong!
                os.system(
                    'echo \'ForgetMeNow KeyError\' | mail -s \'ForgetMeNow KeyError {0}\' {1}'
                    .format(graceid, advocate_email))
                raise KeyError(
                    'could not find ForgetMeNow for %s' % graceid
                )  ### Reed thinks this is necessary as a safety net.
                ### we want the process to terminate if things are not set up correctly to force us to fix it

        else:  # event_dict for event candidate does not exist. we need to create it with up-to-date information
            event_dict = EventDict(
            )  # create a new instance of the EventDict class which is a blank event_dict
            if is_external_trigger(alert) == True:
                event_dict.grb_trigger_setup(
                    g.events(graceid).next(), graceid, g, config, logger)
            else:
                event_dict.setup(
                    g.events(graceid).next(), graceid, configdict, g, config,
                    logger
                )  # fill in event_dict using queried event candidate dictionary
                event_dict.update(
                )  # update the event_dict with signoffs and iDQ info
            eventDicts[
                graceid] = event_dict  # add this instance to the global eventDicts
            eventDictionaries[
                graceid] = event_dict.data  # add the dictionary to the global eventDictionaries

            # create ForgetMeNow queue item and add to overall queue and queueByGraceID
            item = ForgetMeNow(t0, forgetmenow_timeout, graceid, eventDicts,
                               queue, queueByGraceID, logger)
            queue.insert(item)  # add queue item to the overall queue

            ### set up queueByGraceID
            newSortedQueue = utils.SortedQueue(
            )  # create sorted queue for new event candidate
            newSortedQueue.insert(
                item)  # put ForgetMeNow queue item into the sorted queue
            queueByGraceID[
                item.
                graceid] = newSortedQueue  # add queue item to the queueByGraceID

            message = '{0} -- {1} -- Created event dictionary for {1}.'.format(
                convertTime(), graceid)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Created event dictionary.',
                           tagname='em_follow')
            else:
                pass

    #--------------------
    # ignore alerts that are not relevant, like simulation or MDC events
    #--------------------

    # if the graceid starts with 'M' for MDCs or 'S' for Simulation, ignore
    if re.match('M', graceid) or re.match(
            'S',
            graceid):  ### FIXME: we want to make this a config-file option!
        message = '{0} -- {1} -- Mock data challenge or simulation. Ignoring.'.format(
            convertTime(), graceid)
        if loggerCheck(event_dict.data, message) == False:
            logger.info(message)
            g.writeLog(graceid,
                       'AP: Mock data challenge or simulation. Ignoring.',
                       tagname='em_follow')
        else:
            pass
        saveEventDicts(approval_processorMPfiles)
        return 0

    #--------------------
    # take care of external GRB triggers
    #--------------------
    if is_external_trigger(
            alert
    ) == True:  # for now, grouped everything related to external triggers together below
        # if it's not a log message updating us about possible coincidence with gravitational-waves OR labels OR json file uploads we are not interested
        if alert_type == 'label':
            record_label(event_dict.data, description)
        if alert_type == 'update':
            # is this a comment containing coinc info that needs to be parsed?
            if 'comment' in alert['object'].keys():
                comment = alert['object']['comment']
                if 'Significant event in on-source' in comment:  # got comment structure from Dipongkar
                    coinc_pipeline, coinc_fap = record_coinc_info(
                        event_dict.data, comment, alert, logger)
                    # begin creating the dictionary that will turn into json file
                    message_dict = {}
                    # populate text field for the GCN circular-to-be
                    message_dict['message'] = coinc_text.format(
                        graceid, coinc_fap)
                    message_dict['loaded_to_gracedb'] = 0
                    # make json string and file
                    message_dict = json.dumps(message_dict)
                    tmpfile = open('/tmp/coinc_{0}.json'.format(graceid), 'w')
                    tmpfile.write(message_dict)
                    tmpfile.close()
                    # make sure to load with a comment that we look for to check off that it's been loaded into gracedb
                    # was it an online or offline pipeline?
                    if 'Online' in coinc_pipeline:
                        event_dict.data['grb_online_json'] = message_dict
                        g.writeLog(
                            graceid,
                            'GRB-GW Coincidence JSON file: grb_online_json',
                            '/tmp/coinc_{0}.json'.format(graceid),
                            tagname='em_follow')
                    elif 'Offline' in coinc_pipeline:
                        event_dict.data['grb_offline_json'] = message_dict
                        g.writeLog(
                            graceid,
                            'GRB-GW Coincidence JSON file: grb_offline_json',
                            '/tmp/coinc_{0}.json'.format(graceid),
                            tagname='em_follow')
                    os.remove('/tmp/coinc_{0}.json'.format(graceid))
                    ### alert via email
                    os.system(
                        'echo \{0}\' | mail -s \'Coincidence JSON created for {1}\' {2}'
                        .format(notification_text, graceid, grb_email))
                # is this the json file loaded into GraceDb?
                if 'GRB-GW Coincidence JSON file' in comment:
                    # if it is, find out which type of json it was and then message_dict['loaded_to_gracedb'] = 1
                    json_type = re.findall('file: (.*)', comment)[0]
                    message_dict = event_dict.data[json_type]
                    message_dict = json.loads(
                        message_dict)  # converts string to dictionary
                    message_dict['loaded_to_gracedb'] = 1
                    # when we send to observers, message_dict['sent_to_observers'] = 1
            else:
                pass
        saveEventDicts(approval_processorMPfiles)
        return 0

    #--------------------
    # Appending which checks must be satisfied in preliminary_to_initial state before moving on
    #--------------------

    if humanscimons == 'yes':
        preliminary_to_initial.append('operator_signoffCheck')
    if advocates == 'yes':
        preliminary_to_initial.append('advocate_signoffCheck')

    #--------------------
    # update information based on the alert_type
    # includes extracting information from the alert
    # may also include generating VOEvents and issuing them
    #--------------------

    # actions for each alert_type
    currentstate = event_dict.data[
        'currentstate']  ### actions depend on the current state

    ### NOTE: we handle alert_type=="new" above as well and this conditional is slightly redundant...
    if alert_type == 'new':

        #----------------
        ### pass event through PipelineThrottle
        #----------------

        ### check if a PipelineThrottle exists for this node
        group = event_dict.data['group']
        pipeline = event_dict.data['pipeline']
        search = event_dict.data['search']
        key = generate_ThrottleKey(group, pipeline, search=search)
        if queueByGraceID.has_key(key):  ### a throttle already exists
            if len(queueByGraceID[key]) > 1:
                raise ValueError(
                    'too many QueueItems in SortedQueue for pipelineThrottle key=%s'
                    % key)
            item = queueByGraceID[key][
                0]  ### we expect there to be only one item in this SortedQueue

        else:  ### we need to make a throttle!
            # pull PipelineThrottle parameters from the config
            if config.has_section(key):
                throttleWin = config.getfloat(key, 'throttleWin')
                targetRate = config.getfloat(key, 'targetRate')
                requireManualReset = config.get(key, 'requireManualReset')
                conf = config.getfloat(key, 'conf')

            else:
                throttleWin = config.getfloat('default_PipelineThrottle',
                                              'throttleWin')
                targetRate = config.getfloat('default_PipelineThrottle',
                                             'targetRate')
                requireManualReset = config.get('default_PipelineThrottle',
                                                'requireManualReset')
                conf = config.getfloat('default_PipelineThrottle', 'conf')
            item = PipelineThrottle(t0,
                                    throttleWin,
                                    targetRate,
                                    group,
                                    pipeline,
                                    search=search,
                                    requireManualReset=False,
                                    conf=0.9,
                                    graceDB_url=client)

            queue.insert(item)  ### add to overall queue

            newSortedQueue = utils.SortedQueue(
            )  # create sorted queue for event candidate
            newSortedQueue.insert(
                item)  # put ForgetMeNow queue item into the sorted queue
            queueByGraceID[
                item.
                graceid] = newSortedQueue  # add queue item to the queueByGraceID

        item.addEvent(graceid, t0)  ### add new event to throttle
        ### this takes care of labeling in gracedb as necessary

        if item.isThrottled():
            ### send some warning message?
            return 0  ### we're done here because we're ignoring this event -> exit from parseAlert

#        #----------------
#        ### pass data to Grouper
#        #----------------
#        raise Warning("Grouper is not implemented yet! we're currently using a temporate groupTag and prototype code")

#        '''
#        need to extract groupTag from group_pipeline[_search] mapping.
#            These associations should be specified in the config file, so we'll have to specify this somehow.
#            probably just a "Grouper" section, with (option = value) pairs that look like (groupTag = nodeA nodeB nodeC ...)
#        '''
#        groupTag = 'TEMPORARY'

#        ### check to see if Grouper exists for this groupTag
#        if queueByGraceID.has_key(groupTag): ### at least one Grouper already exists

#            ### determine if any of the existing Groupers are still accepting new triggers
#            for item in queueByGraceID[groupTag]:
#                if item.isOpen():
#                    break ### this Grouper is still open, so we'll just use it
#            else: ### no Groupers are open, so we need to create one
#                item = Grouper(t0, grouperWin, groupTag, eventDicts, graceDB_url=client) ### create the actual QueueItem

#                queue.insert( item ) ### insert it in the overall queue

#                newSortedQueue = utils.SortedQueue() ### set up the SortedQueue for queueByGraceID
#                newSortedQueue.insert(item)
#                queueByGraceID[groupTag] = newSortedQueue

#        else: ### we need to make a Grouper
#            grouperWin = config.getfloat('grouper', 'grouperWin')
#            item = Grouper(t0, grouperWin, groupTag, eventDicts, graceDB_url=client) ### create the actual QueueItem

#            queue.insert( item ) ### insert it in the overall queue

#            newSortedQueue = utils.SortedQueue() ### set up the SortedQueue for queueByGraceID
#            newSortedQueue.insert(item)
#            queueByGraceID[groupTag] = newSortedQueue

#        item.addEvent( graceid ) ### add this graceid to the item

        return 0  ### we're done here. When Grouper makes a decision, we'll tick through the rest of the processes with a "selected" label

    elif alert_type == 'label':
        record_label(event_dict.data, description)

        if description == 'PE_READY':  ### PE_READY label was just applied. We may need to send an update alert

            message = '{0} -- {1} -- Sending update VOEvent.'.format(
                convertTime(), graceid)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(
                    graceid,
                    'AP: Received PE_READY label. Sending update VOEvent.',
                    tagname='em_follow')
                process_alert(event_dict.data, 'update', g, config, logger)

            else:
                pass

            message = '{0} -- {1} -- State: {2} --> complete.'.format(
                convertTime(), graceid, currentstate)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: State: {0} --> complete.'.format(currentstate),
                           tagname='em_follow')
                event_dict.data['currentstate'] = 'complete'

            else:
                pass

        elif description == 'EM_READY':  ### EM_READY label was just applied. We may need to send an initial alert
            message = '{0} -- {1} -- Sending initial VOEvent.'.format(
                convertTime(), graceid)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(
                    graceid,
                    'AP: Received EM_READY label. Sending initial VOEvent.',
                    tagname='em_follow')
                process_alert(event_dict.data, 'initial', g, config, logger)

            else:
                pass

            message = '{0} -- {1} -- State: {2} --> initial_to_update.'.format(
                convertTime(), graceid, currentstate)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: State: {0} --> initial_to_update.'.format(
                               currentstate),
                           tagname='em_follow')
                event_dict.data['currentstate'] = 'initial_to_update'

            else:
                pass

        elif description == "EM_Throttled":  ### the event is throttled and we need to turn off all processing for it

            event_dict.data[
                'currentstate'] = 'throttled'  ### update current state

            ### check if we need to send retractions
            voevents = event_dict.data['voevents']
            if len(voevents) > 0:
                if 'retraction' not in sorted(voevents)[-1]:
                    # there are existing VOEvents we've sent, but no retraction alert
                    process_alert(event_dict.data, 'retraction', g, config,
                                  logger)

            ### update ForgetMeNow expiration to handle all the clean-up?
            ### we probably do NOT want to change the clean-up schedule because we'll still likely receive a lot of alerts about this guy
            ### therefore, we just retain the local data and ignore him, rather than erasing the local data and having to query to reconstruct it repeatedly as new alerts come in
#            for item in queueByGraceID[graceid]: ### update expiration of the ForgetMeNow so it is immediately processed next.
#                if item.name == ForgetMeNow.name:
#                    time.setExpiration(-np.infty )
#                                                                ### FIXME: this can break the order in SortedQueue's. We need to pop and reinsert or call a manual resort
#                    queue.resort() ### may be expensive but is needed to guarantee that queue remains sorted
#                    queueByGraceID[graceid].resort()
#                    break
#            else:
#                raise ValueError('could not find ForgetMeNow QueueItem for graceid=%s'%graceid)

        elif description == "EM_Selected":  ### this event was selected by a Grouper
            raise NotImplementedError(
                'write logic to handle \"Selected\" labels')

        elif description == "EM_Superseded":  ### this event was superceded by another event within Grouper
            raise NotImplementedError(
                'write logic to handle \"Superseded" labels')

        elif (
                checkLabels(description.split(), config) > 0
        ):  ### some other label was applied. We may need to issue a retraction notice.
            event_dict.data['currentstate'] = 'rejected'

            ### check to see if we need to send a retraction
            voevents = event_dict.data['voevents']
            if len(voevents) > 0:
                if 'retraction' not in sorted(voevents[-1]):
                    # there are existing VOEvents we've sent, but no retraction alert
                    process_alert(event_dict.data, 'retraction', g, config,
                                  logger)

        saveEventDicts(
            approval_processorMPfiles)  ### save the updated eventDict to disk
        return 0

    ### FIXME: Reed left off commenting here...

    elif alert_type == 'update':
        # first the case that we have a new lvem skymap
        if (filename.endswith('.fits.gz') or filename.endswith('.fits')):
            if 'lvem' in alert['object'][
                    'tag_names']:  # we only care about skymaps tagged lvem for sharing with MOU partners
                submitter = alert['object']['issuer'][
                    'display_name']  # in the past, we used to care who submitted skymaps; keeping this functionality just in case
                record_skymap(event_dict.data, filename, submitter, logger)
            else:
                pass
        # interested in iDQ information or other updates
        else:
            if 'comment' in alert['object'].keys():
                comment = alert['object']['comment']
                if re.match(
                        'minimum glitch-FAP', comment
                ):  # looking to see if it's iDQ glitch-FAP information
                    record_idqvalues(event_dict.data, comment, logger)
                elif re.match(
                        'resent VOEvent', comment
                ):  # looking to see if another running instance of approval_processorMP sent a VOEvent
                    response = re.findall(
                        r'resent VOEvent (.*) in (.*)',
                        comment)  # extracting which VOEvent was re-sent
                    event_dict.data[response[0][1]].append(response[0][0])
                    saveEventDicts(approval_processorMPfiles)
                elif 'EM-Bright probabilities computed from detection pipeline' in comment:  # got comment structure from Shaon G.
                    record_em_bright(event_dict.data, comment, logger)
                elif 'Temporal coincidence with external trigger' in comment:  # got comment structure from Alex U.
                    exttrig, coinc_far = record_coinc_info(
                        event_dict.data, comment, alert, logger)
                    # create dictionary that will become json file
                    message_dict = {}
                    grb_instrument = eventDictionaries[exttrig]['pipeline']
                    message_dict['message'] = em_coinc_text.format(
                        exttrig, grb_instrument, graceid, coinc_far)
                    message_dict['loaded_to_gracedb'] = 0
                    message_dict = json.dumps(message_dict)
                    # update event dictionaries for both the gw and external trigger
                    eventDictionaries[exttrig][
                        'em_coinc_json'] = message_dict  # this updates the external trigger event_dict.data
                    event_dict.data[
                        'em_coinc_json'] = message_dict  # this updates the gw trigger event_dict.data
                    # load json file to the gw gracedb page
                    tmpfile = open('/tmp/coinc_{0}.json'.format(graceid), 'w')
                    tmpfile.write(message_dict)
                    tmpfile.close()
                    g.writeLog(graceid,
                               'GRB-GW Coincidence JSON file: em_coinc_json',
                               '/tmp/coinc_{0}.json'.format(graceid),
                               tagname='em_follow')
                    os.remove('/tmp/coinc_{0}.json'.format(graceid))
                    # load json file to the external trigger page
                    tmpfile = open('/tmp/coinc_{0}.json'.format(exttrig), 'w')
                    tmpfile.write(message_dict)
                    tmpfile.close()
                    g.writeLog(exttrig,
                               'GRB-GW Coincidence JSON file: em_coinc_json',
                               '/tmp/coinc_{0}.json'.format(exttrig),
                               tagname='em_follow')
                    os.remove('/tmp/coinc_{0}.json'.format(exttrig))
                    ### alert via email
                    os.system(
                        'echo \{0}\' | mail -s \'Coincidence JSON created for {1}\' {2}'
                        .format(notification_text, exttrig, grb_email))
                    saveEventDicts(approval_processorMPfiles)
                elif 'GRB-GW Coincidence JSON file' in comment:  # this is the comment that accompanies a loaded coinc json file
                    message_dict = event_dict.data['em_coinc_json']
                    message_dict = json.loads(
                        message_dict)  # converts string to dictionary
                    message_dict['loaded_to_gracedb'] = 1
                    saveEventDicts(approval_processorMPfiles)
                else:
                    pass

    elif alert_type == 'signoff':
        signoff_object = alert['object']
        record_signoff(event_dict.data, signoff_object)

    #---------------------------------------------
    # run checks specific to currentstate of the event candidate
    #---------------------------------------------

    passedcheckcount = 0

    if currentstate == 'new_to_preliminary':
        time.sleep(
            wait_for_hardware_inj
        )  #this is for those cases where we dont have the INJ label right away
        queried_dict = g.events(graceid).next()  #query gracedb for the graceid
        event_dict.data['labels'] = queried_dict['labels'].keys(
        )  #get the latest labels before running checks
        for Check in new_to_preliminary:
            eval('event_dict.{0}()'.format(Check))
            checkresult = event_dict.data[Check + 'result']
            if checkresult == None:
                pass
            elif checkresult == False:
                # because in 'new_to_preliminary' state, no need to apply DQV label
                message = '{0} -- {1} -- Failed {2} in currentstate: {3}.'.format(
                    convertTime(), graceid, Check, currentstate)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(graceid,
                               'AP: Failed {0} in currentstate: {1}.'.format(
                                   Check, currentstate),
                               tagname='em_follow')
                else:
                    pass
                message = '{0} -- {1} -- State: {2} --> rejected.'.format(
                    convertTime(), graceid, currentstate)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(
                        graceid,
                        'AP: State: {0} --> rejected.'.format(currentstate),
                        tagname='em_follow')
                    event_dict.data['currentstate'] = 'rejected'
                else:
                    pass
                saveEventDicts(approval_processorMPfiles)
                return 0
            elif checkresult == True:
                passedcheckcount += 1
        if passedcheckcount == len(new_to_preliminary):
            message = '{0} -- {1} -- Passed all {2} checks.'.format(
                convertTime(), graceid, currentstate)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Passed all {0} checks.'.format(currentstate),
                           tagname='em_follow')
            else:
                pass
            message = '{0} -- {1} -- Sending preliminary VOEvent.'.format(
                convertTime(), graceid)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Sending preliminary VOEvent.',
                           tagname='em_follow')
                process_alert(event_dict.data, 'preliminary', g, config,
                              logger)
            else:
                pass
            message = '{0} -- {1} -- State: {2} --> preliminary_to_initial.'.format(
                convertTime(), graceid, currentstate)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: State: {0} --> preliminary_to_initial.'.format(
                               currentstate),
                           tagname='em_follow')
                event_dict.data['currentstate'] = 'preliminary_to_initial'
            else:
                pass
            labels = event_dict.data['labels']
            # notify the operators if we haven't previously processed this event
            instruments = event_dict.data['instruments']
            for instrument in instruments:
                if instrument in str(labels):
                    pass
                else:
                    message = '{0} -- {1} -- Labeling {2}OPS.'.format(
                        convertTime(), graceid, instrument)
                    if loggerCheck(event_dict.data, message) == False:
                        logger.info(message)
                        g.writeLog(graceid,
                                   'AP: Labeling {0}OPS.'.format(instrument),
                                   tagname='em_follow')
                        g.writeLabel(graceid, '{0}OPS'.format(instrument))
                    else:
                        pass
            # notify the advocates if we haven't previously processed this event
            if 'ADV' in str(labels):
                pass
            else:
                message = '{0} -- {1} -- Labeling ADVREQ.'.format(
                    convertTime(), graceid)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(graceid,
                               'AP: Labeling ADVREQ.',
                               tagname='em_follow')
                    g.writeLabel(graceid, 'ADVREQ')
                    os.system(
                        'echo \'{0}\' | mail -s \'{1} passed criteria for follow-up\' {2}'
                        .format(advocate_text, graceid, advocate_email))
                    # expose event to LV-EM
                    url_perm_base = g.service_url + urllib.quote(
                        'events/{0}/perms/gw-astronomy:LV-EM:Observers/'.
                        format(graceid))
                    for perm in ['view', 'change']:
                        url = url_perm_base + perm
                        #g.put(url)
                else:
                    pass
        saveEventDicts(approval_processorMPfiles)
        return 0

    elif currentstate == 'preliminary_to_initial':
        for Check in preliminary_to_initial:
            eval('event_dict.{0}()'.format(Check))
            checkresult = event_dict.data[Check + 'result']
            if checkresult == None:
                pass
            elif checkresult == False:
                message = '{0} -- {1} -- Failed {2} in currentstate: {3}.'.format(
                    convertTime(), graceid, Check, currentstate)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(graceid,
                               'AP: Failed {0} in currentstate: {1}.'.format(
                                   Check, currentstate),
                               tagname='em_follow')
                else:
                    pass
                message = '{0} -- {1} -- State: {2} --> rejected.'.format(
                    convertTime(), graceid, currentstate)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(
                        graceid,
                        'AP: State: {0} --> rejected.'.format(currentstate),
                        tagname='em_follow')
                    event_dict.data['currentstate'] = 'rejected'
                else:
                    pass
                # need to set DQV label so long as it isn't the operator_signoffCheck or advocate_signoffCheck
                if 'signoffCheck' in Check:
                    message = '{0} -- {1} -- Not labeling DQV because signoffCheck is separate from explicit data quality checks.'.format(
                        convertTime(), graceid)
                    if loggerCheck(event_dict.data, message) == False:
                        logger.info(message)
                        g.writeLog(
                            graceid,
                            'AP: Not labeling DQV because signoffCheck is separate from explicit data quality checks.',
                            tagname='em_follow')
                    else:
                        pass
                else:
                    message = '{0} -- {1} -- Labeling DQV.'.format(
                        convertTime(), graceid)
                    if loggerCheck(event_dict.data, message) == False:
                        logger.info(message)
                        g.writeLog(graceid,
                                   'AP: Labeling DQV.',
                                   tagname='em_follow')
                        g.writeLabel(graceid, 'DQV')
                    else:
                        pass
                saveEventDicts(approval_processorMPfiles)
                return 0
            elif checkresult == True:
                passedcheckcount += 1
                if Check == 'have_lvem_skymapCheck':  # we want to send skymaps out as quickly as possible, even if humans have not vetted the event
                    process_alert(
                        event_dict.data, 'preliminary', g, config, logger
                    )  # if it turns out we've sent this alert with this skymap before, the process_alert function will just not send this repeat
        if passedcheckcount == len(preliminary_to_initial):
            message = '{0} -- {1} -- Passed all {2} checks.'.format(
                convertTime(), graceid, currentstate)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Passed all {0} checks.'.format(currentstate),
                           tagname='em_follow')
            else:
                pass
            message = '{0} -- {1} -- Labeling EM_READY.'.format(
                convertTime(), graceid)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Labeling EM_READY.',
                           tagname='em_follow')
                g.writeLabel(graceid, 'EM_READY')
            else:
                pass
        saveEventDicts(approval_processorMPfiles)
        return 0

    elif currentstate == 'initial_to_update':
        for Check in initial_to_update:
            eval('event_dict.{0}()'.format(Check))
            checkresult = event_dict.data[Check + 'result']
            if checkresult == None:
                pass
            elif checkresult == False:
                # need to set DQV label
                message = '{0} -- {1} -- Failed {2} in currentstate: {3}.'.format(
                    convertTime(), graceid, Check, currentstate)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(graceid,
                               'AP: Failed {0} in currentstate: {1}.'.format(
                                   Check, currentstate),
                               tagname='em_follow')
                else:
                    pass
                message = '{0} -- {1} -- State: {2} --> rejected.'.format(
                    convertTime(), graceid, currentstate)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(
                        graceid,
                        'AP: State: {0} --> rejected.'.format(currentstate),
                        tagname='em_follow')
                    event_dict.data['currentstate'] = 'rejected'
                else:
                    pass
                message = '{0} -- {1} -- Labeling DQV.'.format(
                    convertTime(), graceid)
                if loggerCheck(event_dict.data, message) == False:
                    logger.info(message)
                    g.writeLog(graceid,
                               'AP: Labeling DQV.',
                               tagname='em_follow')
                    g.writeLabel(graceid, 'DQV')
                else:
                    pass
                saveEventDicts(approval_processorMPfiles)
                return 0
            elif checkresult == True:
                passedcheckcount += 1
        if passedcheckcount == len(initial_to_update):
            message = '{0} -- {1} -- Passed all {2} checks.'.format(
                convertTime(), graceid, currentstate)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Passed all {0} checks.'.format(currentstate),
                           tagname='em_follow')
            else:
                pass
            message = '{0} -- {1} -- Labeling PE_READY.'.format(
                convertTime(), graceid)
            if loggerCheck(event_dict.data, message) == False:
                logger.info(message)
                g.writeLog(graceid,
                           'AP: Labeling PE_READY.',
                           tagname='em_follow')
                g.writeLabel(graceid, 'PE_READY')
            else:
                pass
        saveEventDicts(approval_processorMPfiles)
        return 0

    else:
        saveEventDicts(approval_processorMPfiles)
        return 0
예제 #10
0
        "-q",
        "--pipeline2",
        dest="pipeline2",
        default="mbtaonline",
        type="string",
        help="the pipeline we want when finding events in GraceDB",
    )
    parser.add_option("-u", "--username", dest="username", default="None", type="string", help="LIGO username")
    parser.add_option("-w", "--password", dest="password", default="None", type="string", help="LIGO password")
    opts, args = parser.parse_args()

    boundaries = [opts.gpsstart, opts.gpsend, opts.far]
    pipeline = [opts.pipeline1, opts.pipeline2]
    username = opts.username
    password = opts.password
    all_events = [l for l in g.events(" %s .. %s far < %s" % tuple(boundaries)) if (l["pipeline"].lower() in pipeline)]
    # chooses events from GraceDb that are under a certain FAR threshold, in between two GPS times, and certain pipelines

    results = []

    def open_url_wget(url, un=None, pw=None, args=[]):
        import subprocess
        import urlparse

        if un is not None and pw is not None:
            args += ["--user", un, "--password", pw, "--no-check-certificate"]
        retcode = subprocess.call(["wget"] + [url] + args)
        return retcode

    string = "%10s | %20s | %20s | %45s | %15s | %40s | %20s | %40s" % (
        "graceid",
event_search = 'CBC ~Inj ' + str(time1) + ' .. ' + str(time2)

try:
    from ligo.gracedb.rest import GraceDb
except ImportError:
    print >> sys.stderr, "warning: gracedb import failed, program will crash if gracedb uploads are attempted"

# set the gracedb server you want to download the events
# playground is for testing, containing replay events back to Nov. 20, 2018
# main database: https://gracedb.ligo.org/api/" includes real candidates
main_database = "gracedb"  # either "gracedb" or "gracedb-playground"

gracedb_service_url = "https://" + main_database + ".ligo.org/api/"
gracedb_client = GraceDb(gracedb_service_url)

events = gracedb_client.events(event_search)

count = 0
for event in events:
    gid = event['graceid']
    with open('data/' + str(gid), 'w') as f:
        count += 1
        print(count)
        if event['superevent'] == None:
            superevent = u'none'
        else:
            superevent = event['superevent']
        endtime = event['gpstime']
        chirpmass = event['extra_attributes']['CoincInspiral']['mchirp']
        farc = event['extra_attributes']['CoincInspiral']['combined_far']
        snrc = event['extra_attributes']['CoincInspiral']['snr']
예제 #12
0
	parser=OptionParser(usage = usage, description = description)
	parser.add_option('-v', '--verbose', default = False, action = "store_true")
	parser.add_option('-s', '--gpsstart', dest = "gpsstart", default = 0, type = "float", help = "the gps start time used when finding events in GraceDB")
	parser.add_option('-e', '--gpsend', dest = "gpsend", default = 0, type = "float", help = "the gps end time used when finding events in GraceDB")
	parser.add_option('-f', '--far', dest = "far",  default = 0, type = "float", help = "the largest FAR value desired when finding events in GraceDB")
	parser.add_option('-p', '--pipeline1', dest = "pipeline1", default = 'gstlal', type = "string", help = "the pipeline we want when finding events in GraceDB")
	parser.add_option('-q', '--pipeline2', dest = "pipeline2", default = 'mbtaonline', type = "string", help = "the pipeline we want when finding events in GraceDB")
	parser.add_option('-u', '--username', dest = "username", default = 'None', type = "string", help = "LIGO username")
	parser.add_option('-w', '--password', dest = "password", default = 'None', type = "string", help = "LIGO password")
	opts, args = parser.parse_args()

	boundaries = [opts.gpsstart, opts.gpsend, opts.far]
	pipeline = [opts.pipeline1, opts.pipeline2]
	username = opts.username
	password = opts.password
	all_events = [ l for l in g.events(' %s .. %s far < %s'%tuple(boundaries)) if (l['pipeline'].lower() in pipeline ) ]
#chooses events from GraceDb that are under a certain FAR threshold, in between two GPS times, and certain pipelines

	results = []

	def open_url_wget(url,un=None,pw=None,args=[]):
		import subprocess
        	import urlparse
        	if un is not None and pw is not None:
        	        args+=["--user",un,"--password",pw,"--no-check-certificate"]
        	retcode=subprocess.call(['wget']+[url]+args)
        	return retcode

	string = "%10s | %20s | %20s | %45s | %15s | %40s | %20s | %40s"%("graceid", "far", "lalstart", "lalfinish", "snr", "lalsnr", "mchirp", "lalmchirp")
#creates a header string to organize the array
예제 #13
0
# 1 day either side of known GW events:
event_type = 'CBC 1126173062 .. 1126345862'  #GW150914
#event_type = 'gstlal pycbc spiir MBTAOnline 1128592500 .. 1128765300'#GW151012
#event_type = 'CBC 1135049950 .. 1135222750'#GW151226
#event_type = 'CBC 1167473536 .. 1167646336'#GW170104
#event_type = 'CBC 1180836094 .. 1181008894'#GW170608
#event_type = 'CBC 1185303407 .. 1185476207'#GW170729
#event_type = 'gstlal MBTAOnline spiir 1186216119 .. 1186388919'#GW170809
#event_type = 'gstlal MBTAOnline spiir 1186655461 .. 1186828261'#GW170814
#event_type = 'gstlal MBTAOnline spiir 1186922482 .. 1187095282'#GW170817
#event_type = 'gstlal MBTAOnline spiir 1186971927 .. 1187144727'#GW170818
#event_type = 'CBC 1187442856 .. 1187615656'#GW170823
event_name = event_type.replace(' ',
                                '_')  # to get rid of those pesky white spaces
print('  for events satisfying: ' + event_type)
events = gracedb_client.events(
    event_type)  # number of events consistent with spiir Nov 20 - Nov 31
with open('graceid-pipeline_' + event_name, 'w') as f:
    for event in events:
        gid = event['graceid']
        pipe = event['pipeline']
        f.write(gid + ' ' + pipe + '\n')
        fname = "%s.xml" % gid
        fout = open(fname, 'w+')
        content = gracedb_client.files(gid, filename="coinc.xml")
        fout.write(content.read())
        fout.close()

with open("name_log", "w") as f:
    f.write(main_database + "\n")
    f.write(event_name)
예제 #14
0
os.system(rm_command)

# Instantiate client
g = GraceDb()
#g = GraceDb(url)
#g = GraceDbBasic()
#g = GraceDbBasic(url)

eventString = get_eventstring(opts)
if opts.doGPSLoop:
    opts.doGPS = True
    while True:
        endtime = Time(datetime.utcnow(), scale='utc')
        starttime = t = Time(endtime.gps - 86400.0, format='gps', scale='utc')
        opts.startGPS = starttime.gps
        opts.endGPS = endtime.gps

        eventString = get_eventstring(opts)
        # REST API returns an iterator
        events = g.events('%s'%eventString)

        download_events(events)
else:
    eventString = get_eventstring(opts)
    print eventString
    # REST API returns an iterator
    events = g.events('%s'%eventString)

    download_events(events)