예제 #1
0
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
         from ligo.gracedb.exceptions import HTTPError
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab' %
                   str(e), )
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % service_url)
     try:
         self.events[None] = list(connection.superevents(self.query))
         self._query_type = "S"
     except HTTPError:
         self.events[None] = list(connection.events(self.query))
         event_method = connection.event
         eventid_name = "graceid"
         self._query_type = "E"
     else:
         event_method = connection.superevent
         eventid_name = "superevent_id"
         for event in self.events[None]:  # get preferred event parameters
             event.update(
                 connection.event(event["preferred_event"], ).json())
     vprint("Recovered %d events for query %r\n" %
            (len(self.events[None]), self.query))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(
                 event_method(e[eventid_name]).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
예제 #2
0
def test_superevents():
    """Basic functionality test of ligo-gracedb

    Connect to the default server, and print the IDs of the first
    10 superevents with a FAR < 1e9

    Notes
    -----
    The whole function needs to be protected against a RequestException
    because there is no network activity until the first superevent
    is pulled out of the ``<events>`` generator.
    """
    conn = GraceDb(force_noauth=True, )
    events = conn.superevents(
        "far<1e9",
        columns=[
            "superevent_id",
            "gw_id",
        ],
    )
    for i, event in enumerate(events):
        if i >= 10:  # stop after 10
            break
        print(
            event["superevent_id"],
            event["gw_id"],
        )
예제 #3
0
파일: gracedb.py 프로젝트: gwpy/gwsumm
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
         from ligo.gracedb.exceptions import HTTPError
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab'
                   % str(e),)
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % service_url)
     try:
         self.events[None] = list(connection.superevents(self.query))
         self._query_type = "S"
     except HTTPError:
         self.events[None] = list(connection.events(self.query))
         event_method = connection.event
         eventid_name = "graceid"
         self._query_type = "E"
     else:
         event_method = connection.superevent
         eventid_name = "superevent_id"
         for event in self.events[None]:  # get preferred event parameters
             event.update(connection.event(
                 event["preferred_event"],
             ).json())
     vprint("Recovered %d events for query %r\n"
            % (len(self.events[None]), self.query))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(event_method(
                 e[eventid_name]).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
예제 #4
0
#!/usr/bin/env python3
from ligo.gracedb.rest import GraceDb

client = GraceDb()

# Retrieve an iterator for events matching a query.
events = client.events("gstlal ER5 far < 1.0e-4")

# For each event in the search results, add the graceid
# and chirp mass to a dictionary.
results = {}
for event in events:
    graceid = event["graceid"]
    mchirp = event["extra_attributes"]["CoincInspiral"]["mchirp"]
    results.update({graceid: mchirp})

# For each super event in the search results, add the superevent_id
# and chirp mass to a dictionary.
superevents = client.superevents("gstlal ER5 far < 1.0e-4")
s_results = {}
for superevent in superevents:
    superevent_id = superevent["superevent_id"]
    mchirp = superevent["extra_attributes"]["CoincInspiral"]["mchirp"]
    s_results.update({superevent_id: mchirp})
예제 #5
0
class GraceDbTab(get_tab('default')):
    """Custom tab displaying a summary of GraceDb results.
    """
    type = 'gracedb'

    def __init__(self, name, url='https://gracedb.ligo.org',
                 query='External', columns=['gpstime', 'date', 'pipeline'],
                 headers=['GPS time', 'UTC time', 'Source'], rank='gpstime',
                 **kwargs):
        super(GraceDbTab, self).__init__(name, **kwargs)
        self.url = url
        self.query = '{} {} .. {}'.format(
            query,
            int(self.start),
            int(self.end),
        )
        self.events = dict()
        self.headers = headers
        self.columns = columns
        self.rank = rank

    @classmethod
    def from_ini(cls, config, section, **kwargs):
        """Define a new `GraceDbTab` from a `ConfigParser`.
        """
        for key in ['url', 'query', 'rank']:
            try:
                kwargs.setdefault(
                    key, re_quote.sub('', config.get(section, key)))
            except NoOptionError:
                pass
        for key in ['columns', 'headers']:
            try:
                raw = config.get(section, key)
                val = eval(raw)
            except NoOptionError:
                continue
            except (SyntaxError, NameError, TypeError):
                val = [x.strip().rstrip() for x in raw.split(',')]
            kwargs.setdefault(key, val)
        return super(GraceDbTab, cls).from_ini(config, section, **kwargs)

    def process(self, config=GWSummConfigParser(), **kwargs):
        try:
            from ligo.gracedb.rest import GraceDb
            from ligo.gracedb.exceptions import HTTPError
        except ImportError as e:
            e.args = ('%s, this module is required to generate a GraceDbTab'
                      % str(e),)
            raise
        # query gracedb
        service_url = '%s/api/' % self.url
        self.connection = GraceDb(service_url=service_url)
        self.exception = HTTPError
        vprint('Connected to gracedb at %s\n' % service_url)
        try:
            self.events[None] = list(self.connection.superevents(self.query))
            self._query_type = 'S'
        except self.exception:
            self.events[None] = list(self.connection.events(self.query))
            event_method = self.connection.event
            eventid_name = 'graceid'
            self._query_type = 'E'
        else:
            event_method = self.connection.superevent
            eventid_name = 'superevent_id'
            for event in self.events[None]:  # get preferred event parameters
                event.update(self.connection.event(
                    event['preferred_event'],
                ).json())
        vprint('Recovered %d events for query %r\n'
               % (len(self.events[None]), self.query))
        if 'labels' in self.columns:
            for e in self.events[None]:
                e['labels'] = ', '.join(event_method(
                    e[eventid_name]).json()['labels'])
            vprint('Downloaded labels\n')
        return super(GraceDbTab, self).process(config=config, **kwargs)

    def process_state(self, state, **kwargs):
        def in_state(event):
            return int(event['gpstime']) in state.active
        self.events[str(state)] = list(filter(in_state, self.events[None]))
        reverse = self.rank not in ['gpstime', 'far']
        self.events[str(state)].sort(key=lambda x: x[self.rank],
                                     reverse=reverse)
        vprint('    Selected %d events\n' % len(self.events[str(state)]))

    def write_state_html(self, state):
        """Write the '#main' HTML content for this `GraceDbTab`.
        """
        page = markup.page()
        # build table of events
        page.table(class_='table table-sm table-hover table-striped mt-2',
                   id_='gracedb')
        # thead
        page.thead()
        page.tr()
        for head in self.headers:
            page.th(head)
        page.tr.close()
        page.thead.close()
        # tbody
        page.tbody()
        for event in sorted(self.events[str(state)],
                            key=lambda e: e['gpstime']):
            context = None
            try:
                labs = set(event['labels'].split(', '))
            except (AttributeError, KeyError):
                pass
            else:
                for ctx, labels in LABELS.items():
                    if (
                            ctx == 'success' and labs.union(labels) == labs or
                            labs.intersection(labels)
                    ):
                        context = ctx
                        break
            if context:
                page.tr(class_='table-%s' % context)
            else:
                page.tr()
            for col in self.columns:
                if col == 'date':
                    gpskey = 't_0' if 'superevent_id' in event else 'gpstime'
                    page.td(from_gps(event[gpskey]).strftime(
                        '%B %d %Y %H:%M:%S.%f',
                    )[:-3])
                    continue
                elif col.lower() == 'dqr' and 'superevent_id' in event:
                    page.td()
                    sid = event['superevent_id']
                    href = ('{0}/apiweb/superevents/{1}/files/'
                            'dqr.html'.format(self.url, sid))
                    try:
                        self.connection.get(href)
                    except self.exception:
                        page.p('&mdash;')
                    else:
                        title = 'Data-quality report for {}'.format(sid)
                        page.a('DQR', title=title, href=href, target='_blank',
                               rel='external', class_='btn btn-info btn-sm')
                    page.td.close()
                    continue
                elif col.lower() == 'dqr':
                    page.td()
                    page.p('&mdash;')
                    page.td.close()
                    continue
                try:
                    v = event[col]
                except KeyError:
                    try:
                        v = event['extra_attributes']['GRB'][col]
                        assert v is not None
                    except (KeyError, AssertionError):
                        page.td('-')
                        continue
                if col in ('graceid', 'superevent_id', 'preferred_event'):
                    page.td()
                    tag = 'superevents' if col == 'superevent_id' else 'events'
                    href = '{}/{}/view/{}'.format(self.url, tag, v)
                    title = 'GraceDB {} page for {}'.format(tag[:-1], v)
                    page.a(v, title=title, href=href, target='_blank',
                           rel='external', class_='btn btn-info btn-sm')
                    page.td.close()
                elif col not in ('gpstime', 't_0') and isinstance(v, float):
                    page.td('%.3g' % v)
                elif col == 'labels':
                    page.td(', '.join(
                        ['<samp>%s</samp>' % l for l in sorted(labs)]))
                else:
                    page.td(str(v))
            page.tr.close()
        page.tbody.close()
        page.table.close()
        if len(self.events[str(state)]) == 0:
            page.p('No events were recovered for this state.')
        else:
            page.button(
                'Export to CSV',
                class_='btn btn-outline-secondary btn-table mt-2',
                **{'data-table-id': 'gracedb', 'data-filename': 'gracedb.csv'})

        # query doc
        qurl = '{}/search/?query={}&query_type={}&results_format=S'.format(
            self.url,
            self.query.replace(' ', '+'),
            getattr(self, '_query_type', 'E'),
        )
        qlink = markup.oneliner.a(
            'here',
            href=qurl,
            target='_blank',
        )
        page.p('The above table was generated from a query to {} with the '
               'form <code>{}</code>. To view the results of the same query '
               'via the GraceDB web interface, click {}.'.format(
                   self.url, self.query, qlink), class_='mt-2')

        # reference the labelling
        page.h4('Labelling reference')
        page.p('Events in the above table may have a context based on '
               'its labels as follows:')
        for c, labels in LABELS.items():
            c = (c if c == 'warning' else '%s text-white' % c)
            labstr = ', '.join(['<samp>%s</samp>' % l for l in sorted(labels)])
            page.p(labstr, class_='bg-%s pl-2' % c, style='width: auto;')

        # write to file
        idx = self.states.index(state)
        with open(self.frames[idx], 'w') as fobj:
            fobj.write(str(page))
        return self.frames[idx]
예제 #6
0
class Events(object):
    """
    A dictionary with all superevents from the Grace database.
    """

    def __init__(self):
        self.client = GraceDb()
        self.data = {}
        self.loop = asyncio.get_event_loop()
        self.loop.create_task(self._periodic_event_updater())

    def update_all(self):
        """
        Get the latest events from the Grace database.

        Returns
        -------
        None

        See Also
        --------
        https://gracedb.ligo.org/latest/
        """
        events = self.client.superevents(query="-ADVNO", orderby=["-created"])
        self.data = {}

        logging.info("Updating all events. This might take a minute.")
        start = time.time()
        for i, event in enumerate(events, 1):
            self._add_to_event_data(event)

        end = time.time()
        logging.info(f"Updating {i} events took {round(end - start, 2)} s.")

    def update_events_last_week(self):
        logging.info("Updating all events until 1 week ago. This might take a minute.")
        start = time.time()
        events = self.client.superevents(
            query="created: 1 week ago .. now -ADVNO", orderby=["-created"]
        )

        for i, event in enumerate(events, 1):
            logging.info(f"Updating event {event['superevent_id']}")
            self._add_to_event_data(event)

        end = time.time()
        logging.info(f"Updating {i} events took {round(end - start, 2)} s.")

    def update_single(self, event_id: str):
        """
        Update and store the data of a single event in the event dictionary.

        Parameters
        ----------
        event_id : str

        Returns
        -------

        """
        # Make sure the event id has the right upper and lower case format
        _event_id = event_id[0].upper() + event_id[1:].lower()

        start = time.time()
        try:
            event = self.client.superevent(_event_id)
            event = event.json()
        except (ligo.gracedb.exceptions.HTTPError, urllib.error.HTTPError) as e:
            logging.error(f"Could not find event {_event_id}. Exception: {e}")
            return
        end = time.time()
        logging.info(
            f"Updating single event from database took {round(end - start, 2)} s."
        )

        self._add_to_event_data(event)

    def _add_to_event_data(self, event):
        event_id = event.pop("superevent_id")
        event["created"] = dateutil.parser.parse(event["created"])
        self.data[event_id] = event

        self._add_event_info_from_voevent(event_id)

    def _add_event_info_from_voevent(self, event_id: str):
        voevent = VOEventFromEventId()
        try:
            voevent.get(event_id)
            self._add_event_distance(voevent)
            self._add_event_classification(voevent)
            self._add_instruments(voevent)
        except (ligo.gracedb.exceptions.HTTPError, urllib.error.HTTPError) as e:
            logging.warning(
                f"Couldn't get info from VOEvent file with event id {event_id}"
                f"Exception: {e}"
            )

    def _add_event_distance(self, voevent: VOEvent):
        """
        Add distance and its standard deviation to the event dictionary.

        Parameters
        ----------
        voevent : VOEvent

        Returns
        -------
        None
        """
        self.data[voevent.id]["distance_mean_Mly"] = voevent.distance
        self.data[voevent.id]["distance_std_Mly"] = voevent.distance_std

    def _add_event_classification(self, voevent: VOEvent):
        """
        Adds the event type to the events dictionary.

        Possible event types are binary black hole merger, black hole neutron
        star merger etc.

        Returns
        -------
        None
        """
        self.data[voevent.id]["event_types"] = voevent.p_astro
        self.data[voevent.id]["most_likely"] = self.get_likely_event_type(voevent.id)

    def _add_instruments(self, voevent: VOEvent):
        """

        Parameters
        ----------
        voevent

        Returns
        -------

        """
        self.data[voevent.id]["instruments_short"] = voevent.seen_by_short
        self.data[voevent.id]["instruments_long"] = voevent.seen_by_long

    async def _periodic_event_updater(self):
        """
        Fetches all the events from the GraceDB database.

        Returns
        -------
        None

        """
        while True:
            await asyncio.sleep(delay=36000)

            logging.info("Refreshing event database.")
            self.update_all()

    def get_likely_event_type(self, event_id: str) -> str:
        """
        Return the most likely event type of a certain event.

        Parameters
        ----------
        event_id : str
            The event ID you want to know the event type of.

        Returns
        -------
        str
            Most likely event type.
        """
        try:
            event_types = self.data[event_id]["event_types"]
            most_likely, _ = sorted(
                event_types.items(), key=lambda value: value[1], reverse=True
            )[0]
        except AttributeError:
            logging.error(f"Failed to get most likely event of {event_id}")
            return ""

        return most_likely

    @property
    def latest(self) -> Dict[str, dict]:
        """
        Return the latest event from the Grace database.

        Returns
        -------
        dict
            Latest event.
        """
        for id, info in self.data.items():
            return {id: info}

        return {"": dict()}

    def picture(self, event_id: str) -> str:
        """
        Return local path of an image from a specific event.

        Image priority is as follows: 1) LALInference 2) skymap 3) bayestar.png.

        Parameters
        ----------
        event_id : str
            The name of the event you want to have a picture of.

        Returns
        -------
        str
            Local path of the image.
        """
        files = self.client.files(event_id).json()

        for fname in ["LALInference", "skymap", "bayestar"]:
            link = get_latest_file_url(files, fname, ".png")
            if len(link) > 0:
                break

        if len(link) == 0:
            raise FileNotFoundError
        else:
            img = ImageFromUrl(link)

        return img.path
예제 #7
0
def getSuperevents(export=False,dirOut=None,fileOut=None,indent=2,verbose=False,knownEvents={},forceUpdate=False,datelim=999,logFile=None):
    """Get GWTC from GWOSC json files and add parameters.
    Inputs:
        * export [boolean, optional]: set for export to JSON file output. Default=False
        * dirOut [string, optional]: directory to export to (if export=True). Default='data/'
        * fileOut [string, optional]: file to export to (if export=True). Default='GWTC.json'
        * indent [integer, optional]: json indent for exported file. Default=2
        * verbose [boolean, optional]: set for verbose output. Default=False
        * knownEvents [object]: timestamps of existing events. Default=empty (ignore known events)
        * forceUpdate [boolean, optional]: set to force output of all events, not just updated ones. Default=False
        * datelim [integer, optional]: number of days old to skip events for. Default=999
        * lodGile [string, optional]: logFile to output logging to. Default=None (no logging)
    Outputs:
        * [object] object containing data (can be read by gwcatpy.importGWTC)
    """
    service_url = 'https://gracedb.ligo.org/api/'
    if verbose: print('Retrieving GraceDB data from {}'.format(service_url))
    client = GraceDb(service_url,force_noauth=True)
    if verbose: print('Limiting to {} days'.format(datelim))
    # Retrieve an iterator for events matching a query.
    events = client.superevents('far < 1.0e-4')
    # if verbose: print('retrieved {} events'.format(len(events)))
    # For each event in the search results, add the graceid
    # and chirp mass to a dictionary.
    results = {}
    links = {}

    if logFile:
        if os.path.exists(logFile):
            os.remove(logFile)
            print('Removing log file: {}'.format(logFile))
        else:
            print("Log file doesn't exist: {}".format(logFile))
        print('Writing GraceDB log to: {}'.format(logFile))
        logF=open(logFile,'a')

    for event in events:
        sid = event['superevent_id']
        tEvent=Time(event['t_0'],format='gps')
        tNow=Time.now()
        dtEvent=(tNow-tEvent).jd
        if dtEvent>datelim:
            print('Too old ({} days). Skipping {}'.format(dtEvent,sid))
            continue
        # if sid in knownEvents:
        #     tOld=Time(knownEvents[sid])
        #     try:
        #         tNew=Time(cdate)
        #     except:
        #         tNew=Time.now()
        #     if tNew <= tOld:
        #         if verbose:print('no import needed for {}: [{}<={}]'.format(sid,tNew.isot,tOld.isot))
        #         continue
        #     else:
        #         if verbose:print('importing for {}: [{}>{}]'.format(sid,tNew.isot,tOld.isot))
        evOut=event
        evOut['meta']={'retrieved':Time.now().isot,'src':service_url}

        voreq=client.voevents(sid)
        voevents=json.loads(voreq.content)
        evOut['voevents']=voevents

        volist=voevents['voevents']
        good_voevents = [voe for voe in volist if voe['voevent_type'] != 'RE']
        retraction_list = [voe for voe in volist if voe['voevent_type'] == 'RE']
        if len(retraction_list)>0:
            print('Event {} retracted. Skipping'.format(sid))
            cdate=Time(' '.join(retraction_list[-1]['created'].split(' ')[0:2]))
            evOut['meta']['type']='Retraction'
        else:
            evOut['meta']['type']='Candidate'

            Ngood=len(good_voevents)
            validXML=False
            nvo=Ngood
            while validXML==False and nvo>0:
                thisvo=good_voevents[nvo-1]
                cdate=Time(' '.join(thisvo['created'].split(' ')[0:2]))
                if sid in knownEvents:
                    tOld=Time(knownEvents[sid])
                    tNew=cdate
                    if tNew <= tOld:
                        if forceUpdate:
                            if verbose:print('forcing update for {}: [{}<={}]'.format(sid,tNew.isot,tOld.isot))
                            update=True
                        else:
                            if verbose:print('no update needed for {}: [{}<={}]'.format(sid, tNew.isot,tOld.isot))
                            update=False
                            validXML=True
                    else:
                        if verbose:print('getting files for {}: [{}>{}]'.format(sid,tNew.isot,tOld.isot))
                        update=True
                else:
                    update=True

                thisvoe_url = thisvo['links']['file']
                vonum=thisvo['N']

                evOut['xmlfile']=[os.path.split(thisvoe_url)[-1],thisvoe_url]
                if update:
                    if logFile:
                        logF.write(sid+'\n')

                    # parse XML
                    xml={}
                    if verbose: print('  parsing {}'.format(evOut['xmlfile'][0]))
                    xmlurl=evOut['xmlfile'][1]
                    xmlreq=requests.get(xmlurl)
                    soup=BeautifulSoup(xmlreq.text,'lxml')
                    try:
                        params=soup.what.find_all('param',recursive=False)
                        validXML=True
                    except:
                        print('problem with {}: {}'.format(sid,evOut['xmlfile'][0]))
                    if validXML:
                        for p in params:
                            xml[p.attrs['name']]=p.attrs['value']
                        groups=soup.what.find_all('group',recursive=False)
                        for g in groups:
                            gt=g.attrs['type']
                            xml[gt]={}
                            gparams=g.find_all('param',recursice=False)
                            for gp in gparams:
                                xml[gt][gp.attrs['name']]=gp.attrs['value']
                        if 'GW_SKYMAP' in xml:
                            if 'skymap_fits' in xml['GW_SKYMAP']:
                                mapfile=xml['GW_SKYMAP']['skymap_fits']
                                evOut['mapfile']=[os.path.split(mapfile)[-1],mapfile]
                        evOut['xml']=xml
                nvo-=1
            evOut['meta']['validXML']=validXML
                        # create meta data

        evOut['meta']['created_date']=cdate.isot

        results[sid]=evOut

    if logFile:
        logF.close()
    if verbose: print('Retrieved data for {} events'.format(len(results)))

    cat={'meta':{'retrieved':Time.now().isot,'src':service_url},'data':results}

    if export:
        if dirOut==None:
            dirOut='../../data/'
        if fileOut==None:
            fileOut='gracedb.json'
        if verbose: print('Exporting to {}'.format(os.path.join(dirOut,fileOut)))
        fOut=open(os.path.join(dirOut,fileOut),'w')
        json.dump(cat,fOut,indent=indent)
        fOut.close()

    return(cat)

# def getMap(sid,fileout=None,dirOut=None,getLAL=False):
#     if getLAL:
#         filename = 'bayestar.fits'
#     else:
#         filename = 'LALInference.fits'
#     if fileout==None:
#         outFilename = '{}_{}'.format(sid,filename)
#     if dirOut==None:
#         dirOut='../../data/'
#     print('downloading {} for superevent {}'.format(filename,sid))
#     clFits=GraceDbBasic(service_url)
#     fout=open(os.path.join(dirOut,outFilename),'wb')
#     r = clFits.files(sid,filename)
#     fout.write(r.read())
#     fout.close()
예제 #8
0
Make the event issues on the gitlab issue tracker from gracedb.
"""

import asimov
from asimov.event import Event
from asimov import config

from asimov import gitlab
from asimov import config

from ligo.gracedb.rest import GraceDb, HTTPError

client = GraceDb(service_url=config.get("gracedb", "url"))
r = client.ping()

superevent_iterator = client.superevents('O3B_CBC_CATALOG')
superevent_ids = [
    superevent['superevent_id'] for superevent in superevent_iterator
]

server = gitlab.gitlab.Gitlab(config.get("gitlab", "url"),
                              private_token=config.get("gitlab", "token"))
repository = server.projects.get(config.get("olivaw", "tracking_repository"))

gitlab_events = gitlab.find_events(repository)

super_events = set(superevent_ids) - {event.title for event in gitlab_events}

# Add the new events
for superevent in list(super_events):