예제 #1
0
파일: voevent.py 프로젝트: Roald87/GraceDB
class VOEventFromEventId(VOEventFromXml):
    def __init__(self):
        self._client = GraceDb()
        self.event_id = ""
        super().__init__()

    def get(self, event_id: str):
        self.event_id = event_id
        voevents = self._get_voevents_json(event_id)
        voevents = self._sort_voevents_newest_first(voevents)
        xml = self._try_get_latest_voevent(voevents)
        super().get(xml)

    def _get_voevents_json(self, event_id: str) -> List[Dict]:
        response = self._client.voevents(event_id)
        json_voevents = response.json()["voevents"]

        return json_voevents

    def _sort_voevents_newest_first(self, voevents_json):
        voevents_json.sort(key=lambda x: x["N"], reverse=True)

        return voevents_json

    def _try_get_latest_voevent(self, voevents: List[Dict[Any, Any]]):
        # For event S190517h the file 'S190517h-3-Initial.xml' was in the
        # voevent file list. However, this file doesn't exist. Therefore looping
        # over all until a existing file is found.
        for voevent in voevents:
            url = voevent["links"]["file"]
            try:
                xml = self._client.get(url)
                return xml
            except HTTPError:
                if voevent["N"] == 1:
                    logging.error(f"Can't find VOEvent for event {self.event_id}")
                    raise HTTPError
                else:
                    logging.warning(f"Failed to get voevent from {url}")

        return ""
예제 #2
0
def getSuperevents(export=False,dirOut=None,fileOut=None,indent=2,verbose=False,knownEvents={},forceUpdate=False,datelim=999,logFile=None):
    """Get GWTC from GWOSC json files and add parameters.
    Inputs:
        * export [boolean, optional]: set for export to JSON file output. Default=False
        * dirOut [string, optional]: directory to export to (if export=True). Default='data/'
        * fileOut [string, optional]: file to export to (if export=True). Default='GWTC.json'
        * indent [integer, optional]: json indent for exported file. Default=2
        * verbose [boolean, optional]: set for verbose output. Default=False
        * knownEvents [object]: timestamps of existing events. Default=empty (ignore known events)
        * forceUpdate [boolean, optional]: set to force output of all events, not just updated ones. Default=False
        * datelim [integer, optional]: number of days old to skip events for. Default=999
        * lodGile [string, optional]: logFile to output logging to. Default=None (no logging)
    Outputs:
        * [object] object containing data (can be read by gwcatpy.importGWTC)
    """
    service_url = 'https://gracedb.ligo.org/api/'
    if verbose: print('Retrieving GraceDB data from {}'.format(service_url))
    client = GraceDb(service_url,force_noauth=True)
    if verbose: print('Limiting to {} days'.format(datelim))
    # Retrieve an iterator for events matching a query.
    events = client.superevents('far < 1.0e-4')
    # if verbose: print('retrieved {} events'.format(len(events)))
    # For each event in the search results, add the graceid
    # and chirp mass to a dictionary.
    results = {}
    links = {}

    if logFile:
        if os.path.exists(logFile):
            os.remove(logFile)
            print('Removing log file: {}'.format(logFile))
        else:
            print("Log file doesn't exist: {}".format(logFile))
        print('Writing GraceDB log to: {}'.format(logFile))
        logF=open(logFile,'a')

    for event in events:
        sid = event['superevent_id']
        tEvent=Time(event['t_0'],format='gps')
        tNow=Time.now()
        dtEvent=(tNow-tEvent).jd
        if dtEvent>datelim:
            print('Too old ({} days). Skipping {}'.format(dtEvent,sid))
            continue
        # if sid in knownEvents:
        #     tOld=Time(knownEvents[sid])
        #     try:
        #         tNew=Time(cdate)
        #     except:
        #         tNew=Time.now()
        #     if tNew <= tOld:
        #         if verbose:print('no import needed for {}: [{}<={}]'.format(sid,tNew.isot,tOld.isot))
        #         continue
        #     else:
        #         if verbose:print('importing for {}: [{}>{}]'.format(sid,tNew.isot,tOld.isot))
        evOut=event
        evOut['meta']={'retrieved':Time.now().isot,'src':service_url}

        voreq=client.voevents(sid)
        voevents=json.loads(voreq.content)
        evOut['voevents']=voevents

        volist=voevents['voevents']
        good_voevents = [voe for voe in volist if voe['voevent_type'] != 'RE']
        retraction_list = [voe for voe in volist if voe['voevent_type'] == 'RE']
        if len(retraction_list)>0:
            print('Event {} retracted. Skipping'.format(sid))
            cdate=Time(' '.join(retraction_list[-1]['created'].split(' ')[0:2]))
            evOut['meta']['type']='Retraction'
        else:
            evOut['meta']['type']='Candidate'

            Ngood=len(good_voevents)
            validXML=False
            nvo=Ngood
            while validXML==False and nvo>0:
                thisvo=good_voevents[nvo-1]
                cdate=Time(' '.join(thisvo['created'].split(' ')[0:2]))
                if sid in knownEvents:
                    tOld=Time(knownEvents[sid])
                    tNew=cdate
                    if tNew <= tOld:
                        if forceUpdate:
                            if verbose:print('forcing update for {}: [{}<={}]'.format(sid,tNew.isot,tOld.isot))
                            update=True
                        else:
                            if verbose:print('no update needed for {}: [{}<={}]'.format(sid, tNew.isot,tOld.isot))
                            update=False
                            validXML=True
                    else:
                        if verbose:print('getting files for {}: [{}>{}]'.format(sid,tNew.isot,tOld.isot))
                        update=True
                else:
                    update=True

                thisvoe_url = thisvo['links']['file']
                vonum=thisvo['N']

                evOut['xmlfile']=[os.path.split(thisvoe_url)[-1],thisvoe_url]
                if update:
                    if logFile:
                        logF.write(sid+'\n')

                    # parse XML
                    xml={}
                    if verbose: print('  parsing {}'.format(evOut['xmlfile'][0]))
                    xmlurl=evOut['xmlfile'][1]
                    xmlreq=requests.get(xmlurl)
                    soup=BeautifulSoup(xmlreq.text,'lxml')
                    try:
                        params=soup.what.find_all('param',recursive=False)
                        validXML=True
                    except:
                        print('problem with {}: {}'.format(sid,evOut['xmlfile'][0]))
                    if validXML:
                        for p in params:
                            xml[p.attrs['name']]=p.attrs['value']
                        groups=soup.what.find_all('group',recursive=False)
                        for g in groups:
                            gt=g.attrs['type']
                            xml[gt]={}
                            gparams=g.find_all('param',recursice=False)
                            for gp in gparams:
                                xml[gt][gp.attrs['name']]=gp.attrs['value']
                        if 'GW_SKYMAP' in xml:
                            if 'skymap_fits' in xml['GW_SKYMAP']:
                                mapfile=xml['GW_SKYMAP']['skymap_fits']
                                evOut['mapfile']=[os.path.split(mapfile)[-1],mapfile]
                        evOut['xml']=xml
                nvo-=1
            evOut['meta']['validXML']=validXML
                        # create meta data

        evOut['meta']['created_date']=cdate.isot

        results[sid]=evOut

    if logFile:
        logF.close()
    if verbose: print('Retrieved data for {} events'.format(len(results)))

    cat={'meta':{'retrieved':Time.now().isot,'src':service_url},'data':results}

    if export:
        if dirOut==None:
            dirOut='../../data/'
        if fileOut==None:
            fileOut='gracedb.json'
        if verbose: print('Exporting to {}'.format(os.path.join(dirOut,fileOut)))
        fOut=open(os.path.join(dirOut,fileOut),'w')
        json.dump(cat,fOut,indent=indent)
        fOut.close()

    return(cat)

# def getMap(sid,fileout=None,dirOut=None,getLAL=False):
#     if getLAL:
#         filename = 'bayestar.fits'
#     else:
#         filename = 'LALInference.fits'
#     if fileout==None:
#         outFilename = '{}_{}'.format(sid,filename)
#     if dirOut==None:
#         dirOut='../../data/'
#     print('downloading {} for superevent {}'.format(filename,sid))
#     clFits=GraceDbBasic(service_url)
#     fout=open(os.path.join(dirOut,outFilename),'wb')
#     r = clFits.files(sid,filename)
#     fout.write(r.read())
#     fout.close()