예제 #1
0
파일: gracedb.py 프로젝트: gwpy/gwsumm
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
         from ligo.gracedb.exceptions import HTTPError
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab'
                   % str(e),)
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % service_url)
     try:
         self.events[None] = list(connection.superevents(self.query))
         self._query_type = "S"
     except HTTPError:
         self.events[None] = list(connection.events(self.query))
         event_method = connection.event
         eventid_name = "graceid"
         self._query_type = "E"
     else:
         event_method = connection.superevent
         eventid_name = "superevent_id"
         for event in self.events[None]:  # get preferred event parameters
             event.update(connection.event(
                 event["preferred_event"],
             ).json())
     vprint("Recovered %d events for query %r\n"
            % (len(self.events[None]), self.query))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(event_method(
                 e[eventid_name]).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
예제 #2
0
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
         from ligo.gracedb.exceptions import HTTPError
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab' %
                   str(e), )
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % service_url)
     try:
         self.events[None] = list(connection.superevents(self.query))
         self._query_type = "S"
     except HTTPError:
         self.events[None] = list(connection.events(self.query))
         event_method = connection.event
         eventid_name = "graceid"
         self._query_type = "E"
     else:
         event_method = connection.superevent
         eventid_name = "superevent_id"
         for event in self.events[None]:  # get preferred event parameters
             event.update(
                 connection.event(event["preferred_event"], ).json())
     vprint("Recovered %d events for query %r\n" %
            (len(self.events[None]), self.query))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(
                 event_method(e[eventid_name]).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
예제 #3
0
def get_event(graceid, ifos=['H1', 'L1']):
    """
    Get event from GraceDb.
    """
    client = GraceDb()
    event = client.event(graceid).json()
    event_dict = {}
    # Coincident detection attributes
    coinc_insp = event['extra_attributes']['CoincInspiral']
    instruments = event['instruments'].split(',')
    mchirp = coinc_insp['mchirp']
    coinc_end_time = coinc_insp['end_time'] + float(
        coinc_insp['end_time_ns']) * 1e-9
    coinc_template_duration = estimate_duration(mchirp)
    coinc_start_time = coinc_end_time - coinc_template_duration
    coinc_dict = {
        'graceid': graceid,
        'mchirp': mchirp,
        'start_time': coinc_start_time,
        'end_time': coinc_end_time,
        'template_duration': coinc_template_duration
    }
    # Single detection attributes
    for i, ifo in enumerate(instruments):
        sngl_insp = event['extra_attributes']['SingleInspiral'][i]
        end_time = sngl_insp['end_time'] + float(
            sngl_insp['end_time_ns']) * 1e-9
        start_time = end_time - sngl_insp['template_duration']
        sngl_dict = {
            'graceid': graceid,
            'mchirp': mchirp,
            'm1': sngl_insp['mass1'],
            'm2': sngl_insp['mass2'],
            's1z': sngl_insp['spin1z'],
            's2z': sngl_insp['spin2z'],
            'start_time': start_time,
            'end_time': end_time,
            'template_duration': sngl_insp['template_duration']
        }
        event_dict[ifo] = sngl_dict
    missing_ifos = sorted(set(ifos) - set(instruments))
    if len(missing_ifos) == len(ifos):
        # All ifos missing, use coinc attributes only
        for ifo in missing_ifos:
            event_dict[ifo] = coinc_dict.copy()
    elif len(missing_ifos) > 0 and len(missing_ifos) < len(ifos):
        # One but not all ifos are missing; use existing ifo attributes for the missing ones
        existing_ifo = list(set(instruments) - set(missing_ifos))[0]
        for ifo in missing_ifos:
            event_dict[ifo] = event_dict[existing_ifo].copy()
    return event_dict
예제 #4
0
파일: utils.py 프로젝트: yi-fan-wang/bilby
def gracedb_to_json(gracedb,
                    cred=None,
                    service_url='https://gracedb.ligo.org/api/',
                    outdir=None):
    """ Script to download a GraceDB candidate

    Parameters
    ----------
    gracedb: str
        The UID of the GraceDB candidate
    cred:
        Credentials for authentications, see ligo.gracedb.rest.GraceDb
    service_url:
        The url of the GraceDB candidate
        GraceDB 'https://gracedb.ligo.org/api/' (default)
        GraceDB-playground 'https://gracedb-playground.ligo.org/api/'
    outdir: str, optional
        If given, a string identfying the location in which to store the json
    """
    logger.info(
        'Starting routine to download GraceDb candidate {}'.format(gracedb))
    from ligo.gracedb.rest import GraceDb

    logger.info('Initialise client and attempt to download')
    logger.info('Fetching from {}'.format(service_url))
    try:
        client = GraceDb(cred=cred, service_url=service_url)
    except IOError:
        raise ValueError(
            'Failed to authenticate with gracedb: check your X509 '
            'certificate is accessible and valid')
    try:
        candidate = client.event(gracedb)
        logger.info('Successfully downloaded candidate')
    except Exception as e:
        raise ValueError(
            "Unable to obtain GraceDB candidate, exception: {}".format(e))

    json_output = candidate.json()

    if outdir is not None:
        check_directory_exists_and_if_not_mkdir(outdir)
        outfilepath = os.path.join(outdir, '{}.json'.format(gracedb))
        logger.info('Writing candidate to {}'.format(outfilepath))
        with open(outfilepath, 'w') as outfile:
            json.dump(json_output, outfile, indent=2)

    return json_output
예제 #5
0
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab' %
                   str(e), )
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % connection.service_url)
     querystr = '%s %d .. %d' % (self.query, self.start, self.end)
     self.events[None] = list(connection.events(querystr))
     vprint("Recovered %d events for query %r\n" %
            (len(self.events[None]), querystr))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(
                 connection.event(e['graceid']).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
예제 #6
0
 def process(self, config=GWSummConfigParser(), **kwargs):
     try:
         from ligo.gracedb.rest import GraceDb
     except ImportError as e:
         e.args = ('%s, this module is required to generate a GraceDbTab'
                   % str(e),)
         raise
     # query gracedb
     service_url = '%s/api/' % self.url
     connection = GraceDb(service_url=service_url)
     vprint("Connected to gracedb at %s\n" % connection.service_url)
     querystr = '%s %d .. %d' % (self.query, self.start, self.end)
     self.events[None] = list(connection.events(querystr))
     vprint("Recovered %d events for query %r\n"
            % (len(self.events[None]), querystr))
     if 'labels' in self.columns:
         for e in self.events[None]:
             e['labels'] = ', '.join(connection.event(
                 e['graceid']).json()['labels'])
         vprint("Downloaded labels\n")
     return super(GraceDbTab, self).process(config=config, **kwargs)
예제 #7
0
def gracedb_to_json(gracedb, outdir=None):
    """ Script to download a GraceDB candidate

    Parameters
    ----------
    gracedb: str
        The UID of the GraceDB candidate
    outdir: str, optional
        If given, a string identfying the location in which to store the json
    """
    logger.info(
        'Starting routine to download GraceDb candidate {}'.format(gracedb))
    from ligo.gracedb.rest import GraceDb
    import urllib3

    logger.info('Initialise client and attempt to download')
    try:
        client = GraceDb()
    except FileNotFoundError:
        raise ValueError(
            'Failed to authenticate with gracedb: check your X509 '
            'certificate is accessible and valid')
    try:
        candidate = client.event(gracedb)
        logger.info('Successfully downloaded candidate')
    except urllib3.HTTPError:
        raise ValueError("No candidate found")

    json_output = candidate.json()

    if outdir is not None:
        check_directory_exists_and_if_not_mkdir(outdir)
        outfilepath = os.path.join(outdir, '{}.json'.format(gracedb))
        logger.info('Writing candidate to {}'.format(outfilepath))
        with open(outfilepath, 'w') as outfile:
            json.dump(json_output, outfile, indent=2)

    return json_output
    with open('GraceDb_ID-catalog_ID.json') as dicfile:
        dic_id = json.load(dicfile)
    return (dic_id[superevent_id])


parser = argparse.ArgumentParser()
parser.add_argument('--event', help='GraceDB event ID.')
args = parser.parse_args()

directories = ['xml_files', 'fits_files', 'event_data']
for dir in directories:
    if not os.path.exists(dir):
        os.mkdir(dir)

g = GraceDb()
ev = g.event(args.event).json()
catalog_id = gracedb_to_catalog_id(ev['superevent'])
pipeline = ev['pipeline']
mchirp = ev['extra_attributes']['CoincInspiral']['mchirp']

# get effective distances and coincident SNR for PyCBC events
if pipeline == 'pycbc':
    coinc_snr = ev['extra_attributes']['CoincInspiral']['snr']
    if args.event == 'G347304':
        eff_dist_dic = {'H1': 487.567, 'L1': 351.244}
    else:
        ifos = [
            ifos['ifo'] for ifos in ev['extra_attributes']['SingleInspiral']
        ]
        eff_dist = [
            ifos['eff_distance'] if 'eff_distance' in ifos else '-'
    alert = sys.stdin.read()

    if opts.Verbose:
        print "    %s"%(alert)
    alert = json.loads(alert)

    if alert['alert_type'] != 'new': ### ignore alerts that aren't new
        if opts.Verbose:
            print "ignoring alert"
        sys.exit(0)

    opts.graceid = alert['uid']

### extract things about the event
gdb = GraceDb( gracedb_url )
event = gdb.event( opts.graceid ).json()

gps = event['gpstime']
far = event['far']
if farThr < far:
    if opts.Verbose:
        print "ignoring alert due to high FAR (%.3e > %.3e)"%(far, farThr)
    sys.exit(0)

if opts.verbose:
    print "generating OmegaScans for : %s\n    gps : %.6f"%(opts.graceid, gps)

#-------------------------------------------------

### report to GraceDB that we've started follow-up
if upload_or_verbose:
    ### set cert and key
    os.environ['X509_USER_CERT'] = robot_cert
    os.environ['X509_USER_KEY'] = robot_key

### initialize instance of gracedb interface
if config.has_option("gdb general", "gdb_url"):
    gdb_url = config.get('gdb general', 'gdb_url')
    gracedb = GraceDb(gdb_url)
else:
    gdb_url = None
    gracedb = GraceDb()

### connect to gracedb and get event gps time
try:
    gdb_entry = json.loads(gracedb.event(gdb_id).read())
except:
    traceback.print_exc()
    logger.info("    Error: Connection to GraceDB failed!")
    logger.info("    Exiting.")
    sys.exit(1)

#========================
# get parameters about event type from gracedb
#========================
group = gdb_entry['group']
pipeline = gdb_entry['pipeline']
if gdb_entry.has_key('search'):
    search = gdb_entry['search']
    event_type = "%s_%s_%s" % (group, pipeline, search)
else:
예제 #11
0
def create(name, oldname=None, gid=None, superevent=None, repo=None):
    """
    Create a new event record in the ledger..

    Parameters
    ----------
    superevent : str
       The ID of the superevent to be used from GraceDB
    name : str
       The name of the event to be recorded in the issue tracker
    names : path, optional
        The path to the name file which maps between old and new super event IDs
    oldname: str, optional
        The old name of the event.
    """
    import pathlib

    if gid or superevent:
        from ligo.gracedb.rest import GraceDb, HTTPError
        client = GraceDb(service_url=config.get("gracedb", "url"))
        r = client.ping()
    if superevent:
        data = client.superevent(superevent).json()
        event_data = client.event(data['preferred_event']).json()
        gid = data['preferred_event']
        interferometers = event_data['instruments'].split(",")
    elif gid:
        event_data = client.event(gid).json()
        interferometers = event_data['instruments'].split(",")
    else:
        event_data = None
        interferometers = []

    if gid or superevent:
        event_url = f"{config.get('gracedb', 'url')}/events/{gid}/view/"

    if not repo:
        repo = None
        #repo = f"[email protected]:pe/O3/{name}"

    event = Event(
        name=name,
        repository=repo,
        calibration={},
        interferometers=interferometers,
    )

    if oldname:
        event.meta['old superevent'] = oldname
    if gid:
        event.meta['event time'] = event_data['gpstime']
        event.meta['gid'] = gid

    working_dir = os.path.join(config.get('general', 'rundir_default'), name)

    event.meta['working directory'] = working_dir
    pathlib.Path(working_dir).mkdir(parents=True, exist_ok=True)

    if config.get("ledger", "engine") == "gitlab":
        _, repository = connect_gitlab()
        from pkg_resources import resource_filename
        issue_template = resource_filename('asimov', 'gitlabissue.md')
        gitlab.EventIssue.create_issue(repository,
                                       event,
                                       issue_template=issue_template)

    elif config.get("ledger", "engine") == "yamlfile":
        ledger = Ledger(config.get("ledger", "location"))
        ledger.add_event(event)
        ledger.save()
    ### set cert and key
    os.environ['X509_USER_CERT'] = robot_cert
    os.environ['X509_USER_KEY'] = robot_key


### initialize instance of gracedb interface
if config.has_option("gdb general","gdb_url"):
    gdb_url = config.get('gdb general', 'gdb_url')
    gracedb = GraceDb( gdb_url )
else:
    gdb_url = None
    gracedb = GraceDb()

### connect to gracedb and get event gps time
try: 
    gdb_entry = json.loads(gracedb.event(gdb_id).read())
except:
    traceback.print_exc()
    logger.info("    Error: Connection to GraceDB failed!")
    logger.info("    Exiting.")
    sys.exit(1)

#========================
# get parameters about event type from gracedb
#========================
group = gdb_entry['group']
pipeline = gdb_entry['pipeline']
if gdb_entry.has_key('search'):
    search = gdb_entry['search']
    event_type = "%s_%s_%s"%(group, pipeline, search)
else:
예제 #13
0
config.read(args[0])

#-------------------------------------------------

### figure out where we're writing segment files locally
output_dir = config.get('general', 'output-dir')
if not os.path.exists(output_dir):
    os.makedirs(output_dir)

### find which GraceDb we're using and pull out parameters of this event
if config.has_option('general', 'gracedb_url'):
    gracedb = GraceDb(config.get('general', 'gracedb_url'))
else:
    gracedb = GraceDb()

event = gracedb.event(opts.graceid).json()  ### query for this event
gpstime = float(event['gpstime'])
if opts.verbose:
    print "processing %s -> %.6f" % (opts.graceid, gpstime)

### find which segDB we're using
if config.has_option('general', 'segdb-url'):
    segdb_url = config.get('general', 'segdb-url')
else:
    segdb_url = 'https://segments.ligo.org'
if opts.verbose:
    print "searching for segments in : %s" % segdb_url

### figure out global tags and queryTags
g_tags = config.get('general', 'tags').split()
g_qtags = config.get('general', 'queryTags').split()
             'uid'        : graceid, ### generate an alert from graceid? Should already be a dicitonary by this point...
             'object'     : {'group'    : 'Test',
                             'pipeline' : 'gstlal',
                            }, ### omit the search
            }

    #------- Notify
    name = 'notify'
    tests.append( (name, alert) )

#------------------------

if opts.basic:

    ### set up inputs
    event = gdb.event(graceid).json()
    alert = {
             'alert_type' : 'new',
             'uid'        : graceid, ### generate an alert from graceid? Should already be a dicitonary by this point...
             'object'     : {'group'    : event['group'],
                             'pipeline' : event['pipeline'],
                            },
            }

    #------- EventCreation
    name = 'event creation'
#    raise NotImplementedError('need to test event creation for each possible pipeline separately')
    """
    EventCreationItem
        cWBTriggerCheck
        oLIBTriggerCheck
예제 #15
0
config.read( args[0] )

#-------------------------------------------------

### figure out where we're writing segment files locally
output_dir = config.get('general', 'output-dir')
if not os.path.exists(output_dir):
    os.makedirs( output_dir )

### find which GraceDb we're using and pull out parameters of this event
if config.has_option('general', 'gracedb_url'):
    gracedb = GraceDb( config.get('general', 'gracedb_url') )
else:
    gracedb = GraceDb()

event = gracedb.event( opts.graceid ).json() ### query for this event
gpstime = float(event['gpstime'])
if opts.verbose:
    print "processing %s -> %.6f"%(opts.graceid, gpstime)

### find which segDB we're using
if config.has_option('general', 'segdb-url'):
    segdb_url = config.get('general', 'segdb-url')
else:
    segdb_url = 'https://segments.ligo.org'
if opts.verbose:
    print "searching for segments in : %s"%segdb_url

### figure out global tags and queryTags
g_tags  = config.get('general', 'tags').split()
g_qtags = config.get('general', 'queryTags').split()
예제 #16
0
confs = config.get('stats', 'conf').split()
areas = config.get('stats', 'area').split()

#-------------------------------------------------
# BEGIN THE ANALYSIS
#-------------------------------------------------

### download the FITS files and run snglFITS

localnames = []
for graceid in opts.graceid:
    if opts.verbose:
        print('downloading FITS files associated with %s'%graceid)

    ### figure out which IFOs participated
    ifos = gracedb.event( graceid ).json()['instruments'].split(',')

    ### format like I like them in this repo...
    ifos = [ifo[0] for ifo in ifos] ### eg: H1 -> H

    for filename in [filename for filename in gracedb.files(graceid).json().keys() if filename.endswith('.fits') or filename.endswith('.fits.gz')]:
        localname = os.path.join(outdir, "%s-%s"%(graceid, filename))
        if opts.verbose:
            print('downloading %s:%s -> %s'%(graceid, filename, localname) )        

        file_obj = open(localname, 'w')
        file_obj.write( gracedb.files( graceid, filename ).read() )
        file_obj.close()

        localnames.append( localname )
예제 #17
0
class GraceDbTab(get_tab('default')):
    """Custom tab displaying a summary of GraceDb results.
    """
    type = 'gracedb'

    def __init__(self, name, url='https://gracedb.ligo.org',
                 query='External', columns=['gpstime', 'date', 'pipeline'],
                 headers=['GPS time', 'UTC time', 'Source'], rank='gpstime',
                 **kwargs):
        super(GraceDbTab, self).__init__(name, **kwargs)
        self.url = url
        self.query = '{} {} .. {}'.format(
            query,
            int(self.start),
            int(self.end),
        )
        self.events = dict()
        self.headers = headers
        self.columns = columns
        self.rank = rank

    @classmethod
    def from_ini(cls, config, section, **kwargs):
        """Define a new `GraceDbTab` from a `ConfigParser`.
        """
        for key in ['url', 'query', 'rank']:
            try:
                kwargs.setdefault(
                    key, re_quote.sub('', config.get(section, key)))
            except NoOptionError:
                pass
        for key in ['columns', 'headers']:
            try:
                raw = config.get(section, key)
                val = eval(raw)
            except NoOptionError:
                continue
            except (SyntaxError, NameError, TypeError):
                val = [x.strip().rstrip() for x in raw.split(',')]
            kwargs.setdefault(key, val)
        return super(GraceDbTab, cls).from_ini(config, section, **kwargs)

    def process(self, config=GWSummConfigParser(), **kwargs):
        try:
            from ligo.gracedb.rest import GraceDb
            from ligo.gracedb.exceptions import HTTPError
        except ImportError as e:
            e.args = ('%s, this module is required to generate a GraceDbTab'
                      % str(e),)
            raise
        # query gracedb
        service_url = '%s/api/' % self.url
        self.connection = GraceDb(service_url=service_url)
        self.exception = HTTPError
        vprint('Connected to gracedb at %s\n' % service_url)
        try:
            self.events[None] = list(self.connection.superevents(self.query))
            self._query_type = 'S'
        except self.exception:
            self.events[None] = list(self.connection.events(self.query))
            event_method = self.connection.event
            eventid_name = 'graceid'
            self._query_type = 'E'
        else:
            event_method = self.connection.superevent
            eventid_name = 'superevent_id'
            for event in self.events[None]:  # get preferred event parameters
                event.update(self.connection.event(
                    event['preferred_event'],
                ).json())
        vprint('Recovered %d events for query %r\n'
               % (len(self.events[None]), self.query))
        if 'labels' in self.columns:
            for e in self.events[None]:
                e['labels'] = ', '.join(event_method(
                    e[eventid_name]).json()['labels'])
            vprint('Downloaded labels\n')
        return super(GraceDbTab, self).process(config=config, **kwargs)

    def process_state(self, state, **kwargs):
        def in_state(event):
            return int(event['gpstime']) in state.active
        self.events[str(state)] = list(filter(in_state, self.events[None]))
        reverse = self.rank not in ['gpstime', 'far']
        self.events[str(state)].sort(key=lambda x: x[self.rank],
                                     reverse=reverse)
        vprint('    Selected %d events\n' % len(self.events[str(state)]))

    def write_state_html(self, state):
        """Write the '#main' HTML content for this `GraceDbTab`.
        """
        page = markup.page()
        # build table of events
        page.table(class_='table table-sm table-hover table-striped mt-2',
                   id_='gracedb')
        # thead
        page.thead()
        page.tr()
        for head in self.headers:
            page.th(head)
        page.tr.close()
        page.thead.close()
        # tbody
        page.tbody()
        for event in sorted(self.events[str(state)],
                            key=lambda e: e['gpstime']):
            context = None
            try:
                labs = set(event['labels'].split(', '))
            except (AttributeError, KeyError):
                pass
            else:
                for ctx, labels in LABELS.items():
                    if (
                            ctx == 'success' and labs.union(labels) == labs or
                            labs.intersection(labels)
                    ):
                        context = ctx
                        break
            if context:
                page.tr(class_='table-%s' % context)
            else:
                page.tr()
            for col in self.columns:
                if col == 'date':
                    gpskey = 't_0' if 'superevent_id' in event else 'gpstime'
                    page.td(from_gps(event[gpskey]).strftime(
                        '%B %d %Y %H:%M:%S.%f',
                    )[:-3])
                    continue
                elif col.lower() == 'dqr' and 'superevent_id' in event:
                    page.td()
                    sid = event['superevent_id']
                    href = ('{0}/apiweb/superevents/{1}/files/'
                            'dqr.html'.format(self.url, sid))
                    try:
                        self.connection.get(href)
                    except self.exception:
                        page.p('&mdash;')
                    else:
                        title = 'Data-quality report for {}'.format(sid)
                        page.a('DQR', title=title, href=href, target='_blank',
                               rel='external', class_='btn btn-info btn-sm')
                    page.td.close()
                    continue
                elif col.lower() == 'dqr':
                    page.td()
                    page.p('&mdash;')
                    page.td.close()
                    continue
                try:
                    v = event[col]
                except KeyError:
                    try:
                        v = event['extra_attributes']['GRB'][col]
                        assert v is not None
                    except (KeyError, AssertionError):
                        page.td('-')
                        continue
                if col in ('graceid', 'superevent_id', 'preferred_event'):
                    page.td()
                    tag = 'superevents' if col == 'superevent_id' else 'events'
                    href = '{}/{}/view/{}'.format(self.url, tag, v)
                    title = 'GraceDB {} page for {}'.format(tag[:-1], v)
                    page.a(v, title=title, href=href, target='_blank',
                           rel='external', class_='btn btn-info btn-sm')
                    page.td.close()
                elif col not in ('gpstime', 't_0') and isinstance(v, float):
                    page.td('%.3g' % v)
                elif col == 'labels':
                    page.td(', '.join(
                        ['<samp>%s</samp>' % l for l in sorted(labs)]))
                else:
                    page.td(str(v))
            page.tr.close()
        page.tbody.close()
        page.table.close()
        if len(self.events[str(state)]) == 0:
            page.p('No events were recovered for this state.')
        else:
            page.button(
                'Export to CSV',
                class_='btn btn-outline-secondary btn-table mt-2',
                **{'data-table-id': 'gracedb', 'data-filename': 'gracedb.csv'})

        # query doc
        qurl = '{}/search/?query={}&query_type={}&results_format=S'.format(
            self.url,
            self.query.replace(' ', '+'),
            getattr(self, '_query_type', 'E'),
        )
        qlink = markup.oneliner.a(
            'here',
            href=qurl,
            target='_blank',
        )
        page.p('The above table was generated from a query to {} with the '
               'form <code>{}</code>. To view the results of the same query '
               'via the GraceDB web interface, click {}.'.format(
                   self.url, self.query, qlink), class_='mt-2')

        # reference the labelling
        page.h4('Labelling reference')
        page.p('Events in the above table may have a context based on '
               'its labels as follows:')
        for c, labels in LABELS.items():
            c = (c if c == 'warning' else '%s text-white' % c)
            labstr = ', '.join(['<samp>%s</samp>' % l for l in sorted(labels)])
            page.p(labstr, class_='bg-%s pl-2' % c, style='width: auto;')

        # write to file
        idx = self.states.index(state)
        with open(self.frames[idx], 'w') as fobj:
            fobj.write(str(page))
        return self.frames[idx]
예제 #18
0
    sys.exit(0)
elif 'minimum glitch-FAP for' not in alert['object']['comment']:
    print "not a iDQ glitch-FAP update"
    print alert['object']['comment']
    sys.exit(0)

gid = alert['uid']
print "graceid : " + gid

labels = [label['name'] for label in gdb.labels(gid).json()['labels']]
print "labels : " + ", ".join(labels)
if "DQV" in labels:
    print "already labeled DQV"
    sys.exit(0)

event = gdb.event(gid).json()
print "pipeline : " + event['pipeline']
if event['pipeline'].lower() not in allowed_pipelines:
    print "  not allowed to label this pipeline"
    sys.exit(1)

logs = gdb.logs(gid).json()['log']
result = dict((ifo, 1) for ifo in ifos)
for log in logs:
    comment = log['comment']
    if "minimum glitch-FAP for" in comment:
        gFAP = float(comment.split()[-1])
        for ifo in ifos:
            if ifo in comment:
                result[ifo] = gFAP
                break
        'object': {
            'group': 'Test',
            'pipeline': 'gstlal',
        },  ### omit the search
    }

    #------- Notify
    name = 'notify'
    tests.append((name, alert))

#------------------------

if opts.basic:

    ### set up inputs
    event = gdb.event(graceid).json()
    alert = {
        'alert_type': 'new',
        'uid':
        graceid,  ### generate an alert from graceid? Should already be a dicitonary by this point...
        'object': {
            'group': event['group'],
            'pipeline': event['pipeline'],
        },
    }

    #------- EventCreation
    name = 'event creation'
    #    raise NotImplementedError('need to test event creation for each possible pipeline separately')
    """
    EventCreationItem
예제 #20
0
    superevent['superevent_id'] for superevent in superevent_iterator
]

server = gitlab.gitlab.Gitlab(config.get("gitlab", "url"),
                              private_token=config.get("gitlab", "token"))
repository = server.projects.get(config.get("olivaw", "tracking_repository"))

gitlab_events = gitlab.find_events(repository)

super_events = set(superevent_ids) - {event.title for event in gitlab_events}

# Add the new events
for superevent in list(super_events):

    data = client.superevent(superevent).json()
    event_data = client.event(data['preferred_event']).json()

    event_url = f"https://catalog-dev.ligo.org/events/{data['preferred_event']}/view/"

    event = Event(name=superevent,
                  repository=f"[email protected]:pe/O3/{superevent}",
                  gid=data['preferred_event'],
                  gid_url=event_url,
                  calibration={},
                  interferometers=event_data['instruments'].split(","),
                  disable_repo=True)
    gitlab.EventIssue.create_issue(repository,
                                   event,
                                   issue_template="scripts/outline.md")