def processevent(event='', pretend=True): """ Called externally by the voevent_handler script when a new VOEvent is received. Return True if the event was parsed by this handler, False if it was another type of event that should be examined by a different handler. :param event: A string containg the XML string in VOEvent format :param pretend: Boolean, True if we don't want to actually schedule the observations. :return: Boolean, True if this handler processed this event, False to pass it to another handler function. """ if sys.version_info.major == 2: # event arrives as a unicode string but loads requires a non-unicode string. v = voeventparse.loads(str(event)) else: v = voeventparse.loads(event.encode('latin-1')) log.info("Working on: %s" % v.attrib['ivorn']) isgw = is_gw(v) log.debug("GW? {0}".format(isgw)) if isgw: handle_gw(v, pretend=(pretend or GW_PRETEND)) log.info("Finished.") return isgw # True if we're handling this event, False if we're rejecting it
def processevent(event='', pretend=True): """ Called externally by the voevent_handler script when a new VOEvent is received. Return True if the event was parsed by this handler, False if it was another type of event that should be examined by a different handler. :param event: A string containg the XML string in VOEvent format :param pretend: Boolean, True if we don't want to actually schedule the observations. :return: Boolean, True if this handler processed this event, False to pass it to another handler function. """ if sys.version_info.major == 2: # event arrives as a unicode string but loads requires a non-unicode string. v = voeventparse.loads(str(event)) else: v = voeventparse.loads(event.encode('latin-1')) # only respond to SWIFT and MAXI evetnts ivorn = v.attrib['ivorn'] if not (('SWIFT' in ivorn) or ('MAXI' in ivorn)): return False log.info("Working on: %s" % ivorn) isflarestar = is_flarestar(v) log.debug("Flare Star ? {0}".format(isflarestar)) if isflarestar: handle_flarestar(v, pretend=pretend) log.info("Finished.") return isflarestar # True if we're handling this event, False if we're rejecting it
def main(): args = handle_args() logger = setup_logging(args.logfile_path) dburl = dbconfig.make_db_url(dbconfig.default_admin_db_params, args.dbname) if not db_utils.check_database_exists(dburl): raise RuntimeError("Database not found") if six.PY3: stdin = sys.stdin.buffer.read() else: stdin = sys.stdin.read() # Py2 v = voeventparse.loads(stdin) session = Session(bind=create_engine(dburl)) try: conv.safe_insert_voevent(session, v) session.commit() except: logger.exception( "Could not insert packet with ivorn {} into {}".format( v.attrib['ivorn'], args.dbname)) logger.info("Loaded packet with ivorn {} into {}".format( v.attrib['ivorn'], args.dbname)) return 0
def heartbeat_packets(start=default_start_dt, interval=timedelta(minutes=15), n_packets=24, role=vp.definitions.roles.test): """ Create Voevents with varying ivorns and values of ``Who.Date``. Args: start(datetime.datetime): Start time. end(datetime.datetime): End time (non-inclusive). interval(datetime.timedelta): Heartbeat interval. Returns: packets: A list of VOEvent packets. """ packets = [] for ts in packetgen.timerange(start, start + n_packets * interval, interval): packets.append( packetgen.create_test_packet(testpacket_identity, author_date=ts, role=role)) # NB Whitespacing of loaded (parsed) vs custom-built VOEvents is different: # http://lxml.de/FAQ.html#why-doesn-t-the-pretty-print-option-reformat-my-xml-output # So, to enable exact ``dumps`` matching (for equality testing) # we take the fake voevents on a save/load round-trip before we return packets = [vp.loads(vp.dumps(v)) for v in packets] return packets
def heartbeat_packets(start=default_start_dt, interval=timedelta(minutes=15), n_packets=24, role=vp.definitions.roles.test): """ Create Voevents with varying ivorns and values of ``Who.Date``. Args: start(datetime.datetime): Start time. end(datetime.datetime): End time (non-inclusive). interval(datetime.timedelta): Heartbeat interval. Returns: packets: A list of VOEvent packets. """ packets = [] for ts in packetgen.timerange(start,start+n_packets*interval,interval): packets.append(packetgen.create_test_packet(testpacket_identity, author_date=ts, role=role)) # NB Whitespacing of loaded (parsed) vs custom-built VOEvents is different: # http://lxml.de/FAQ.html#why-doesn-t-the-pretty-print-option-reformat-my-xml-output # So, to enable exact ``dumps`` matching (for equality testing) # we take the fake voevents on a save/load round-trip before we return packets = [ vp.loads(vp.dumps(v)) for v in packets] return packets
def create_swift_feeds(hash_cache_path, look_back_ndays): """ Create swift feeds for recent BAT_GRB alerts """ s = session_registry() logger.debug("Checking database {} for recent Swift BAT GRB alerts".format( s.bind.engine.url.database)) now = datetime.datetime.utcnow() q = s.query(models.Voevent).filter( models.Voevent.stream == "nasa.gsfc.gcn/SWIFT", models.Voevent.ivorn.like('%{}%'.format("BAT_GRB")), models.Voevent.role == 'observation') if look_back_ndays is not None: threshold_tstamp = now - datetime.timedelta(days=look_back_ndays) q = q.filter(models.Voevent.author_datetime > threshold_tstamp) feeds = [] for entry in q: v = vp.loads(entry.xml.encode()) alert = BatGrb(v) if not (alert.startracker_lost() or alert.tgt_in_flight_cat() or alert.tgt_in_ground_cat()): feeds.append(SwiftFeed(v, hash_cache_path=hash_cache_path)) logger.debug("Created feed for packet {}".format(v.attrib['ivorn'])) return feeds
def test_tarball_round_trip(named_temporary_file, fixture_db_session): voevent_etrees = fake.heartbeat_packets() # with open(assasn_non_ascii_packet_filepath, 'rb') as f: # voevent_etrees.append(vp.load(f)) s = fixture_db_session for etree in voevent_etrees: s.add(models.Voevent.from_etree(etree)) s.flush() voevent_dbrows = s.query(models.Voevent.ivorn, models.Voevent.xml).all() assert len(voevent_dbrows) == len(voevent_etrees) voevent_rowgen = list(models.Voevent.from_etree(v) for v in voevent_etrees) assert voevent_dbrows[0].ivorn == voevent_rowgen[0].ivorn assert voevent_dbrows[0].xml == voevent_rowgen[0].xml assert type(voevent_dbrows[0].xml) == type(voevent_rowgen[0].xml) assert type(voevent_rowgen[0].xml) == six.binary_type # Therefore it's crucial to test with an actual round-tripped dataset, # the 'voevent_dbrows' from above: fname = named_temporary_file.name filestore.write_tarball(voevent_dbrows, fname) loaded_voevents = [ vp.loads(s.xml) for s in filestore.tarfile_xml_generator(fname) ] def to_strings(voeventlist): return [vp.dumps(v) for v in voeventlist] def to_ivorn(voeventlist): return [v.attrib['ivorn'] for v in voeventlist] assert (to_ivorn(voevent_etrees) == to_ivorn(loaded_voevents)) assert (to_strings(voevent_etrees) == to_strings(loaded_voevents))
def main(): if six.PY2: stdin = sys.stdin.read() else: # Py3: stdin = sys.stdin.buffer.read() v = voeventparse.loads(stdin) handle_voevent(v) return 0
def test_load_of_voe_v2(self): with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: vff = vp.load(f) with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: vfs = vp.loads(f.read()) self.assertEqual(objectify.dump(vff), objectify.dump(vfs)) self.assertEqual(vfs.tag, 'VOEvent') self.assertEqual(vfs.attrib['ivorn'], 'ivo://nasa.gsfc.gcn/SWIFT#BAT_GRB_Pos_532871-729')
def _get_packet_type(payload): """Get the packet type from a VOEvent payload.""" voevent = vp.loads(payload) top_params = vp.get_toplevel_params(voevent) try: packet_type = int(top_params['Packet_Type']['value']) except KeyError: # If it's a VOEvent but not a GCN it won't have a packet type (e.g. Gaia alerts) packet_type = None return packet_type
def process_voevent_celerytask(bytestring): """ Process the voevent using the 'voevent_logic' i.e. the function defined in `fourpisky.scripts.process_voevent`. """ v = voeventparse.loads(bytestring) logger.debug("Load for processing: " + v.attrib['ivorn']) voevent_logic(v) logger.info("Processed:" + v.attrib['ivorn'])
def __init__(self, payload): # Store the creation time self.creation_time = Time.now() # Store the payload self.payload = payload # Load the payload using voeventparse self.voevent = vp.loads(self.payload) # Get key attributes: # IVORN self.ivorn = self.voevent.attrib['ivorn'] # Using the official IVOA terms (ivo://authorityID/resourceKey#local_ID): self.authorityID = self.ivorn.split('/')[2] self.resourceKey = self.ivorn.split('/')[3].split('#')[0] self.local_ID = self.ivorn.split('/')[3].split('#')[1] # Using some easier terms to understand: self.authority = self.authorityID self.publisher = self.resourceKey self.title = self.local_ID # Role (observation/test/...) self.role = self.voevent.attrib['role'] # Event time event_time = vp.convenience.get_event_time_as_utc(self.voevent, index=0) if event_time: self.time = Time(event_time) else: # Some test events don't have times self.time = None # Contact email try: self.contact = self.voevent.Who.Author.contactEmail except AttributeError: self.contact = None # GCN packet type self.packet_type = self._get_packet_type(payload) # Set default attributes # The subclasses for "interesting" events will overwrite these self.notice = 'Unknown' self.type = 'Unknown' self.source = 'Unknown' self.position = None self.coord = None self.target = None self.skymap = None self.properties = {} self.strategy = None
def test_event(filepath='../test_events/MS190410a-1-Preliminary.xml', test_time=Time('2018-4-03 12:00:00')): pretend = True log.info('Running test event from %s' % (filepath)) log.info('Mock time: %s' % (test_time)) payload = astropy.utils.data.get_file_contents(filepath) if sys.version_info.major == 2: # event arrives as a unicode string but loads requires a non-unicode string. v = voeventparse.loads(str(payload)) else: v = voeventparse.loads(payload.encode('latin-1')) params = { elem.attrib['name']: elem.attrib['value'] for elem in v.iterfind('.//Param') } return
def main(): stdin = sys.stdin.read() v = voeventparse.loads(stdin.encode()) ivorn = v.attrib['ivorn'] if ('SWIFT' in ivorn): if is_grb(v): handle_grb(v) if ('SWIFT' in ivorn) or ('MAXI' in ivorn): if is_flare_star(v): handle_flare_star(v) return 0
def QueueWorker(): """ Worker thread to process incoming message packets in the EventQueue. It is spawned on startup, and run continuously, blocking on EventQueue.get() if there's nothing to process. When an item is 'put' on the queue, the EventQueue.get() returns and the event is processed. Only exits if the global EXITING is set to True externally, to trigger a clean shutdown. """ global EXITING global IVORN_LIST try: while not EXITING: eventxml = EventQueue.get() if sys.version_info.major == 2: # event arrives as a unicode string but loads requires a non-unicode string. v = voeventparse.loads(str(eventxml)) else: v = voeventparse.loads(eventxml.encode('latin-1')) if v.attrib['ivorn'] in IVORN_LIST: DEFAULTLOGGER.info( "Already seen event %s, discarding. Current queue size is %d" % (v.attrib['ivorn'], EventQueue.qsize())) else: DEFAULTLOGGER.info( "Processing event %s. Current queue size is %d" % (v.attrib['ivorn'], EventQueue.qsize())) IVORN_LIST.append(v.attrib['ivorn']) for hfunc in EVENTHANDLERS: handled = hfunc(event=eventxml, pretend=PRETEND) if handled: # One of the handlers accepted this event break # Don't try any more event handlers. EventQueue.task_done() except Exception: DEFAULTLOGGER.error( "Exception in QueueWorker. Restarting in 10 sec: %s" % (traceback.format_exc(), )) handlers.send_email(from_address='*****@*****.**', to_addresses=EXCEPTION_NOTIFY_LIST, subject='Exception in QueueWorker loop', msg_text=EXCEPTION_EMAIL_TEMPLATE % traceback.format_exc())
def load_from_tarfile(session, tarfile_path, check_for_duplicates, pkts_per_commit=1000): """ Iterate through xml files in a tarball and attempt to load into database. .. warning:: Very slow with duplicate checking enabled. Returns: tuple: (n_parsed, n_loaded) - Total number of packets parsed from tarbar, and number successfully loaded. """ tf_stream = tarfile_xml_generator(tarfile_path) logger.info("Loading: " + tarfile_path) n_parsed = 0 n_loaded = 0 for tarinf in tf_stream: try: v = vp.loads(tarinf.xml, check_version=False) if v.attrib['version'] != '2.0': logger.debug( 'Packet: {} is not VO-schema version 2.0.'.format( tarinf.name)) n_parsed += 1 except: logger.exception('Error loading file {}, skipping'.format( tarinf.name)) continue try: new_row = Voevent.from_etree(v) if check_for_duplicates: if ivorn_present(session, new_row.ivorn): logger.debug( "Ignoring duplicate ivorn: {} in file {}".format( new_row.ivorn, tarinf.name)) continue session.add(new_row) n_loaded += 1 except: logger.exception( 'Error converting file {} to database row, skipping'. format(tarinf.name)) continue if n_loaded % pkts_per_commit == 0: session.commit() session.commit() logger.info("Successfully parsed {} packets, of which loaded {}.".format(n_parsed, n_loaded)) return n_parsed, n_loaded
def __call__(self, event): """ Add an event to the celery processing queue """ log.debug("Passing to voeventdb: %s" % (event.attrib['ivorn'],)) session = Session(bind=dbengine) try: v = voeventparse.loads(event.text) dbconv.safe_insert_voevent(session, v) session.commit() except Exception as e: log.warn("Could not insert packet with ivorn {} into database {}".format( v.attrib['ivorn'], voeventdb_dbname)) self.deferred.errback(e) log.info("Loaded {} into database {}".format( v.attrib['ivorn'], voeventdb_dbname))
def process_gcn(payload, root): """ Process gcn function """ # Print the alert print('Got VOEvent:') print(payload) path = './temp/' time_r = time.time() filenameout = path + 'event_' + str(time_r) + '.json' print("filenameout: ", filenameout) # save the input xml file file_to_save = open(filenameout, 'wb') file_to_save.write(payload) file_to_save.close() voevent = vp.loads(payload)
def test_namespace_variations(self): # NB, not enclosing root element in a namespace is invalid under schema # But this has been seen in the past (isolated bug case?) # Anyway, handled easily enough with open(datapaths.no_namespace_test_packet, 'rb') as f: vff = vp.load(f) self.assertFalse(vp.valid_as_v2_0(vff)) self.assertEqual(vff.tag, 'VOEvent') self.assertEqual(vff.attrib['ivorn'], 'ivo://com.dc3/dc3.broker#BrokerTest-2014-02-24T15:55:27.72') with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: xml_str = f.read() xml_str = xml_str.replace(b'voe', b'foobar_ns') # print xml_str vfs = vp.loads(xml_str) vp.assert_valid_as_v2_0(vfs) self.assertEqual(vfs.tag, 'VOEvent') self.assertEqual(vfs.attrib['ivorn'], 'ivo://nasa.gsfc.gcn/SWIFT#BAT_GRB_Pos_532871-729')
def main(): args = handle_args() logger = setup_logging(args.logfile_path) dburl = dbconfig.make_db_url(dbconfig.default_admin_db_params, args.dbname) if not db_utils.check_database_exists(dburl): raise RuntimeError("Database not found") stdin = sys.stdin.read() v = voeventparse.loads(stdin) session = Session(bind=create_engine(dburl)) try: conv.safe_insert_voevent(session, v) session.commit() except: logger.exception("Could not insert packet with ivorn {} into {}".format( v.attrib['ivorn'], args.dbname)) logger.info("Loaded packet with ivorn {} into {}".format( v.attrib['ivorn'], args.dbname)) return 0
def __call__(self, event): """ Add an event to the celery processing queue """ v = None try: session = Session(bind=dbengine) v = voeventparse.loads(event.raw_bytes) dbconv.safe_insert_voevent(session, v) session.commit() except Exception as e: if v is None: log.warn("Could not parse event-bytes as voevent") else: log.warn( "Could not insert packet with ivorn {} into database {}". format(v.attrib['ivorn'], voeventdb_dbname)) self.deferred.errback(e) log.info("Loaded {} into database {}".format(v.attrib['ivorn'], voeventdb_dbname))
def ingest_voevent_celerytask(bytestring): """ Ingest the voevent into a local instance of voeventdb. """ v = voeventparse.loads(bytestring) logger.debug("Load for ingest: " + v.attrib['ivorn']) session = Session(bind=dbengine) try: dbconv.safe_insert_voevent(session, v) session.commit() except: if (v.attrib['role'] == voeventparse.definitions.roles.test and v.attrib['ivorn'].startswith('ivo://nasa.gsfc.gcn/INTEGRAL')): logger.warning( "Ignoring mismatched duplicate-ivorn test events from " "NASA-INTEGRAL stream") else: logger.exception( "Could not insert packet with ivorn {} into {}".format( v.attrib['ivorn'], voeventdb_dbname)) logger.info("Ingested:" + v.attrib['ivorn'])
def test_tarball_round_trip(named_temporary_file, fixture_db_session): voevent_etrees = fake.heartbeat_packets() # with open(assasn_non_ascii_packet_filepath, 'rb') as f: # voevent_etrees.append(vp.load(f)) s = fixture_db_session for etree in voevent_etrees: s.add(models.Voevent.from_etree(etree)) s.flush() voevent_dbrows = s.query(models.Voevent.ivorn, models.Voevent.xml).all() assert len(voevent_dbrows) == len(voevent_etrees) voevent_rowgen = list(models.Voevent.from_etree(v) for v in voevent_etrees) assert voevent_dbrows[0].ivorn == voevent_rowgen[0].ivorn assert voevent_dbrows[0].xml == voevent_rowgen[0].xml # Here's the crux # A newly instantiated model will store a string type same as Python 2; # bytestring stores as bytestring, unicode as unicode. # However, after a round-trip to the database, proper typing has been # asserted and the bytestring is returned as unicode! assert type(voevent_dbrows[0].xml) != type(voevent_rowgen[0].xml) assert type(voevent_rowgen[0].xml) == str assert type(voevent_dbrows[0].xml) == unicode # Therefore it's crucial to test with an actual round-tripped dataset, # the 'voevent_dbrows' from above: fname = named_temporary_file.name filestore.write_tarball(voevent_dbrows, fname) loaded_voevents = [vp.loads(s.xml) for s in filestore.tarfile_xml_generator(fname)] def to_strings(voeventlist): return [vp.dumps(v) for v in voeventlist] def to_ivorn(voeventlist): return [v.attrib['ivorn'] for v in voeventlist] assert (to_ivorn(voevent_etrees) == to_ivorn(loaded_voevents)) assert (to_strings(voevent_etrees) == to_strings(loaded_voevents))
def process_gcn(payload, root): alerts_path = config.get('ALERT FILES', 'PATH') # event alert file path fits_path = config.get('EVENT FILES', 'PATH') # event FITS file path is_test = config.getboolean('GENERAL', 'TEST') if config.has_option( 'GENERAL', 'TEST') else False # Respond only to 'test'/'observation' events if is_test: role = 'test' else: role = 'observation' if root.attrib['role'] != role: logging.info('Not {}, aborting.'.format(role)) return ivorn = root.attrib['ivorn'] filename = ntpath.basename(ivorn).split('#')[1] log = init_log(filename) # Is retracted? if gcn.handlers.get_notice_type(root) == gcn.notice_types.LVC_RETRACTION: # Save alert to file with open(alerts_path + filename + '.xml', "wb") as f: f.write(payload) log.info("Event {} retracted, doing nothing.".format(filename)) send_mail(subject="[GW@Wise] {}".format(filename.split('-')[0]), text="GCN/LVC retraction {} received, doing nothing.".format( filename), html=format_html("<b>Alert retracted.</b><br>"), files=[alerts_path + filename + '.xml']) return v = vp.loads(payload) # Read all of the VOEvent parameters from the "What" section params = { elem.attrib['name']: elem.attrib['value'] for elem in v.iterfind('.//Param') } # Respond only to 'CBC' (compact binary coalescence candidates) events. # Change 'CBC' to 'Burst' to respond to only unmodeled burst events. if params['Group'] != 'CBC': log.info('Not CBC, aborting.') return # Respond only to specific merger types if ((config.getfloat("GENERAL", "BNS_MIN") < float(params["BNS"])) | (config.getfloat("GENERAL", "NSBH_MIN") < float(params["NSBH"])) | (config.getfloat("GENERAL", "MASSGAP_MIN") < float(params["MassGap"])) | (config.getfloat("GENERAL", "BBH_MIN") < float(params["BBH"])) | (config.getfloat("GENERAL", "HASNS_MIN") < float(params["HasNS"])) | (config.getfloat("GENERAL", "HASREMNANT_MIN") < float(params["HasRemnant"]))) & \ (config.getfloat("GENERAL", "TERRESTRIAL_MAX") >= float(params["Terrestrial"])) & \ (config.getfloat("GENERAL", "FAR_MAX") >= float(params["FAR"])*60*60*24*365): pass else: log.info("Uninteresting alert, aborting.") return # Save alert to file with open(alerts_path + filename + '.xml', "wb") as f: f.write(payload) log.info("GCN/LVC alert {} received, started processing.".format(ivorn)) # Read VOEvent attributes keylist = ['ivorn', 'role', 'version'] for key in keylist: params[key] = v.attrib[key] # Read Who params['author_ivorn'] = v.Who.Author.contactName params['date_ivorn'] = v.Who.Date # Read WhereWhen params[ 'observatorylocation_id'] = v.WhereWhen.ObsDataLocation.ObservatoryLocation.attrib[ 'id'] params[ 'astrocoordsystem_id'] = v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoordSystem.attrib[ 'id'] params[ 'isotime'] = v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.TimeInstant.ISOTime # Read How description = "" for item in v.How.iterfind('Description'): description = description + ", " + item params['how_description'] = description # Insert VOEvent to the database mysql_update.insert_voevent('voevent_lvc', params, log) # Download the HEALPix sky map FITS file. tmp_path = download_file(params['skymap_fits'], cache=False) skymap_path = fits_path + filename + "_" + ntpath.basename( params['skymap_fits']) shutil.move(tmp_path, skymap_path) # Respond only to alerts with reasonable localization credzones = [ 0.5, 0.9, config.getfloat("GENERAL", "AREA_CREDZONE"), config.getfloat("TILE", "CREDZONE") ] area = get_sky_area(skymap_path, credzone=credzones) if area[2] > config.getfloat("GENERAL", "AREA_MAX"): log.info( f"""{credzones[2]} area is {area[2]} > {config.get("GENERAL", "AREA_MAX")} deg^2, aborting.""" ) send_mail( subject="[GW@Wise] {}".format(params["GraceID"]), text= f"""Attached {filename} GCN/LVC alert received, but {credzones[2]} area is {area[2]} > \ {config.get("GENERAL", "AREA_MAX")} deg^2, aborting.""", html=format_alert(params, area[0:1]), files=[alerts_path + filename + '.xml'], log=log) return # Send alert email send_mail( subject="[GW@Wise] {}".format(params["GraceID"]), text="Attached {} GCN/LVC alert received, started processing.".format( filename), html=format_alert(params, area[0:2]), files=[alerts_path + filename + '.xml'], log=log) if area[3] > config.getfloat("TILE", "AREA_MAX"): # Create the galaxy list galaxies, ra, dec = galaxy_list.find_galaxy_list(skymap_path, log=log) # Save galaxy list to csv file and send it ascii.write(galaxies, "galaxy_list.csv", format="csv", overwrite=True, names=[ "GladeID", "RA", "Dec", "Dist", "Bmag", "Score", "Distance factor" ]) send_mail( subject="[GW@Wise] {} Galaxy list".format(params["GraceID"]), text="{} GCN/LVC alert galaxy list is attached.".format(filename), files=["galaxy_list.csv"], log=log) # Create Wise plan wise.process_galaxy_list(galaxies, alertname=ivorn.split('/')[-1], ra_event=ra, dec_event=dec, log=log) else: # Tile the credible region wise.process_tiles(skymap_path, alertname=ivorn.split('/')[-1], log=log) # Finish and delete logger log.info("Done.") close_log(log)
def process_gcn(payload, root): alerts_path = config.get('ALERT FILES', 'PATH') # event alert file path fits_path = config.get('EVENT FILES', 'PATH') # event FITS file path is_test = config.getboolean('GENERAL', 'TEST') if config.has_option( 'GENERAL', 'TEST') else False # Respond only to 'test'/'observation' events if is_test: role = 'test' else: role = 'observation' if root.attrib['role'] != role: logging.info('Not {}, aborting.'.format(role)) return ivorn = root.attrib['ivorn'] filename = ntpath.basename(ivorn).split('#')[1] log = init_log(filename) # Is retracted? if gcn.handlers.get_notice_type(root) == gcn.notice_types.LVC_RETRACTION: # Save alert to file with open(alerts_path + filename + '.xml', "wb") as f: f.write(payload) log.info("Event {} retracted, doing nothing.".format(ivorn)) send_mail( subject="[GW@Wise] LVC event retracted", text="Attached GCN/LVC retraction {} received, doing nothing.". format(ivorn), files=[alerts_path + filename + '.xml']) return v = vp.loads(payload) # Read all of the VOEvent parameters from the "What" section params = { elem.attrib['name']: elem.attrib['value'] for elem in v.iterfind('.//Param') } # Respond only to 'CBC' (compact binary coalescence candidates) events. # Change 'CBC' to 'Burst' to respond to only unmodeled burst events. if params['Group'] != 'CBC': log.info('Not CBC, aborting.') return # Save alert to file with open(alerts_path + filename + '.xml', "wb") as f: f.write(payload) log.info("GCN/LVC alert {} received, started processing.".format(ivorn)) # Read VOEvent attributes keylist = ['ivorn', 'role', 'version'] for key in keylist: params[key] = v.attrib[key] # Read Who params['author_ivorn'] = v.Who.Author.contactName params['date_ivorn'] = v.Who.Date # Read WhereWhen params[ 'observatorylocation_id'] = v.WhereWhen.ObsDataLocation.ObservatoryLocation.attrib[ 'id'] params[ 'astrocoordsystem_id'] = v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoordSystem.attrib[ 'id'] params[ 'isotime'] = v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.TimeInstant.ISOTime # Read How description = "" for item in v.How.iterfind('Description'): description = description + ", " + item params['how_description'] = description # Insert VOEvent to the database mysql_update.insert_voevent('voevent_lvc', params, log) # Send alert email send_mail( subject="[GW@Wise] LVC alert received", text="Attached GCN/LVC alert {} received, started processing.".format( ivorn), files=[alerts_path + filename + '.xml']) # Download the HEALPix sky map FITS file. tmp_path = download_file(params['skymap_fits']) skymap_path = fits_path + filename + "_" + ntpath.basename( params['skymap_fits']) shutil.move(tmp_path, skymap_path) # Create the galaxy list galaxies, ra, dec = galaxy_list.find_galaxy_list(skymap_path, log=log) # Create Wise plan wise.process_galaxy_list(galaxies, filename=ivorn.split('/')[-1], ra_event=ra, dec_event=dec, log=log) # Finish and delete logger log.info("Done.") close_log(log)
def main(): stdin = sys.stdin.read() v = voeventparse.loads(stdin) handle_voevent(v) return 0
def cli(): fourpisky.log_config.setup_logging("process_voevent") stdin_binary = click.get_binary_stream('stdin') v = voeventparse.loads(stdin_binary.read()) voevent_logic(v) return 0
def handler(): """ Convert VOEvent from stdin to parquet file, and store it. The user will have to define the following constants: EVENTDIR: str USEHDFS: bool HOST: str PORT: int USER: str They are stored in the fink_voevent/vo_writer.py file. Usage as a Comet handler: twistd -n comet --verbose --local-ivo=ivo://fink-broker/$(hostname)\ --remote=voevent.4pisky.org --cmd=fink_voevent/vo_writer.py Usage as a standard script: cat a_voevent_from_disk | fink_voevent/vo_writer.py """ # Check if the outdir exists if not vo.check_dir_exist(EVENTDIR, USEHDFS): print("EVENTDIR={} does not exist".format(EVENTDIR)) print("Create it or edit fink_broker/vo_writer.py") sys.exit() # Read the data from the stdin (string) packet_data = sys.stdin.buffer.read() # Load as XML xml_packet_data = voeventparse.loads(packet_data) # Extract the ivorn ivorn = xml_packet_data.attrib['ivorn'] # Skip if the event is a test if not vo.is_observation(xml_packet_data): print('test/utility received - {}'.format(ivorn)) return 0 # Extract information about position and time coords = voeventparse.get_event_position(xml_packet_data) time_utc = str(voeventparse.get_event_time_as_utc(xml_packet_data)) # Store useful information for coincidence in a DataFrame df = pd.DataFrame.from_dict( { 'ivorn': [ivorn], 'ra': [coords.ra], 'dec': [coords.dec], 'err': [coords.err], 'units': [coords.units], 'timeUTC': [time_utc], 'raw_event': packet_data} ) # Filename for the event is based on the ivorn. fn = '{}/{}.parquet'.format(EVENTDIR, vo.string_to_filename(ivorn)) # Get the connector if USEHDFS: fs = vo.get_hdfs_connector(HOST, PORT, USER) else: fs = None vo.write_dataframe(df, outpath=fn, fs=fs) return 0
def main(): s = sys.stdin.read() v = voeventparse.loads(s) voevent_logic(v) return 0