def test_for_packet_mangling(self): """ Check that applying prettystr to a packet does not change it. """ self.assertTrue(vp.valid_as_v2_0(self.swift_grb_v2_packet)) before = vp.dumps(self.swift_grb_v2_packet) vp.prettystr(self.swift_grb_v2_packet) self.assertTrue(vp.valid_as_v2_0(self.swift_grb_v2_packet)) after = vp.dumps(self.swift_grb_v2_packet) self.assertEqual(before, after)
def cli(): click.echo("Attempting celery task") test_packet = fourpisky.voevent.create_4pisky_test_trigger_voevent() voevent_bytestring = voeventparse.dumps(test_packet) process_voevent_celerytask.delay(voevent_bytestring) click.echo("Task fired") return 0
def heartbeat_packets(start=default_start_dt, interval=timedelta(minutes=15), n_packets=24, role=vp.definitions.roles.test): """ Create Voevents with varying ivorns and values of ``Who.Date``. Args: start(datetime.datetime): Start time. end(datetime.datetime): End time (non-inclusive). interval(datetime.timedelta): Heartbeat interval. Returns: packets: A list of VOEvent packets. """ packets = [] for ts in packetgen.timerange(start, start + n_packets * interval, interval): packets.append( packetgen.create_test_packet(testpacket_identity, author_date=ts, role=role)) # NB Whitespacing of loaded (parsed) vs custom-built VOEvents is different: # http://lxml.de/FAQ.html#why-doesn-t-the-pretty-print-option-reformat-my-xml-output # So, to enable exact ``dumps`` matching (for equality testing) # we take the fake voevents on a save/load round-trip before we return packets = [vp.loads(vp.dumps(v)) for v in packets] return packets
def from_etree(root, received=pytz.UTC.localize(datetime.utcnow())): """ Init a Voevent row from an LXML etree loaded with voevent-parse """ ivorn = root.attrib['ivorn'] # Stream- Everything except before the '#' separator, # with the prefix 'ivo://' removed: stream = ivorn.split('#')[0][6:] row = Voevent( ivorn=ivorn, role=root.attrib['role'], version=root.attrib['version'], stream=stream, xml=vp.dumps(root), received=received, ) row.author_datetime = _grab_xpath(root, 'Who/Date', converter=iso8601.parse_date) row.author_ivorn = _grab_xpath(root, 'Who/AuthorIVORN') row.cites = Cite.from_etree(root) if not _has_bad_coords(root, stream): try: row.coords = Coord.from_etree(root) except: logger.exception( 'Error loading coords for ivorn {}, coords dropped.'. format(ivorn)) return row
def from_etree(root, received=pytz.UTC.localize(datetime.utcnow())): """ Init a Voevent row from an LXML etree loaded with voevent-parse """ ivorn = root.attrib['ivorn'] # Stream- Everything except before the '#' separator, # with the prefix 'ivo://' removed: stream = ivorn.split('#')[0][6:] row = Voevent(ivorn=ivorn, role=root.attrib['role'], version=root.attrib['version'], stream=stream, xml=vp.dumps(root), received=received, ) row.author_datetime = _grab_xpath(root, 'Who/Date', converter=iso8601.parse_date) row.author_ivorn = _grab_xpath(root, 'Who/AuthorIVORN') row.cites = Cite.from_etree(root) if not _has_bad_coords(root, stream): try: row.coords = Coord.from_etree(root) except: logger.exception( 'Error loading coords for ivorn {}, coords dropped.'.format( ivorn) ) return row
def heartbeat_packets(start=default_start_dt, interval=timedelta(minutes=15), n_packets=24, role=vp.definitions.roles.test): """ Create Voevents with varying ivorns and values of ``Who.Date``. Args: start(datetime.datetime): Start time. end(datetime.datetime): End time (non-inclusive). interval(datetime.timedelta): Heartbeat interval. Returns: packets: A list of VOEvent packets. """ packets = [] for ts in packetgen.timerange(start,start+n_packets*interval,interval): packets.append(packetgen.create_test_packet(testpacket_identity, author_date=ts, role=role)) # NB Whitespacing of loaded (parsed) vs custom-built VOEvents is different: # http://lxml.de/FAQ.html#why-doesn-t-the-pretty-print-option-reformat-my-xml-output # So, to enable exact ``dumps`` matching (for equality testing) # we take the fake voevents on a save/load round-trip before we return packets = [ vp.loads(vp.dumps(v)) for v in packets] return packets
def test_ingest_packet(): #Must create corpusdb first: assert db_utils.check_database_exists(testdb_corpus_url) engine = sqlalchemy.create_engine(testdb_corpus_url) s = sqlalchemy.orm.Session(bind=engine) from voeventdb.server import __path__ as root_path root_path = root_path[0] script_path = os.path.join(root_path, 'bin', 'voeventdb_ingest_packet.py') print("Testing script at ", script_path) print("Using executable:", sys.executable) # Do stuff n_before = s.query(Voevent).count() proc = subprocess.Popen( [ script_path, '-d={}'.format(testdb_corpus_url.database), '-l={}'.format('/tmp/vdbingest-test.log'), ], stdin=subprocess.PIPE, ) proc.communicate(voeventparse.dumps( fake.heartbeat_packets(n_packets=1)[0])) proc.wait() assert proc.returncode == 0 assert s.query(Voevent).count() == n_before + 1
def write_voevent(v, outname='new_voevent_example.xml'): """ Takes VOEvent object and writes as xml. """ with open(outname, 'w') as f: voxml = vp.dumps(v) xmlstr = minidom.parseString(voxml).toprettyxml(indent=" ") f.write(xmlstr) print("Wrote your voevent to ", os.path.abspath(outname))
def test_xml_retrieval(self, simple_populated_db): dbinf = simple_populated_db all_ivorns = apiv1.list_ivorn(order=apiv1.OrderValues.id) xml = apiv1.packet_xml(all_ivorns[-1]) assert xml == vp.dumps(dbinf.insert_packets[-1]) # Now try a non-existent ivorn: with pytest.raises(requests.HTTPError): apiv1.packet_xml("ivo://foo/bar")
def __init__(self, fixture_db_session): s = fixture_db_session packets = [swift_bat_grb_655721] packets.extend(fake.heartbeat_packets(role=vp.definitions.roles.test)) extra_packets = fake.heartbeat_packets( start=fake.default_start_dt + timedelta(hours=24), role=vp.definitions.roles.utility) # Packets referenced by other packets: # self.cited = set() # Count times ivorn referenced by other packets in db : self.cite_counts = defaultdict(int) # Packets containing at least one cite entry self.followup_packets = [] # Total number of citations (one packet may have multiple cite entries) self.n_citations = 0 c0 = packets[0].attrib['ivorn'] c1 = packets[1].attrib['ivorn'] #One reference in ep0 self.add_reference(extra_packets[0], c0) #Two references in ep1 self.add_reference(extra_packets[1], c0) self.add_reference(extra_packets[1], c1) # # #Now cite ep[0], making it both cites / cited_by c2 = extra_packets[0].attrib['ivorn'] self.add_reference(extra_packets[2], c2) # Add a citation to an external packet self.add_reference(extra_packets[3], swift_xrt_grb_655721.attrib['ivorn']) packets.extend(extra_packets) self.packet_dict = {pkt.attrib['ivorn']: pkt for pkt in packets} self.insert_packets = packets[:-1] self.insert_packets_dumps = [vp.dumps(v) for v in self.insert_packets] self.streams = [ v.attrib['ivorn'].split('#')[0][6:] for v in self.insert_packets ] self.stream_set = list(set(self.streams)) self.roles = [v.attrib['role'] for v in self.insert_packets] self.role_set = list(set(self.roles)) self.remaining_packet = packets[-1] # Insert all but the last packet, this gives us a useful counter-example s.add_all((Voevent.from_etree(p) for p in self.insert_packets)) self.n_inserts = len(self.insert_packets) self.inserted_ivorns = [p.attrib['ivorn'] for p in self.insert_packets] self.absent_ivorn = self.remaining_packet.attrib['ivorn']
def __init__(self, fixture_db_session): s = fixture_db_session packets = [swift_bat_grb_655721] packets.extend(fake.heartbeat_packets(role=vp.definitions.roles.test)) extra_packets = fake.heartbeat_packets( start=fake.default_start_dt + timedelta(hours=24), role=vp.definitions.roles.utility) # Packets referenced by other packets: # self.cited = set() # Count times ivorn referenced by other packets in db : self.cite_counts = defaultdict(int) # Packets containing at least one cite entry self.followup_packets = [] # Total number of citations (one packet may have multiple cite entries) self.n_citations = 0 c0 = packets[0].attrib['ivorn'] c1 = packets[1].attrib['ivorn'] #One reference in ep0 self.add_reference(extra_packets[0], c0) #Two references in ep1 self.add_reference(extra_packets[1],c0) self.add_reference(extra_packets[1],c1) # # #Now cite ep[0], making it both cites / cited_by c2 = extra_packets[0].attrib['ivorn'] self.add_reference(extra_packets[2],c2) # Add a citation to an external packet self.add_reference(extra_packets[3], swift_xrt_grb_655721.attrib['ivorn']) packets.extend(extra_packets) self.packet_dict = { pkt.attrib['ivorn'] : pkt for pkt in packets } self.insert_packets = packets[:-1] self.insert_packets_dumps = [vp.dumps(v) for v in self.insert_packets] self.streams = [v.attrib['ivorn'].split('#')[0][6:] for v in self.insert_packets] self.stream_set = list(set(self.streams)) self.roles = [v.attrib['role'] for v in self.insert_packets] self.role_set = list(set(self.roles)) self.remaining_packet = packets[-1] # Insert all but the last packet, this gives us a useful counter-example s.add_all( (Voevent.from_etree(p) for p in self.insert_packets)) self.n_inserts = len(self.insert_packets) self.inserted_ivorns = [p.attrib['ivorn'] for p in self.insert_packets] self.absent_ivorn = self.remaining_packet.attrib['ivorn']
def test_dumps(self): """ Note, the processed output does not match the raw input - because I have added the UTF-8 encoding declaration. So we match the convenience routines against an etree.tostring processed version of the original. """ swift_grb_v2_raw = objectify.parse( datapaths.swift_bat_grb_pos_v2).getroot() with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: swift_grb_v2_voeparsed = vp.load(f) raw = etree.tostring(swift_grb_v2_raw, pretty_print=False, xml_declaration=True, encoding='UTF-8') processed = vp.dumps(swift_grb_v2_voeparsed) self.assertEqual(raw, processed)
def test_ingest_packet(): # Must create corpusdb first: assert db_utils.check_database_exists(testdb_corpus_url) engine = sqlalchemy.create_engine(testdb_corpus_url) s = sqlalchemy.orm.Session(bind=engine) from voeventdb.server import __path__ as root_path root_path = root_path[0] script_path = os.path.join(root_path, "bin", "voeventdb_ingest_packet.py") print("Testing script at ", script_path) print("Using executable:", sys.executable) # Do stuff n_before = s.query(Voevent).count() proc = subprocess.Popen( [script_path, "-d={}".format(testdb_corpus_url.database), "-l={}".format("/tmp/vdbingest-test.log")], stdin=subprocess.PIPE, ) proc.communicate(voeventparse.dumps(fake.heartbeat_packets(n_packets=1)[0])) proc.wait() assert proc.returncode == 0 assert s.query(Voevent).count() == n_before + 1
def get_value(self, v, param_data, item, event_type): ''' Extract the value of item from VOEvent. :param v: VOEvent xml :param param_data: all param data from VOEvent file :param item: single dictionary item from mapping :param event_type: event type of VOEvent, including citation if applicable, e.g. ('new', None) :type v: lxml.objectify.ObjectifiedElement, str :type param_data: orderedmultidict.orderedmultidict.omdict :type item: dict :type event_type: tuple :returns: value for item :rtype: int, float, str, bool, NoneType ''' itemtype = item.get('type') if itemtype == 'ivorn': if (event_type[0] == 'supersedes'): if event_type[1]: # type supersedes with a valid ivorn citation return event_type[1] else: # type supersedes with no ivorn citation, use event ivorn return self.get_attrib(v, item.get('name')) else: return self.get_attrib(v, item.get('name')) elif itemtype == 'Param': return self.get_param(param_data, item.get('param_group'), item.get('param_name')) elif itemtype == 'ISOTime': try: return self.get_utc_time_str(v) except AttributeError: # for type 'retraction' there is no time defined return None elif itemtype == 'authortime': try: timestr = v.xpath('.//' + item.get('voevent').replace('.', '/'))[0] return parser.parse(str(timestr)).strftime('%Y-%m-%d %H:%M:%S') except IndexError: return None elif itemtype == 'XML': return vp.dumps(v) elif itemtype == 'voevent': try: return v.xpath('.//' + item.get('voevent').replace('.', '/'))[0] except IndexError: return None elif itemtype == 'Coord': return self.get_coord(v, item.get('name')) elif itemtype == 'verify': # get importance attribute from <Why> section importance = v.Why.attrib.get(item.get('name')) # for high importance set verified=True, else False try: if (float(importance) >= 0.95): # high importance, so default to verified return True else: return False except TypeError: return False else: return None
def _NewVOEvent(self, dm, dm_err, width, snr, flux, ra, dec, semiMaj, semiMin, ymw16, name, importance, utc, gl, gb, gain, dt=TSAMP.to(u.ms).value, delta_nu_MHz=(BANDWIDTH / NCHAN).to(u.MHz).value, nu_GHz=1.37, posang=0, test=None): """ Create a VOEvent :param float dm: Dispersion measure (pc cm**-3) :param float dm_err: Error on DM (pc cm**-3) :param float width: Pulse width (ms) :param float snr: Signal-to-noise ratio :param float flux: flux density (mJy) :param float ra: Right ascension (deg) :param float dec: Declination (deg) :param float semiMaj: Localisation region semi-major axis (arcmin) :param float semiMin: Localisation region semi-minor axis (arcmin) :param float ymw16: YMW16 DM (pc cm**-3) :param str name: Source name :param float importance: Trigger importance (0-1) :param str utc: UTC arrival time in ISOT format :param float gl: Galactic longitude (deg) :param float gb: Galactic latitude (deg) :param float gain: Telescope gain (K Jy**-1) :param float dt: Telescope time resolution (ms) :param float delta_nu_MHz: Telescope frequency channel width (MHz) :param float nu_GHz: Telescope centre frequency (GHz) :param float posang: Localisation region position angle (deg) :param bool test: Whether to send a test event or observation event """ z = dm / 1000.0 # May change errDeg = semiMaj / 60.0 # Parse UTC utc_YY = int(utc[:4]) utc_MM = int(utc[5:7]) utc_DD = int(utc[8:10]) utc_hh = int(utc[11:13]) utc_mm = int(utc[14:16]) utc_ss = float(utc[17:]) t = Time(utc, scale='utc', format='isot') # IERS server is down, avoid using it t.delta_ut1_utc = 0 mjd = t.mjd ivorn = ''.join([name, str(utc_hh), str(utc_mm), '/', str(mjd)]) # use default value for test flag if not set if test is None: test = self.test # Set role to either test or real observation if test: self.logger.info("Event type is test") v = vp.Voevent(stream='nl.astron.apertif/alert', stream_id=ivorn, role=vp.definitions.roles.test) else: self.logger.info("Event type is observation") v = vp.Voevent(stream='nl.astron.apertif/alert', stream_id=ivorn, role=vp.definitions.roles.observation) # Author origin information vp.set_who(v, date=datetime.datetime.utcnow(), author_ivorn="nl.astron") # Author contact information vp.set_author(v, title="ARTS FRB alert system", contactName="Leon Oostrum", contactEmail="*****@*****.**", shortName="ALERT") # Parameter definitions # Apertif-specific observing configuration beam_sMa = vp.Param(name="beam_semi-major_axis", unit="MM", ucd="instr.beam;pos.errorEllipse;phys.angSize.smajAxis", ac=True, value=semiMaj) beam_sma = vp.Param(name="beam_semi-minor_axis", unit="MM", ucd="instr.beam;pos.errorEllipse;phys.angSize.sminAxis", ac=True, value=semiMin) beam_rot = vp.Param(name="beam_rotation_angle", value=str(posang), unit="Degrees", ucd="instr.beam;pos.errorEllipse;instr.offset", ac=True) tsamp = vp.Param(name="sampling_time", value=str(dt), unit="ms", ucd="time.resolution", ac=True) bw = vp.Param(name="bandwidth", value=str(delta_nu_MHz), unit="MHz", ucd="instr.bandwidth", ac=True) nchan = vp.Param(name="nchan", value=str(NCHAN), dataType="int", ucd="meta.number;em.freq;em.bin", unit="None") cf = vp.Param(name="centre_frequency", value=str(1000 * nu_GHz), unit="MHz", ucd="em.freq;instr", ac=True) npol = vp.Param(name="npol", value="2", dataType="int", unit="None") bits = vp.Param(name="bits_per_sample", value="8", dataType="int", unit="None") gain = vp.Param(name="gain", value=str(gain), unit="K/Jy", ac=True) tsys = vp.Param(name="tsys", value=str(TSYS.to(u.Kelvin).value), unit="K", ucd="phot.antennaTemp", ac=True) backend = vp.Param(name="backend", value="ARTS") # beam = vp.Param(name="beam", value= ) v.What.append(vp.Group(params=[beam_sMa, beam_sma, beam_rot, tsamp, bw, nchan, cf, npol, bits, gain, tsys, backend], name="observatory parameters")) # Event parameters DM = vp.Param(name="dm", ucd="phys.dispMeasure", unit="pc/cm^3", ac=True, value=str(dm)) DM_err = vp.Param(name="dm_err", ucd="stat.error;phys.dispMeasure", unit="pc/cm^3", ac=True, value=str(dm_err)) Width = vp.Param(name="width", ucd="time.duration;src.var.pulse", unit="ms", ac=True, value=str(width)) SNR = vp.Param(name="snr", ucd="stat.snr", unit="None", ac=True, value=str(snr)) Flux = vp.Param(name="flux", ucd="phot.flux", unit="Jy", ac=True, value=str(flux)) Flux.Description = "Calculated from radiometer equation. Not calibrated." Gl = vp.Param(name="gl", ucd="pos.galactic.lon", unit="Degrees", ac=True, value=str(gl)) Gb = vp.Param(name="gb", ucd="pos.galactic.lat", unit="Degrees", ac=True, value=str(gb)) # v.What.append(vp.Group(params=[DM, Width, SNR, Flux, Gl, Gb], name="event parameters")) v.What.append(vp.Group(params=[DM, DM_err, Width, SNR, Flux, Gl, Gb], name="event parameters")) # Advanced parameters (note, change script if using a differeing MW model) mw_dm = vp.Param(name="MW_dm_limit", unit="pc/cm^3", ac=True, value=str(ymw16)) mw_model = vp.Param(name="galactic_electron_model", value="YMW16") redshift_inferred = vp.Param(name="redshift_inferred", ucd="src.redshift", unit="None", value=str(z)) redshift_inferred.Description = "Redshift estimated using z = DM/1000.0" v.What.append(vp.Group(params=[mw_dm, mw_model, redshift_inferred], name="advanced parameters")) # WhereWhen vp.add_where_when(v, coords=vp.Position2D(ra=ra, dec=dec, err=errDeg, units='deg', system=vp.definitions.sky_coord_system.utc_fk5_geo), obs_time=datetime.datetime(utc_YY, utc_MM, utc_DD, utc_hh, utc_mm, int(utc_ss), tzinfo=pytz.UTC), observatory_location="WSRT") # Why vp.add_why(v, importance=importance) v.Why.Name = name if vp.valid_as_v2_0(v): with open('{}.xml'.format(utc), 'wb') as f: voxml = vp.dumps(v) xmlstr = minidom.parseString(voxml).toprettyxml(indent=" ") f.write(xmlstr.encode()) self.logger.info(vp.prettystr(v.Who)) self.logger.info(vp.prettystr(v.What)) self.logger.info(vp.prettystr(v.WhereWhen)) self.logger.info(vp.prettystr(v.Why)) else: self.logger.error("Unable to write file {}.xml".format(name))
def handle_neutrino(v, pretend=False): """ Handles the parsing of the VOEvent and generates observations. :param v: string in VOEvent XML format :param pretend: Boolean, True if we don't want to schedule observations (automatically switches to True for test events) :return: None """ if v.attrib['role'] != "observation": log.info("Attribute role != 'observation'. Setting pretend=True") pretend = True if PRETEND: log.info("Global PRETEND is True, setting pretend=True") pretend = True # Fetch params from the What section params = voeventparse.convenience.get_toplevel_params(v) try: is_real = params.get("isRealAlert")["value"] except: is_real = True if not is_real: log.info("Parameter isRealAlert is not True, setting pretend=True") pretend = True if 'Antares' in v.attrib['ivorn']: trig_id = params.get("TrigID")["value"] # Determine if the event satisfies trigger criteria # Note: this should ultimately be made more complex than selecting simply on ranking ranking = int(params.get("ranking")["value"]) if ranking < MINIMUM_RANKING: log.info("Event ranking %s below trigger threshold %s. Not triggering." % (ranking, MINIMUM_RANKING)) handlers.send_email(from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='DEBUG Neutrino alert for: %s - below minimum ranking to trigger' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % ("Event ranking %s below trigger threshold %s. Not triggering." % (ranking, MINIMUM_RANKING)), attachments=[('voevent.xml', voeventparse.dumps(v))]) return if trig_id not in xml_cache: neutrino = Neutrino(event=v) neutrino.voe_source = "ANTARES" neutrino.trigger_id = trig_id log.info("Trigger id: {}".format(trig_id)) if pretend: neutrino.info("****This is a test event****") xml_cache[trig_id] = neutrino else: neutrino = xml_cache[trig_id] elif 'ICECUBE' in v.attrib['ivorn']: trig_id = params.get("AMON_ID")["value"] if trig_id not in xml_cache: neutrino = Neutrino(event=v) neutrino.voe_source = "ICECUBE" neutrino.trigger_id = trig_id log.info("Trigger id: {}".format(trig_id)) if pretend: neutrino.info("****This is a test event****") xml_cache[trig_id] = neutrino else: neutrino = xml_cache[trig_id] else: log.debug("Not an ICECUBE or ANTARES neutrino.") log.debug("Not Triggering") handlers.send_email(from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='DEBUG Neutrino alert - Not an ICECUBE or ANTARES event, not triggering', msg_text=DEBUG_EMAIL_TEMPLATE % ("Unknown event type, not triggering"), attachments=[('voevent.xml', voeventparse.dumps(v))]) return position = voeventparse.convenience.get_event_position(v) log.info("Neutrino detected at: RA={:.2f}, Dec={:.2f} ({:.2f} deg error circle)".format(position.ra, position.dec, position.err)) neutrino.add_pos((position.ra, position.dec, position.err)) req_time_min = 30 # Check for scheduled observations obslist = triggerservice.obslist(obstime=req_time_min * 60) if obslist is not None and len(obslist) > 0: neutrino.debug("Currently observing:") neutrino.debug(str(obslist)) # Check if we are currently observing *this* neutrino obs = str(obslist[0][1]) # in case the obslist is returning unicode strings neutrino.debug("Current observation: {0}, current trigger: {1}".format(obs, trig_id)) if trig_id in obs: neutrino.info("Already observing this Neutrino") # Check the difference in position last_pos = neutrino.get_pos(-2) neutrino.info("Old position: RA {0}, Dec {1}, err {2}".format(*last_pos)) pos_diff = SkyCoord(ra=last_pos[0], dec=last_pos[1], unit=astropy.units.degree, frame='icrs').separation( SkyCoord(ra=position.ra, dec=position.dec, unit=astropy.units.degree, frame='icrs')).degree neutrino.info("New position is {0} deg from previous".format(pos_diff)) # Continue the current observation when the position difference is less than REPOINTING_DIR if pos_diff < REPOINTING_LIMIT: neutrino.info("(less than constraint of {0} deg)".format(REPOINTING_LIMIT)) neutrino.info("Not triggering") return neutrino.info("(greater than constraint of {0}deg)".format(REPOINTING_LIMIT)) neutrino.info("Update current observation.") else: neutrino.info("Not currently observing this Neutrino") else: neutrino.debug("Current schedule empty") emaildict = {'triggerid': neutrino.trigger_id, 'trigtime': Time.now().iso, 'ra': position.ra, 'dec': position.dec} email_text = EMAIL_TEMPLATE % emaildict email_subject = EMAIL_SUBJECT_TEMPLATE % neutrino.trigger_id # Do the trigger result = neutrino.trigger_observation(ttype=neutrino.voe_source, obsname=trig_id, time_min=req_time_min, pretend=pretend, project_id=PROJECT_ID, secure_key=SECURE_KEY, email_tolist=NOTIFY_LIST, email_text=email_text, email_subject=email_subject, creator='VOEvent_Auto_Trigger: Neutrino=%s' % __version__, voevent=voeventparse.dumps(v)) if result is None: # Trigger failed: handlers.send_email(from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='DEBUG Neutrino alert - Trigger failed', msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in neutrino.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))])
def voevent_etree_to_ivorn_xml_tuple(voevent): """ Args: voevent (etree): Root of an lxml.etree loaded with voeventparse. """ return (voevent.attrib["ivorn"], voeventparse.dumps(voevent))
def handle_gw(v, pretend=False, calc_time=None): """ Handles the parsing of the VOEvent and generates observations. :param v: string in VOEvent XML format :param pretend: Boolean, True if we don't want to schedule observations (automatically switches to True for test events) :param calc_time: astropy.time.Time object for calculations :return: None """ is_test = v.attrib['role'] == 'test' if is_test: # There's a 'test' event every hour, and half of these are followed by a retraction. if random.random( ) < TEST_PROB: # Some events, at random, generate a 'pretend' trigger. log.info('Test event, pretending to trigger.') pretend = True else: log.info('Test event, not triggering.') return event_debug_list = DEBUG_NOTIFY_LIST else: # This is a real event, send debug email to entire mailing list event_debug_list = NOTIFY_LIST params = { elem.attrib['name']: elem.attrib['value'] for elem in v.iterfind('.//Param') } trig_id = params['GraceID'] debug_email_subject = DEBUG_EMAIL_SUBJECT_TEMPLATE % trig_id if trig_id not in xml_cache: gw = GW(event=v) gw.trigger_id = trig_id gw.info("Received trigger %s" % trig_id) if is_test: gw.info("****This is a test event****") xml_cache[trig_id] = gw else: gw = xml_cache[trig_id] gw.add_event(v) if params['Packet_Type'] == "164": gw.info("Alert is an event retraction. Not triggering.") handlers.send_email(from_address='*****@*****.**', to_addresses=event_debug_list, subject=debug_email_subject, msg_text=DEBUG_EMAIL_TEMPLATE % "Alert is an event retraction. Not triggering.", attachments=[('voevent.xml', voeventparse.dumps(v)) ]) return if 'HasNS' not in params: msg = "HasNS not in params. Not triggering." gw.debug(msg) handlers.send_email(from_address='*****@*****.**', to_addresses=event_debug_list, subject=debug_email_subject, msg_text=DEBUG_EMAIL_TEMPLATE % msg, attachments=[('voevent.xml', voeventparse.dumps(v)) ]) return elif float(params['HasNS']) < HAS_NS_THRESH: msg = "P_HasNS (%.2f) below threshold (%.2f). Not triggering." % ( float(params['HasNS']), HAS_NS_THRESH) gw.debug(msg) handlers.send_email(from_address='*****@*****.**', to_addresses=event_debug_list, subject=debug_email_subject, msg_text=DEBUG_EMAIL_TEMPLATE % msg, attachments=[('voevent.xml', voeventparse.dumps(v)) ]) return if 'skymap_fits' not in params: gw.debug("No skymap in VOEvent. Not triggering.") handlers.send_email(from_address='*****@*****.**', to_addresses=event_debug_list, subject=debug_email_subject, msg_text=DEBUG_EMAIL_TEMPLATE % "No skymap in VOEvent. Not triggering.", attachments=[('voevent.xml', voeventparse.dumps(v)) ]) return try: gw.load_skymap(params['skymap_fits'], calc_time=calc_time) except: gw.debug("Failed to load skymap. Retrying in 1 minute") time.sleep(60) gw.load_skymap(params['skymap_fits'], calc_time=calc_time) RADecgrid, delays, power = gw.get_mwapointing_grid(returndelays=True, returnpower=True, minprob=MIN_PROB) if RADecgrid is None: gw.info("No pointing from skymap, not triggering") handlers.send_email(from_address='*****@*****.**', to_addresses=event_debug_list, subject=debug_email_subject, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in gw.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v)) ]) return ra, dec = RADecgrid.ra, RADecgrid.dec gw.info("Pointing at %s, %s" % (ra, dec)) gw.info("Pointing contains %.3f of the localisation" % (power)) gw.add_pos((ra.deg, dec.deg, 0.0)) req_time_s = OBS_LENGTH obslist = triggerservice.obslist(obstime=req_time_s) currently_observing = False if obslist is not None and len(obslist) > 0: gw.debug("Currently observing:") gw.debug(str(obslist)) obs = str(obslist[0][1]) gw.debug("obs {0}, trig {1}".format(obs, trig_id)) if obs == trig_id: currently_observing = True gw.info("Already observing this GW event") last_pos = gw.get_pos(-2) last_ra = last_pos[0] last_dec = last_pos[1] gw.info("Old position: RA {0}, Dec {1}".format(last_ra, last_dec)) if (abs(ra.deg - last_ra) < 5.0) and (abs(dec.deg - last_dec) < 5.0): gw.info( "New pointing very close to old pointing. Not triggering.") handlers.send_email( from_address='*****@*****.**', to_addresses=event_debug_list, subject=debug_email_subject, msg_text=DEBUG_EMAIL_TEMPLATE % "New pointing same as old pointing. Not triggering.", attachments=[('voevent.xml', voeventparse.dumps(v))]) return else: gw.info( "New pointing far from old pointing. Updating and triggering." ) time_string = v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.TimeInstant.ISOTime.text merger_time = Time(time_string) delta_T = Time.now() - merger_time delta_T_sec = delta_T.sec if not currently_observing: # If this event is not currently being observed, check whether time since merger exceeds max response time if delta_T_sec > MAX_RESPONSE_TIME: log_message = "Time since merger (%d s) greater than max response time (%d s). Not triggering" % ( delta_T_sec, MAX_RESPONSE_TIME) gw.info(log_message) handlers.send_email(from_address='*****@*****.**', to_addresses=event_debug_list, subject=debug_email_subject, msg_text=DEBUG_EMAIL_TEMPLATE % log_message, attachments=[('voevent.xml', voeventparse.dumps(v))]) return # Check if this event has been triggered on before if gw.first_trig_time is not None: # If it has been triggered, update the required time for the updated observation gw.info("This event has already been triggered.") req_time_s -= (Time.now() - gw.first_trig_time).sec gw.info("Required observing time: %.0f s" % (req_time_s)) emaildict = { 'triggerid': gw.trigger_id, 'trigtime': Time.now().iso, 'ra': ra.to_string(unit=astropy.units.hour, sep=':'), 'dec': dec.to_string(unit=astropy.units.deg, sep=':') } email_text = EMAIL_TEMPLATE % emaildict gw.info(email_text) gw.info("Template GCN text:") gcn_text = GCN_TEMPLATE % (trig_id, Time.now().iso, delta_T_sec, ra.deg, dec.deg, power) gw.info(gcn_text) email_subject = EMAIL_SUBJECT_TEMPLATE % gw.trigger_id # Do the trigger gw.info("Sending trigger.") result = gw.trigger_observation( ttype="LVC", obsname=trig_id, time_min=req_time_s / 60, pretend=(pretend or GW_PRETEND), project_id=PROJECT_ID, secure_key=SECURE_KEY, email_tolist=NOTIFY_LIST, email_text=email_text, email_subject=email_subject, creator='VOEvent_Auto_Trigger: GW_LIGO=%s' % __version__, voevent=voeventparse.dumps(v)) if result is None: handlers.send_email(from_address='*****@*****.**', to_addresses=event_debug_list, subject=debug_email_subject, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in gw.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v)) ])
def to_strings(voeventlist): return [vp.dumps(v) for v in voeventlist]
def test_process_voevent_script_2(): script_path = os.path.join(examples_folder, 'process_voevent_from_stdin_2.py') test_packet = fourpiskytools.voevent.create_test_packet(example_identity) retcode = run_command([script_path], vp.dumps(test_packet)) assert retcode == 0
def handle_grb(v, pretend=False): """ Handles the actual VOEvent parsing, generating observations if appropriate. :param v: string in VOEvent XML format :param pretend: Boolean, True if we don't want to actually schedule the observations. :return: None """ log.debug("processing GRB {0}".format(v.attrib['ivorn'])) # trigger = False if 'SWIFT' in v.attrib['ivorn']: # compute the trigger id trig_id = "SWIFT_" + v.attrib['ivorn'].split('_')[-1].split('-')[0] # #The following should never be hit because of the checks made in is_grb. # grbid = v.find(".//Param[@name='GRB_Identified']").attrib['value'] # if grbid != 'true': # log.debug("SWIFT alert but not a GRB") # handlers.send_email(from_address='*****@*****.**', # to_addresses=DEBUG_NOTIFY_LIST, # subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, # msg_text=DEBUG_EMAIL_TEMPLATE % "SWIFT alert but not a GRB", # attachments=[('voevent.xml', voeventparse.dumps(v))]) # # return log.debug("SWIFT GRB trigger detected") this_trig_type = "SWIFT" # If the star tracker looses it's lock then we can't trust any of the locations so we ignore this alert. startrack_lost_lock = v.find( ".//Param[@name='StarTrack_Lost_Lock']").attrib['value'] # convert 'true' to True, and everything else to false startrack_lost_lock = startrack_lost_lock.lower() == 'true' log.debug("StarLock OK? {0}".format(not startrack_lost_lock)) if startrack_lost_lock: log.debug("The SWIFT star tracker lost it's lock") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % "SWIFT alert for GRB, but with StarTrack_Lost_Lock", attachments=[('voevent.xml', voeventparse.dumps(v))]) return # cache the event using the trigger id if trig_id not in xml_cache: grb = GRB(event=v) grb.trigger_id = trig_id xml_cache[trig_id] = grb else: grb = xml_cache[trig_id] grb.add_event(v) trig_time = float( v.find(".//Param[@name='Integ_Time']").attrib['value']) if trig_time < LONG_SHORT_LIMIT: grb.debug("Probably a short GRB: t={0} < 2".format(trig_time)) grb.short = True grb.vcsmode = SWIFT_SHORT_TRIGGERS_IN_VCSMODE trigger = True else: grb.debug("Probably a long GRB: t={0} > 2".format(trig_time)) grb.short = False grb.vcsmode = SWIFT_LONG_TRIGGERS_IN_VCSMODE trigger = True elif "Fermi" in v.attrib['ivorn']: log.debug("Fermi GRB notice detected") # cache the event using the trigger id trig_id = "Fermi_" + v.attrib['ivorn'].split('_')[-2] this_trig_type = v.attrib['ivorn'].split('_')[1] # Flt, Gnd, or Fin if trig_id not in xml_cache: grb = GRB(event=v) grb.trigger_id = trig_id xml_cache[trig_id] = grb else: grb = xml_cache[trig_id] grb.add_event(v) # Not all alerts have trigger times. # eg Fermi#GBM_Gnd_Pos if this_trig_type == 'Flt': trig_time = float( v.find(".//Param[@name='Trig_Timescale']").attrib['value']) if trig_time < LONG_SHORT_LIMIT: grb.short = True grb.debug("Possibly a short GRB: t={0}".format(trig_time)) else: msg = "Probably not a short GRB: t={0}".format(trig_time) grb.debug(msg) grb.debug("Not Triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return # don't trigger most_likely = int( v.find(".//Param[@name='Most_Likely_Index']").attrib['value']) # ignore things that don't have GRB as best guess if most_likely == 4: grb.debug("MOST_LIKELY = GRB") prob = int( v.find( ".//Param[@name='Most_Likely_Prob']").attrib['value']) # ignore things that don't reach our probability threshold if prob > FERMI_POBABILITY_THRESHOLD: grb.debug("Prob(GRB): {0}% > {1}".format( prob, FERMI_POBABILITY_THRESHOLD)) trigger = True else: msg = "Prob(GRB): {0}% <{1}".format( prob, FERMI_POBABILITY_THRESHOLD) grb.debug(msg) grb.debug("Not Triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject= 'GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return else: msg = "MOST_LIKELY != GRB" grb.debug(msg) grb.debug("Not Triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return else: # for Gnd/Fin we trigger if we already triggered on the Flt position grb.debug("Gnd/Flt message -> reverting to Flt trigger") trigger = grb.triggered else: msg = "Not a Fermi or SWIFT GRB." log.debug(msg) log.debug("Not Triggering") handlers.send_email(from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification', msg_text=DEBUG_EMAIL_TEMPLATE % msg, attachments=[('voevent.xml', voeventparse.dumps(v)) ]) return if not trigger: grb.debug("Not Triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return # get current position ra, dec, err = handlers.get_position_info(v) # add it to the list of positions grb.add_pos((ra, dec, err)) grb.debug("RA {0}, Dec {1}, err {2}".format(ra, dec, err)) if not grb.vcsmode: req_time_min = 30 else: grb.debug('Reducing request time to %d for VCS observation' % SWIFT_SHORT_VCS_TIME) req_time_min = SWIFT_SHORT_VCS_TIME # check repointing just for tests # last_pos = grb.get_pos(-2) # if None not in last_pos: # grb.info("Old position: RA {0}, Dec {1}, err {2}".format(*last_pos)) # # pos_diff = SkyCoord(ra=last_pos[0], dec=last_pos[1], unit=astropy.units.degree, frame='icrs').separation( # SkyCoord(ra=ra, dec=dec, unit=astropy.units.degree, frame='icrs')).degree # if pos_diff < REPOINTING_LIMIT: # grb.info("New position is {0} deg from previous (less than constraint of {1} deg)".format(pos_diff, # REPOINTING_LIMIT)) # grb.info("Not triggering") # handlers.send_email(from_address='*****@*****.**', # to_addresses=DEBUG_NOTIFY_LIST, # subject='GRB_fermi_swift debug notification', # msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), # attachments=[('voevent.xml', voeventparse.dumps(v))]) # return # else: # grb.info("New position is {0} deg from previous (greater than constraint of {1} deg".format(pos_diff, # REPOINTING_LIMIT)) # grb.info("Attempting trigger") # end tests # look at the schedule obslist = triggerservice.obslist(obstime=1800) if obslist is not None and len(obslist) > 0: grb.debug("Currently observing:") grb.debug(str(obslist)) # are we currently observing *this* GRB? obs = str( obslist[0][1]) # in case the obslist is returning unicode strings obs_group_id = obslist[0][ 5] # The group ID of the first observation in the list returned grb.debug("obs {0}, trig {1}".format(obs, trig_id)) # Same GRB trigger from same telescope if trig_id in obs: # if obs == trig_id: # update the schedule! grb.info("Already observing this GRB") last_pos = grb.get_pos(-2) grb.info( "Old position: RA {0}, Dec {1}, err {2}".format(*last_pos)) pos_diff = SkyCoord(ra=last_pos[0], dec=last_pos[1], unit=astropy.units.degree, frame='icrs').separation( SkyCoord(ra=ra, dec=dec, unit=astropy.units.degree, frame='icrs')).degree grb.info("New position is {0} deg from previous".format(pos_diff)) if pos_diff < REPOINTING_LIMIT: grb.info("(less than constraint of {0} deg)".format( REPOINTING_LIMIT)) grb.info("Not triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification', msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return grb.info( "(greater than constraint of {0}deg)".format(REPOINTING_LIMIT)) if "SWIFT" in trig_id: grb.info("Updating SWIFT observation with new coords") pass elif "Fermi" in trig_id: prev_type = grb.last_trig_type if this_trig_type == 'Flt' and (prev_type in ['Gnd', 'Fin']): msg = "{0} positions have precedence over {1}".format( prev_type, this_trig_type) grb.info(msg) grb.info("Not triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject= 'GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return elif this_trig_type == 'Gnd' and prev_type == 'Fin': msg = "{0} positions have precedence over {1}".format( prev_type, this_trig_type) grb.info(msg) grb.info("Not triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject= 'GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return else: grb.info("Triggering {0} to replace {1}".format( this_trig_type, prev_type)) # shorten the observing time requested so we are ~30mins total (for non VCS). # If this is a VCS mode observation, don't shorten the time - if the previous trigger was # in VCS mode, we won't be able to interrupt it, and if it wasn't, we still want the normal # length of a VCS trigger. if (grb.first_trig_time is not None) and not grb.vcsmode: req_time_min = 30 - (Time.now() - grb.first_trig_time).sec // 60 grb.debug('Set requested time to %d' % req_time_min) # if we are observing a SWIFT trigger but not the trigger we just received elif 'SWIFT' in obs: if "SWIFT" in trig_id: if obs in xml_cache: prev_short = xml_cache[obs].short else: prev_short = False # best bet if we don't know grb.info("Curently observing a SWIFT trigger") if grb.short and not prev_short: grb.info("Interrupting with a short SWIFT GRB") else: grb.info("Not interrupting previous observation") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject= 'GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return else: grb.info("Not interrupting previous obs") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return # if we are observing a FERMI trigger but not the trigger we just received elif 'Fermi' in obs: # SWIFT > Fermi if "SWIFT" in trig_id: grb.info("Replacing a Fermi trigger with a SWIFT trigger") else: grb.info( "Currently observing a different Fermi trigger, not interrupting" ) handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return else: grb.info("Not currently observing any GRBs") else: grb.debug("Current schedule empty") emaildict = { 'triggerid': grb.trigger_id, 'trigtime': Time.now().iso, 'ra': Angle(grb.ra[-1], unit=astropy.units.deg).to_string(unit=astropy.units.hour, sep=':'), 'dec': Angle(grb.dec[-1], unit=astropy.units.deg).to_string(unit=astropy.units.deg, sep=':'), 'err': grb.err[-1] } email_text = EMAIL_TEMPLATE % emaildict email_subject = EMAIL_SUBJECT_TEMPLATE % grb.trigger_id # Do the trigger result = grb.trigger_observation( ttype=this_trig_type, obsname=trig_id, time_min=req_time_min, pretend=pretend, project_id=PROJECT_ID, secure_key=SECURE_KEY, email_tolist=NOTIFY_LIST, email_text=email_text, email_subject=email_subject, creator='VOEvent_Auto_Trigger: GRB_Fermi_swift=%s' % __version__, voevent=voeventparse.dumps(v)) if result is None: handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in grb.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))])
def NewVOEvent(dm, dm_err, width, snr, flux, ra, dec, semiMaj, semiMin, ymw16, name, importance, utc, gl, gb): z = dm/1200.0 #May change errDeg = semiMaj/60.0 # Parse UTC utc_YY = int(utc[:4]) utc_MM = int(utc[5:7]) utc_DD = int(utc[8:10]) utc_hh = int(utc[11:13]) utc_mm = int(utc[14:16]) utc_ss = float(utc[17:]) t = Time('T'.join([utc[:10], utc[11:]]), scale='utc', format='isot') mjd = t.mjd now = Time.now() mjd_now = now.mjd ivorn = ''.join([name, str(utc_hh), str(utc_mm), '/', str(mjd_now)]) v = vp.Voevent(stream='nl.astron.apertif/alert', stream_id=ivorn, role=vp.definitions.roles.test) # v = vp.Voevent(stream='nl.astron.apertif/alert', stream_id=ivorn, role=vp.definitions.roles.observation) # Author origin information vp.set_who(v, date=datetime.datetime.utcnow(), author_ivorn="nl.astron") # Author contact information vp.set_author(v, title="ASTRON ALERT FRB Detector", contactName="Leon Oostrum", contactEmail="*****@*****.**", shortName="ALERT") # Parameter definitions #Apertif-specific observing configuration %%TODO: update parameters as necessary for new obs config beam_sMa = vp.Param(name="beam_semi-major_axis", unit="MM", ucd="instr.beam;pos.errorEllipse;phys.angSize.smajAxis", ac=True, value=semiMaj) beam_sma = vp.Param(name="beam_semi-minor_axis", unit="MM", ucd="instr.beam;pos.errorEllipse;phys.angSize.sminAxis", ac=True, value=semiMin) beam_rot = vp.Param(name="beam_rotation_angle", value=0.0, unit="Degrees", ucd="instr.beam;pos.errorEllipse;instr.offset", ac=True) tsamp = vp.Param(name="sampling_time", value=0.0496, unit="ms", ucd="time.resolution", ac=True) bw = vp.Param(name="bandwidth", value=300.0, unit="MHz", ucd="instr.bandwidth", ac=True) nchan = vp.Param(name="nchan", value="1536", dataType="int", ucd="meta.number;em.freq;em.bin", unit="None") cf = vp.Param(name="centre_frequency", value=1400.0, unit="MHz", ucd="em.freq;instr", ac=True) npol = vp.Param(name="npol", value="2", dataType="int", unit="None") bits = vp.Param(name="bits_per_sample", value="8", dataType="int", unit="None") gain = vp.Param(name="gain", value=1.0, unit="K/Jy", ac=True) tsys = vp.Param(name="tsys", value=75.0, unit="K", ucd="phot.antennaTemp", ac=True) backend = vp.Param(name="backend", value="ARTS") # beam = vp.Param(name="beam", value= ) v.What.append(vp.Group(params=[beam_sMa, beam_sma, beam_rot, tsamp, bw, nchan, cf, npol, bits, gain, tsys, backend], name="observatory parameters")) #Event parameters DM = vp.Param(name="dm", ucd="phys.dispMeasure", unit="pc/cm^3", ac=True, value=dm ) # DM_err = vp.Param(name="dm_err", ucd="stat.error;phys.dispMeasure", unit="pc/cm^3", ac=True, value=dm_err) Width = vp.Param(name="width", ucd="time.duration;src.var.pulse", unit="ms", ac=True, value=width) SNR = vp.Param(name="snr", ucd="stat.snr", unit="None", ac=True, value=snr) Flux = vp.Param(name="flux", ucd="phot.flux", unit="Jy", ac=True, value=flux) Flux.Description = "Calculated from radiometer equation. Not calibrated." Gl = vp.Param(name="gl", ucd="pos.galactic.lon", unit="Degrees", ac=True, value=gl) Gb = vp.Param(name="gb", ucd="pos.galactic.lat", unit="Degrees", ac=True, value=gb) v.What.append(vp.Group(params=[DM, Width, SNR, Flux, Gl, Gb], name="event parameters")) # v.What.append(vp.Group(params=[DM, DM_err, Width, SNR, Flux, Gl, Gb], name="event parameters")) #Advanced parameters (note, change script if using a differeing MW model) mw_dm = vp.Param(name="MW_dm_limit", unit="pc/cm^3", ac=True, value=ymw16) mw_model = vp.Param(name="galactic_electron_model", value="YMW16") redshift_inferred = vp.Param(name="redshift_inferred", ucd="src.redshift", unit="None", value=z) redshift_inferred.Description = "Redshift estimated using z = DM/1200.0 (Ioka 2003)" v.What.append(vp.Group(params=[mw_dm, mw_model, redshift_inferred], name="advanced parameters")) #WhereWhen vp.add_where_when(v, coords=vp.Position2D(ra=ra, dec=dec, err=errDeg, units='deg', system=vp.definitions.sky_coord_system.utc_fk5_geo), obs_time=datetime.datetime(utc_YY,utc_MM,utc_DD,utc_hh,utc_mm,int(utc_ss), tzinfo=pytz.UTC), observatory_location="WSRT") #Why vp.add_why(v, importance=imp) v.Why.Name = name if vp.valid_as_v2_0(v): with open('%s.xml' % utc, 'wb') as f: voxml = vp.dumps(v) xmlstr = minidom.parseString(voxml).toprettyxml(indent=" ") f.write(xmlstr) print(vp.prettystr(v.Who)) print(vp.prettystr(v.What)) print(vp.prettystr(v.WhereWhen)) print(vp.prettystr(v.Why)) else: print "Unable to write file %s.xml" % name
def voevent_etree_to_ivorn_xml_tuple(voevent): """ Args: voevent (etree): Root of an lxml.etree loaded with voeventparse. """ return (voevent.attrib['ivorn'], voeventparse.dumps(voevent))
def handle_flarestar(v, pretend=False): """ Handles the actual VOEvent parsing, generating observations if appropriate. :param v: string in VOEvent XML format :param pretend: Boolean, True if we don't want to actually schedule the observations. :return: None """ ivorn = v.attrib['ivorn'] log.debug("processing Flare Star {0}".format(ivorn)) name = v.Why.Inference.Name trig_id = v.find(".//Param[@name='TrigID']").attrib['value'] c = voeventparse.get_event_position(v) if c.dec > DEC_LIMIT: msg = "Flare Star {0} above declination cutoff of +10 degrees".format( name) log.debug(msg) log.debug("Not triggering") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % msg, attachments=[('voevent.xml', voeventparse.dumps(v))]) return if trig_id not in xml_cache: fs = FlareStar(event=v) fs.trigger_id = trig_id xml_cache[trig_id] = fs else: fs = xml_cache[trig_id] fs.add_event(v) ra = c.ra dec = c.dec fs.add_pos((ra, dec, 0.)) fs.debug("Flare Star {0} is detected at RA={1}, Dec={2}".format( name, ra, dec)) req_time_min = 30 # look at the schedule obslist = triggerservice.obslist(obstime=1800) if obslist is not None and len(obslist) > 0: fs.debug("Currently observing:") fs.debug(str(obslist)) # are we currently observing *this* GRB? obs = str( obslist[0][1]) # in case the obslist is returning unicode strings fs.debug("obs {0}, trig {1}".format(obs, trig_id)) # Same GRB trigger from same telescope if obs == trig_id: fs.info("already observing this star") fs.info("not triggering again") handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in fs.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))]) return else: fs.debug("Current schedule empty") fs.debug("Triggering") # label as SWIFT or MAXI for the trigger type ttype = v.attrib['ivorn'].split('/')[-1].split('#')[0] emaildict = { 'triggerid': fs.trigger_id, 'trigtime': Time.now().iso, 'ra': Angle(fs.ra[-1], unit=astropy.units.deg).to_string(unit=astropy.units.hour, sep=':'), 'dec': Angle(fs.dec[-1], unit=astropy.units.deg).to_string(unit=astropy.units.deg, sep=':'), 'name': name } email_text = EMAIL_TEMPLATE % emaildict email_subject = EMAIL_SUBJECT_TEMPLATE % fs.trigger_id # Do the trigger result = fs.trigger_observation( ttype=ttype, obsname=trig_id, time_min=req_time_min, pretend=pretend, project_id=PROJECT_ID, secure_key=SECURE_KEY, email_tolist=NOTIFY_LIST, email_text=email_text, email_subject=email_subject, creator='VOEvent_Auto_Trigger: FlareStar_swift_maxi=%s' % __version__, voevent=voeventparse.dumps(v)) if result is None: handlers.send_email( from_address='*****@*****.**', to_addresses=DEBUG_NOTIFY_LIST, subject='GRB_fermi_swift debug notification for trigger: %s' % trig_id, msg_text=DEBUG_EMAIL_TEMPLATE % '\n'.join([str(x) for x in fs.loglist]), attachments=[('voevent.xml', voeventparse.dumps(v))])