def test_add_How(self): descriptions = ['One sentence.', 'Another.'] vp.add_how(self.v, descriptions) self.assertEqual(len(self.v.How.Description), 2) self.assertEqual(descriptions, [self.v.How.Description[0], self.v.How.Description[1]]) refs = [vp.Reference('http://www.saltycrane.com/blog/2011/07/' 'example-parsing-xml-lxml-objectify/'), vp.Reference('http://github.com/timstaley/voevent-parse')] vp.add_how(self.v, references=refs) self.assertEqual(len(self.v.How.Reference), len(refs)) self.assertEqual([r.attrib['uri'] for r in refs], [r.attrib['uri'] for r in self.v.How.Reference]) self.assertTrue(vp.valid_as_v2_0(self.v))
def set_how(self): ''' Add How section to voevent object. ''' # Describe the reference/telescope here # TODO: reference of telescope? vp.add_how(self.v, descriptions=self.event['telescope'], references=vp.Reference(""))
def generate_voevent(self, feed_id): event_data = self.event_id_data_map[feed_id] stream_id = self.feed_id_to_stream_id(feed_id) v = create_skeleton_4pisky_voevent( substream=self.substream, stream_id=stream_id, role=vp.definitions.roles.observation, date=datetime.datetime.utcnow()) vp.add_how(v, references=[vp.Reference(uri=self.url)]) v.How.Description = "Parsed from Swift burst-analysis listings by 4PiSky-Bot." # Simply copy the WhereWhen from the trigger event: v.WhereWhen = self.trigger_event.WhereWhen v.What.append(vp.Param("TrigID", value=self.trigger_id, ucd="meta.id")) vp.add_citations(v, event_ivorns=[ vp.EventIvorn( ivorn=self.trigger_event.attrib['ivorn'], cite_type=vp.definitions.cite_types.followup) ]) if SwiftFeedKeys.duration in feed_id: duration_data = event_data[SwiftFeedKeys.duration] battblocks_failed = duration_data['battblocks_failed'] battblocks_param = vp.Param('battblocks_failed', value=battblocks_failed, ucd='meta.code.error') battblocks_param.Description = """\ If 'battblocks_failed' is 'True', the source-page contains the 'battblocks failed' warning. This means the duration analysis is bad, usually because the source is not actually a GRB burst. """ duration_params = [] if not battblocks_failed: duration_params.extend([ vp.Param(k, value=duration_data.get(k), unit='s', ucd='time.duration') for k in (SwiftFeedKeys.t90, SwiftFeedKeys.t50) ]) duration_params.extend([ vp.Param(k, value=duration_data.get(k), unit='s', ucd='meta.code.error;time.duration') for k in (SwiftFeedKeys.t90_err, SwiftFeedKeys.t50_err) ]) duration_params.append(battblocks_param) v.What.append( vp.Group(params=duration_params, name=SwiftFeedKeys.duration)) return v
# In[ ]: ## See how much element creation that routine just saved us(!): # print(vp.prettystr(v.WhereWhen)) # ##Adding the ``How``## # We should also describe how this transient was detected, and refer to the name # that Gaia have assigned it. Note that we can provide multiple descriptions # (and/or references) here: # In[ ]: vp.add_how(v, descriptions=['Scraped from the Gaia website', 'This is Gaia14adi'], references=vp.Reference("http://gsaweb.ast.cam.ac.uk/alerts/")) # ##And finally, ``Why``## # Finally, we can provide some information about why this even might be scientifically interesting. Gaia haven't provided a classification, but we can at least incorporate the textual description: # In[ ]: vp.add_why(v) v.Why.Description = "Fading source on top of 2MASS Galaxy (offset from bulge)" # ##Check and save## # Finally - and importantly, as discussed in the [VOEvent notes](http://voevent.readthedocs.org/en/latest/parse.html) - let's make sure that this event is really valid according to our schema: # In[ ]: vp.valid_as_v2_0(v)
def generate_voevent(self, feed_id): rowdict = self.event_id_data_map[feed_id] params = rowdict['param'] urls = rowdict['url'] stream_id = self.feed_id_to_stream_id(feed_id) v = create_skeleton_4pisky_voevent(substream=self.substream, stream_id=stream_id, role=vp.definitions.roles.observation, date=datetime.datetime.utcnow() ) vp.add_how(v, references=vp.Reference(uri=self.url)) v.How.Description = "Parsed from ASASSN listings page by 4PiSky-Bot." timestamp_dt = asassn_timestamp_str_to_datetime( params[AsassnKeys.detection_timestamp]) posn_sc = SkyCoord(params['ra'], params['dec'], unit=(u.hourangle, u.deg)) # Couldn't find a formal analysis of positional accuracy, but # http://dx.doi.org/10.1088/0004-637X/788/1/48 # states the angular resolution as 16 arcseconds, so we'll go with that. err_radius_estimate = 16 * u.arcsec posn_simple = vp.Position2D(ra=posn_sc.ra.deg, dec=posn_sc.dec.deg, err=err_radius_estimate.to(u.deg).value, units=vp.definitions.units.degrees, system=vp.definitions.sky_coord_system.utc_icrs_geo, ) vp.add_where_when( v, coords=posn_simple, obs_time=timestamp_dt, observatory_location=vp.definitions.observatory_location.geosurface) asassn_params = [vp.Param(key, params[key]) for key in (AsassnKeys.id_asassn, AsassnKeys.id_other, AsassnKeys.detection_timestamp, AsassnKeys.ra, AsassnKeys.dec, AsassnKeys.spec_class, AsassnKeys.comment, ) if key in params ] if AsassnKeys.mag_v in params: asassn_params.append( vp.Param(AsassnKeys.mag_v, params[AsassnKeys.mag_v], unit='mag', ucd="phot.mag", ) ) if AsassnKeys.id_other in urls: asassn_params.append( vp.Param(AsassnKeys.id_other, urls[AsassnKeys.id_other][0][0]) ) asassn_urls = [vp.Param(key, urls[key][0][1]) for key in urls] v.What.append(vp.Group(params=asassn_params, name=self.text_params_groupname)) v.What.append(vp.Group(params=asassn_urls, name=self.url_params_groupname)) return v
units='deg', system=vp.definitions.sky_coord_system.utc_fk5_geo), obs_time=datetime.datetime(2013, 1, 31, 12, 5, 30, tzinfo=pytz.utc), observatory_location=vp.definitions.observatory_location.geosurface) # Prettyprint some sections for desk-checking: print("\n***Here is your WhereWhen:***\n") print(vp.prettystr(v.WhereWhen)) print("\n***And your What:***\n") print(vp.prettystr(v.What)) # You would normally describe or reference your telescope / instrument here: vp.add_how(v, descriptions='Discovered via 4PiSky', references=vp.Reference('http://4pisky.org')) # The 'Why' section is optional, allows for speculation on probable # astrophysical cause vp.add_why(v, importance=0.5, inferences=vp.Inference(probability=0.1, relation='identified', name='GRB121212A', concept='process.variation.burst;em.radio')) # We can also cite earlier VOEvents: vp.add_citations( v, vp.EventIvorn( ivorn='ivo://astronomy.physics.science.org/super_exciting_events#101',
def create_voevent(jsonfile=None, deployment=False, **kwargs): """ template syntax for voeventparse creation of voevent """ required = [ 'internalname', 'mjds', 'dm', 'width', 'snr', 'ra', 'dec', 'radecerr' ] preferred = ['fluence', 'p_flux', 'importance', 'dmerr'] # set values dd = kwargs.copy() if jsonfile is not None: # as made by caltechdata.set_metadata for k, v in trigger.items(): if k in required + preferred: dd[k] = v assert all([ k in dd for k in required ]), f'Input keys {list(dd.keys())} not complete (requires {required})' # TODO: set this correctly dt = time.Time(dd['mjds'], format='mjd').to_datetime(timezone=pytz.utc) # create voevent instance role = vp.definitions.roles.observation if deployment else vp.definitions.roles.test v = vp.Voevent( stream='', # TODO: check stream_id=1, role=role) vp.set_who(v, date=datetime.datetime.utcnow(), author_ivorn="voevent.dsa-110.caltech.org") # TODO: check vp.set_author(v, title="DSA-110 Testing Node", contactName="Casey Law", contactEmail="*****@*****.**") params = [] dm = vp.Param(name="dm", value=str(dd['dm']), unit="pc/cm^3", ucd="phys.dispMeasure;em.radio.750-1500MHz", dataType='float', ac=True) dm.Description = 'Dispersion Measure' params.append(dm) width = vp.Param(name="width", value=str(dd['width']), unit="ms", ucd="time.duration;src.var.pulse", dataType='float', ac=True) width.Description = 'Temporal width of burst' params.append(width) snr = vp.Param(name="snr", value=str(dd['snr']), ucd="stat.snr", dataType='float', ac=True) snr.Description = 'Signal to noise ratio' params.append(snr) if 'fluence' in dd: fluence = vp.Param( name='fluence', value=str(dd['fluence']), unit='Jansky ms', ucd='em.radio.750-1500MHz', # TODO: check dataType='float', ac=False) fluence.Description = 'Fluence' params.append(fluence) if 'p_flux' in dd: p_flux = vp.Param(name='peak_flux', value=str(dd['p_flux']), unit='Janskys', ucd='em.radio.750-1500MHz', dataType='float', ac=True) p_flux.Description = 'Peak Flux' params.append(p_flux) if 'dmerr' in dd: dmerr = vp.Param(name="dm_error", value=str(dd['dmerr']), unit="pc/cm^3", ucd="phys.dispMeasure;em.radio.750-1500MHz", dataType='float', ac=True) dmerr.Description = 'Dispersion Measure error' params.append(dmerr) v.What.append(vp.Group(params=params, name='event parameters')) vp.add_where_when(v, coords=vp.Position2D( ra=str(dd['ra']), dec=str(dd['dec']), err=str(dd['radecerr']), units='deg', system=vp.definitions.sky_coord_system.utc_fk5_geo), obs_time=dt, observatory_location='OVRO') print("\n***Here is your WhereWhen:***\n") print(vp.prettystr(v.WhereWhen)) print("\n***And your What:***\n") print(vp.prettystr(v.What)) vp.add_how(v, descriptions='Discovered with DSA-110', references=vp.Reference('http://deepsynoptic.org')) if 'importance' in dd: vp.add_why(v, importance=str(dd['importance'])) else: vp.add_why(v) v.Why.Name = str(dd['internalname']) vp.assert_valid_as_v2_0(v) return v
def generate_voevent(self, feed_id): event_data = self.event_id_data_map[feed_id] stream_id = self.feed_id_to_stream_id(feed_id) v = create_skeleton_4pisky_voevent(substream=self.substream, stream_id=stream_id, role=vp.definitions.roles.observation, date=datetime.datetime.utcnow() ) gsaw_event_url = 'http://gsaweb.ast.cam.ac.uk/alerts/alert/'+feed_id vp.add_how(v, references=[vp.Reference(uri=self.url), vp.Reference(uri=gsaw_event_url) ] ) v.How.Description = "Parsed from GAIA Science Alerts listings by 4PiSky-Bot." posn_sc = SkyCoord(event_data[GaiaKeys.ra], event_data[GaiaKeys.dec], unit=(u.deg, u.deg)) # Astrometric accuracy is a guesstimate, # http://gsaweb.ast.cam.ac.uk/alerts/tableinfo states that: # "The sky position may either refer to a source in Gaia's own # catalogue, or to a source in an external catalogue (e.g. SDSS) used as # a reference for combining Gaia observations. Where the position comes # from Gaia's catalogue, it is derived from a single, Gaia observation # at the triggering point of the alert; this is not an astrometric # measurement to the full precision of the Gaia main mission." # # We assume a 'worst-case' scenario of 100mas from SDSS at mag r=22, cf # http://classic.sdss.org/dr7/products/general/astrometry.html err_radius_estimate = 0.1 * u.arcsec posn_simple = vp.Position2D(ra=posn_sc.ra.deg, dec=posn_sc.dec.deg, err=err_radius_estimate.to(u.deg).value, units=vp.definitions.units.degrees, system=vp.definitions.sky_coord_system.utc_icrs_geo, ) # NB GAIA values are in Barycentric co-ordinate time # (http://en.wikipedia.org/wiki/Barycentric_Coordinate_Time) observation_time_tcb = astropy.time.Time( event_data[GaiaKeys.obs_timestamp], scale='tcb') # We convert to UTC, in keeping with other feeds: observation_time_utc_dt = observation_time_tcb.utc.datetime observation_time_utc_dt = observation_time_utc_dt.replace(tzinfo=pytz.UTC) vp.add_where_when( v, coords=posn_simple, obs_time=observation_time_utc_dt, observatory_location=vp.definitions.observatory_location.geosurface) gaia_params = [vp.Param('Name', event_data[GaiaKeys.name])] gaia_params.extend([vp.Param(key.strip(), event_data[key]) for key in (GaiaKeys.alert_class, GaiaKeys.obs_timestamp, GaiaKeys.pub_timestamp, GaiaKeys.ra, GaiaKeys.dec, GaiaKeys.comment, ) ]) gaia_params.extend([vp.Param(key.strip(), event_data[key], unit='mag', ucd='phot.mag') for key in ( GaiaKeys.mag_alert, GaiaKeys.mag_historic, GaiaKeys.mag_historic_std_dev, ) ]) v.What.append(vp.Group(params=gaia_params, name=self.text_params_groupname)) return v