def test_load_of_voe_v1(self): with self.assertRaises(ValueError): with open(datapaths.swift_xrt_pos_v1, 'rb') as f: vff = vp.load(f) # Can override version checking, at own risk! with open(datapaths.swift_xrt_pos_v1, 'rb') as f: vff = vp.load(f, check_version=False)
def test_low_dec(self): with open(datapaths.swift_bat_grb_pos_v2) as f: good_src = voeventparse.load(f) with open(datapaths.swift_bat_grb_low_dec) as f: bad_src = voeventparse.load(f) good_fk5 = convert_voe_coords_to_eqposn( voeventparse.pull_astro_coords(good_src)) bad_fk5 = convert_voe_coords_to_eqposn( voeventparse.pull_astro_coords(bad_src)) self.assertIsNone(filters.ami.reject(good_fk5)) self.assertIsNotNone(filters.ami.reject(bad_fk5))
def test_low_dec(self): with open(datapaths.swift_bat_grb_pos_v2) as f: good_src = voeventparse.load(f) with open(datapaths.swift_bat_grb_low_dec) as f: bad_src = voeventparse.load(f) good_fk5 = convert_voe_coords_to_eqposn( voeventparse.get_event_position(good_src)) bad_fk5 = convert_voe_coords_to_eqposn( voeventparse.get_event_position(bad_src)) self.assertIsNone(filters.ami.reject(good_fk5)) self.assertIsNotNone(filters.ami.reject(bad_fk5))
def setUp(self): with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: self.swift_grb_v2_packet = vp.load(f) with open(datapaths.moa_lensing_event_path, 'rb') as f: self.moa_packet = vp.load(f) with open(datapaths.gaia_alert_16aac_direct, 'rb') as f: self.gaia_noname_param_packet = vp.load(f) with open(datapaths.asassn_scraped_example, 'rb') as f: self.assasn_scraped_packet = vp.load(f) self.blank = vp.Voevent(stream='voevent.foo.bar/TEST', stream_id='100', role='test')
def parse_VOEvent(self, voevent, mapping): ''' Parse VOEvent xml file. :param voevent: VOEvent xml file :param mapping: mapping from mapping.json :type voevent: lxml.objectify.ObjectifiedElement, str :type mapping: dict :returns: mapping (mapping from mapping.json with values filled), event_type (event_type and citation if applicable) :rtype: dict, tuple ''' # load VOEvent xml file try: v = vp.load(voevent) except AttributeError: f = open(voevent, "rb") v = vp.load(f) f.close() # assert if xml file is a valid VOEvent vp.assert_valid_as_v2_0(v) # Check if the event is a new VOEvent # For a new VOEvent there should be no citations try: event_type = (v.xpath('Citations')[0].EventIVORN.attrib['cite'], v.xpath('Citations')[0].EventIVORN.text) except IndexError: event_type = ('new', None) self.logger.info("Event of of type: {}".format(event_type)) # use the mapping to get required data from VOEvent xml # if a path is not found in the xml it gets an empty list which is # removed in the next step # puts all params into dict param_data[group][param_name] try: param_data = vp.get_grouped_params(v) except AttributeError: # <What> section is not needed for retractions param_data = None for table in mapping.keys(): # iterate over all tables for idx, item in enumerate(mapping[table]): # Add values from XML to dictionary mapping[table][idx]['value'] = self.get_value( v, param_data, item, event_type) if item.get('description'): note = self.get_description(v, item) if note: mapping[table][idx]['note'] = note return mapping, event_type
def load_voevent(self): """ Load VOE """ if self.isfile: with open(self.VO_event, 'rb') as f: self.v = vp.load(f) else: self.v = self.VO_event
def test_dump(self): """Check that writing to a file actually works as expected""" with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: packet = vp.load(f) with tempfile.TemporaryFile(mode='w+b') as f: vp.dump(packet, f)
def get_voevent(inname): """ Use voeventparse to read voevent. """ with open(inname, 'rb') as fp: ve = voeventparse.load(fp) return ve
def test_load_of_voe_v2(self): with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: vff = vp.load(f) with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: vfs = vp.loads(f.read()) self.assertEqual(objectify.dump(vff), objectify.dump(vfs)) self.assertEqual(vfs.tag, 'VOEvent') self.assertEqual(vfs.attrib['ivorn'], 'ivo://nasa.gsfc.gcn/SWIFT#BAT_GRB_Pos_532871-729')
def test_swift_grb_v2_fk5(self): with open(datapaths.swift_bat_grb_pos_v2) as f: swift_grb_v2 = voeventparse.load(f) known_swift_grb_posn = ephem.Equatorial(74.741200 / DEG_PER_RADIAN, 25.313700 / DEG_PER_RADIAN, epoch=ephem.J2000) voe_coords = voeventparse.pull_astro_coords(swift_grb_v2) extracted_posn = convert_voe_coords_to_eqposn(voe_coords) self.assertEqual(extracted_posn.ra, known_swift_grb_posn.ra) self.assertEqual(extracted_posn.dec, known_swift_grb_posn.dec)
def test_swift_grb_v2_fk5(self): with open(datapaths.swift_bat_grb_pos_v2) as f: swift_grb_v2 = voeventparse.load(f) known_swift_grb_posn = ephem.Equatorial( 74.741200/DEG_PER_RADIAN, 25.313700/DEG_PER_RADIAN, epoch=ephem.J2000) voe_coords = voeventparse.pull_astro_coords(swift_grb_v2) extracted_posn = convert_voe_coords_to_eqposn(voe_coords) self.assertEqual(extracted_posn.ra, known_swift_grb_posn.ra) self.assertEqual(extracted_posn.dec, known_swift_grb_posn.dec)
def test_validation_routine(self): """ Now we perform the same validation tests, but applied via the convenience functions. """ with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: v = vp.load(f) self.assertTrue(vp.valid_as_v2_0(v)) v.Who.BadChild = 42 self.assertFalse(vp.valid_as_v2_0(v)) del v.Who.BadChild self.assertTrue(vp.valid_as_v2_0(v))
def test_initial_case(self): with open(datapaths.swift_bat_grb_pos_v2) as f: swift_alert = BatGrb(vp.load(f)) request_status = {'sent_time':datetime.datetime.utcnow(), 'acknowledged':False, } v = vo_subs.create_ami_followup_notification(swift_alert, stream_id=001, request_status=request_status) vp.assert_valid_as_v2_0(v) with open('/tmp/test_voevent.xml', 'w') as f: vp.dump(v, f)
def test_initial_case(self): with open(datapaths.swift_bat_grb_pos_v2) as f: swift_alert = BatGrb(vp.load(f)) current_utc_time = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) request_status = { 'sent_time': current_utc_time, 'acknowledged': False, } v = vo_subs.create_ami_followup_notification( swift_alert, stream_id=1, request_status=request_status) vp.assert_valid_as_v2_0(v) with open('/tmp/test_voevent.xml', 'wb') as f: vp.dump(v, f)
def test_dumps(self): """ Note, the processed output does not match the raw input - because I have added the UTF-8 encoding declaration. So we match the convenience routines against an etree.tostring processed version of the original. """ swift_grb_v2_raw = objectify.parse( datapaths.swift_bat_grb_pos_v2).getroot() with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: swift_grb_v2_voeparsed = vp.load(f) raw = etree.tostring(swift_grb_v2_raw, pretty_print=False, xml_declaration=True, encoding='UTF-8') processed = vp.dumps(swift_grb_v2_voeparsed) self.assertEqual(raw, processed)
def get_data_from_voevent(self, voevent_file: HTTPResponse) -> dict: voevent = vp.load(voevent_file) voevent_data = {} classification_group = voevent.What.find( ".//Group[@type='Classification']") for datum in ['BNS', 'NSBH', 'BBH', 'MassGap', 'Terrestrial']: datum_element = classification_group.find( f".//Param[@name='{datum}']") if datum_element is not None: voevent_data[datum] = datum_element.attrib['value'] properties_group = voevent.What.find(".//Group[@type='Properties']") for datum in ['HasNS', 'HasRemnant']: datum_element = properties_group.find(f".//Param[@name='{datum}']") if datum_element is not None: voevent_data[datum] = datum_element.attrib['value'] return voevent_data
def test_namespace_variations(self): # NB, not enclosing root element in a namespace is invalid under schema # But this has been seen in the past (isolated bug case?) # Anyway, handled easily enough with open(datapaths.no_namespace_test_packet, 'rb') as f: vff = vp.load(f) self.assertFalse(vp.valid_as_v2_0(vff)) self.assertEqual(vff.tag, 'VOEvent') self.assertEqual(vff.attrib['ivorn'], 'ivo://com.dc3/dc3.broker#BrokerTest-2014-02-24T15:55:27.72') with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: xml_str = f.read() xml_str = xml_str.replace(b'voe', b'foobar_ns') # print xml_str vfs = vp.loads(xml_str) vp.assert_valid_as_v2_0(vfs) self.assertEqual(vfs.tag, 'VOEvent') self.assertEqual(vfs.attrib['ivorn'], 'ivo://nasa.gsfc.gcn/SWIFT#BAT_GRB_Pos_532871-729')
def _get_data_from_voevent(self, alert): voevent_data = {} try: event_trigger_number = alert['alert_identifier'].split('_')[0] gracedb_superevent = self.gracedb_client.superevents[ event_trigger_number] latest_voevent = gracedb_superevent.voevents.get()[-1] voevent_file = gracedb_superevent.files[ latest_voevent['filename']].get() voevent = vp.load(voevent_file) for param in voevent.What.Param: if param.attrib['name'] in ['FAR', 'Instruments']: voevent_data[param.attrib['name']] = param.attrib['value'] classification_group = voevent.What.find( ".//Group[@type='Classification']") if classification_group is not None: # Retractions don't have classifications for param in classification_group.findall('Param'): voevent_data[param.attrib['name']] = param.attrib['value'] properties_group = voevent.What.find( ".//Group[@type='Properties']") if properties_group is not None: for param in properties_group.findall('Param'): voevent_data[param.attrib['name']] = param.attrib['value'] filename_parts = latest_voevent['filename'].split('.')[0].split( '-') voevent_data[ 'data_version'] = f'{filename_parts[-1]} {filename_parts[-2]}' voevent_data.update( self._get_confidence_regions(gracedb_superevent)) except requests.exceptions.HTTPError as httpe: logger.error( f'Unable to parse VO Event for alert {alert["event_trig_num"]}: {httpe}' ) return voevent_data
def _ingest_packet(sock, ivorn, handler, log): """Ingest one VOEvent Transport Protocol packet and act on it, first sending the appropriate response and then calling the handler if the payload is a VOEvent.""" # Receive payload payload = _recv_packet(sock) log.debug("received packet of %d bytes", len(payload)) log.debug("payload is:\n%s", payload) # Parse payload and act on it try: root = parse_from_string(payload) except XMLSyntaxError: log.exception( "oops...failed to parse XML, base64-encoded payload is:\n%s", base64.b64encode(payload)) raise else: if root.tag == "{http://telescope-networks.org/schema/Transport/v1.1}Transport": if "role" not in root.attrib: log.error("receieved transport message without a role") elif root.attrib["role"] == "iamalive": log.debug("received iamalive message") _send_packet( sock, _form_response("iamalive", root.find("Origin").text, ivorn, _get_now_iso8601())) log.debug("sent iamalive response") else: log.error( "received transport message with unrecognized role: %s", root.attrib["role"]) elif root.tag in ("{http://www.ivoa.net/xml/VOEvent/v1.1}VOEvent", "{http://www.ivoa.net/xml/VOEvent/v2.0}VOEvent"): log.info("received VOEvent") ivorn = root.attrib['ivorn'] filename = urllib.quote_plus(ivorn) with open(filename, "w") as f: f.write(payload) logging.getLogger('gcn.handlers.archive').info( "archived %s", ivorn) with open(filename) as f: v = vp.load(f) v ###################################################################################LVC ONLY################################################################################ if v.find(".//Param[@name='AlertType']" ).attrib['value'] == "Preliminary": keylist1 = ['ivorn', 'role', 'version'] dict1 = {} for key in keylist1: dict1[key] = v.attrib[key] #Source dict1.update({ 'author_ivorn': v.Who.AuthorIVORN, 'shortname': v.Who.Author.shortName, 'contactname': v.Who.Author.contactName, 'contactemail': v.Who.Author.contactEmail, 'date': v.Who.Date, 'who_description': v.Who.Description }) #xmlns dict1.update({ 'xmlns_voe': "http://www.ivoa.net/xml/VOEvent/v2.0", 'xmlns_xsi': "http://www.w3.org/2001/XMLSchema-instance", 'xsi_schemalocation': "http://www.ivoa.net/xml/VOEvent/v2.0 http://www.ivoa.net/xml/VOEvent/VOEvent-v2.0.xsd" }) #Parameters #print type(v.find(".//Param[@name='Group']").attrib['value']) dict1.update({ 'packet_type': v.find(".//Param[@name='Packet_Type']").attrib['value'], 'pkt_ser_num': v.find(".//Param[@name='Pkt_Ser_Num']").attrib['value'], 'alert_type': v.find(".//Param[@name='AlertType']").attrib['value'], 'graceid': v.find(".//Param[@name='GraceID']").attrib['value'], 'id_letter': v.find(".//Param[@name='ID_Letter']").attrib['value'], 'trig_id': v.find(".//Param[@name='TrigID']").attrib['value'], 'trigger_tjd': v.find(".//Param[@name='Trigger_TJD']").attrib['value'], 'trigger_sod': v.find(".//Param[@name='Trigger_SOD']").attrib['value'], 'eventpage': v.find(".//Param[@name='EventPage']").attrib['value'], 'search': v.find(".//Param[@name='Search']").attrib['value'], 'pipeline': v.find(".//Param[@name='Pipeline']").attrib['value'], 'internal': v.find(".//Param[@name='Internal']").attrib['value'], 'far': v.find(".//Param[@name='FAR']").attrib['value'], 'chirpmass': v.find(".//Param[@name='ChirpMass']").attrib['value'], 'eta': v.find(".//Param[@name='Eta']").attrib['value'], 'maxdistance': v.find(".//Param[@name='MaxDistance']").attrib['value'], 'trigger_id': v.find(".//Param[@name='Trigger_ID']").attrib['value'], 'misc_flags': v.find(".//Param[@name='Misc_flags']").attrib['value'], 'lvc_internal': v.find(".//Param[@name='LVC_Internal']").attrib['value'], 'test': v.find(".//Param[@name='Test']").attrib['value'], 'retraction': v.find(".//Param[@name='Retraction']").attrib['value'], 'internal_test': v.find(".//Param[@name='InternalTest']").attrib['value'], 'num_det_participated': v.find(".//Param[@name='Num_Det_participated']" ).attrib['value'], 'lho_participated': v.find( ".//Param[@name='LHO_participated']").attrib['value'], 'llo_participated': v.find( ".//Param[@name='LLO_participated']").attrib['value'], 'virgo_participated': v.find(".//Param[@name='Virgo_participated']"). attrib['value'], 'geo600_participated': v.find(".//Param[@name='GEO600_participated']" ).attrib['value'], 'kagra_participated': v.find(".//Param[@name='KAGRA_participated']" ).attrib['value'], 'lio_participated': v.find( ".//Param[@name='LIO_participated']").attrib['value'], 'sequence_number': v.find( ".//Param[@name='Sequence_number']").attrib['value'], '_group': v.find(".//Param[@name='Group']").attrib['value'], 'skymap_url_fits_basic': "" }) #ObservationInfo dict1.update({ 'observatorylocation_id': v.WhereWhen.ObsDataLocation.ObservatoryLocation. attrib['id'], 'astrocoordsystem_id': v.WhereWhen.ObsDataLocation.ObservationLocation. AstroCoordSystem.attrib['id'], 'timeunit': v.WhereWhen.ObsDataLocation.ObservationLocation. AstroCoords.Time.attrib['unit'], 'isotime': v.WhereWhen.ObsDataLocation.ObservationLocation. AstroCoords.Time.TimeInstant.ISOTime, 'how_description': v.How.Description, 'reference_uri': 'http://gcn.gsfc.nasa.gov/gcn/ligo.html', 'importance': v.Why.attrib['importance'], 'inference_probability': v.Why.Inference.attrib['probability'], 'concept': v.Why.Inference.Concept }) #print dict1 hostname, username, passwd, database = lsc.mysqldef.getconnection( "lcogt2") conn = lsc.mysqldef.dbConnect(hostname, username, passwd, database) lsc.mysqldef.insert_values(conn, "voevent_lvc", dict1) #The following is to act only on Initial and Update Notices elif v.find( ".//Param[@name='AlertType']" ).attrib['value'] == "Initial" or v.find( ".//Param[@name='AlertType']").attrib['value'] == "Update": keylist1 = ['ivorn', 'role', 'version'] dict1 = {} for key in keylist1: dict1[key] = v.attrib[key] #Source dict1.update({ 'author_ivorn': v.Who.AuthorIVORN, 'shortname': v.Who.Author.shortName, 'contactname': v.Who.Author.contactName, 'contactemail': v.Who.Author.contactEmail, 'date': v.Who.Date, 'who_description': v.Who.Description }) #xmlns dict1.update({ 'xmlns_voe': "http://www.ivoa.net/xml/VOEvent/v2.0", 'xmlns_xsi': "http://www.w3.org/2001/XMLSchema-instance", 'xsi_schemalocation': "http://www.ivoa.net/xml/VOEvent/v2.0 http://www.ivoa.net/xml/VOEvent/VOEvent-v2.0.xsd" }) #Parameters dict1.update({ 'packet_type': v.find(".//Param[@name='Packet_Type']").attrib['value'], 'pkt_ser_num': v.find(".//Param[@name='Pkt_Ser_Num']").attrib['value'], 'alert_type': v.find(".//Param[@name='AlertType']").attrib['value'], 'graceid': v.find(".//Param[@name='GraceID']").attrib['value'], 'id_letter': v.find(".//Param[@name='ID_Letter']").attrib['value'], 'trig_id': v.find(".//Param[@name='TrigID']").attrib['value'], 'trigger_tjd': v.find(".//Param[@name='Trigger_TJD']").attrib['value'], 'trigger_sod': v.find(".//Param[@name='Trigger_SOD']").attrib['value'], 'eventpage': v.find(".//Param[@name='EventPage']").attrib['value'], 'search': v.find(".//Param[@name='Search']").attrib['value'], 'pipeline': v.find(".//Param[@name='Pipeline']").attrib['value'], 'internal': v.find(".//Param[@name='Internal']").attrib['value'], 'far': v.find(".//Param[@name='FAR']").attrib['value'], 'chirpmass': v.find(".//Param[@name='ChirpMass']").attrib['value'], 'eta': v.find(".//Param[@name='Eta']").attrib['value'], 'maxdistance': v.find(".//Param[@name='MaxDistance']").attrib['value'], 'trigger_id': v.find(".//Param[@name='Trigger_ID']").attrib['value'], 'misc_flags': v.find(".//Param[@name='Misc_flags']").attrib['value'], 'lvc_internal': v.find(".//Param[@name='LVC_Internal']").attrib['value'], 'test': v.find(".//Param[@name='Test']").attrib['value'], 'retraction': v.find(".//Param[@name='Retraction']").attrib['value'], 'internal_test': v.find(".//Param[@name='InternalTest']").attrib['value'], 'num_det_participated': v.find(".//Param[@name='Num_Det_participated']" ).attrib['value'], 'lho_participated': v.find( ".//Param[@name='LHO_participated']").attrib['value'], 'llo_participated': v.find( ".//Param[@name='LLO_participated']").attrib['value'], 'virgo_participated': v.find(".//Param[@name='Virgo_participated']"). attrib['value'], 'geo600_participated': v.find(".//Param[@name='GEO600_participated']" ).attrib['value'], 'kagra_participated': v.find(".//Param[@name='KAGRA_participated']" ).attrib['value'], 'lio_participated': v.find( ".//Param[@name='LIO_participated']").attrib['value'], 'sequence_number': v.find( ".//Param[@name='Sequence_number']").attrib['value'], '_group': v.find(".//Param[@name='Group']").attrib['value'], 'skymap_url_fits_basic': v.find(".//Param[@name='SKYMAP_URL_FITS_BASIC']" ).attrib['value'] }) #ObservationInfo dict1.update({ 'observatorylocation_id': v.WhereWhen.ObsDataLocation.ObservatoryLocation. attrib['id'], 'astrocoordsystem_id': v.WhereWhen.ObsDataLocation.ObservationLocation. AstroCoordSystem.attrib['id'], 'timeunit': v.WhereWhen.ObsDataLocation.ObservationLocation. AstroCoords.Time.attrib['unit'], 'isotime': v.WhereWhen.ObsDataLocation.ObservationLocation. AstroCoords.Time.TimeInstant.ISOTime, 'how_description': v.How.Description, 'reference_uri': 'http://gcn.gsfc.nasa.gov/gcn/ligo.html', 'importance': v.Why.attrib['importance'], 'inference_probability': v.Why.Inference.attrib['probability'], 'concept': v.Why.Inference.Concept }) #'group': v.find(".//Param[@name='Group']").attrib['value'] #print dict1 hostname, username, passwd, database = lsc.mysqldef.getconnection( "lcogt2") conn = lsc.mysqldef.dbConnect(hostname, username, passwd, database) lsc.mysqldef.insert_values(conn, "voevent_lvc", dict1) #wget import os command = 'wget --auth-no-challenge ' + v.find( ".//Param[@name='SKYMAP_URL_FITS_BASIC']" ).attrib[ 'value'] + ' -O' + ' /home/svasylyev/ligoevent_fits/' + v.find( ".//Param[@name='GraceID']" ).attrib['value'] + '_' + v.find( ".//Param[@name='AlertType']" ).attrib['value'] + '.fits.gz' #print command os.system(command) import galaxy_list galaxy_map = galaxy_list.find_galaxy_list( '/home/svasylyev/ligoevent_fits/' + v.find(".//Param[@name='GraceID']").attrib['value'] + '_' + v.find(".//Param[@name='AlertType']").attrib['value'] + '.fits.gz') print galaxy_map else: print "Unknown Alert Type" ####################################################################################LVC ONLY################################################################################ if 'ivorn' not in root.attrib: log.error("received voevent message without ivorn") else: _send_packet( sock, _form_response("ack", root.attrib["ivorn"], ivorn, _get_now_iso8601())) log.debug("sent receipt response") if handler is not None: try: handler(payload, root) except: log.exception("exception in payload handler") else: log.error("received XML document with unrecognized root tag: %s", root.tag)
from __future__ import absolute_import, unicode_literals import voeventparse from voeventdb.server.tests.resources.datapaths import ( assasn_non_ascii_packet_filepath, gaia_16bsf_filepath, konus_lc_filepath, swift_bat_grb_pos_v2_filepath, swift_bat_grb_655721_filepath, swift_xrt_grb_655721_filepath ) with open(swift_bat_grb_pos_v2_filepath, 'rb') as f: swift_bat_grb_pos_v2_etree = voeventparse.load(f) #NB xrt_grb_655721 cites -> bat_grb_655721 with open(swift_bat_grb_655721_filepath, 'rb') as f: swift_bat_grb_655721 = voeventparse.load(f) with open(swift_xrt_grb_655721_filepath, 'rb') as f: swift_xrt_grb_655721 = voeventparse.load(f) with open(konus_lc_filepath, 'rb') as f: konus_lc = voeventparse.load(f) with open(gaia_16bsf_filepath, 'rb') as f: gaia_16bsg = voeventparse.load(f)
* lxml documentation at http://lxml.de/objectify.html * VOEvent standard at http://www.ivoa.net/documents/VOEvent/ * VOEvent schema file at http://www.ivoa.net/xml/VOEvent/VOEvent-v2.0.xsd """ from __future__ import print_function import pprint import copy import voeventparse from voeventparse.fixtures.datapaths import swift_bat_grb_pos_v2 pp = pprint.PrettyPrinter() with open(swift_bat_grb_pos_v2, 'rb') as f: v = voeventparse.load(f) # Basic attribute access print("Ivorn:", v.attrib['ivorn']) print("Role:", v.attrib['role']) print("AuthorIVORN:", v.Who.AuthorIVORN) print("Short name:", v.Who.Author.shortName) print("Contact:", v.Who.Author.contactEmail) # Copying by value, and validation: print("Original valid as v2.0? ", voeventparse.valid_as_v2_0(v)) v_copy = copy.copy(v) print("Copy valid? ", voeventparse.valid_as_v2_0(v_copy)) # Changing values: v_copy.Who.Author.shortName = 'BillyBob'
def test_bad_duration_swift_grb(): with open(datapaths.swift_bat_grb_bad_duration_analysis, 'rb') as f: bad_duration_voevent = vp.load(f) voevent = parse_from_voevent(bad_duration_voevent) params = vp.get_grouped_params(voevent)
def setUp(self): with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: self.swift_grb_v2_packet = vp.load(f)
import os import voeventparse as vp from fourpisky.feeds import SwiftFeed from fourpisky.feeds.swift import SwiftFeedKeys from fourpisky.tests.resources import datapaths import pytest with open(datapaths.swift_bat_grb_pos_v2, 'rb') as f: good_bat_grb_voevent = vp.load(f) def test_content_fetch(): feed = SwiftFeed(good_bat_grb_voevent) assert len(feed.content) def parse_from_voevent(voevent): feed = SwiftFeed(voevent) events = feed.parse_content_to_event_data_list() assert len(events) == 1 feed_id = list(feed.event_id_data_map.keys())[0] voevent = feed.generate_voevent(feed_id) vp.assert_valid_as_v2_0(voevent) if True: # Write to file for desk-checking: tmpdir = '/tmp/fps_feed_test/swift' if not os.path.isdir(tmpdir): os.makedirs(tmpdir) outpath = os.path.join(tmpdir, '{}.xml'.format(feed_id))
def lvc_insert(root, payload): ivorn = root.attrib['ivorn'] filename = urllib.quote_plus(ivorn) with open(filename, "w") as f: f.write(payload) logging.getLogger('gcn.handlers.archive').info("archived %s", ivorn) with open(filename) as f: v = vp.load(f) ###################################################################################LVC ONLY############################################################################# if "LVC" in ivorn: dict1 = {} #Parameters paramlist = ['packet_type','pkt_ser_num','alert_type','graceid', 'id_letter','trig_id','trigger_tjd','trigger_sod','eventpage','search','pipeline','internal','far','chirpmass','eta','maxdistance','trigger_id','misc_flags', 'lvc_internal','test','retraction','internal_test','num_det_participated','lho_participated','llo_participated','virgo_participated','geo600_participated', 'kagra_participated','lio_participated','sequence_number','_group','probhasns','probhasremnant','hardwareinj','vetted','openalert','temporalcoinc'] valuelist = ['Packet_Type','Pkt_Ser_Num','AlertType','GraceID','ID_Letter','TrigID','Trigger_TJD','Trigger_SOD', 'EventPage','Search','Pipeline','Internal','FAR','ChirpMass','ETA','MaxDistance','Trigger_ID','Misc_flags', 'LVC_Internal','Test','Retraction','InternalTest','Num_Det_participated','LHO_participated', 'LLO_participated','Virgo_participated','GEO600_participated','KAGRA_participated','LIO_participated','Sequence_number', 'Group','ProbHasNS','ProbHasRemnant','HardwareInj','Vetted','OpenAlert','TemporalCoinc'] dict1 = {key: (v.find(".//Param[@name='"+ value +"']").attrib['value'] if v.find(".//Param[@name='"+ value +"']") is not None else None) for key, value in zip(paramlist, valuelist)} dict1.update({'skymap_url_fits_basic': ""}) keylist1 = ['ivorn','role','version'] for key in keylist1: dict1[key] = v.attrib[key] #Source dict1.update({'author_ivorn': v.Who.AuthorIVORN,'shortname': v.Who.Author.shortName,'contactname': v.Who.Author.contactName,'contactemail': v.Who.Author.contactEmail,'date': v.Who.Date,'who_description': v.Who.Description}) #xmlns dict1.update({'xmlns_voe': "http://www.ivoa.net/xml/VOEvent/v2.0",'xmlns_xsi': "http://www.w3.org/2001/XMLSchema-instance",'xsi_schemalocation': "http://www.ivoa.net/xml/VOEvent/v2.0 http://www.ivoa.net/xml/VOEvent/VOEvent-v2.0.xsd"}) #ObservationInfo dict1.update({'observatorylocation_id': v.WhereWhen.ObsDataLocation.ObservatoryLocation.attrib['id'],'astrocoordsystem_id': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoordSystem.attrib['id'],'timeunit': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.attrib['unit'],'isotime': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.TimeInstant.ISOTime,'how_description': v.How.Description,'reference_uri': 'http://gcn.gsfc.nasa.gov/gcn/ligo.html','importance': v.Why.attrib['importance'],'inference_probability': v.Why.Inference.attrib['probability'],'concept': v.Why.Inference.Concept}) #to act on both Initial and Update notices if v.find(".//Param[@name='AlertType']").attrib['value'] == "Initial" or v.find(".//Param[@name='AlertType']").attrib['value'] == "Update" : dict1.update({'skymap_url_fits_basic': v.find(".//Param[@name='SKYMAP_URL_FITS_BASIC']").attrib['value']}) #insert into table insert_values("voevent_lvc", dict1) if (v.find(".//Param[@name='AlertType']").attrib['value'] == "Initial" or v.find(".//Param[@name='AlertType']").attrib['value'] == "Update") and not v.find(".//Param[@name='ID_Letter']").attrib['value'] == "M" : #remove 'and not v.find(".//Param[@name='ID_Letter']").attrib['value'] == "M"' in order to save LVC M-series (or test events that occur every 10 min) to lvc_galaxies table #wget command command = 'wget --auth-no-challenge ' + v.find(".//Param[@name='SKYMAP_URL_FITS_BASIC']").attrib['value'] + ' -O' + ' /supernova/ligoevent_fits/' + v.find(".//Param[@name='GraceID']").attrib['value'] + '_' + v.find(".//Param[@name='AlertType']").attrib['value'] + '.fits.gz' #print command os.system(command) #fetch FITS file galaxy_map = galaxy_list.find_galaxy_list('/supernova/ligoevent_fits/' + v.find(".//Param[@name='GraceID']").attrib['value'] + '_' + v.find(".//Param[@name='AlertType']").attrib['value'] + '.fits.gz') #print galaxy_map #prints out the coordinates in form [RA, DEC, Distance to obj(in Mpc), Bmag, probability score] else: pass #######################################################################LVC ONLY ^######################################################################### elif "ICECUBE" in ivorn: keylist1 = ['ivorn','role','version'] dict1 = {} for key in keylist1: dict1[key] = v.attrib[key] #Source dict1.update({'author_ivorn': v.Who.AuthorIVORN,'shortname': v.Who.Author.shortName,'contactname': v.Who.Author.contactName,'contactemail': v.Who.Author.contactEmail,'date': v.Who.Date,'who_description': v.Who.Description}) #xmlns dict1.update({'xmlns_voe': "http://www.ivoa.net/xml/VOEvent/v2.0",'xmlns_xsi': "http://www.w3.org/2001/XMLSchema-instance",'xsi_schemalocation': "http://www.ivoa.net/xml/VOEvent/v2.0 http://www.ivoa.net/xml/VOEvent/VOEvent-v2.0.xsd"}) #parameters dict1.update({'packet_type': v.find(".//Param[@name='Packet_Type']").attrib['value'],'pkt_ser_num': v.find(".//Param[@name='Pkt_Ser_Num']").attrib['value'],'trig_id': v.find(".//Param[@name='TrigID']").attrib['value'],'event_tjd': v.find(".//Param[@name='Event_TJD']").attrib['value'],'event_sod': v.find(".//Param[@name='Event_SOD']").attrib['value'],'nevents': v.find(".//Param[@name='Nevents']").attrib['value'],'stream': v.find(".//Param[@name='Stream']").attrib['value'],'rev': v.find(".//Param[@name='Rev']").attrib['value'],'false_pos': v.find(".//Param[@name='False_pos']").attrib['value'],'pvalue': v.find(".//Param[@name='pvalue']").attrib['value'],'deltat': v.find(".//Param[@name='deltaT']").attrib['value'],'sigmat': v.find(".//Param[@name='sigmaT']").attrib['value'],'charge': v.find(".//Param[@name='charge']").attrib['value'],'signalness': v.find(".//Param[@name='signalness']").attrib['value'],'hesetypeindex': v.find(".//Param[@name='hesetypeindex']").attrib['value'],'trigger_id': v.find(".//Param[@name='Trigger_ID']").attrib['value'],'misc_flags': v.find(".//Param[@name='Misc_flags']").attrib['value'],'subtype': v.find(".//Param[@name='SubType']").attrib['value'],'test': v.find(".//Param[@name='Test']").attrib['value'],'radec_valid': v.find(".//Param[@name='RADec_valid']").attrib['value'],'retraction': v.find(".//Param[@name='Retraction']").attrib['value'],'internal_test': v.find(".//Param[@name='InternalTest']").attrib['value']}) #ObservationInfo dict1.update({'observatorylocation_id': v.WhereWhen.ObsDataLocation.ObservatoryLocation.attrib['id'],'astrocoordsystem_id': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoordSystem.attrib['id'],'timeunit': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.attrib['unit'],'isotime': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.TimeInstant.ISOTime,'ra0': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Position2D.Value2.C1,'dec0': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Position2D.Value2.C2,'error2radius': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Position2D.Error2Radius, 'how_description': v.How.Description,'reference_uri': 'http://gcn.gsfc.nasa.gov/gcn/ligo.html','importance': v.Why.attrib['importance'],'inference_probability': v.Why.Inference.attrib['probability'],'concept': v.Why.Inference.Concept}) #insert into table insert_values("voevent_amon", dict1) else: pass print "DONE"
def send_data2DB(filename, candidates, Nb_cuts, owncloud_path, VOE_path, usrpwd_path, FoV=60, coords_type='pix', corner_cut=32, debug=False, fmt='png'): """Send candidates information to database""" # Load data and header data, header = fits.getdata(filename, header=True) dateObs = header['DATE-OBS'] Tstart = Time(dateObs, format='fits', scale='utc') Tend = Tstart + TimeDelta(float(header['EXPOSURE']), format='sec') # Try to get airmass rom header, else set it to -1 try: Airmass = header['AIRMASS'] except: Airmass = -1 # Do not consider candidates found in the image edge imsize = data.shape print(imsize, header['NAXIS1'], header['NAXIS2']) # Get the physical pixels of the original size if image were split into different quadrants. for i, candidate in enumerate(candidates): quadrant_idx = candidate['quadrant'] quadrant, index_i, index_j = quadrant_idx.split('_') quadrant = quadrant[1:] candidates['Xpos'][i] = candidate['Xpos'] + int( imsize[0] / Nb_cuts[0]) * int(index_j) candidates['Ypos'][i] = candidate['Ypos'] + int( imsize[1] / Nb_cuts[1]) * int(index_i) print(candidates) candidates.write('test_all.oc', format='ascii.commented_header', overwrite=True) mask = (candidates['Xpos'] > corner_cut) & \ (candidates['Ypos'] > corner_cut) & \ (candidates['Xpos'] < imsize[1] - corner_cut) & \ (candidates['Ypos'] < imsize[0] - corner_cut) candidates_cut = candidates[mask] print(candidates_cut) # Get information about the current alert from the xml file containing observation plan with open(VOE_path, 'rb') as f: obsplan = vp.load(f) dict_event = {} dict_event['event_type'] = obsplan.find( ".//Param[@name='Event_type']").attrib['value'] dict_event['event_name'] = obsplan.find( ".//Param[@name='Event_ID']").attrib['value'] dict_event['event_status'] = obsplan.find( ".//Param[@name='Event_status']").attrib['value'] dict_event['revision'] = obsplan.find( ".//Param[@name='Revision']").attrib['value'] dict_event['telescope'] = obsplan.find( ".//Param[@name='Name_tel']").attrib['value'] # Get user email adress and password to login in https://grandma-fa-interface.lal.in2p3.fr with open(usrpwd_path) as f: usrpwd = json.load(f) # Set up the output repository path to store sub-images outputDir = owncloud_path + '/' + dict_event['event_type'] + '/' + \ dict_event['event_name'] + '/' + dict_event['event_status'] + \ '_' + dict_event['revision'] + '/OTs/' # Create a sub image centered on each candidate found, and gather information #tile_id_list = [1] * len(candidates_cut) #filter_list = candidates_cut['filter_DB'] #Tstart_list = [Tstart.fits] * len(candidates_cut) #Tend_list = [Tend.fits] * len(candidates_cut) #Airmass_list = [Airmass] * len(candidates_cut) Fits_path = [] for i, row in enumerate(candidates_cut): name = dict_event['telescope'] + '_' + \ str(round(float(row['_RAJ2000']),5)) + '_' + \ str(round(float(row['_DEJ2000']),5)) + '_' + \ dateObs + '.' + fmt name = 'test' + str(i) + '.' + fmt Fits_path.append(name) if coords_type == 'world': OT_coords = [row['_RAJ2000'], row['_DEJ2000']] elif coords_type == 'pix': OT_coords = [row['Ypos'], row['Xpos']] make_sub_image(filename, OT_coords, coords_type=coords_type, output_name=outputDir + name, size=[128, 128], FoV=FoV, fmt=fmt) alias = ['new'] * len(candidates_cut) new = [1] * len(candidates_cut) tile_id_list = [3] * len(candidates_cut) RA_list = candidates_cut['_RAJ2000'] Dec_list = candidates_cut['_DEJ2000'] filter_list = candidates_cut['filter_DB'] Tstart_list = [Tstart.fits] * len(candidates_cut) Tend_list = [Tend.fits] * len(candidates_cut) Mag_list = candidates_cut['mag_calib'] Mag_err_list = candidates_cut['mag_calib_err'] Magsys_list = candidates_cut['magsys'] Airmass_list = [Airmass] * len(candidates_cut) candidates_2DB = Table([ alias, new, tile_id_list, RA_list, Dec_list, filter_list, Tstart_list, Tend_list, Mag_list, Mag_err_list, Magsys_list, Airmass_list, Fits_path ], names=[ 'alias', 'new', 'tile_id', 'RA', 'DEC', 'filter', 'Tstart', 'Tend', 'Magnitude', 'Magnitude_error', 'Magsys', 'Airmass', 'fits_name' ]) # Set url to report tile or galaxy observations url = "https://grandma-fa-interface.lal.in2p3.fr/obs_report_OT.php" #url = "http://localhost/test2/obs_report_OT.php" # Loop over the observations for i in range(len(candidates_2DB)): data2DB = {} for col in candidates_2DB.colnames: data2DB[col] = candidates_2DB[col][i] # Add obsplan info to data dictionary for key, value in dict_event.items(): data2DB[key] = value # Add username and password to data dictionary for key, value in usrpwd.items(): data2DB[key] = value # Add compulsory keys data2DB["method"] = "POST" data2DB["submit"] = "ok" response = requests.post(url, data=data2DB) if debug: print('\nDEBUG:\n') print('Data sent to DB:') print(data2DB) print('\n\n') print('Request response text:') print(response.text) print('\n\n') print('Request response status code:') print(response.status_code) print('\n\n') print('Request response history:') print(response.history)
def test_init(self): with open(datapaths.swift_bat_grb_pos_v2) as f: trigger = swift.BatGrb(voeventparse.load(f))
def reject_packet(pkt): with open(pkt, 'rb') as f: v = voeventparse.load(f) alert = swift.BatGrb(v) return alert.reject()
def reject_packet(pkt): with open(pkt) as f: v = voeventparse.load(f) alert = swift.BatGrb(v) return alert.reject()
#!/bin/env/python #script to test loading of VOEvent, treat them and send them to broker #import and prepare setup import sys import voeventparse as vp import ToO_manager as too #import ToO_manager_David as too #fileo= open('MS190425b-1-Preliminary.xml,0') filename = str(sys.argv[1]) fileo = open(filename) v = vp.load(fileo) too.online_processing(v, role_filter='test') #too.online_processing(v,role_filter='observation')
def _ingest_packet(sock, ivorn, handler, log): """Ingest one VOEvent Transport Protocol packet and act on it, first sending the appropriate response and then calling the handler if the payload is a VOEvent.""" # Receive payload payload = _recv_packet(sock) log.debug("received packet of %d bytes", len(payload)) log.debug("payload is:\n%s", payload) # Parse payload and act on it try: root = parse_from_string(payload) except XMLSyntaxError: log.exception("oops...failed to parse XML, base64-encoded payload is:\n%s", base64.b64encode(payload)) raise else: if root.tag == "{http://telescope-networks.org/schema/Transport/v1.1}Transport": if "role" not in root.attrib: log.error("receieved transport message without a role") elif root.attrib["role"] == "iamalive": log.debug("received iamalive message") _send_packet(sock, _form_response("iamalive", root.find("Origin").text, ivorn, _get_now_iso8601())) log.debug("sent iamalive response") else: log.error("received transport message with unrecognized role: %s", root.attrib["role"]) elif root.tag in ("{http://www.ivoa.net/xml/VOEvent/v1.1}VOEvent", "{http://www.ivoa.net/xml/VOEvent/v2.0}VOEvent"): log.info("received VOEvent") ivorn = root.attrib['ivorn'] filename = urllib.quote_plus(ivorn) with open(filename, "w") as f: f.write(payload) logging.getLogger('gcn.handlers.archive').info("archived %s", ivorn) with open(filename) as f: v = vp.load(f) v ###################################################################################LVC ONLY################################################################################ if v.find(".//Param[@name='AlertType']").attrib['value'] == "Preliminary": keylist1 = ['ivorn','role','version'] dict1 = {} for key in keylist1: dict1[key] = v.attrib[key] #Source dict1.update({'author_ivorn': v.Who.AuthorIVORN,'shortname': v.Who.Author.shortName,'contactname': v.Who.Author.contactName,'contactemail': v.Who.Author.contactEmail,'date': v.Who.Date,'who_description': v.Who.Description}) #xmlns dict1.update({'xmlns_voe': "http://www.ivoa.net/xml/VOEvent/v2.0",'xmlns_xsi': "http://www.w3.org/2001/XMLSchema-instance",'xsi_schemalocation': "http://www.ivoa.net/xml/VOEvent/v2.0 http://www.ivoa.net/xml/VOEvent/VOEvent-v2.0.xsd"}) #Parameters #print type(v.find(".//Param[@name='Group']").attrib['value']) dict1.update({'packet_type': v.find(".//Param[@name='Packet_Type']").attrib['value'],'pkt_ser_num': v.find(".//Param[@name='Pkt_Ser_Num']").attrib['value'],'alert_type': v.find(".//Param[@name='AlertType']").attrib['value'],'graceid': v.find(".//Param[@name='GraceID']").attrib['value'],'id_letter': v.find(".//Param[@name='ID_Letter']").attrib['value'],'trig_id': v.find(".//Param[@name='TrigID']").attrib['value'],'trigger_tjd': v.find(".//Param[@name='Trigger_TJD']").attrib['value'],'trigger_sod': v.find(".//Param[@name='Trigger_SOD']").attrib['value'],'eventpage': v.find(".//Param[@name='EventPage']").attrib['value'],'search': v.find(".//Param[@name='Search']").attrib['value'],'pipeline': v.find(".//Param[@name='Pipeline']").attrib['value'],'internal': v.find(".//Param[@name='Internal']").attrib['value'],'far': v.find(".//Param[@name='FAR']").attrib['value'],'chirpmass': v.find(".//Param[@name='ChirpMass']").attrib['value'],'eta': v.find(".//Param[@name='Eta']").attrib['value'],'maxdistance': v.find(".//Param[@name='MaxDistance']").attrib['value'],'trigger_id': v.find(".//Param[@name='Trigger_ID']").attrib['value'],'misc_flags': v.find(".//Param[@name='Misc_flags']").attrib['value'],'lvc_internal': v.find(".//Param[@name='LVC_Internal']").attrib['value'],'test': v.find(".//Param[@name='Test']").attrib['value'],'retraction': v.find(".//Param[@name='Retraction']").attrib['value'],'internal_test': v.find(".//Param[@name='InternalTest']").attrib['value'],'num_det_participated': v.find(".//Param[@name='Num_Det_participated']").attrib['value'],'lho_participated': v.find(".//Param[@name='LHO_participated']").attrib['value'],'llo_participated': v.find(".//Param[@name='LLO_participated']").attrib['value'],'virgo_participated': v.find(".//Param[@name='Virgo_participated']").attrib['value'],'geo600_participated': v.find(".//Param[@name='GEO600_participated']").attrib['value'],'kagra_participated': v.find(".//Param[@name='KAGRA_participated']").attrib['value'],'lio_participated': v.find(".//Param[@name='LIO_participated']").attrib['value'],'sequence_number': v.find(".//Param[@name='Sequence_number']").attrib['value'],'_group': v.find(".//Param[@name='Group']").attrib['value'],'skymap_url_fits_basic': ""}) #ObservationInfo dict1.update({'observatorylocation_id': v.WhereWhen.ObsDataLocation.ObservatoryLocation.attrib['id'],'astrocoordsystem_id': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoordSystem.attrib['id'],'timeunit': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.attrib['unit'],'isotime': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.TimeInstant.ISOTime,'how_description': v.How.Description,'reference_uri': 'http://gcn.gsfc.nasa.gov/gcn/ligo.html','importance': v.Why.attrib['importance'],'inference_probability': v.Why.Inference.attrib['probability'],'concept': v.Why.Inference.Concept}) #print dict1 hostname, username, passwd, database = lsc.mysqldef.getconnection("lcogt2") conn = lsc.mysqldef.dbConnect(hostname, username, passwd, database) lsc.mysqldef.insert_values(conn, "voevent_lvc", dict1) #The following is to act only on Initial and Update Notices elif v.find(".//Param[@name='AlertType']").attrib['value'] == "Initial" or v.find(".//Param[@name='AlertType']").attrib['value'] == "Update" : keylist1 = ['ivorn','role','version'] dict1 = {} for key in keylist1: dict1[key] = v.attrib[key] #Source dict1.update({'author_ivorn': v.Who.AuthorIVORN,'shortname': v.Who.Author.shortName,'contactname': v.Who.Author.contactName,'contactemail': v.Who.Author.contactEmail,'date': v.Who.Date,'who_description': v.Who.Description}) #xmlns dict1.update({'xmlns_voe': "http://www.ivoa.net/xml/VOEvent/v2.0",'xmlns_xsi': "http://www.w3.org/2001/XMLSchema-instance",'xsi_schemalocation': "http://www.ivoa.net/xml/VOEvent/v2.0 http://www.ivoa.net/xml/VOEvent/VOEvent-v2.0.xsd"}) #Parameters dict1.update({'packet_type': v.find(".//Param[@name='Packet_Type']").attrib['value'],'pkt_ser_num': v.find(".//Param[@name='Pkt_Ser_Num']").attrib['value'],'alert_type': v.find(".//Param[@name='AlertType']").attrib['value'],'graceid': v.find(".//Param[@name='GraceID']").attrib['value'],'id_letter': v.find(".//Param[@name='ID_Letter']").attrib['value'],'trig_id': v.find(".//Param[@name='TrigID']").attrib['value'],'trigger_tjd': v.find(".//Param[@name='Trigger_TJD']").attrib['value'],'trigger_sod': v.find(".//Param[@name='Trigger_SOD']").attrib['value'],'eventpage': v.find(".//Param[@name='EventPage']").attrib['value'],'search': v.find(".//Param[@name='Search']").attrib['value'],'pipeline': v.find(".//Param[@name='Pipeline']").attrib['value'],'internal': v.find(".//Param[@name='Internal']").attrib['value'],'far': v.find(".//Param[@name='FAR']").attrib['value'],'chirpmass': v.find(".//Param[@name='ChirpMass']").attrib['value'],'eta': v.find(".//Param[@name='Eta']").attrib['value'],'maxdistance': v.find(".//Param[@name='MaxDistance']").attrib['value'],'trigger_id': v.find(".//Param[@name='Trigger_ID']").attrib['value'],'misc_flags': v.find(".//Param[@name='Misc_flags']").attrib['value'],'lvc_internal': v.find(".//Param[@name='LVC_Internal']").attrib['value'],'test': v.find(".//Param[@name='Test']").attrib['value'],'retraction': v.find(".//Param[@name='Retraction']").attrib['value'],'internal_test': v.find(".//Param[@name='InternalTest']").attrib['value'],'num_det_participated': v.find(".//Param[@name='Num_Det_participated']").attrib['value'],'lho_participated': v.find(".//Param[@name='LHO_participated']").attrib['value'],'llo_participated': v.find(".//Param[@name='LLO_participated']").attrib['value'],'virgo_participated': v.find(".//Param[@name='Virgo_participated']").attrib['value'],'geo600_participated': v.find(".//Param[@name='GEO600_participated']").attrib['value'],'kagra_participated': v.find(".//Param[@name='KAGRA_participated']").attrib['value'],'lio_participated': v.find(".//Param[@name='LIO_participated']").attrib['value'],'sequence_number': v.find(".//Param[@name='Sequence_number']").attrib['value'],'_group': v.find(".//Param[@name='Group']").attrib['value'], 'skymap_url_fits_basic': v.find(".//Param[@name='SKYMAP_URL_FITS_BASIC']").attrib['value']}) #ObservationInfo dict1.update({'observatorylocation_id': v.WhereWhen.ObsDataLocation.ObservatoryLocation.attrib['id'],'astrocoordsystem_id': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoordSystem.attrib['id'],'timeunit': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.attrib['unit'],'isotime': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.TimeInstant.ISOTime,'how_description': v.How.Description,'reference_uri': 'http://gcn.gsfc.nasa.gov/gcn/ligo.html','importance': v.Why.attrib['importance'],'inference_probability': v.Why.Inference.attrib['probability'],'concept': v.Why.Inference.Concept}) #'group': v.find(".//Param[@name='Group']").attrib['value'] #print dict1 hostname, username, passwd, database = lsc.mysqldef.getconnection("lcogt2") conn = lsc.mysqldef.dbConnect(hostname, username, passwd, database) lsc.mysqldef.insert_values(conn, "voevent_lvc", dict1) #wget import os command = 'wget --auth-no-challenge ' + v.find(".//Param[@name='SKYMAP_URL_FITS_BASIC']").attrib['value'] + ' -O' + ' /home/svasylyev/ligoevent_fits/' + v.find(".//Param[@name='GraceID']").attrib['value'] + '_' + v.find(".//Param[@name='AlertType']").attrib['value'] + '.fits.gz' #print command os.system(command) import galaxy_list galaxy_map = galaxy_list.find_galaxy_list('/home/svasylyev/ligoevent_fits/' + v.find(".//Param[@name='GraceID']").attrib['value'] + '_' + v.find(".//Param[@name='AlertType']").attrib['value'] + '.fits.gz') print galaxy_map else: print "Unknown Alert Type" ####################################################################################LVC ONLY################################################################################ if 'ivorn' not in root.attrib: log.error("received voevent message without ivorn") else: _send_packet(sock, _form_response("ack", root.attrib["ivorn"], ivorn, _get_now_iso8601())) log.debug("sent receipt response") if handler is not None: try: handler(payload, root) except: log.exception("exception in payload handler") else: log.error("received XML document with unrecognized root tag: %s", root.tag)
def load_voevent(VO_event): with open(VO_event, 'rb') as f: v = vp.load(f) return v
def lvc_insert(root, payload): ivorn = root.attrib['ivorn'] filename = urllib.quote_plus(ivorn) with open(filename, "w") as f: f.write(payload) logging.getLogger('gcn.handlers.archive').info("archived %s", ivorn) with open(filename) as f: v = vp.load(f) ###################################################################################LVC ONLY############################################################################# if "LVC" in ivorn: dict1 = {} #Parameters paramlist = [ 'packet_type', 'pkt_ser_num', 'alert_type', 'graceid', 'id_letter', 'trig_id', 'trigger_tjd', 'trigger_sod', 'eventpage', 'search', 'pipeline', 'internal', 'far', 'chirpmass', 'eta', 'maxdistance', 'trigger_id', 'misc_flags', 'lvc_internal', 'test', 'retraction', 'internal_test', 'num_det_participated', 'lho_participated', 'llo_participated', 'virgo_participated', 'geo600_participated', 'kagra_participated', 'lio_participated', 'sequence_number', '_group', 'probhasns', 'probhasremnant', 'hardwareinj', 'vetted', 'openalert', 'temporalcoinc' ] valuelist = [ 'Packet_Type', 'Pkt_Ser_Num', 'AlertType', 'GraceID', 'ID_Letter', 'TrigID', 'Trigger_TJD', 'Trigger_SOD', 'EventPage', 'Search', 'Pipeline', 'Internal', 'FAR', 'ChirpMass', 'ETA', 'MaxDistance', 'Trigger_ID', 'Misc_flags', 'LVC_Internal', 'Test', 'Retraction', 'InternalTest', 'Num_Det_participated', 'LHO_participated', 'LLO_participated', 'Virgo_participated', 'GEO600_participated', 'KAGRA_participated', 'LIO_participated', 'Sequence_number', 'Group', 'ProbHasNS', 'ProbHasRemnant', 'HardwareInj', 'Vetted', 'OpenAlert', 'TemporalCoinc' ] dict1 = { key: (v.find(".//Param[@name='" + value + "']").attrib['value'] if v.find(".//Param[@name='" + value + "']") is not None else None) for key, value in zip(paramlist, valuelist) } dict1.update({'skymap_url_fits_basic': ""}) keylist1 = ['ivorn', 'role', 'version'] for key in keylist1: dict1[key] = v.attrib[key] #Source dict1.update({ 'author_ivorn': v.Who.AuthorIVORN, 'shortname': v.Who.Author.shortName, 'contactname': v.Who.Author.contactName, 'contactemail': v.Who.Author.contactEmail, 'date': v.Who.Date, 'who_description': v.Who.Description }) #xmlns dict1.update({ 'xmlns_voe': "http://www.ivoa.net/xml/VOEvent/v2.0", 'xmlns_xsi': "http://www.w3.org/2001/XMLSchema-instance", 'xsi_schemalocation': "http://www.ivoa.net/xml/VOEvent/v2.0 http://www.ivoa.net/xml/VOEvent/VOEvent-v2.0.xsd" }) #ObservationInfo dict1.update({ 'observatorylocation_id': v.WhereWhen.ObsDataLocation.ObservatoryLocation.attrib['id'], 'astrocoordsystem_id': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoordSystem. attrib['id'], 'timeunit': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time. attrib['unit'], 'isotime': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time. TimeInstant.ISOTime, 'how_description': v.How.Description, 'reference_uri': 'http://gcn.gsfc.nasa.gov/gcn/ligo.html', 'importance': v.Why.attrib['importance'], 'inference_probability': v.Why.Inference.attrib['probability'], 'concept': v.Why.Inference.Concept }) #to act on both Initial and Update notices if v.find(".//Param[@name='AlertType']").attrib[ 'value'] == "Initial" or v.find( ".//Param[@name='AlertType']").attrib['value'] == "Update": dict1.update({ 'skymap_url_fits_basic': v.find( ".//Param[@name='SKYMAP_URL_FITS_BASIC']").attrib['value'] }) #insert into table insert_values("voevent_lvc", dict1) if (v.find(".//Param[@name='AlertType']").attrib['value'] == "Initial" or v.find(".//Param[@name='AlertType']").attrib['value'] == "Update" ) and not v.find(".//Param[@name='ID_Letter']").attrib[ 'value'] == "M": #remove 'and not v.find(".//Param[@name='ID_Letter']").attrib['value'] == "M"' in order to save LVC M-series (or test events that occur every 10 min) to lvc_galaxies table #wget command command = 'wget --auth-no-challenge ' + v.find( ".//Param[@name='SKYMAP_URL_FITS_BASIC']" ).attrib['value'] + ' -O' + ' /supernova/ligoevent_fits/' + v.find( ".//Param[@name='GraceID']").attrib['value'] + '_' + v.find( ".//Param[@name='AlertType']").attrib['value'] + '.fits.gz' #print command os.system(command) #fetch FITS file galaxy_map = galaxy_list.find_galaxy_list( '/supernova/ligoevent_fits/' + v.find(".//Param[@name='GraceID']").attrib['value'] + '_' + v.find(".//Param[@name='AlertType']").attrib['value'] + '.fits.gz') #print galaxy_map #prints out the coordinates in form [RA, DEC, Distance to obj(in Mpc), Bmag, probability score] else: pass #######################################################################LVC ONLY ^######################################################################### elif "ICECUBE" in ivorn: keylist1 = ['ivorn', 'role', 'version'] dict1 = {} for key in keylist1: dict1[key] = v.attrib[key] #Source dict1.update({ 'author_ivorn': v.Who.AuthorIVORN, 'shortname': v.Who.Author.shortName, 'contactname': v.Who.Author.contactName, 'contactemail': v.Who.Author.contactEmail, 'date': v.Who.Date, 'who_description': v.Who.Description }) #xmlns dict1.update({ 'xmlns_voe': "http://www.ivoa.net/xml/VOEvent/v2.0", 'xmlns_xsi': "http://www.w3.org/2001/XMLSchema-instance", 'xsi_schemalocation': "http://www.ivoa.net/xml/VOEvent/v2.0 http://www.ivoa.net/xml/VOEvent/VOEvent-v2.0.xsd" }) #parameters dict1.update({ 'packet_type': v.find(".//Param[@name='Packet_Type']").attrib['value'], 'pkt_ser_num': v.find(".//Param[@name='Pkt_Ser_Num']").attrib['value'], 'trig_id': v.find(".//Param[@name='TrigID']").attrib['value'], 'event_tjd': v.find(".//Param[@name='Event_TJD']").attrib['value'], 'event_sod': v.find(".//Param[@name='Event_SOD']").attrib['value'], 'nevents': v.find(".//Param[@name='Nevents']").attrib['value'], 'stream': v.find(".//Param[@name='Stream']").attrib['value'], 'rev': v.find(".//Param[@name='Rev']").attrib['value'], 'false_pos': v.find(".//Param[@name='False_pos']").attrib['value'], 'pvalue': v.find(".//Param[@name='pvalue']").attrib['value'], 'deltat': v.find(".//Param[@name='deltaT']").attrib['value'], 'sigmat': v.find(".//Param[@name='sigmaT']").attrib['value'], 'charge': v.find(".//Param[@name='charge']").attrib['value'], 'signalness': v.find(".//Param[@name='signalness']").attrib['value'], 'hesetypeindex': v.find(".//Param[@name='hesetypeindex']").attrib['value'], 'trigger_id': v.find(".//Param[@name='Trigger_ID']").attrib['value'], 'misc_flags': v.find(".//Param[@name='Misc_flags']").attrib['value'], 'subtype': v.find(".//Param[@name='SubType']").attrib['value'], 'test': v.find(".//Param[@name='Test']").attrib['value'], 'radec_valid': v.find(".//Param[@name='RADec_valid']").attrib['value'], 'retraction': v.find(".//Param[@name='Retraction']").attrib['value'], 'internal_test': v.find(".//Param[@name='InternalTest']").attrib['value'] }) #ObservationInfo dict1.update({ 'observatorylocation_id': v.WhereWhen.ObsDataLocation.ObservatoryLocation.attrib['id'], 'astrocoordsystem_id': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoordSystem. attrib['id'], 'timeunit': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time. attrib['unit'], 'isotime': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time. TimeInstant.ISOTime, 'ra0': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords. Position2D.Value2.C1, 'dec0': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords. Position2D.Value2.C2, 'error2radius': v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords. Position2D.Error2Radius, 'how_description': v.How.Description, 'reference_uri': 'http://gcn.gsfc.nasa.gov/gcn/ligo.html', 'importance': v.Why.attrib['importance'], 'inference_probability': v.Why.Inference.attrib['probability'], 'concept': v.Why.Inference.Concept }) #insert into table insert_values("voevent_amon", dict1) else: pass print "DONE"
# In[ ]: # Uncomment the following and hit enter: # ?vp.load # Alternatively, you can always [read the docs](http://voevent-parse.readthedocs.org), # which include autogenerated # [API specs](http://voevent-parse.rtfd.org/en/master/reference.html#voeventparse.voevent.load). # # Ok, let's load up a [voevent (click here to see the raw XML)](voevent.xml): # In[ ]: with open('voevent.xml') as f: v = vp.load(f) # **IPython Tip #2**: We also get tab-completion. Simply start typing the name of a function (or even just the '.' operator) and hit tab to see valid possible options - this is handy for exploring VOEvent packets: # In[ ]: # Uncomment the following and hit tab: # v. # ##Accessing data## # # ###Text-values### # #
def VOEvent_decoder(XML_file): result_qry_json = [] with open(XML_file, 'rb') as f: v = voeventparse.load(f) # Basic attribute access Ivorn = v.attrib['ivorn'] VOE_role = v.attrib['role'] AuthorIVORN = v.Who.AuthorIVORN VOE_date = v.Who.Date Name_sender = v.Who.Author.shortName Phone_sender = v.Who.Author.contactPhone Mail_sender = v.Who.Author.contactEmail # Copying by value, and validation: #print("Original valid as v2.0? ", voeventparse.valid_as_v2_0(v)) #v_copy = copy.copy(v) #print("Copy valid? ", voeventparse.valid_as_v2_0(v_copy)) ####################################################### # And now, parse the VOEvent ####################################################### #c = voeventparse.get_event_position(v) #print("Coords:", c) # ============================================================================= # Retrieve the WHAT Params # ============================================================================= toplevel_params = voeventparse.get_toplevel_params(v) #print("Params:", toplevel_params) #print() for par in toplevel_params: Event_ID = toplevel_params['Event_ID']['value'] Event_type = toplevel_params['Event_type']['value'] Event_inst = toplevel_params['Event_inst']['value'] Loc_url = toplevel_params['Loc_url']['value'] BA_name = toplevel_params['FA']['value'] Prob = toplevel_params['Prob']['value'] Quicklook_url = toplevel_params['Quicklook_url']['value'] Distance = toplevel_params['Distance']['value'] Err_distance = toplevel_params['Err_distance']['value'] fifty_cr_skymap = toplevel_params['50cr_skymap']['value'] ninety_cr_skymap = toplevel_params['90cr_skymap']['value'] FAR = toplevel_params['FAR']['value'] Group = toplevel_params['Group']['value'] Pipeline = toplevel_params['Pipeline']['value'] Obs_req = toplevel_params['Obs_req']['value'] grouped_params = voeventparse.get_grouped_params(v) #print("Group Params:", grouped_params) #print() for par in grouped_params: Event_status = grouped_params['Status']['Event_status']['value'] Revision = grouped_params['Status']['Revision']['value'] Prob_BNS = grouped_params['Classification']['BNS']['value'] Prob_NSBH = grouped_params['Classification']['NSBH']['value'] Prob_BBH = grouped_params['Classification']['BBH']['value'] Prob_Terrestrial = grouped_params['Classification']['Terrestrial'][ 'value'] Prob_NS = grouped_params['Properties']['HasNS']['value'] Prob_EM = grouped_params['Properties']['HasRemnant']['value'] Name_svom_tel = grouped_params['Set_up_OS']['Name_tel']['value'] FOV_svom_tel = grouped_params['Set_up_OS']['FOV']['value'] FOV_coverage_svom_tel = grouped_params['Set_up_OS']['FOV_coverage'][ 'value'] Mag_limit_svom_tel = grouped_params['Set_up_OS']['Mag_limit']['value'] exposure_svom_tel = grouped_params['Set_up_OS']['exposure']['value'] Slew_rate_svom_tel = grouped_params['Set_up_OS']['Slew_rate']['value'] Readout_svom_tel = grouped_params['Set_up_OS']['Readout']['value'] Filters_svom_tel = grouped_params['Set_up_OS']['Filters_tel']['value'] Latitude_svom_tel = grouped_params['Set_up_OS']['Latitude']['value'] Longitude_svom_tel = grouped_params['Set_up_OS']['Longitude']['value'] Elevation_svom_tel = grouped_params['Set_up_OS']['Elevation']['value'] obs_plan_RA_unit = v.What.Table.Field[1].attrib['unit'] obs_plan_dec_unit = v.What.Table.Field[2].attrib['unit'] obs_plan_Grid_ID = [] obs_plan_RA_center = [] obs_plan_dec_center = [] obs_plan_OS_grade = [] for par in v.What.Table.Data.TR: obs_plan_Grid_ID.append(par.TD[0]) obs_plan_RA_center.append(par.TD[1]) obs_plan_dec_center.append(par.TD[2]) obs_plan_OS_grade.append(par.TD[3]) # ============================================================================= # Retrieve the WHERE & WHEN Params # ============================================================================= trigger_Collab = v.WhereWhen.ObsDataLocation.ObservatoryLocation.attrib[ 'id'] AstroCoordSystem = v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoordSystem.attrib[ 'id'] Time_unit = v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.attrib[ 'unit'] Trigger_date = v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Time.TimeInstant.ISOTime Trigger_date_jd = Time(Trigger_date + '.00') Trigger_date_jd_start = Trigger_date_jd.jd Trigger_date = str(Trigger_date).replace('T', ' ') Trigger_pos_unit = v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Position2D.attrib[ 'unit'] Trigger_RA = v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Position2D.Value2.C1 Trigger_dec = v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Position2D.Value2.C2 Trigger_poserr = v.WhereWhen.ObsDataLocation.ObservationLocation.AstroCoords.Position2D.Error2Radius # ============================================================================= # Retrieve the WHY Params # ============================================================================= alert_importance = v.Why.attrib['importance'] # ============================================================================= # First retrieve some params in the DB to fill the trigger table # ============================================================================= ID_SVOM_ba_shift = Retrieve_BA_ID(db, Trigger_date_jd_start) ID_external_trigger_type = Retrieve_trigger_type_ID( db, Event_type, VOE_role) ID_external_trigger_telescope = Retrieve_telescope_type_ID(db, Event_inst) result_qry_json = [] d = {} d["Alert_type"] = VOE_role d["Event_ID"] = Event_ID d["Event_type"] = Event_type d["Event_inst"] = Event_inst d["Loc_url"] = Loc_url d["BA_name"] = BA_name d["Prob"] = Prob d["Quicklook_url"] = Quicklook_url d["Distance"] = Distance d["Err_distance"] = Err_distance d["fifty_cr_skymap"] = fifty_cr_skymap d["ninety_cr_skymap"] = ninety_cr_skymap d["FAR"] = FAR d["Group"] = Group d["Pipeline"] = Pipeline d["Obs_req"] = Obs_req d["Event_status"] = Event_status d["Revision"] = Revision d["Prob_BNS"] = Prob_BNS d["Prob_NSBH"] = Prob_NSBH d["Prob_BBH"] = Prob_BBH d["Prob_Terrestrial"] = Prob_Terrestrial d["Prob_NS"] = Prob_NS d["Prob_EM"] = Prob_EM d["Name_svom_tel"] = Name_svom_tel d["FOV_svom_tel"] = FOV_svom_tel d["FOV_coverage_svom_tel"] = FOV_coverage_svom_tel d["Mag_limit_svom_tel"] = Mag_limit_svom_tel d["exposure_svom_tel"] = exposure_svom_tel d["Slew_rate_svom_tel"] = Slew_rate_svom_tel d["Readout_svom_tel"] = Readout_svom_tel d["Filters_svom_tel"] = Filters_svom_tel d["Latitude_svom_tel"] = Latitude_svom_tel d["Longitude_svom_tel"] = Longitude_svom_tel d["Elevation_svom_tel"] = Elevation_svom_tel d["Longitude_svom_tel"] = Longitude_svom_tel d["obs_plan_RA_unit"] = obs_plan_RA_unit d["obs_plan_dec_unit"] = obs_plan_dec_unit d["obs_plan_Grid_ID"] = obs_plan_Grid_ID d["obs_plan_RA_center"] = obs_plan_RA_center d["obs_plan_dec_center"] = obs_plan_dec_center d["obs_plan_OS_grade"] = obs_plan_OS_grade d["trigger_Collab"] = trigger_Collab d["AstroCoordSystem"] = AstroCoordSystem d["Time_unit"] = Time_unit d["Trigger_date"] = str(Trigger_date) d["Trigger_date_jd"] = str(Trigger_date_jd_start) d["Trigger_pos_unit"] = Trigger_pos_unit d["Trigger_RA"] = str(Trigger_RA) d["Trigger_dec"] = str(Trigger_dec) d["Trigger_poserr"] = str(Trigger_poserr) d["alert_importance"] = str(alert_importance) d["ID_SVOM_ba_shift"] = ID_SVOM_ba_shift d["ID_external_trigger_type"] = ID_external_trigger_type d["ID_external_trigger_telescope"] = ID_external_trigger_telescope result_qry_json = json.dumps(d, ensure_ascii=False) return result_qry_json, obs_plan_RA_unit, obs_plan_dec_unit, obs_plan_Grid_ID, obs_plan_RA_center, obs_plan_dec_center, obs_plan_OS_grade
# In[ ]: # Uncomment the following and hit enter: # ?vp.load # Alternatively, you can always [read the docs](http://voevent-parse.readthedocs.org), # which include autogenerated # [API specs](http://voevent-parse.rtfd.org/en/master/reference.html#voeventparse.voevent.load). # # Ok, let's load up a [voevent (click here to see the raw XML)](voevent.xml): # In[ ]: with open('voevent.xml') as f: v = vp.load(f) # **IPython Tip #2**: We also get tab-completion. Simply start typing the name of a function (or even just the '.' operator) and hit tab to see valid possible options - this is handy for exploring VOEvent packets: # In[ ]: # Uncomment the following and hit tab: # v. # ##Accessing data## # # ###Text-values### # # # **XML Tip #1**: # An XML packet is a tree-structure made composed of [elements](http://www.w3schools.com/xml/xml_elements.asp).
def send_data2DB( filename, candidates, Nb_cuts, owncloud_path, VOE_path, usrpwd_path, FoV=60, coords_type="wolrd", corner_cut=32, debug=False, fmt="png", subFiles=None, ): """Send candidates information to database""" # Load original image header to retrieve date of observation and airmass # These information might have been lost in the analyis images header = fits.getheader(filename) dateObs = header["DATE-OBS"] Tstart = Time(dateObs, format="fits", scale="utc") try: exposure = float(header["EXPOSURE"]) Tend = Tstart + TimeDelta(exposure, format="sec") except BaseException: Tend = Time(dateObs, format="fits", scale="utc") exposure = Tend - Tstart # Try to get airmass rom header, else set it to -1 try: Airmass = header["AIRMASS"] except BaseException: Airmass = -1 # Compute the image corner RA, Dec coordinates ra, dec = get_corner_coords(filename) pix_im_coord = np.array([ra, dec]).T im_poly = Polygon([tuple(co) for co in pix_im_coord]) # Do not consider candidates found in the image edge # imsize = data.shape # print (imsize, header['NAXIS1'], header['NAXIS2']) # Get the physical pixels of the original size if image were split into # different quadrants. """ for i, candidate in enumerate(candidates): quadrant_idx = candidate['quadrant'] if quadrant_idx == 'None': quadrant = None index_i = 0 index_j = 0 else: quadrant, index_i, index_j = quadrant_idx.split('_') quadrant = quadrant[1:] candidates['Xpos'][i] = candidate['Xpos'] + (int(imsize[0]/Nb_cuts[0]) * int(index_j)) candidates['Ypos'][i] = candidate['Ypos'] + (int(imsize[1]/Nb_cuts[1]) * int(index_i)) #print (candidates) mask = (candidates['Xpos'] > corner_cut) & \ (candidates['Ypos'] > corner_cut) & \ (candidates['Xpos'] < imsize[1] - corner_cut) & \ (candidates['Ypos'] < imsize[0] - corner_cut) candidates_cut = candidates[mask] """ # Get information about the current alert from the xml file containing # observation plan with open(VOE_path, "rb") as f: obsplan = vp.load(f) dict_event = {} dict_event["event_type"] = obsplan.find( ".//Param[@name='Event_type']").attrib["value"] dict_event["event_name"] = obsplan.find( ".//Param[@name='Event_ID']").attrib["value"] dict_event["event_status"] = obsplan.find( ".//Param[@name='Event_status']").attrib["value"] dict_event["revision"] = obsplan.find( ".//Param[@name='Revision']").attrib["value"] dict_event["telescope"] = obsplan.find( ".//Param[@name='Name_tel']").attrib["value"] tiles_info = get_obsplan(obsplan) # Get user email adress and password to login in # https://grandma-fa-interface.lal.in2p3.fr with open(usrpwd_path) as f: usrpwd = json.load(f) # Set up the output repository path to store sub-images outputDir = (owncloud_path + "/" + dict_event["event_type"] + "/" + dict_event["event_name"] + "/" + dict_event["event_status"] + "_" + dict_event["revision"] + "/OTs/") # Create a sub image centered on each candidate found, and gather # information # tile_id_list = [1] * len(candidates) # filter_list = candidates['filter_DB'] # Tstart_list = [Tstart.fits] * len(candidates) # Tend_list = [Tend.fits] * len(candidates) # Airmass_list = [Airmass] * len(candidates) ImFits_path = [] RefFits_path = [] SubFits_path = [] tile_id_list = [] if subFiles: mask = candidates["FlagSub"] == "Y" candidates = candidates[mask] masktest = ((candidates["_RAJ2000"] < 244.01) & (candidates["_RAJ2000"] > 244.0) & (candidates["_DEJ2000"] < 22.27) & (candidates["_DEJ2000"] > 22.26)) candidates = candidates[masktest] for i, row in enumerate(candidates): name = (dict_event["telescope"] + "_" + str(round(float(row["_RAJ2000"]), 5)) + "_" + str(round(float(row["_DEJ2000"]), 5)) + "_" + dateObs + "." + fmt) OT_coords_wcs = [row["_RAJ2000"], row["_DEJ2000"]] OT_coords_pix = [row["Ypos"], row["Xpos"]] # Extract the region given wcs coordinates. # If substraction was performed, images are realigned and some # cut on the edges might performed, so the physical pixels position # do not match exactly the original image. # Astrometry is performed for the original image and sustracted image, # so no problem. make_sub_image( row["OriginalIma"], OT_coords_wcs, coords_type="world", output_name=outputDir + name, size=[128, 128], FoV=FoV, fmt=fmt, ) ImFits_path.append(name) if subFiles: name_ref = (dict_event["telescope"] + "_" + str(round(float(row["_RAJ2000"]), 5)) + "_" + str(round(float(row["_DEJ2000"]), 5)) + "_" + dateObs + "_ref." + fmt) name_sub = (dict_event["telescope"] + "_" + str(round(float(row["_RAJ2000"]), 5)) + "_" + str(round(float(row["_DEJ2000"]), 5)) + "_" + dateObs + "_sub." + fmt) RefFits_path.append(name_ref) SubFits_path.append(name_sub) # Here use physical pixels coordinates. # Should work as well with wcs coordinates make_sub_image( row["RefIma"], OT_coords_wcs, coords_type="world", output_name=outputDir + name_ref, size=[128, 128], FoV=FoV, fmt=fmt, ) make_sub_image( row["filenames"], OT_coords_wcs, coords_type="world", output_name=outputDir + name_sub, size=[128, 128], FoV=FoV, fmt=fmt, ) else: RefFits_path.append("") SubFits_path.append("") # Check in which tile the OT is located # Check that the RA, Dec lies within the FoV rectangle # Stop when finding one. Assuming that tiles are not overlapping. tile_id = 0 # by default for tile in tiles_info: tile_center = Point(tiles_info["RA"], tiles_info["Dec"]) if tile_center.intersects(im_poly): tile_id = tiles_info["Id"] break tile_id_list.append(tile_id) alias = ["new"] * len(candidates) new = [1] * len(candidates) RA_list = candidates["_RAJ2000"] Dec_list = candidates["_DEJ2000"] filter_list = candidates["filter_DB"] Tstart_list = [Tstart.fits] * len(candidates) Tend_list = [Tend.fits] * len(candidates) exp_list = [exposure] * len(candidates) Mag_list = candidates["mag_calib"] Mag_err_list = candidates["mag_calib_err"] Magsys_list = candidates["magsys"] Airmass_list = [Airmass] * len(candidates) candidates_2DB = Table( [ alias, new, tile_id_list, RA_list, Dec_list, filter_list, Tstart_list, Tend_list, exp_list, Mag_list, Mag_err_list, Magsys_list, Airmass_list, ImFits_path, RefFits_path, SubFits_path, ], names=[ "alias", "new", "tile_id", "RA", "DEC", "filter", "Tstart", "Tend", "Exposure", "Magnitude", "Magnitude_error", "Magsys", "Airmass", "im_fits_name", "ref_fits_name", "sub_fits_name", ], ) # Set url to report tile or galaxy observations # url = "https://grandma-fa-interface.lal.in2p3.fr/obs_report_OT.php" # url = "http://localhost/grandma/obs_report_OT.php" # Loop over the observations for i in range(len(candidates_2DB)): data2DB = {} for col in candidates_2DB.colnames: data2DB[col] = candidates_2DB[col][i] # Add obsplan info to data dictionary for key, value in dict_event.items(): data2DB[key] = value # Add username and password to data dictionary for key, value in usrpwd.items(): data2DB[key] = value # Add compulsory keys data2DB["method"] = "POST" data2DB["submit"] = "ok" response = requests.post(url, data=data2DB) if response.status_code == 200: print("Data sent succesfully to database.") forced_debug = False else: print("Data not sent to database. See information below.") forced_debug = True if debug or forced_debug: print("\nDEBUG:\n") print("Data sent to DB:") print(data2DB) print("\n\n") print("Request response text:") print(response.text) print("\n\n") print("Request response status code:") print(response.status_code) print("\n\n") print("Request response history:") print(response.history) forced_debug = False