def pub_sig_tier(detector_data): detector_data = None stream = Stream(persist=False) msg = format_msg(detector_data, "ST") with stream.open(set_topic("O"), "w") as s: s.write(msg) print(f"Publishing OBS message:\n{msg}")
def pub_alert(detector_data): detector_data = None stream = Stream(persist=False) msg = format_msg(detector_data, "A") with stream.open(set_topic("A"), "w") as s: s.write(msg) print(f"Publishing ALERT message:\n{msg}")
def run(self): """ Run the model for the integration test. :return: none """ t1 = threading.Thread(target=self.readNumMsg, args=(self.topic, )) t1.start() m = subprocess.Popen([ 'python3', '../hop/apps/SNalert/model.py', '--f', './../config.env', '--no-auth' ]) startTime = time.monotonic() # randomly publish messages while time.monotonic() - startTime < self.totalTime: # randomTime = random.randint(self.minTime, self.maxTime) randomTime = exponentialDistribution(self.mean) start2 = time.monotonic() while True: if time.monotonic() - start2 > randomTime: break # write message with current time now = datetime.datetime.utcnow().strftime( os.getenv("TIME_STRING_FORMAT")) # newFileName = self.writeMessage(now) stream = Stream(auth=self.auth) with stream.open(os.getenv("TESTING_TOPIC"), "w") as s: s.write(self.writeMessage(now)) m.kill()
def sub_obs(): stream = Stream(persist=True) with stream.open(set_topic("O"), "r") as s: for message in s: # print(f"saw an OBS at: {time_str()} from {message['detector_id']}") slack_alert.send_slack_msg('O', message) save_message_obs(message, message['detector_id'])
def pub_heartbeat(detector_data): stream = Stream(persist=True) # while True: with stream.open(set_topic("H"), "w") as s: # detector_data = None needs to be called every time it's iterated ! obs_msg = format_msg(detector_data, "H") s.write(obs_msg) time.sleep(60)
def sub_alrt(): stream = Stream(persist=True) with stream.open(set_topic("A"), "r") as s: for message in s: # print(f"saw an ALERT at: {time_str()} from {message['detector_id']}") slack_alert.send_slack_msg('A', message) save_message_alert(message)
def logMsgs(self): # stream = Stream(persist=True, auth=self.auth, start_at=StartPosition.EARLIEST) stream = Stream(persist=True, auth=self.auth) with stream.open(self.topic, "r") as s: for msg in s: # set timeout=0 so it doesn't stop listening to the topic t = threading.Thread(target=self.countMsgThread, args=(msg.asdict()['content'], )) t.start()
def submit(self, request, *args, **kwargs): auth = Auth(settings.HOPSKOTCH_CONSUMER_CONFIGURATION['sasl.username'], settings.HOPSKOTCH_CONSUMER_CONFIGURATION['sasl.password']) stream = Stream(auth=auth) topic = request.data.pop('topic') with stream.open( f'kafka://{settings.HOPSKOTCH_SERVER}:{settings.HOPSKOTCH_PORT}/{topic}', 'w') as s: s.write(request.data) return HttpResponse(f'Successfully submitted alert to {topic}.')
def main(args): """generate synthetic observation/heartbeat messages """ # set up logging verbosity = [logging.WARNING, logging.INFO, logging.DEBUG] logging.basicConfig( level=verbosity[min(args.verbose, 2)], format="%(asctime)s | model : %(levelname)s : %(message)s", ) # load environment variables load_dotenv(dotenv_path=args.env_file) # choose set of detector/location pairs if args.detector: detectors = [Detector(*args.detector.split(":"))] else: detectors = [ Detector("DETECTOR 1", "Houston"), Detector("DETECTOR 2", "Seattle"), Detector("DETECTOR 3", "Los Angeles"), ] # configure and open observation stream logger.info("starting up stream") stream = Stream(auth=(not args.no_auth)) source = stream.open(os.getenv("OBSERVATION_TOPIC"), "w") # generate messages logger.info(f"publishing messages to {os.getenv('OBSERVATION_TOPIC')}") try: # send one message, then persist if specified message = generate_message( os.getenv("TIME_STRING_FORMAT"), detectors, alert_probability=args.alert_probability, ) source.write(message) time.sleep(args.rate) while args.persist: message = generate_message( os.getenv("TIME_STRING_FORMAT"), detectors, alert_probability=args.alert_probability, ) source.write(message) time.sleep(args.rate) except KeyboardInterrupt: pass finally: logger.info("shutting down") source.close()
def main(args): """Measure latency from SNEWS events. """ # set up logging verbosity = [logging.WARNING, logging.INFO, logging.DEBUG] logging.basicConfig( level=verbosity[min(args.verbose, 2)], format="%(asctime)s | latency : %(levelname)s : %(message)s", ) # load environment variables load_dotenv(dotenv_path=args.env_file) # map choices to measurements topics = { "alert": os.getenv("ALERT_TOPIC"), "observation": os.getenv("OBSERVATION_TOPIC"), } # configure and open stream logger.info("starting up") stream = Stream(auth=(not args.no_auth), persist=True) source = stream.open(topics[args.measurement], "r") # track latency measurements latencies = deque(maxlen=args.num_points) # generate messages logger.info(f"listening to messages from {topics[args.measurement]}") try: for message, metadata in source.read(batch_size=1, metadata=True): # calculate current latency message_timestamp = metadata.timestamp current_timestamp = int( (datetime.utcnow() - datetime(1970, 1, 1)).total_seconds() * 1000) latency = (current_timestamp - message_timestamp) / 1000 # calculate mean latency latencies.append(latency) mean_latency = numpy.around(numpy.mean(list(latencies)), 3) logger.info( f"current latency: {latency}s, mean latency: {mean_latency}s") except KeyboardInterrupt: pass finally: logger.info("shutting down") source.close()
def submit_upstream_alert(self, target=None, observation_record=None, **kwargs): """ Submits target and observation record as Hopskotch alerts. :param target: ``Target`` object to be converted to an alert and submitted upstream :type target: ``Target`` :param observation_record: ``ObservationRecord`` object to be converted to an alert and submitted upstream :type observation_record: ``ObservationRecord`` :param \\**kwargs: See below :Keyword Arguments: * *topic* (``str``): Hopskotch topic to submit the alert to. :returns: True or False depending on success of message submission :rtype: bool :raises: AlertSubmissionException: If topic is not provided to the function and a default is not provided in settings """ creds = settings.BROKERS['SCIMMA'] stream = Stream(auth=Auth(creds['hopskotch_username'], creds['hopskotch_password'])) stream_url = creds['hopskotch_url'] topic = kwargs.get('topic') if kwargs.get('topic') else creds['default_hopskotch_topic'] if not topic: raise AlertSubmissionException(f'Topic must be provided to submit alert to {self.name}') try: with stream.open(f'kafka://{stream_url}:9092/{topic}', 'w') as s: if target: message = {'type': 'target', 'target_name': target.name, 'ra': target.ra, 'dec': target.dec} s.write(message) if observation_record: message = {'type': 'observation', 'status': observation_record.status, 'parameters': observation_record.parameters, 'target_name': observation_record.target.name, 'ra': observation_record.target.ra, 'dec': observation_record.target.dec, 'facility': observation_record.facility} s.write(message) except KafkaException as e: raise AlertSubmissionException(f'Submission to Hopskotch failed: {e}') return True
def pub_test(detector_data): detector_data = None stream = Stream(persist=False) msg = { "message_id": 'DS_20k_test_obs', "detector_id": 'DS_20k', "sent_time": "21/06/06 14:21:37", "neutrino_time": datetime.now().strftime("%H:%M:%S"), "machine_time": datetime.now().strftime("%H:%M:%S"), "location": "test", "p_value": 0, "status": "test", "content": "test" } with stream.open(set_topic("O"), "w") as s: s.write(msg) print(f"Publishing OBS message:\n{msg}")
def readNumMsg(self, topic): """ Read the number of alert messages. :param topic: :param configFilePath: :return: """ # gcnFormat = "json" stream = Stream(persist=True, auth=self.auth) # print("===") # print(topic) with stream.open(topic, "r") as s: for msg in s: # set timeout=0 so it doesn't stop listening to the topic print("====") # if gcn_dict['header']['subject'] == "TEST": # self.count += 1 self.count += 1
def oneDetectorThread(self, uuid): # lock = threading.Lock() print(uuid) # print(timeout) startTime = time.monotonic() # randomly publish messages while time.monotonic() - startTime < self.totalTime: # print(time.monotonic() - startTime) # print(self.totalTime) # msg = self.writeMessage(uuid) stream = Stream(auth=self.auth) with stream.open(self.topic, "w") as s: msg = self.writeMessage(uuid) s.write(msg) with self.lock: self.numMsgPublished += 1 self.idsWritten.add(msg["header"]["MESSAGE ID"])
def handle(self, *args, **options): for alert_type, topic in self.topic_mapping.items(): stream = Stream(auth=Auth( settings.HOPSKOTCH_CONSUMER_CONFIGURATION['sasl.username'], settings.HOPSKOTCH_CONSUMER_CONFIGURATION['sasl.password'])) with stream.open(f'kafka://dev.hop.scimma.org:9092/{topic}', 'w') as s: scraped_alerts = ScrapedAlert.objects.filter( alert_type=alert_type).order_by('timestamp') for alert in scraped_alerts: if alert.alert_type == 'lvc_circular': try: circular = GCNCircular.load( alert.alert_data.read().decode('utf-8')) s.write(circular) except: pass else: s.write(alert.alert)
class Model(object): def __init__(self, args): """ The constructor of the model class. :param args: the command line arguments """ # load environment variables load_dotenv(dotenv_path=args.env_file) self.args = args self.gcnFormat = "json" self.coinc_threshold = int(os.getenv("COINCIDENCE_THRESHOLD")) self.msg_expiration = int(os.getenv("MSG_EXPIRATION")) self.db_server = os.getenv("DATABASE_SERVER") self.drop_db = bool(os.getenv("NEW_DATABASE")) self.regularMsgSchema = msgSchema.regularMsgSchema logger.info(f"setting up decider at: {self.db_server}") self.myDecider = decider.Decider(self.coinc_threshold, self.msg_expiration, os.getenv("TIME_STRING_FORMAT"), os.getenv("DATABASE_SERVER"), self.drop_db) if self.drop_db: logger.info("clearing out decider cache") self.deciderUp = False # specify topics self.observation_topic = os.getenv("OBSERVATION_TOPIC") self.alert_topic = os.getenv("ALERT_TOPIC") # open up stream connections self.stream = Stream(auth=(not args.no_auth), persist=True) self.source = self.stream.open(self.observation_topic, "r") self.sink = self.stream.open(self.alert_topic, "w") # message types and processing algorithms self.mapping = { SNEWSObservation.__name__: self.processObservationMessage, SNEWSHeartbeat.__name__: self.processHeartbeatMessage } def run(self): """ Execute the model. :return: none """ self.deciderUp = True logger.info("starting decider") logger.info(f"processing messages from {self.observation_topic}") for msg, meta in self.source.read(batch_size=1, metadata=True, autocommit=False): self.processMessage(msg) self.source.mark_done(meta) def close(self): """ Close stream connections. """ logger.info("shutting down") self.deciderUp = False self.source.close() self.sink.close() def addObservationMsg(self, message): self.myDecider.addMessage(message) def processMessage(self, message): message_type = type(message).__name__ logger.debug(f"processing {message_type}") if message_type in self.mapping: self.mapping[message_type](message) def processObservationMessage(self, message): self.addObservationMsg(message) alert = self.myDecider.deciding() if alert: # publish alert message to ALERT_TOPIC logger.info("found coincidence, sending alert") self.sink.write(self.writeAlertMsg()) def processHeartbeatMessage(self, message): pass def writeAlertMsg(self): return SNEWSAlert( message_id=str(uuid.uuid4()), sent_time=datetime.datetime.utcnow().strftime( os.getenv("TIME_STRING_FORMAT")), machine_time=datetime.datetime.utcnow().strftime( os.getenv("TIME_STRING_FORMAT")), content= "SNEWS Alert: a coincidence between detectors has been observed.", )
dir_path = os.path.dirname(os.path.abspath(__file__)) + '/' print(dir_path) ROLE = "test" if len(sys.argv) > 2: filename = str(sys.argv[1]) print('config filename is ' + filename) ROLE = str(sys.argv[2]) print('Run with role ' + ROLE) else: print( 'you need to provide a configuration file and a role (test,observation)' ) sys.exit() if not os.path.isabs(filename): # make assumption that the path is relative to the current directory filename = dir_path + filename with open(filename) as filein: server_config = json.load(filein) filter_func = filter_notices([NoticeType.LVC_TEST]) stream = Stream(persist=True) with stream.open(server_config['scimma'], "r") as s: for message in filter(filter_func, s): print(message) process_gcn(message, root) #gcn.listen(host=server_config['in_host'], port=server_config['in_port'], handler=process_gcn)