def handle_message(): """ Example that shows how to handle messages """ print("handle message") msg1 = Message(body="hello world!".encode("utf-8"), header={"h1": "val1"}) msg2 = Message(body="hello world!".encode("utf-8"), header={"h1": "val1"}) assert(msg1 == msg2) msg2 = msg1.serialize() msg2 = deserialize(msg2) assert(msg1 == msg2) msg3 = deserialize('{"body": "hello world!", "header": {"h1": "val1"}}') assert(msg1 == msg3) tmp = msg1.stringify() msg4 = destringify(tmp) assert(msg1 == msg4) msg5 = msg1.jsonify() assert(isinstance(msg5, dict)) msg5 = dejsonify(msg5) assert(msg1 == msg5) print("...handle message OK!")
def publishSToMq(self, arguments, event_types, summaries, summaries_data): # the max size limit in KB but python expects it in bytes size_limit = self.maxMQmessageSize * 1000 for event in summaries_data.keys(): if not summaries_data[event]: continue msg_head = { 'input-source' : arguments['input_source'], 'input-destination' : arguments['input_destination'], 'org_metadata_key' : arguments['org_metadata_key'], 'event-type' : event, 'rsv-timestamp' : "%s" % time.time(), 'summaries' : 1, 'destination' : '/topic/perfsonar.summary.' + event } msg_body = { 'meta': arguments } msg_body['summaries'] = summaries_data[event] size_summ = self.total_size(summaries_data[event]) msg = Message(body=json.dumps(msg_body), header=msg_head) size_msg = msg.size() #self.add2log("Message size: %s" % size_msg) # if size of the message is larger than 10MB discarrd if size_msg > size_limit or sys.getsizeof(json.dumps(msg_body)) > size_limit or size_summ > size_limit: self.add2log("Size of message body bigger than limit, discarding") continue # add to mq try: self.mq.add_message(msg) except Exception as e: self.add2log("Failed to add message to mq %s, exception was %s" % (self.dq, e))
def publishSToMq(self, arguments, event_types, summaries, summaries_data): # the max size limit in KB but python expects it in bytes size_limit = self.maxMQmessageSize * 1000 for event in summaries_data.keys(): if not summaries_data[event]: continue msg_head = { 'input-source': arguments['input_source'], 'input-destination': arguments['input_destination'], 'org_metadata_key': arguments['org_metadata_key'], 'event-type': event, 'rsv-timestamp': "%s" % time.time(), 'summaries': 1, 'destination': '/topic/perfsonar.summary.' + event } msg_body = {'meta': arguments} msg_body['summaries'] = summaries_data[event] size_summ = self.total_size(summaries_data[event]) msg = Message(body=json.dumps(msg_body), header=msg_head) size_msg = msg.size() #self.add2log("Message size: %s" % size_msg) # if size of the message is larger than 10MB discarrd if size_msg > size_limit or sys.getsizeof(json.dumps( msg_body)) > size_limit or size_summ > size_limit: self.add2log( "Size of message body bigger than limit, discarding") continue # add to mq try: self.mq.add_message(msg) except Exception as e: self.add2log( "Failed to add message to mq %s, exception was %s" % (self.dq, e))
def construct_msg(session, bodysize, timezone): statusl = ['OK', 'WARNING', 'MISSING', 'CRITICAL', 'UNKNOWN', 'DOWNTIME'] try: msg = Message() msg.header = dict() msg.body = str() msg.header.update({'service': generator.rndb64(10)}) msg.header.update({'hostname': generator.rndb64(10)}) msg.header.update({'metric': generator.rndb64(10)}) msg.header.update({'monitoring_host': generator.rndb64(10)}) msg.header.update({ 'timestamp': str( datetime.datetime.now(timezone).strftime('%Y-%m-%dT%H:%M:%SZ')) }) msg.header.update({'status': random.choice(statusl)}) msg.body += 'summary: %s\n' % generator.rndb64(20) msg.body += 'message: %s\n' % generator.rndb64(bodysize) msg.body += 'vofqan: %s\n' % generator.rndb64(10) if session: msg.body += 'actual_data: *** SESSION %s *** %s\n' % ( session, generator.rndb64(10)) else: msg.body += 'actual_data: %s\n' % generator.rndb64(10) msg.body += 'voname: %s\n' % generator.rndb64(3) msg.body += 'roc: %s\n' % generator.rndb64(3) except MessageError as e: sys.stderr.write('Error constructing message - %s\n', repr(e)) else: return msg
def printUnseenMessages(self): msgs = Message(self.cursor).unseen(self.email) if len(msgs) > 0: print("\nYou have new messages:\n") else: print("\nYou have no new messages!\n") for msg in msgs: print("FROM: {0} | REGARDING: {1} | DATE: {2}\nCONTENT: {3}\n". format(msg[2], msg[4], msg[1], msg[3])) Message(self.cursor).markRead(self.email)
def enqueue(dirq, destination, event): mq_header = { 'measurement_agent': socket.gethostname(), 'destination': destination } if 'timestamp' not in event.keys(): event['timestamp'] = time.time() mq_body = json.dumps(event) msg = Message(body=mq_body, header=mq_header) msg.is_text = True mq = DQS(path=dirq) mq.add_message(msg)
def test_message_compression(self): """ Test message compression. """ print("checking message compression") length = 10000 body = 'a' * length ok = list() for module in COMPRESSORS: msg = Message(body=body, header={'l': 'ff'}) jsonified = msg.jsonify({'compression': module}) self.assert_(len(jsonified['body']) < length * 0.9, "message should have been compressed with %s" % module) ok.append(module) print("...message compression ok for %s" % ",".join(ok))
def message_member(self, user_input): if (int(user_input) in self.requests_dict.keys()): handler = Message(self.cursor) self.cursor.execute("SELECT email FROM requests WHERE rid = :user_input", {'user_input': user_input}) email = self.cursor.fetchone()[0] message_body = input("Please enter the message you want to send " + email + "\n") handler.new(self.email, email, message_body, user_input) print("Successfully sent " + email + " with message: \n" + message_body) self.find_requests_by_location(self.location) else: user_input = input("Invalid entry. Please enter a valid request number: ") self.message_member(user_input)
def publishRToMq(self, arguments, event_types, datapoints): for event in datapoints.keys(): # filter events for mq (must be subset of the probe's filter) if event not in self.allowedEvents: continue # skip events that have no datapoints if not datapoints[event]: continue # compose msg msg_head = { 'input-source': arguments['input_source'], 'input-destination': arguments['input_destination'], 'org_metadata_key': arguments['org_metadata_key'], # including the timestart of the smalles measureement 'ts_start': min(datapoints[event].keys()), 'event-type': event, 'rsv-timestamp': "%s" % time.time(), 'summaries': 0, 'destination': '/topic/perfsonar.raw.' + event } msg_body = {'meta': arguments} msg_body['datapoints'] = datapoints[event] msg = Message(body=json.dumps(msg_body), header=msg_head) # add to mq try: self.mq.add_message(msg) except Exception as e: self.add2log( "Failed to add message to mq %s, exception was %s" % (self.dq, e))
def test_queue_queue(self): """ Test queue 2 queue. """ print("checking queue 2 queue use case") mq1_path = self.path + "/mq1" mq2_path = self.path + "/mq2" mq1 = DQS(path=mq1_path) count = 10 bodies = list() for i in range(count): body = "hello world %s" % (i, ) bodies.append(body) mq1.add_message(Message(body=body)) self.assertEqual(count, mq1.count()) cmd = "python bin/amqpclt --incoming-queue path=%s" \ " --outgoing-queue path=%s --remove --loglevel debug" \ % (mq1_path, mq2_path) (ret, out, err) = proc.timed_process(cmd.split()) self.assertEqual(0, ret, "out: %s\nerr: %s" % (out, err)) mq2 = DQS(path=mq2_path) for i in mq2: if mq2.lock(i): bodies.remove(mq2.get_message(i).body) self.assertEqual(count, mq2.count()) self.assertEqual(0, len(bodies)) print("checking queue 2 queue use case OK")
def test_full_chain(self): """ Test kombu full chain. """ print("checking kombu full chain") try: import kombu except ImportError: print("kombu is not available, skipping it") return mq1_path = self.path + "/mq1" mq2_path = self.path + "/mq2" mq1 = DQS(path=mq1_path) count = 10 dest = "/queue/test%s" % (rndstr(10), ) bodies = list() for i in range(count): body = "hello world %s" % (i, ) bodies.append(body) msg = Message(body=body) msg.header = {"destination": dest} mq1.add_message(msg) self.assertEqual(count, mq1.count()) cmd1 = "python bin/amqpclt --incoming-queue path=%s" \ " --outgoing-broker-uri %s " \ " --outgoing-broker-module kombu " \ " --outgoing-broker-auth plain,name=guest,pass=guest" \ " --remove --loglevel debug" \ % (mq1_path, self.broker) (ret, out, err) = proc.timed_process(cmd1.split()) self.assertEqual(0, ret, "out: %s\nerr: %s" % (out, err)) cmd2 = "python bin/amqpclt --incoming-broker-uri %s" \ " --incoming-broker-module kombu" \ " --incoming-broker-auth plain,name=guest,pass=guest" \ " --subscribe destination=%s" \ " --outgoing-queue path=%s --count %d --reliable " \ "--loglevel debug" \ % (self.broker, dest, mq2_path, count) (ret, out, err) = proc.timed_process(cmd2.split()) self.assertEqual(0, ret, "out: %s\nerr: %s" % (out, err)) mq2 = DQS(path=mq2_path) for i in mq2: if mq2.lock(i): bodies.remove(mq2.get_message(i).body) self.assertEqual(count, mq2.count()) self.assertEqual(0, len(bodies)) self.assertEqual(0, mq1.count()) print("checking kombu fullchain OK")
def __sendMessages(self): """ Prepare and send messages created from objects in postprocessing queue (meta + raw data). """ while not self.__stopped: # wait for an object to process try: data = self.__postprocessing_queue.get(True, self.QUEUE_TIMEOUT) except Queue.Empty: continue # prepare and send messages event_type = data['event']['event-type'] message = Message(body=json.dumps(data['event']), header={ 'destination': '/topic/perfsonar.' + event_type, 'time': "%s" % time.time(), 'source_host': data['measurement']['source'], 'destination_host': data['measurement']['destination'] }) # send the message while not self.__stopped: self.log.debug("Sending message: %s" % message.header) try: self.__connection.send(message.body, **message.header) except stomp.exception.NotConnectedException: self.log.warn( "Could not send a message. Trying to reconnect...") # wait until new connection is established while not self.__stopped: try: self.__connectToBroker() except stomp.exception.ConnectFailedException: self.log.error("Failed to reconnect.") else: break # message was succesfully sent - break the loop else: break # profiling self.messages_count[ event_type] = self.messages_count[event_type] + 1
def build_msg(args, *headers): msg = Message() msg.header = dict() msg.body = str() timestamp, service, hostname, testname, status, nagioshost = headers msg.header.update({'execution_time': timestamp}) msg.header.update({'service_flavour': service}) msg.header.update({'node_name': hostname}) msg.header.update({'test_name': testname}) msg.header.update({'status': status}) for bs in ['details', 'vo', 'site', 'roc', 'urlhistory', 'urlhelp']: code = "msg.body += '%s: ' + args.%s + '\\n' if args.%s else ''" % ( bs, bs, bs) exec(code) msg.text = True return msg
def message_member(self, user_input): if (user_input.isdigit() and int(user_input) in self.rides_dict): handler = Message(self.cursor) self.cursor.execute( "SELECT driver FROM rides WHERE rno = :user_input", {'user_input': user_input}) email = self.cursor.fetchone()[0] message_body = input("Please enter the message you want to send " + email + "\n") print("Successfully sent " + email + " with message: \n" + message_body) handler.new(self.email, email, message_body, user_input) print('') else: error = input( "Invalid entry. To message the poster of a ride, please enter the ride number: " ) self.message_member(error)
def construct_msg(session, bodysize, timezone, schemapath): statusl = ['OK', 'WARNING', 'MISSING', 'CRITICAL', 'UNKNOWN', 'DOWNTIME'] try: msg = Message() msg.header = dict() msg.body = str() if session: msg.header.update( {'*** SESSION ***': '*** {0} ***'.format(session)}) msg.header.update({'service': generator.rndb64(10)}) msg.header.update({'hostname': generator.rndb64(10)}) msg.header.update({'metric': generator.rndb64(10)}) msg.header.update({'monitoring_host': generator.rndb64(10)}) msg.header.update({ 'timestamp': str( datetime.datetime.now(timezone).strftime('%Y-%m-%dT%H:%M:%SZ')) }) msg.header.update({'status': random.choice(statusl)}) msg.body += 'summary: %s\n' % generator.rndb64(20) msg.body += 'message: %s\n' % generator.rndb64(bodysize) msg.body += 'vofqan: %s\n' % generator.rndb64(10) msg.body += 'actual_data: %s\n' % generator.rndb64(10) msg.body += 'voname: %s\n' % generator.rndb64(3) msg.body += 'roc: %s\n' % generator.rndb64(3) plainmsg = dict() plainmsg.update(msg.header) plainmsg.update(body2dict(msg.body)) plainmsg.update(tags=tag2dict(msg.body)) return avro_serialize(plainmsg, schemapath) except MessageError as e: sys.stderr.write('Error constructing message - %s\n', repr(e)) else: return msg
def __init__(self, db, config, modem): """ Initialize the database tables for voice messages. """ if config["DEBUG"]: print("Initializing VoiceMail") self.db = db self.config = config self.modem = modem # Create a message event shared with the Message class used to monitor changes self.message_event = threading.Event() self.config["MESSAGE_EVENT"] = self.message_event # Initialize the message indicators (LEDs) self.message_indicator = MessageIndicator( self.config.get("GPIO_LED_MESSAGE_PIN", GPIO_MESSAGE), self.config.get("GPIO_LED_MESSAGE_BRIGHTNESS", 100)) pins = self.config.get("GPIO_LED_MESSAGE_COUNT_PINS", GPIO_MESSAGE_COUNT_PINS) kwargs = self.config.get("GPIO_LED_MESSAGE_COUNT_KWARGS", GPIO_MESSAGE_COUNT_KWARGS) self.message_count_indicator = MessageCountIndicator(*pins, **kwargs) # Create the Message object used to interface with the DB self.messages = Message(db, config) # Start the thread that monitors the message events and updates the indicators self._stop_event = threading.Event() self._thread = threading.Thread(target=self._event_handler) self._thread.name = "voice_mail_event_handler" self._thread.start() # Pulse the indicator if an unplayed msg is waiting self.reset_message_indicator() if self.config["DEBUG"]: print("VoiceMail initialized")
def build_msg(args, *headers): msg = Message() msg.header = dict() msg.body = str() timestamp, service, hostname, metric, status, nagioshost = headers msg.header.update({'timestamp': timestamp}) msg.header.update({'service': service}) msg.header.update({'hostname': hostname}) msg.header.update({'metric': metric}) msg.header.update({'status': status}) msg.header.update({'monitoring_host': nagioshost}) for bs in [ 'summary', 'message', 'vofqan', 'voname', 'roc', 'actual_data', 'site' ]: code = "msg.body += '%s: ' + args.%s + '\\n' if args.%s else ''" % ( bs, bs, bs) exec(code) msg.text = True return msg
def test_message_creation(self): """ Test message creation. """ print("checking message creation") msg = Message() msg.header["should not"] = "appear" msg = Message() self.assertEqual(msg.header, dict(), "message header should be empty: %s" % msg) msg = Message(body='dfhkdfgkfd', header={'l': 'ff', 'lid': 56, 567: 34, }) msg.size() print("...message creation ok")
def handle(self): try: data = self.request[0].strip() socket = self.request[1] #glogger.info("%s wrote:" % (self.client_address[0])) jsonDict = data.replace('\n', ' ') msg = Message(body=jsonDict) try: mqid = gmq.add_message(msg) #glogger.info("msg added as %s" % mqid) except Exception as err : glogger.error( "failing upload to local queue. Error: %s" % (err)) raise Exception global gcounter gcounter += 1 except Exception as err : glogger.error( "Error: %s" % (err))
def get(self): """ Get a message. """ if len(self._msgbuf) == 0: self._drain_events() if len(self._msgbuf) == 0: return "no messages received", None (info, header, body) = self._msgbuf.pop(0) if header.get("content_type") is not None and \ (header["content_type"].startswith("text/") or "charset=" in header["content_type"]): body = body.decode("utf-8") headers = header["application_headers"] for header_name, header_value in headers.items(): try: headers[header_name] = header_value.encode("utf-8") except UnicodeDecodeError: headers[header_name] = header_value.decode("utf-8") msg = Message(header=headers, body=body) if self._config["reliable"]: self._pending.append(info.get("delivery_tag")) return msg, info.get("delivery_tag") else: return msg, None
def get(self): """ Get a message. """ if len(self._msgbuf) == 0: self._connection.process_data_events() if len(self._msgbuf) == 0: return "no messages received", None (method, header, body) = self._msgbuf.pop(0) if header.content_type is not None and \ (header.content_type.startswith("text/") or "charset=" in header.content_type): body = body.decode("utf-8") headers = header.headers for header_name, header_value in headers.items(): try: headers[header_name] = header_value.encode("utf-8") except UnicodeDecodeError: headers[header_name] = header_value.decode("utf-8") msg = Message(header=header.headers, body=body) if self._config["reliable"]: self._pending.append(method.delivery_tag) return msg, method.delivery_tag else: return msg, None
class VoiceMail: def __init__(self, db, config, modem): """ Initialize the database tables for voice messages. """ if config["DEBUG"]: print("Initializing VoiceMail") self.db = db self.config = config self.modem = modem # Create a message event shared with the Message class used to monitor changes self.message_event = threading.Event() self.config["MESSAGE_EVENT"] = self.message_event # Initialize the message indicators (LEDs) self.message_indicator = MessageIndicator( self.config.get("GPIO_LED_MESSAGE_PIN", GPIO_MESSAGE), self.config.get("GPIO_LED_MESSAGE_BRIGHTNESS", 100)) pins = self.config.get("GPIO_LED_MESSAGE_COUNT_PINS", GPIO_MESSAGE_COUNT_PINS) kwargs = self.config.get("GPIO_LED_MESSAGE_COUNT_KWARGS", GPIO_MESSAGE_COUNT_KWARGS) self.message_count_indicator = MessageCountIndicator(*pins, **kwargs) # Create the Message object used to interface with the DB self.messages = Message(db, config) # Start the thread that monitors the message events and updates the indicators self._stop_event = threading.Event() self._thread = threading.Thread(target=self._event_handler) self._thread.name = "voice_mail_event_handler" self._thread.start() # Pulse the indicator if an unplayed msg is waiting self.reset_message_indicator() if self.config["DEBUG"]: print("VoiceMail initialized") def stop(self): """ Stops the voice mail thread and releases hardware resources. """ self._stop_event.set() self._thread.join() self.message_indicator.close() self.message_count_indicator.close() def _event_handler(self): """ Thread function that updates the message indicators upon a message event. """ while not self._stop_event.is_set(): # Get the number of unread messages if self.message_event.wait(2.0): if self.config["DEBUG"]: print("Message Event triggered") self.reset_message_indicator() def voice_messaging_menu(self, call_no, caller): """ Play a voice message menu and respond to the choices. """ # Build some common paths voice_mail = self.config.get_namespace("VOICE_MAIL_") voice_mail_menu_file = voice_mail['menu_file'] invalid_response_file = voice_mail['invalid_response_file'] goodbye_file = voice_mail['goodbye_file'] # Indicate the user is in the menu self.message_indicator.blink() tries = 0 wait_secs = 8 # Candidate for configuration rec_msg = False while tries < 3: self.modem.play_audio(voice_mail_menu_file) success, digit = self.modem.wait_for_keypress(wait_secs) if not success: break if digit == '1': self.record_message(call_no, caller) rec_msg = True # prevent a duplicate reset_message_indicator break elif digit == '0': # End this call break else: # Try again--up to a limit self.modem.play_audio(invalid_response_file) tries += 1 self.modem.play_audio(goodbye_file) if not rec_msg: self.reset_message_indicator() def record_message(self, call_no, caller): """ Records a message. """ # Build the filename used for a potential message path = self.config["VOICE_MAIL_MESSAGE_FOLDER"] filepath = os.path.join(path, "{}_{}_{}_{}.wav".format( call_no, caller["NMBR"], caller["NAME"].replace('_', '-'), datetime.now().strftime("%m%d%y_%H%M"))) # Play instructions to caller leave_msg_file = self.config["VOICE_MAIL_LEAVE_MESSAGE_FILE"] self.modem.play_audio(leave_msg_file) # Show recording in progress self.message_indicator.turn_on() if self.modem.record_audio(filepath): # Save to Message table (message.add will update the indicator) msg_no = self.messages.add(call_no, filepath) # Return the messageID on success return msg_no else: self.reset_message_indicator() # Return failure return None def delete_message(self, msg_no): """ Removes the message record and associated wav file. """ # Remove message and file (message.delete will update the indicator) return self.messages.delete(msg_no) def reset_message_indicator(self): unplayed_count = self.messages.get_unplayed_count() if self.config["DEBUG"]: print("Resetting Message Indicator to show {} unplayed messages".format(unplayed_count)) if unplayed_count > 0: self.message_indicator.pulse() if unplayed_count < 10: self.message_count_indicator.display(unplayed_count) self.message_count_indicator.decimal_point = False else: self.message_count_indicator.display(9) self.message_count_indicator.decimal_point = True else: self.message_indicator.turn_off() self.message_count_indicator.display(' ') self.message_count_indicator.decimal_point = False
def book_ride(self): rno = input("Please enter a ride number: ") while (not rno.isdigit() or not (int(rno) in self.rides_dict.keys())): rno = input( "Please enter a valid ride number from the rides displayed!: ") member = input( "Please enter the email of the member you want to book on the ride: " ) while (not self.verify_email(member)): member = input("Please enter a valid member email: ") pickup = input("Please enter pick up location code: ") while (not self.verify_location(pickup)): pickup = input("Please enter a valid pickup location code: ") dropoff = input("Please enter drop off location code: ") while (not self.verify_location(dropoff)): dropoff = input("Please enter a valid dropoff location code: ") cost = input("Please enter the cost for ride: ") while not cost.isdigit(): cost = input("Please enter a valid cost: ") seats = input("Please enter the number of seats for ride: ") while not seats.isdigit(): seats = input("Please enter a valid number for seats: ") while (int(seats) > self.rides_dict[int(rno)][-1]): overbook = input( "Warning: the ride is being overbooked, are you sure you want to continue (y/n): " ) if overbook == 'y': break else: seats = input("Please enter a valid number for seats: ") while not seats.isdigit(): seats = input("Please enter a valid number for seats: ") # get unique booking number bno = self.generate_bno() self.cursor.execute( '''INSERT INTO bookings VALUES (:bno, :member, :rno, :cost, :seats, :pickup, :dropoff) ''', { 'bno': bno, 'member': member, 'rno': rno, 'cost': cost, 'seats': seats, 'pickup': pickup, 'dropoff': dropoff }) msg = '''Hi {0}, you've been booked on a ride (ride no: {1}). Your booking reference number is {2}. You have booked {3} seats at ${4} per seat. Your pickup location is {5} and dropoff is {6}'''.format( member, rno, bno, seats, cost, pickup, dropoff) Message(self.cursor).new(self.user, member, msg, rno) print("Ride successfully booked, message sent to the user!")