def test_parse(): assert parse_message(None) is None assert parse_message('lorem') is None assert parse_message('lorem 19.99') is None assert parse_message('99999') is None assert parse_message('-99999') is None assert parse_message(1) is None assert parse_message('19.99') == 19.99 assert parse_message('19,99') == 19.99 assert parse_message('19.99 lorem') == 19.99
def test_parsing_should_return_original_message_as_json_happy(self): """ a base64 string passed to this function should be transformed back to its original json format """ encoded_data = "eyJib2R5IjogeyJtZXNzYWdlIjogIlR\oZSBmb3JjZSBpcyBzdHJvbmcgd2l0aCB0aGlzIG9uZS4uLiJ9LCAiaGVhZGVycyI6IHsiYWxwaGEiOiAiYmV0YSIsICJDb250ZW50LVR5cGUiOiAiYXBwbGljYXRpb24vanNvbiJ9LCAicXVlcnlfcGFyYW1zIjogeyJtaSI6ICJwaWFjaXRvIn0sICJwYXRoX3BhcmFtcyI6IHsiZG9tYWluIjogInBpcGVkcmVhbSJ9fQ==" decoded_data = parse_message(encoded_data) expected_data = { 'body': { 'message': 'The force is strong with this one...' }, 'headers': { 'alpha': 'beta', 'Content-Type': 'application/json' }, 'query_params': { 'mi': 'piacito' }, 'path_params': { 'domain': 'pipedream' } } self.assertEqual(decoded_data, expected_data)
def handle_request(self, conn, addr): while True: conn.settimeout(self.TIMEOUT) try: message = parse_message(conn.recv) if not message: break command, data = message except socket.timeout: break if command == 'connect': conn.sendall(make_message('connected', 'HELLO')) elif command == 'ping': conn.sendall(make_message('pong')) elif command == 'pingd': conn.sendall(make_message('pongd', data)) elif command == 'quit': conn.sendall(make_message('ackquit')) break elif command == 'finish': conn.sendall(make_message('ackfinish')) self.is_stop = True break else: conn.sendall(make_message('unknowncommand')) conn.close() logger.info('Close thread connection...')
def test_parsing_should_return_original_message_as_json_fail(self): """ passing an invalid base64 string should not break the function should only return false """ encoded_data = "fake_base64_string_will_not_parse_to_json_isa" decoded_data = parse_message(encoded_data) self.assertEqual(decoded_data, False)
def handle(update, context): chat_id = update.effective_chat.id user_id = update.effective_user.id if DEBUG: # In debug mode, first name is used as user id, and you can fake # multiple users by typing a name after the amount, e.g '19.99 Bob' arr = update.effective_message.text.split(" ") user_id = update.effective_user.first_name if len(arr) > 1: user_id = arr[1] added_cost = parse_message(update.effective_message.text) if (added_cost is not None): if 'lang' not in context.user_data: context.user_data['lang'] = 'en' if 'state' not in context.chat_data: context.chat_data['state'] = {'payments': dict()} if user_id not in context.chat_data['state']['payments']: context.chat_data['state']['payments'][user_id] = float(0) new_costs = context.chat_data['state']['payments'][user_id] + added_cost if new_costs < 0: new_costs = 0 context.chat_data['state']['payments'][user_id] = new_costs results = split_costs(context.chat_data['state']['payments']) status_strings = [] for debtor in results: for creditor, amount in results[debtor].items(): if DEBUG: debtor_name = debtor creditor_name = creditor else: debtor_name = context.bot.get_chat_member( chat_id, debtor).user.first_name creditor_name = context.bot.get_chat_member( chat_id, creditor).user.first_name status_strings.append( _t(context, 'status').format(debtor_name, creditor_name, str(round(amount, 2)))) reply_message = '\n'.join(status_strings) or _t(context, 'even') else: reply_message = _t(context, 'error') context.bot.sendMessage(chat_id, reply_message)
def start_raid(self, peer_id, controllers): working = True while working: for event in self.longpoll.check(): # print(event) from_id = event.object['message']['from_id'] message_text = event.object['message']['text'] if event.type == VkBotEventType.MESSAGE_NEW: parsed_text = utils.parse_message(message_text, self.community_info) if parsed_text == "stopRaid": log_info('{} requested raid stop'.format(from_id)) admin_check_answer = check_admin(from_id) if admin_check_answer: log_info('Request approved. Starting raid.') working = False self.vk.messages.send( peer_id=peer_id, message="Raid is stopped", random_id=get_random_id(), keyboard=keyboard_gen.gen("startRaid")) else: log_info("Request denied. {} isn't in admin list.". format(from_id)) self.vk.messages.send( peer_id=peer_id, message="No access. Sosi xyi nishiy nn.", random_id=get_random_id()) if working: try: self.vk.messages.send( peer_id=peer_id, message=random.choice(config.msgs), random_id=get_random_id(), attachment="wall-199568112_16", keyboard=keyboard_gen.gen("arturyud.in vto.pe")) time.sleep(config.message_delay) except vk_api.exceptions.ApiError: working = False log_error('Kicked from conversation, stopping...')
def process(self, text, options): plugins = self.plugins settings = self.load_settings(options.settings) log.info('Load `%s` settings.', options.settings) log.debug('Settings dict %s', settings) plugins_instances = plugins.setup_plugins(settings) tags, text = parse_message(text) log.debug("Parsed text: tags: %s; text: %s",tags, text) for key in options.plugins: p = plugins_instances.get(key) if p.is_configured(): p.add_post(text, tags) pass else: log.warning('Plugin %s not valid configuration', key)
def start_listening(self): for event in self.longpoll.listen(): # print(event) peer_id = event.object['message']['peer_id'] from_id = event.object['message']['from_id'] message_text = event.object['message']['text'] if event.type == VkBotEventType.MESSAGE_NEW: if message_text == '': action_type = event.object['message']['action']['type'] if action_type == 'chat_invite_user': self.vk.messages.send( peer_id=peer_id, message=config.ver_msg + "\nTo start raid admin must click any button below!", random_id=get_random_id(), keyboard=keyboard_gen.gen("startRaid")) log_info('Entered conversation') parsed_text = utils.parse_message(message_text, self.community_info) if parsed_text == "ver": self.vk.messages.send(peer_id=peer_id, message=config.ver_msg, random_id=get_random_id()) if parsed_text == "startRaid": log_info('{} requested raid start.'.format(from_id)) if check_admin(from_id): log_info('Request approved. Starting raid.') self.start_raid(peer_id=peer_id, controllers=config.controllers) else: log_info("Request denied. {} isn't in admin list.") self.vk.messages.send( peer_id=peer_id, message="No access. Sosi xyi nishiy nn.", random_id=get_random_id())
def test_parse_message_empty(self): body = "" self.assertEqual(utils.parse_message(body), "")
def test_parse_message_punctuation(self): body = "hey,girl, hey! \n\n What's up??? :)" self.assertEqual(utils.parse_message(body), "hey,girl,hey,what's,up")
def test_parse_message_fwd_empty(self): body = "\n\n-----Original Message-----\n\n SEND TO ALL YOUR FRIENDS..." self.assertEqual(utils.parse_message(body), "")
def test_parse_message_fwd(self): body = "hey,girl, hey! \n\nLook at this :) \n\n-----Original Message-----\n\nSEND TO ALL YOUR FRIENDS..." self.assertEqual(utils.parse_message(body), "hey,girl,hey,look,at,this")
def process_archive(self, peer, sender, mail_options, recips, rcptopts, data): """Archives email meta data using a Backend""" LOG( E_INFO, '%s: Sender is <%s> - Recipients (Envelope): %s' % (self.type, sender, ','.join(recips))) size = len(data) if size < MINSIZE: return self.do_exit(550, 'Invalid Mail') if not data.endswith(NL): data = data + NL args = {} aid = None mid = None stream = StringIO(data) msg = Message(stream) if sender == '': LOG(E_INFO, '%s: Null return path mail, not archived' % (self.type)) return self.sendmail('<>', mail_options, recips, rcptopts, data, aid) ## Check if I have msgid in my cache mid = msg.get('message-id', self.new_mid()) hash = hash_headers(msg.get) if self.hashdb.has_key(hash): LOG(E_TRACE, '%s: Message-id: %s' % (self.type, mid)) aid = self.hashdb[hash] LOG( E_TRACE, '%s: Message already has year/pid pair, only adding header' % self.type) return self.sendmail(sender, mail_options, recips, rcptopts, self.add_aid(data, msg, aid), aid, hash) args['m_mid'] = mid args['hash'] = hash ## Check for duplicate headers dupe = dupe_check(msg.headers) if dupe is not None: LOG(E_ERR, '%s: Duplicate header %s' % (self.type, dupe)) return self.do_exit(552, 'Duplicate header %s' % dupe) ## Extraction of From field m_from = msg.getaddrlist('From') if len(m_from) == 1: m_from = safe_parseaddr(m_from[0][1]) else: m_from = None ## Empty or invalid 'From' field, try to use sender if m_from is None: LOG(E_ERR, '%s: no From header in mail using sender' % self.type) m_from = safe_parseaddr(sender) ## No luck if m_from is None: return self.do_exit(552, 'Mail has not suitable From/Sender') args['m_from'] = m_from ## Extract 'To' field m_to = [] for h in msg.getaddrlist('To'): rec = safe_parseaddr(h[1]) if rec is None: continue m_to.append(rec) ## Empty 'To' field use recipients if len(m_to) == 0: LOG(E_ERR, '%s: no To header in mail using recipients' % self.type) for recipient in recips: rec = safe_parseaddr(recipient) if rec is None: continue m_to.append(rec) if len(m_to) == 0: return self.do_exit(552, 'Mail has not suitable To/Recipient') ## Extract 'Cc' field for h in msg.getaddrlist('Cc'): rec = safe_parseaddr(h[1]) if rec is None: continue m_to.append(rec) ## Cleanup: remove duplicates recs = [] for rec in m_to: if rec not in recs: recs.append(rec) args['m_rec'] = recs ## Extract 'Subject' field m_sub = mime_decode_header(msg.get('Subject', 'No Subject')) if subjpattern is not None and m_sub.find(subjpattern) != -1: LOG(E_INFO, '%s: Subject pattern matched, not archived' % self.type) return self.sendmail(sender, mail_options, recips, rcptopts, self.remove_aid(data, msg)) args['m_sub'] = m_sub ## Whitelist check: From, To and Sender (envelope) checklist = [m_from] + m_to ss = safe_parseaddr(sender) if ss is not None: checklist.append(ss) for check in checklist: if check.split('@', 1)[0] in whitelist: LOG( E_INFO, '%s: Mail to: %s in whitelist, not archived' % (self.type, check)) return self.sendmail(sender, mail_options, recips, rcptopts, self.remove_aid(data, msg)) ## Sender size limit check - in kb if dbchecker is not None and dbchecker.quota_check( m_from, size >> 10): return self.do_exit(422, 'Sender quota execeded') args['m_size'] = size ## Extract 'Date' field m_date = None if self.datefromemail: m_date = msg.getdate('Date') try: mktime(m_date) except: m_date = None if m_date is None: m_date = localtime(time()) args['m_date'] = m_date m_attach = [] if msg.maintype != 'multipart': m_parse = parse_message(msg) if m_parse is not None: m_attach.append(m_parse) else: filepart = MultiFile(stream) filepart.push(msg.getparam('boundary')) try: while filepart.next(): submsg = Message(filepart) subpart = parse_message(submsg) if subpart is not None: m_attach.append(subpart) except: LOG(E_ERR, '%s: Error in multipart splitting' % self.type) args['m_attach'] = m_attach if dbchecker is not None: ## Collect data for mb lookup addrs = [] for addr in [m_from] + m_to: addrs.append(addr) args['m_mboxes'] = dbchecker.mblookup(addrs) else: args['m_mboxes'] = [] year, pid, error = self.backend.process(args) if year == 0: LOG(E_ERR, '%s: Backend Error: %s' % (self.type, error)) return self.do_exit(pid, error) ## Adding X-Archiver-ID: header aid = '%d-%d' % (year, pid) data = self.add_aid(data, msg, aid) LOG(E_TRACE, '%s: inserting %s msg in hashdb' % (self.type, aid)) self.hashdb[hash] = aid self.hashdb.sync() ## Next hop LOG(E_TRACE, '%s: backend worked fine' % self.type) LOG( E_TRACE, '%s: passing data to nexthop: %s:%s' % (self.type, self.output_address, self.output_port)) return self.sendmail(sender, mail_options, recips, rcptopts, data, aid, hash)
def incoming_entry(self, message): # make a poll.Message out of the rapidsms.models.Message # because utils.parse_message expects a poll.Message mess = p.Message.objects.create(is_outgoing=False,\ connection=str(message.connection), text=message.text) # ensure that the caller is subscribed r, created = p.Respondant.subscribe(str(message.connection)) # if no question is currently running, then # we can effectively ignore the incoming sms, # but should notify the caller anyway ques = p.Question.current() if ques is None: message.respond(STR["no_question"]) self.handled = True # try to parse the message # pass along the rapidsms.models.Message.backend with the # poll.Message object so that parse_message can check that # the respondant is subscribed parsed = utils.parse_message(mess, ques) # send an appropriate response to the caller if parsed: graph.graph_entries(ques) message.respond(STR["thanks"]) self.handled = True else: message.respond(STR["thanks_unparseable"]) self.handled = True # BROADCAST FUNCTIONS ---------------------------------------------------------- def broadcast_question(question): # gather active respondants respondants = p.Respondant.objects.filter(is_active=True) sending = 0 # message to be blasted broadcast = question.text # unless this is a free text question, # add the answer choices to the broadcast message if question.type != 'F': answers = p.Answer.objects.filter(question=question.pk) for a in answers: broadcast = broadcast + '\n ' + a.choice + ' - ' + a.text # blast the broadcast message to our active respondants # and increment the counter for r in respondants: r.connection.backend.send(r.connection.identity, broadcast) sending += 1 self.info('[broadcaster] Blasted to %d of %d numbers...' % (sending, len(respondants))) # save number broadcasted to db question.sent_to = sending question.save() return '[broadcaster] Blasted %s to %d numbers with %d failures' % \ (broadcast, sending, (len(respondants) - sending)) def broadcaster(seconds=60, wake_hour=8, sleep_hour=21): self.info("Starting Broadcaster...", "init") while True: # only broadcast while people are awake. otherwise, we'll be sending # people messages at 12:01AM, which probably won't go down so well hour = time.localtime()[3] if wake_hour < hour < sleep_hour: # if the current question has not been sent, # broadcaster will broadcast it q = p.Question.current() if q: # if this question hasn't been 'sent_to' anyone yet, # we can assume that it should be broadcast now if not q.sent_to: self.info("Broadcasting new question") broadcast_question(q) # already sent, so we have nothing to do # i don't think we really need to log this... #else: app.log("Current question was already broadcast") else: pass # when in production, there should probably ALWAYS # be an active question; otherwise, anyone texting # in will receive an error -- so highlight this # as a warning in the screen log else: self.info("No current question", "warn") # we are outside of waking hours, so do nothing else: self.info("Broadcast disabled from %d:00 to %d:00" %\ (sleep_hour, wake_hour)) # wait until it's time # to check again (60s) time.sleep(seconds) # BROADCAST THREAD ------------------------------------------------------------- self.info("[broadcaster] Starting up...") # interval to check for broadcasting (in seconds) broadcast_interval = 30 # start a thread for broadcasting thread.start_new_thread(broadcaster, (broadcast_interval,))
def test_parse_message_fwd(self): body = "hey,girl, hey! \n\nLook at this :) \n\n-----Original Message-----\n\nSEND TO ALL YOUR FRIENDS..." self.assertEqual( utils.parse_message(body), "hey,girl,hey,look,at,this")
def process_archive(self, peer, sender, mail_options, recips, rcptopts, data): """Archives email meta data using a Backend""" LOG(E_INFO, "%s: Sender is <%s> - Recipients (Envelope): %s" % (self.type, sender, ",".join(recips))) size = len(data) if size < MINSIZE: return self.do_exit(550, "Invalid Mail") if not data.endswith(NL): data = data + NL args = {} aid = None mid = None stream = StringIO(data) msg = Message(stream) if sender == "": LOG(E_INFO, "%s: Null return path mail, not archived" % (self.type)) return self.sendmail("<>", mail_options, recips, rcptopts, data, aid) ## Check if I have msgid in my cache mid = msg.get("message-id", self.new_mid()) hash = hash_headers(msg.get) if self.hashdb.has_key(hash): LOG(E_TRACE, "%s: Message-id: %s" % (self.type, mid)) aid = self.hashdb[hash] LOG(E_TRACE, "%s: Message already has year/pid pair, only adding header" % self.type) return self.sendmail(sender, mail_options, recips, rcptopts, self.add_aid(data, msg, aid), aid, hash) args["m_mid"] = mid args["hash"] = hash ## Check for duplicate headers dupe = dupe_check(msg.headers) if dupe is not None: LOG(E_ERR, "%s: Duplicate header %s" % (self.type, dupe)) return self.do_exit(552, "Duplicate header %s" % dupe) ## Extraction of From field m_from = msg.getaddrlist("From") if len(m_from) == 1: m_from = safe_parseaddr(m_from[0][1]) else: m_from = None ## Empty or invalid 'From' field, try to use sender if m_from is None: LOG(E_ERR, "%s: no From header in mail using sender" % self.type) m_from = safe_parseaddr(sender) ## No luck if m_from is None: return self.do_exit(552, "Mail has not suitable From/Sender") args["m_from"] = m_from ## Extract 'To' field m_to = [] for h in msg.getaddrlist("To"): rec = safe_parseaddr(h[1]) if rec is None: continue m_to.append(rec) ## Empty 'To' field use recipients if len(m_to) == 0: LOG(E_ERR, "%s: no To header in mail using recipients" % self.type) for recipient in recips: rec = safe_parseaddr(recipient) if rec is None: continue m_to.append(rec) if len(m_to) == 0: return self.do_exit(552, "Mail has not suitable To/Recipient") ## Extract 'Cc' field for h in msg.getaddrlist("Cc"): rec = safe_parseaddr(h[1]) if rec is None: continue m_to.append(rec) ## Cleanup: remove duplicates recs = [] for rec in m_to: if rec not in recs: recs.append(rec) args["m_rec"] = recs ## Extract 'Subject' field m_sub = mime_decode_header(msg.get("Subject", "No Subject")) if subjpattern is not None and m_sub.find(subjpattern) != -1: LOG(E_INFO, "%s: Subject pattern matched, not archived" % self.type) return self.sendmail(sender, mail_options, recips, rcptopts, self.remove_aid(data, msg)) args["m_sub"] = m_sub ## Whitelist check: From, To and Sender (envelope) checklist = [m_from] + m_to ss = safe_parseaddr(sender) if ss is not None: checklist.append(ss) for check in checklist: if check.split("@", 1)[0] in whitelist: LOG(E_INFO, "%s: Mail to: %s in whitelist, not archived" % (self.type, check)) return self.sendmail(sender, mail_options, recips, rcptopts, self.remove_aid(data, msg)) ## Sender size limit check - in kb if dbchecker is not None and dbchecker.quota_check(m_from, size >> 10): return self.do_exit(422, "Sender quota execeded") args["m_size"] = size ## Extract 'Date' field m_date = None if self.datefromemail: m_date = msg.getdate("Date") try: mktime(m_date) except: m_date = None if m_date is None: m_date = localtime(time()) args["m_date"] = m_date m_attach = [] if msg.maintype != "multipart": m_parse = parse_message(msg) if m_parse is not None: m_attach.append(m_parse) else: filepart = MultiFile(stream) filepart.push(msg.getparam("boundary")) try: while filepart.next(): submsg = Message(filepart) subpart = parse_message(submsg) if subpart is not None: m_attach.append(subpart) except: LOG(E_ERR, "%s: Error in multipart splitting" % self.type) args["m_attach"] = m_attach if dbchecker is not None: ## Collect data for mb lookup addrs = [] for addr in [m_from] + m_to: addrs.append(addr) args["m_mboxes"] = dbchecker.mblookup(addrs) else: args["m_mboxes"] = [] year, pid, error = self.backend.process(args) if year == 0: LOG(E_ERR, "%s: Backend Error: %s" % (self.type, error)) return self.do_exit(pid, error) ## Adding X-Archiver-ID: header aid = "%d-%d" % (year, pid) data = self.add_aid(data, msg, aid) LOG(E_TRACE, "%s: inserting %s msg in hashdb" % (self.type, aid)) self.hashdb[hash] = aid self.hashdb.sync() ## Next hop LOG(E_TRACE, "%s: backend worked fine" % self.type) LOG(E_TRACE, "%s: passing data to nexthop: %s:%s" % (self.type, self.output_address, self.output_port)) return self.sendmail(sender, mail_options, recips, rcptopts, data, aid, hash)
def recive(self): message = parse_message(self.socket.recv) return message