class Account(Contextual): def __init__(self, username, password, host=None): self.username = username self.password = password self.inbox = Inbox(self, host) def close(self): self.inbox.close()
def handle(self, *args, **options): def on_message(to, sender, subject, body): # fn = "mymsg_{}.txt".format(get_random_string(5)) # with open(fn, "wb") as f: # f.write(body) # print "written {} bytes to {}.".format(len(body), fn) text, html, images = parse_message(body) msg = add_email(sender, to, subject, text, html, images) # TODO: send confirmation email # TODO: send email to managers inbox = Inbox(on_message, settings.TURTLES_SMTP_PORT, settings.TURTLES_SMTP_HOST) inbox.serve()
def __init__(self): self.syncDB = DatabaseConnection(env.DB_HOST, env.DB_UNAME, env.DB_PASSWORD, env.DB_NAME) # self.statusDB = DatabaseConnection( # env.DB_HOST, env.DB_UNAME, env.DB_PASSWORD, env.DB_NAME) self.limitRow = env.LIMIT_PROC_ROW self.outbox = Outbox(self.syncDB) self.systemlog = SystemLog() self.inbox = Inbox(self.syncDB) self.outbox = Outbox(self.syncDB) self.clientIdStartFrom = 10 self.updateToZeroHistory = set([]) self.PKFileName = 'pk' self.nextPriToProcess = dict() self.PRIFileName = 'pri'
def main(): inbox = Inbox() parser = Parser() telegram = TelegramAPI() # Async callback when email is received @inbox.collate def handle(to, sender, body): try: str_body = quopri.decodestring(body) str_body = str_body.decode("ascii", "ignore") except Exception as e: log.plog(e) return # Caltex Singapore's receipt email body for CaltexGO if ("Thank You - Successful Payment (" not in str_body): # Ignore emails if conditions not met # e.g. using a very specific toaddr like [email protected] print(str_body) return try: # Parses the fixed-format email to extract date, refill, cost per litre ddmmyy, refilled, costperlitre = parser.extract_info(str_body) except ParsingException as e: log.plog("{} (sender: {})".format(e, sender)) return try: # Uses Gspread to get previous mileage sheets_api = SheetsAPI() prev_mileage = sheets_api.get_prev_mileage() # Uses Telegram bot to prompt user for current mileage mileage = telegram.prompt_for_mileage(prev_mileage) except ParsingException as e: print(e) return # Uses Gspread to access Google Sheets API to update cells sheets_api = SheetsAPI() sheets_api.update_row(ddmmyy, mileage, refilled, costperlitre) log.plog("update_row: {} {} {} {}".format(ddmmyy, mileage, refilled, costperlitre)) inbox.serve(address='0.0.0.0', port=4467)
def __init__(self, dbhost, dbusername, dbpass, dbname, sinkaddr, skey, ivkey): self.key = skey self.iv = ivkey self.context = zmq.Context() self.receiver = self.context.socket(zmq.PULL) self.receiver.bind(sinkaddr) self.syslog = SystemLog() self.db = DatabaseConnection(dbhost, dbusername, dbpass, dbname) self.inbox = Inbox(self.db) self.outbox = Outbox(self.db)
from inbox import Inbox from smtplib import SMTP inbox = Inbox() SMTP_HOST = 'smtp.mydomain.com' SMTP_INBOX = '*****@*****.**' STOP_AT_COUNT = 1 @inbox.collate def handle(to, sender, subject, body): print 'Forwarding Mail' STOP_AT_COUNT += 1 conn = SMTP(SMTP_HOST, 25, 'localhost') conn.ehlo_or_helo_if_needed() conn.sendmail(sender, SMTP_INBOX, body) conn.quit() if STOP_AT_COUNT == 100: print 'Enough is enough. Closing now.' exit() print 'starting server' inbox.serve(address='0.0.0.0', port=25)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.set_reuse_addr() self.bind((host, port)) self.listen(5) self.storage = storage def handle_accept(self): pair = self.accept() if pair is None: pass else: sock, addr = pair handler = HttpHandler( sock, self.storage ) inbox = Inbox() storage = StorageHandler() @inbox.collate def handle( to, sender, body ): parser = email.parser.Parser() mail = parser.parsestr( body ) message = {} message['to'] = to message['sender'] = unicode(sender) message['subject'] = mail['subject'] message['received'] = time.strftime( "%Y-%m-%d %H:%M:%S" ) message['content'] = "" message['attachments'] = [] for part in mail.walk():
from inbox import Inbox from smtplib import SMTP inbox = Inbox() SMTP_HOST = 'localhost:1025' SMTP_USERNAME = '******' SMTP_PASSWORD = '******' body = 'hi' @inbox.collate def handle(to, sender, body): """ Forward a message via an authenticated SMTP connection with starttls. """ conn = SMTP(SMTP_HOST, 25, 'localhost') conn.starttls() conn.ehlo_or_helo_if_needed() conn.login(SMTP_USERNAME, SMTP_PASSWORD) conn.sendmail(sender, to, body) conn.quit() inbox.serve(address='localhost', port=1025)
#!/usr/bin/env python """ Need to install Logbook and inbox to get the smtpsink to work """ from inbox import Inbox PORT = 4467 ADDR = '0.0.0.0' inbox = Inbox() @inbox.collate def handle(*args, **kwargs): outfile = open('email_log', 'a') for arg in args: outfile.write(arg + "\n") for key, arg in kwargs.items(): outfile.write("{0}: {1}".format(key, arg)) outfile.write('*' * 30) # Bind directly. inbox.serve(address=ADDR, port=PORT) print "serving on {0}:{1}".format(ADDR, PORT)
if metric_name == prototype_class: continue else: self.zabbix.add( 'smtp.trap.subject.match[{},{},{}]'.format( prototype_class, prototype_name, metric_name), metric_value) self.zabbix.add( 'smtp.trap.subject.match.subject[{},{}]'.format( prototype_class, prototype_name), subject) self.zabbix.add( 'smtp.trap.subject.match.body[{},{}]'.format( prototype_class, prototype_name), body) inbox = Inbox() @inbox.collate def handle(to, sender, subject, body, zabbix_server=config.zabbix_address, zabbix_port=config.zabbix_port, decode_html=config.server_decode_html): for recipient in to: host = recipient.partition('@')[0] logger.info('host is %s' % host) myzabbix = MyZabbix(zabbix_server, zabbix_port, host)
def __init__(self, username, password, host=None): self.username = username self.password = password self.inbox = Inbox(self, host)
import MySQLdb as mdb from smtplib import SMTP import requests from backports import configparser as ConfigParser import os #import ConfigParser, os import re import datetime import sys import hashlib from sender import Mail, Message import time import tempfile import uuid inbox = Inbox() def checkChain(rule, to, sender, subject): check = True #Status of this Rule #Check if Sender Match if rule[2] != None and re.match(rule[2], sender) is None: check = False #Check if To Match oneToFails = False #If it get True, min on to header don`t match oneToMatch = False #If it get True, min on to header match for mail in to: if rule[3] != None and re.match(rule[3], mail) is None: oneToFails = True elif rule[3] != None and re.match(rule[3], mail) is not None: oneToMatch = True
# inbox.py instantiates a standalone mail server based in python and calls the handle function # when it receives a new message. We then "handle" the message by saving it's text to a .txt file # and a .json blob file. # # Todo: # Handle encoding better # Figure out how to handle attachments # Stability and watchdogs from inbox import Inbox from config import * import time,json import os # Create the inbox.py object inbox = Inbox() @inbox.collate timenow = time.time() # Our message handling function def handle(rawdata, to, sender, subject, mailhtml, mailplain, attachments): # Write new mails to index.html if not os.path.exists("/home/ubuntu/newspoc/index.html"): open("/home/ubuntu/newspoc/index.html", "w").close() with open("/home/ubuntu/newspoc/index.html", "a") as index: file.write("<a href='"+sender+"-"+subject+"-"+str(int(timenow))+"'>"+sender+"-"+subject+"-"+str(int(timenow))+"</a>\n") print "Added "+sender+"-"+subject+"-"+str(int(timenow))+" to index.html" # Write the components to the .html file separated by newlines (ok but a little more readable) with open("/home/ubuntu/newspoc/"+sender+"-"+subject+"-"+str(int(timenow))+".html","w") as file: file.write("TO:\n"+str(to)+"\n")
class Sync: def __init__(self): self.syncDB = DatabaseConnection(env.DB_HOST, env.DB_UNAME, env.DB_PASSWORD, env.DB_NAME) # self.statusDB = DatabaseConnection( # env.DB_HOST, env.DB_UNAME, env.DB_PASSWORD, env.DB_NAME) self.limitRow = env.LIMIT_PROC_ROW self.outbox = Outbox(self.syncDB) self.systemlog = SystemLog() self.inbox = Inbox(self.syncDB) self.outbox = Outbox(self.syncDB) self.clientIdStartFrom = 10 self.updateToZeroHistory = set([]) self.PKFileName = 'pk' self.nextPriToProcess = dict() self.PRIFileName = 'pri' def getClient(self): sql = "select * from tb_sync_client" return self.syncDB.executeFetchAll(sql) def _getPrimaryKeyColumn(self, table): db_name = env.DB_NAME sql = """ select COLUMN_NAME from information_schema.COLUMNS where TABLE_SCHEMA='{}' and TABLE_NAME='{}' and COLUMN_KEY='PRI' """.format(db_name, table) res = self.syncDB.executeFetchOne(sql) return res['data']['COLUMN_NAME'] def setPriority(self, id, table, priority): db_name = env.DB_NAME sql = """ select COLUMN_NAME from information_schema.COLUMNS where TABLE_SCHEMA='{}' and TABLE_NAME='{}' and COLUMN_KEY='PRI' """.format(db_name, table) res = self.syncDB.executeFetchOne(sql) if (res['execute_status']): primary_key = res['data']['COLUMN_NAME'] # update primary key sql = "update {} set priority={} where {}={}" update = self.syncDB.executeCommit( sql.format(table, priority, primary_key, id)) if (update): # update PK success print("Updated Priority") else: self.systemlog.insert( "Sync.setPriority", json.dumps(self.syncDB.getLastCommitError())) def processInsert(self, data): print(f"Inbox ID: {data['inbox_id']}") print(f"Type: {data['msg_type']}") # mengirim bahwa pesan sedang di proses # self.sendStatusUpdate(data, 'PROC') insert = self.syncDB.executeCommit(data['query']) rowId = self.syncDB.lastRowId if (insert): # hanya master yang mengirim NEEDPK ke slave # if(env.MASTER_MODE): # self.sendStatusUpdate(data, 'NEEDPK') print("Status: OK") # set result primary key to table inbox insert = self.inbox.update( data={ 'result_primary_key': rowId, }, where_clause={'inbox_id': data['inbox_id']}) # if the msg is sent from master # update primary key right away if (data['master_status'] == 1): # insert to inbox jadi akan dikerjakan di proses selanjutnya inbox = { 'row_id': rowId, 'table_name': data['table_name'], 'msg_type': 'PRI', 'msg_id': 0, 'query': data['row_id'], 'client_unique_id': data['client_unique_id'], 'master_status': 0, 'priority': 1 } if (not self.inbox.insert(inbox)): print(self.syncDB.getLastCommitError()) elif (env.MASTER_MODE): # master akan mengirim PK hasil insert ke # slave pengirim pesan insert msg = { 'row_id': data['row_id'], # local row id 'table_name': data['table_name'], 'msg_type': 'PRI', 'query': rowId, # receiver outbox_id 'client_unique_id': data['client_unique_id'], 'msg_id': 0, 'priority': 1 } tes = self.outbox.insert(msg) # print(tes) if (not tes): print(self.syncDB.getLastCommitError()) self.setAsProcessed(data['inbox_id']) else: # set priority menjadi 3 self.setPriority(data['inbox_id'], 'tb_sync_inbox', 3) print('error, downgrade priority') return True def getZeroPKHistory(self): file = open(self.PKFileName, 'r') file_value = file.read() if (file_value): self.updateToZeroHistory = set(literal_eval(file_value)) file.close() def getPriToProcess(self): file = open(self.PRIFileName, 'r') file_value = file.read() if (file_value): self.nextPriToProcess = literal_eval(file_value) file.close() def updateZeroPKHistory(self): file = open(self.PKFileName, 'w') file.write(str(list(self.updateToZeroHistory))) file.close() def updatePriToProcess(self): file = open(self.PRIFileName, 'w') file.write(str(self.nextPriToProcess)) file.close() def processPrimaryKey(self, data): print(f"Inbox ID: {data['inbox_id']}") print(f"Type: {data['msg_type']}") if (data['row_id'] == int(data['query'])): self.setAsProcessed(data['inbox_id']) print("Status: OK Same PK") return True self.getPriToProcess() print(self.nextPriToProcess) if (data['table_name'] in self.nextPriToProcess): if (int(data['query']) != self.nextPriToProcess[data['table_name']]): print( f"Status: {data['query']}/{self.nextPriToProcess[data['table_name']]}" ) return True self.getZeroPKHistory() # check apakah pri ini ada di history update 0 row_id = data['row_id'] if (len(self.updateToZeroHistory) > 0): # mencari apakah ada history\ code = f"{data['table_name']}{data['row_id']}" zeroExecMode = False if (code in self.updateToZeroHistory): print("Mode: 0 Exec") data['row_id'] = 0 res = self.doUpdatePK(data) if (res): self.updateToZeroHistory.remove(code) else: # skip res = self.doUpdatePK(data) else: # langsung eksekusi update res = self.doUpdatePK(data) print("Status: ", end="") print("OK") if res else print("ERROR") self.updateZeroPKHistory() # mencari nama kolom primary key # print(db_name) def doUpdatePK(self, data): db_name = env.DB_NAME sql = """ select COLUMN_NAME from information_schema.COLUMNS where TABLE_SCHEMA='{}' and TABLE_NAME='{}' and COLUMN_KEY='PRI' """.format(db_name, data['table_name']) res = self.syncDB.executeFetchOne(sql) if (res['execute_status']): primary_key = res['data']['COLUMN_NAME'] update_from = data['row_id'] update_to = data['query'] print(f"From: {update_from} To: {update_to}") sql = "update {} set {}={} where {}={}" update = self.syncDB.executeCommit( sql.format(data['table_name'], primary_key, update_to, primary_key, update_from)) if (update): # set status outbox menjadi done if (update_from == 0): self.nextPriToProcess.pop(data['table_name']) else: self.nextPriToProcess[data['table_name']] = update_from self.updatePriToProcess() if (data['msg_id'] == 0): # pesan PRI di generate oleh slave # mengambil pesan INS insMsg = self.syncDB.executeFetchOne( f"select * from tb_sync_inbox where row_id={data['query']} and msg_type='INS' and table_name='{data['table_name']}'" ) self.sendStatusUpdate(insMsg['data'], 'DONE') else: # pesan PRI yang diterima dari master updateQ = f"update tb_sync_outbox set status='done' where table_name='{data['table_name']}' and msg_type='INS' and row_id = {data['row_id']}" self.syncDB.executeCommit(updateQ) # update PK success # cek pesan lain yang menggunakan PK lama # update ke PK baru if (not env.MASTER_MODE): check = "select * from tb_sync_outbox where (status = 'waiting' or status='canceled') and (msg_type = 'DEL' or msg_type='UPD') and row_id = {}" res = self.syncDB.executeFetchAll( check.format(data['row_id'])) if (res['execute_status']): # update ke PK yang benar for msg in res['data']: query = "update tb_sync_outbox set row_id={}, status='waiting' where outbox_id={}" updated = self.syncDB.executeCommit( query.format(data['query'], msg['outbox_id'])) if (not updated): print(self.syncDB.getLastCommitError()['msg']) else: print("CHECK PESAN LAIN ERROR: {}".format( res['error_data']['msg'])) self.setAsProcessed(data['inbox_id']) return True else: # update to zero history self.setPriority(data['inbox_id'], 'tb_sync_inbox', 3) allowToAdd = True for item in self.updateToZeroHistory: if (data['table_name'] in item): allowToAdd = False break if (allowToAdd): self.nextPriToProcess[data['table_name']] = update_from self.updatePriToProcess() code = f"{data['table_name']}{data['row_id']}" self.updateToZeroHistory.add(code) update = self.syncDB.executeCommit( sql.format(data['table_name'], primary_key, 0, primary_key, update_from)) return False # ubah primary key goal menjadi 0 def processUpdate(self, data): # self.sendStatusUpdate(data, "PROC") print(f"Inbox ID: {data['inbox_id']}") print(f"Type: {data['msg_type']}") # cek apakah pesan ini lebih baru dibantingkan data sekarnag primary_key = self._getPrimaryKeyColumn(data['table_name']) row_data = self.syncDB.executeFetchOne( f"select * from {data['table_name']} where {primary_key}={data['row_id']}" ) print( f"{row_data['data']['last_action_at']} : {data['first_time_occur_at']}" ) if (row_data['data']['last_action_at'] < data['first_time_occur_at']): # data yang di proses adalah data baru execute = self.syncDB.executeCommit(data['query']) if (not execute): print("Status: ERROR") else: self.setAsProcessed(data['inbox_id']) self.sendStatusUpdate(data, "DONE") print("Status: OK") else: # data yang di proses adlaah data lama self.setAsProcessed(data['inbox_id']) self.sendStatusUpdate(data, "DONE") print("Status: OLD DATA") def processDelete(self, data): # self.sendStatusUpdate(data, "PROC") # cek apakah ada inbox yang bertipe PRI # berdasarkan primari key yang masuk # jika ada mata update inbox tersebut jadi terproses # jika tidak ada lakukan delete seperti biasa print(f"Inbox ID: {data['inbox_id']}") print(f"Type: {data['msg_type']}") checkQuery = """ select count(inbox_id) as total from tb_sync_inbox where msg_type = 'PRI' and status = 'waiting' and table_name = '{}' and query = '{}' """ result = self.syncDB.executeFetchOne( checkQuery.format(data['table_name'], data['query'])) if (result['execute_status']): if (result['data']['total'] > 0): print('Skip, total PRI: {}'.format(result['data']['total'])) else: dltQuery = "delete from {} where {}={}" pkColumnName = self._getPrimaryKeyColumn(data['table_name']) delete = self.syncDB.executeCommit( dltQuery.format(data['table_name'], pkColumnName, data['row_id'])) if (delete): self.sendStatusUpdate(data, "DONE") self.setAsProcessed(data['inbox_id']) print("Status: OK") else: self.setPriority(data['inbox_id'], 'tb_sync_inbox', 3) print("Status: ERROR") def processAck(self, data): print(f"Inbox ID: {data['inbox_id']}") print(f"Type: {data['msg_type']}") obox = self.syncDB.executeFetchOne( f"select * from tb_sync_outbox where outbox_id = {data['query']}") ack = True if (obox['data']): if (obox['data']['msg_type'] == 'INS'): status = 'need_pk_update' else: status = 'arrived' ack = self.outbox.update(data={'status': status}, where_clause={'outbox_id': data['query']}) # ack = self.syncDB.executeCommit( # f"update tb_sync_outbox set status='{status}' where outbox_id={data['query']}") # ackQuery = "update tb_sync_outbox set is_arrived=1, status='arrived' where outbox_id = {}".format( # data['query']) # ack = self.syncDB.executeCommit(ackQuery) if (not ack): self.outbox.update(data={'status': 'error'}, where_clause={'outbox_id': data['msg_id']}) print("Status: ERROR") # errorQuery = 'update tb_sync_outbox set is_error=1 where outbox_id = {}'.format( # data['msg_id']) # self.syncDB.executeCommit(errorQuery) # self.systemlog.insert("processACK", "Gagal update ACK ID#{} ERROR: {}".format( # data['inbox_id'], self.statusDB.getLastCommitError()['msg'])) else: self.setAsProcessed(data['inbox_id']) print("Status: OK") def processReg(self, data): print(f"Inbox ID: {data['inbox_id']}") print(f"Type: {data['msg_type']}") if (env.MASTER_MODE): time.sleep(0.2) regData = data['query'].split('#') reg = {} for item in regData: attributes = item.split(':') reg[attributes[0]] = attributes[1] # cek apakah ip address sudah terdaftar checkQuery = f"select count(*) as total from tb_sync_client where client_ip = '{reg['ip_address']}'" check = self.syncDB.executeFetchOne(checkQuery) if (check['data']['total'] > 0): outbox = { 'row_id': 0, 'table_name': '', 'msg_type': 'REG', 'msg_id': 0, 'query': f"status:ERROR#reason:IP Address sudah digunakan#for:{data['msg_id']}", 'client_unique_id': 0, 'client_ip': reg['ip_address'], 'client_port': reg['port'], 'client_key': reg['secret_key'], 'client_iv': reg['iv_key'] } self.outbox.insert(outbox) self.setAsProcessed(data['inbox_id']) else: client_id_check_q = "select ifnull(max(client_unique_id), 0) as id from tb_sync_client" client_id = self.syncDB.executeFetchOne(client_id_check_q) if (client_id['data']['id'] == 0): client_id = self.clientIdStartFrom else: client_id = client_id['data']['id'] + 1 sql = f"insert into tb_sync_client(client_unique_id, client_key, client_iv, client_port, client_ip) values({client_id}, '{reg['secret_key']}', '{reg['iv_key']}', {reg['port']}, '{reg['ip_address']}')" inserted = self.syncDB.executeCommit(sql) if (not inserted): self.setPriority(data['inbox_id'], 'tb_sync_inbox', 3) else: outbox = { 'row_id': 0, 'table_name': '', 'msg_type': 'REG', 'msg_id': 0, 'query': f"status:OK#id:{client_id}#for:{data['msg_id']}", 'client_unique_id': client_id } self.outbox.insert(outbox) self.setAsProcessed(data['inbox_id']) print("Status: OK") else: outbox = { 'row_id': 0, 'table_name': '', 'msg_type': 'REG', 'msg_id': 0, 'query': f"status:ERROR#reason:Host bukan master#for:{data['msg_id']}", 'client_unique_id': 0, 'client_ip': reg['ip_address'], 'client_port': reg['port'], 'client_key': reg['secret_key'], 'client_iv': reg['iv_key'] } self.outbox.insert(outbox) self.setAsProcessed(data['inbox_id']) print(f'Status: ERROR') def getData(self): self.syncDB.connect() sql = "(select * from tb_sync_inbox where status = 'waiting' and msg_type <> 'PRI' order by priority asc, inbox_id asc, occur_at asc)" if (self.limitRow > 0): sql += f' limit {self.limitRow}' self.getPriToProcess() additionalQuery = "" excludeTables = "" # tambah query untuk mendapatkan pri yang harus diproses if (len(self.nextPriToProcess) > 0): for item in self.nextPriToProcess: additionalQuery += f" union (select * from tb_sync_inbox where status = 'waiting' and msg_type = 'PRI' and table_name = '{item}' and query = '{self.nextPriToProcess[item]}' order by first_time_occur_at asc, priority asc)" if (excludeTables != ''): excludeTables += f" or table_name <> '{item}'" else: excludeTables += f"table_name <> '{item}'" # buat query untuk mengambil PRI masing2 tabel kecuali excluded table if (excludeTables == ''): # mengambil pesan PK masing2 1 pada setiap tabel # additionalQuery = '' additionalQuery += f" union (SELECT * FROM tb_sync_inbox WHERE msg_type = 'PRI' AND STATUS='waiting' GROUP BY table_name ORDER BY first_time_occur_at ASC, priority ASC)" else: additionalQuery += f" union (select * from tb_sync_inbox where status = 'waiting' and msg_type = 'PRI' and ({excludeTables}) group by table_name order by first_time_occur_at asc, priority asc)" # print(sql + additionalQuery) data = self.syncDB.executeFetchAll(sql + additionalQuery, False) self.syncDB.close() return data def getStatusInbox(self): sql = "select * from tb_sync_inbox where status = 'waiting' and (msg_type = 'ACK' or msg_type = 'DONE') order by priority asc, inbox_id asc, occur_at asc" if (self.limitRow > 0): sql += f' {self.limitRow}' data = self.syncDB.executeFetchAll(sql) return data def getSyncInbox(self): sql = "select * from tb_sync_inbox where status = 'waiting' and (msg_type = 'INS' or msg_type = 'UPD' or msg_type = 'DEL' or msg_type = 'REG' or msg_type = 'PRI') order by priority asc, inbox_id asc, occur_at asc" if (self.limitRow > 0): sql += f' {self.limitRow}' data = self.syncDB.executeFetchAll(sql) return data def setAsProcessed(self, id, status='done'): set = self.inbox.update(data={'status': status}, where_clause={'inbox_id': id}) # query = 'update tb_sync_inbox set is_process=1 where inbox_id = {}'.format( # id) # print(set) def sendStatusUpdate(self, data, status): return self.outbox.insert({ 'row_id': data['row_id'], # local row id 'table_name': data['table_name'], 'msg_type': status, 'query': data['msg_id'], # receiver outbox_id 'client_unique_id': data['client_unique_id'], 'msg_id': 0, 'priority': 1 }) def updateOutboxStatus(self, id, status, inbox_id): upd = self.syncDB.executeCommit( f"update tb_sync_outbox set status='{status}' where outbox_id={id}" ) if (upd): self.setAsProcessed(inbox_id) else: self.setPriority(inbox_id, 'tb_sync_inbox', 3) def canProcessMsg(self, data): watchedMsgType = ['INS', 'UPD', 'DEL'] if (data['msg_type'] not in watchedMsgType): return True # cek apakah ada pesan watchedMsgType yang blm selesai # sebelum inbox_id ini # jika slave, harus memastika semua outbox nya selesai di proses di master # lalu eksekusi inbox if (not env.MASTER_MODE): previousMsgs = self.syncDB.executeFetchOne( "select count(*) as total from tb_sync_outbox where (msg_type = 'INS' or msg_type='UPD' or msg_type='DEL') and status <> 'done'" ) if (previousMsgs['data']['total'] > 0): return False else: return True print(previousMsgs) def process(self, inbox): if (inbox): for item in inbox: # proses pesan selain INS, UPD dan DEL terlebih dahulu # jgn proses pesan utama jika masih ada pesan INS UPD DEL yang belum selesai # jika proses adalah INS UPD DEL, lakukan pengecekan pesan tertunda delayMsgInboxQ = "select count(*) from tb_sync_inbox where status " print("[{}] -> #{}".format( datetime.datetime.now().strftime("%d-%m-%Y %H:%M:%S"), item['msg_id']), end=" ") msgType = item['msg_type'] if (msgType == 'INS'): self.processInsert(item) elif (msgType == 'UPD'): self.processUpdate(item) elif (msgType == 'DEL'): self.processDelete(item) elif (msgType == 'ACK'): self.processAck(item) elif (msgType == "PRI"): self.processPrimaryKey(item) elif (msgType == 'REG'): self.processReg(item) elif (msgType == 'PROC'): print( self.updateOutboxStatus(item['query'], "processing", item['inbox_id'])) elif (msgType == 'NEEDPK'): print( self.updateOutboxStatus(item['query'], "need_pk_update", item['inbox_id'])) elif (msgType == 'DONE'): done = self.statusDB.executeCommit( f"update tb_sync_outbox set status = 'done' where outbox_id = {item['query']}" ) if (done): print( self.statusDB.executeCommit( f"update tb_sync_inbox set status='done' where inbox_id={item['inbox_id']}" )) else: print("False") # print(self.updateOutboxStatus( # item['query'], "done", item['inbox_id'])) else: self.syncDB.insError("Msg type not found for id=" + str(item['inbox_id'])) # print(f"finish at: {time.time()}") file = open("proctime.text", 'a') file.write(f"{time.time()}\n") file.close() else: time.sleep(0.3)
import MySQLdb as mdb from smtplib import SMTP import requests from backports import configparser as ConfigParser import os #import ConfigParser, os import re import datetime import sys import hashlib from sender import Mail, Message import time import tempfile import uuid inbox = Inbox() def checkChain(rule, to, sender, subject): check = True #Status of this Rule #Check if Sender Match if rule[2] != None and re.match(rule[2], sender) is None: check = False #Check if To Match oneToFails = False #If it get True, min on to header don`t match oneToMatch = False #If it get True, min on to header match for mail in to: if rule[3] != None and re.match(rule[3], mail) is None: oneToFails = True elif rule[3] != None and re.match(rule[3], mail) is not None: oneToMatch = True if rule[4] == True and oneToFails == True: #Rules says all to header must match but they dont do
#pip install inbox #dasinbox.py 0.0.0.0 4467 from inbox import Inbox inbox = Inbox() @inbox.collate def handle(to, sender, subject, body): ... # Bind directly. inbox.serve(address='0.0.0.0', port=4467) if __name__ == '__main__': inbox.dispatch()
# # Basic idea: # inbox.py instantiates a standalone mail server based in python and calls the handle function # when it receives a new message. We then "handle" the message by saving it's text to a .txt file # and a .json blob file. # # Todo: # Handle encoding better # Figure out how to handle attachments # Stability and watchdogs from inbox import Inbox import time,json #Create the inbox.py object inbox = Inbox() @inbox.collate #Our message handling function def handle(to, sender, subject, body): #Write the components to the .txt file separated by newlines (ok but a little more readable) with open("/home/ubuntu/newspoc/"+sender+"-"+subject+"-"+str(int(time.time()))+".txt","w") as file: file.write(str(to)+"\n") file.write(str(sender)+"\n") file.write(str(subject)+"\n") file.write(body+"\n") #Write the components to the .json file, better for processing later but doesn't solve encoding with open("/home/ubuntu/newspoc/"+sender+"-"+subject+"-"+str(int(time.time()))+".json","w") as jsonfile: jsonfile.write(json.dumps({"to":to,"sender":sender,"subject":subject,"body":body})) #Start the inbox.py server on our local ip address
# import smtpd # import asyncore # class CustomSMTPServer(smtpd.SMTPServer): # def process_message(self, peer, mailfrom, rcpttos, data): # print 'Receiving message from:', peer # print 'Message addressed from:', mailfrom # print 'Message addressed to :', rcpttos # print 'Message length :', len(data) # return # server = CustomSMTPServer(('127.0.0.1', 1025), None) # asyncore.loop() from inbox import Inbox inbox = Inbox() @inbox.collate def handle(to, sender, subject, body): print sender print to print subject print body # Bind directly. inbox.serve(address='0.0.0.0', port=465)
from re import match from inbox import Inbox from crm.settings import ATTACHMENTS_DIR, STATIC_URL_PATH, SENDGRID_API_KEY, SUPPORT_EMAIL from crm.mailer import sendemail, parse_email_body from crm.db import RootModel, db from crm import app from crm.apps.user.models import User from crm.apps.contact.models import Contact from crm.apps.message.models import Message, MessageState from crm.apps.link.models import Link import click PATTERN_TO_ROOTOBJ = r'(?P<objid>\w{5})_(?P<rootobjtype>\w+)@(?P<domain>.+)' PATTERN_SUPPORT_EMAIL = r'support@(?P<domain>.+)' inbox = Inbox() @inbox.collate def handle_mail(to, sender, subject, body): """ Fired on every new email received @param to [str]: receivers list. [should be in format $uid_roottypeobj@$domain]. @param sender str: sender email. [should be in CRM database users/contacts emails] @param subject str: subject @param body email.Message: email message object. If sender is not in recognized senders (contacts/users emails) an email will be sent back to him to contact support. If sender is in recognized senders: we get the correct object receiving the message and attach the email text body to its messages. If receiever is SUPPORT_EMAIL: an email will be sent to it using sendgrid.
import json parser = argparse.ArgumentParser(description='Posts first image attachment of emails to a slack channel') parser.add_argument('-c', metavar='<config file path>', dest='config', help='The location of your config file', required=True) args = parser.parse_args() # Load up the config from our JSON file... config = {} with open(args.config, 'r') as f: config = json.load(f) slack = Slacker(config['slack_key']) inbox = Inbox() @inbox.collate def handle(to, sender, subject, body): msg = email.message_from_string(body) msg_parts = collect_sub_messages(msg, []) image_contents = "" msg_txt = "" for part in msg_parts: partType = part.get_content_type() if partType == "text/plain": encoding = part.get_all("content-transfer-encoding") if encoding is not None and 'base64' in encoding: msg_txt = base64.b64decode(part.get_payload())