class Log: db = None host = 'localhost' username = '******' password = '******' database = 'db_ta_monitor' def __init__(self): self.rowLimit = env.LOG_ROW_LIMIT self.db = DatabaseConnection( env.DB_HOST, env.DB_UNAME, env.DB_PASSWORD, env.DB_NAME) # /////////////////////////////////////////// def getUnproceessLog(self): # query = f"""select * from tb_sync_outbox # where (status = 'waiting') or # (status = 'sent' and retry_again_at <= now()) # order by first_time_occur_at asc, priority asc""" query = f"""select * from tb_sync_outbox where (status = 'waiting') order by first_time_occur_at asc, priority asc""" if (env.LOG_ROW_LIMIT > 0): query += f' limit {env.LOG_ROW_LIMIT}' result = self.db.executeFetchAll(sql=query) return result
password = '******' database = 'db_simak' rowLimit = '200' db = DatabaseConnection(host, username, password, database) sql = """ SELECT tb_mahasiswa.mahasiswa_id, nim, nama, tb_mahasiswa.program_id FROM tb_mahasiswa JOIN tb_krs ON tb_krs.mahasiswa_id = tb_mahasiswa.mahasiswa_id JOIN tb_mktawar ON tb_krs.`mktawar_id` = tb_mktawar.`mktawar_id` WHERE tb_krs.`tahunajaran`=2019 AND tb_krs.semester=1 AND tb_mktawar.`program_id` <> tb_mahasiswa.program_id and tb_krs.flag = 1 GROUP BY tb_mahasiswa.`mahasiswa_id` """ res = db.executeFetchAll(sql) print("Memproses {} data".format(len(res))) queryKrs = """ Select krs_id, tb_krs.mktawar_id, program_id, tb_mktawar.matakuliah_id, kelas from tb_krs join tb_mktawar on tb_mktawar.mktawar_id = tb_krs.mktawar_id where mahasiswa_id = {} and tb_krs.tahunajaran=2019 and tb_krs.semester=1 and tb_krs.flag=1""" krs = None queryPenawaranTerkait = """ select mktawar_id, kelas, program_id, matakuliah_id from tb_mktawar where tahunajaran=2019 and semester=1 and program_id = {} and matakuliah_id= {} and kelas='{}' and flag=1 """ queryUpdateKrs = "update tb_krs set mktawar_id = {} where krs_id = {}"
class Sync: def __init__(self): self.syncDB = DatabaseConnection(env.DB_HOST, env.DB_UNAME, env.DB_PASSWORD, env.DB_NAME) # self.statusDB = DatabaseConnection( # env.DB_HOST, env.DB_UNAME, env.DB_PASSWORD, env.DB_NAME) self.limitRow = env.LIMIT_PROC_ROW self.outbox = Outbox(self.syncDB) self.systemlog = SystemLog() self.inbox = Inbox(self.syncDB) self.outbox = Outbox(self.syncDB) self.clientIdStartFrom = 10 self.updateToZeroHistory = set([]) self.PKFileName = 'pk' self.nextPriToProcess = dict() self.PRIFileName = 'pri' def getClient(self): sql = "select * from tb_sync_client" return self.syncDB.executeFetchAll(sql) def _getPrimaryKeyColumn(self, table): db_name = env.DB_NAME sql = """ select COLUMN_NAME from information_schema.COLUMNS where TABLE_SCHEMA='{}' and TABLE_NAME='{}' and COLUMN_KEY='PRI' """.format(db_name, table) res = self.syncDB.executeFetchOne(sql) return res['data']['COLUMN_NAME'] def setPriority(self, id, table, priority): db_name = env.DB_NAME sql = """ select COLUMN_NAME from information_schema.COLUMNS where TABLE_SCHEMA='{}' and TABLE_NAME='{}' and COLUMN_KEY='PRI' """.format(db_name, table) res = self.syncDB.executeFetchOne(sql) if (res['execute_status']): primary_key = res['data']['COLUMN_NAME'] # update primary key sql = "update {} set priority={} where {}={}" update = self.syncDB.executeCommit( sql.format(table, priority, primary_key, id)) if (update): # update PK success print("Updated Priority") else: self.systemlog.insert( "Sync.setPriority", json.dumps(self.syncDB.getLastCommitError())) def processInsert(self, data): print(f"Inbox ID: {data['inbox_id']}") print(f"Type: {data['msg_type']}") # mengirim bahwa pesan sedang di proses # self.sendStatusUpdate(data, 'PROC') insert = self.syncDB.executeCommit(data['query']) rowId = self.syncDB.lastRowId if (insert): # hanya master yang mengirim NEEDPK ke slave # if(env.MASTER_MODE): # self.sendStatusUpdate(data, 'NEEDPK') print("Status: OK") # set result primary key to table inbox insert = self.inbox.update( data={ 'result_primary_key': rowId, }, where_clause={'inbox_id': data['inbox_id']}) # if the msg is sent from master # update primary key right away if (data['master_status'] == 1): # insert to inbox jadi akan dikerjakan di proses selanjutnya inbox = { 'row_id': rowId, 'table_name': data['table_name'], 'msg_type': 'PRI', 'msg_id': 0, 'query': data['row_id'], 'client_unique_id': data['client_unique_id'], 'master_status': 0, 'priority': 1 } if (not self.inbox.insert(inbox)): print(self.syncDB.getLastCommitError()) elif (env.MASTER_MODE): # master akan mengirim PK hasil insert ke # slave pengirim pesan insert msg = { 'row_id': data['row_id'], # local row id 'table_name': data['table_name'], 'msg_type': 'PRI', 'query': rowId, # receiver outbox_id 'client_unique_id': data['client_unique_id'], 'msg_id': 0, 'priority': 1 } tes = self.outbox.insert(msg) # print(tes) if (not tes): print(self.syncDB.getLastCommitError()) self.setAsProcessed(data['inbox_id']) else: # set priority menjadi 3 self.setPriority(data['inbox_id'], 'tb_sync_inbox', 3) print('error, downgrade priority') return True def getZeroPKHistory(self): file = open(self.PKFileName, 'r') file_value = file.read() if (file_value): self.updateToZeroHistory = set(literal_eval(file_value)) file.close() def getPriToProcess(self): file = open(self.PRIFileName, 'r') file_value = file.read() if (file_value): self.nextPriToProcess = literal_eval(file_value) file.close() def updateZeroPKHistory(self): file = open(self.PKFileName, 'w') file.write(str(list(self.updateToZeroHistory))) file.close() def updatePriToProcess(self): file = open(self.PRIFileName, 'w') file.write(str(self.nextPriToProcess)) file.close() def processPrimaryKey(self, data): print(f"Inbox ID: {data['inbox_id']}") print(f"Type: {data['msg_type']}") if (data['row_id'] == int(data['query'])): self.setAsProcessed(data['inbox_id']) print("Status: OK Same PK") return True self.getPriToProcess() print(self.nextPriToProcess) if (data['table_name'] in self.nextPriToProcess): if (int(data['query']) != self.nextPriToProcess[data['table_name']]): print( f"Status: {data['query']}/{self.nextPriToProcess[data['table_name']]}" ) return True self.getZeroPKHistory() # check apakah pri ini ada di history update 0 row_id = data['row_id'] if (len(self.updateToZeroHistory) > 0): # mencari apakah ada history\ code = f"{data['table_name']}{data['row_id']}" zeroExecMode = False if (code in self.updateToZeroHistory): print("Mode: 0 Exec") data['row_id'] = 0 res = self.doUpdatePK(data) if (res): self.updateToZeroHistory.remove(code) else: # skip res = self.doUpdatePK(data) else: # langsung eksekusi update res = self.doUpdatePK(data) print("Status: ", end="") print("OK") if res else print("ERROR") self.updateZeroPKHistory() # mencari nama kolom primary key # print(db_name) def doUpdatePK(self, data): db_name = env.DB_NAME sql = """ select COLUMN_NAME from information_schema.COLUMNS where TABLE_SCHEMA='{}' and TABLE_NAME='{}' and COLUMN_KEY='PRI' """.format(db_name, data['table_name']) res = self.syncDB.executeFetchOne(sql) if (res['execute_status']): primary_key = res['data']['COLUMN_NAME'] update_from = data['row_id'] update_to = data['query'] print(f"From: {update_from} To: {update_to}") sql = "update {} set {}={} where {}={}" update = self.syncDB.executeCommit( sql.format(data['table_name'], primary_key, update_to, primary_key, update_from)) if (update): # set status outbox menjadi done if (update_from == 0): self.nextPriToProcess.pop(data['table_name']) else: self.nextPriToProcess[data['table_name']] = update_from self.updatePriToProcess() if (data['msg_id'] == 0): # pesan PRI di generate oleh slave # mengambil pesan INS insMsg = self.syncDB.executeFetchOne( f"select * from tb_sync_inbox where row_id={data['query']} and msg_type='INS' and table_name='{data['table_name']}'" ) self.sendStatusUpdate(insMsg['data'], 'DONE') else: # pesan PRI yang diterima dari master updateQ = f"update tb_sync_outbox set status='done' where table_name='{data['table_name']}' and msg_type='INS' and row_id = {data['row_id']}" self.syncDB.executeCommit(updateQ) # update PK success # cek pesan lain yang menggunakan PK lama # update ke PK baru if (not env.MASTER_MODE): check = "select * from tb_sync_outbox where (status = 'waiting' or status='canceled') and (msg_type = 'DEL' or msg_type='UPD') and row_id = {}" res = self.syncDB.executeFetchAll( check.format(data['row_id'])) if (res['execute_status']): # update ke PK yang benar for msg in res['data']: query = "update tb_sync_outbox set row_id={}, status='waiting' where outbox_id={}" updated = self.syncDB.executeCommit( query.format(data['query'], msg['outbox_id'])) if (not updated): print(self.syncDB.getLastCommitError()['msg']) else: print("CHECK PESAN LAIN ERROR: {}".format( res['error_data']['msg'])) self.setAsProcessed(data['inbox_id']) return True else: # update to zero history self.setPriority(data['inbox_id'], 'tb_sync_inbox', 3) allowToAdd = True for item in self.updateToZeroHistory: if (data['table_name'] in item): allowToAdd = False break if (allowToAdd): self.nextPriToProcess[data['table_name']] = update_from self.updatePriToProcess() code = f"{data['table_name']}{data['row_id']}" self.updateToZeroHistory.add(code) update = self.syncDB.executeCommit( sql.format(data['table_name'], primary_key, 0, primary_key, update_from)) return False # ubah primary key goal menjadi 0 def processUpdate(self, data): # self.sendStatusUpdate(data, "PROC") print(f"Inbox ID: {data['inbox_id']}") print(f"Type: {data['msg_type']}") # cek apakah pesan ini lebih baru dibantingkan data sekarnag primary_key = self._getPrimaryKeyColumn(data['table_name']) row_data = self.syncDB.executeFetchOne( f"select * from {data['table_name']} where {primary_key}={data['row_id']}" ) print( f"{row_data['data']['last_action_at']} : {data['first_time_occur_at']}" ) if (row_data['data']['last_action_at'] < data['first_time_occur_at']): # data yang di proses adalah data baru execute = self.syncDB.executeCommit(data['query']) if (not execute): print("Status: ERROR") else: self.setAsProcessed(data['inbox_id']) self.sendStatusUpdate(data, "DONE") print("Status: OK") else: # data yang di proses adlaah data lama self.setAsProcessed(data['inbox_id']) self.sendStatusUpdate(data, "DONE") print("Status: OLD DATA") def processDelete(self, data): # self.sendStatusUpdate(data, "PROC") # cek apakah ada inbox yang bertipe PRI # berdasarkan primari key yang masuk # jika ada mata update inbox tersebut jadi terproses # jika tidak ada lakukan delete seperti biasa print(f"Inbox ID: {data['inbox_id']}") print(f"Type: {data['msg_type']}") checkQuery = """ select count(inbox_id) as total from tb_sync_inbox where msg_type = 'PRI' and status = 'waiting' and table_name = '{}' and query = '{}' """ result = self.syncDB.executeFetchOne( checkQuery.format(data['table_name'], data['query'])) if (result['execute_status']): if (result['data']['total'] > 0): print('Skip, total PRI: {}'.format(result['data']['total'])) else: dltQuery = "delete from {} where {}={}" pkColumnName = self._getPrimaryKeyColumn(data['table_name']) delete = self.syncDB.executeCommit( dltQuery.format(data['table_name'], pkColumnName, data['row_id'])) if (delete): self.sendStatusUpdate(data, "DONE") self.setAsProcessed(data['inbox_id']) print("Status: OK") else: self.setPriority(data['inbox_id'], 'tb_sync_inbox', 3) print("Status: ERROR") def processAck(self, data): print(f"Inbox ID: {data['inbox_id']}") print(f"Type: {data['msg_type']}") obox = self.syncDB.executeFetchOne( f"select * from tb_sync_outbox where outbox_id = {data['query']}") ack = True if (obox['data']): if (obox['data']['msg_type'] == 'INS'): status = 'need_pk_update' else: status = 'arrived' ack = self.outbox.update(data={'status': status}, where_clause={'outbox_id': data['query']}) # ack = self.syncDB.executeCommit( # f"update tb_sync_outbox set status='{status}' where outbox_id={data['query']}") # ackQuery = "update tb_sync_outbox set is_arrived=1, status='arrived' where outbox_id = {}".format( # data['query']) # ack = self.syncDB.executeCommit(ackQuery) if (not ack): self.outbox.update(data={'status': 'error'}, where_clause={'outbox_id': data['msg_id']}) print("Status: ERROR") # errorQuery = 'update tb_sync_outbox set is_error=1 where outbox_id = {}'.format( # data['msg_id']) # self.syncDB.executeCommit(errorQuery) # self.systemlog.insert("processACK", "Gagal update ACK ID#{} ERROR: {}".format( # data['inbox_id'], self.statusDB.getLastCommitError()['msg'])) else: self.setAsProcessed(data['inbox_id']) print("Status: OK") def processReg(self, data): print(f"Inbox ID: {data['inbox_id']}") print(f"Type: {data['msg_type']}") if (env.MASTER_MODE): time.sleep(0.2) regData = data['query'].split('#') reg = {} for item in regData: attributes = item.split(':') reg[attributes[0]] = attributes[1] # cek apakah ip address sudah terdaftar checkQuery = f"select count(*) as total from tb_sync_client where client_ip = '{reg['ip_address']}'" check = self.syncDB.executeFetchOne(checkQuery) if (check['data']['total'] > 0): outbox = { 'row_id': 0, 'table_name': '', 'msg_type': 'REG', 'msg_id': 0, 'query': f"status:ERROR#reason:IP Address sudah digunakan#for:{data['msg_id']}", 'client_unique_id': 0, 'client_ip': reg['ip_address'], 'client_port': reg['port'], 'client_key': reg['secret_key'], 'client_iv': reg['iv_key'] } self.outbox.insert(outbox) self.setAsProcessed(data['inbox_id']) else: client_id_check_q = "select ifnull(max(client_unique_id), 0) as id from tb_sync_client" client_id = self.syncDB.executeFetchOne(client_id_check_q) if (client_id['data']['id'] == 0): client_id = self.clientIdStartFrom else: client_id = client_id['data']['id'] + 1 sql = f"insert into tb_sync_client(client_unique_id, client_key, client_iv, client_port, client_ip) values({client_id}, '{reg['secret_key']}', '{reg['iv_key']}', {reg['port']}, '{reg['ip_address']}')" inserted = self.syncDB.executeCommit(sql) if (not inserted): self.setPriority(data['inbox_id'], 'tb_sync_inbox', 3) else: outbox = { 'row_id': 0, 'table_name': '', 'msg_type': 'REG', 'msg_id': 0, 'query': f"status:OK#id:{client_id}#for:{data['msg_id']}", 'client_unique_id': client_id } self.outbox.insert(outbox) self.setAsProcessed(data['inbox_id']) print("Status: OK") else: outbox = { 'row_id': 0, 'table_name': '', 'msg_type': 'REG', 'msg_id': 0, 'query': f"status:ERROR#reason:Host bukan master#for:{data['msg_id']}", 'client_unique_id': 0, 'client_ip': reg['ip_address'], 'client_port': reg['port'], 'client_key': reg['secret_key'], 'client_iv': reg['iv_key'] } self.outbox.insert(outbox) self.setAsProcessed(data['inbox_id']) print(f'Status: ERROR') def getData(self): self.syncDB.connect() sql = "(select * from tb_sync_inbox where status = 'waiting' and msg_type <> 'PRI' order by priority asc, inbox_id asc, occur_at asc)" if (self.limitRow > 0): sql += f' limit {self.limitRow}' self.getPriToProcess() additionalQuery = "" excludeTables = "" # tambah query untuk mendapatkan pri yang harus diproses if (len(self.nextPriToProcess) > 0): for item in self.nextPriToProcess: additionalQuery += f" union (select * from tb_sync_inbox where status = 'waiting' and msg_type = 'PRI' and table_name = '{item}' and query = '{self.nextPriToProcess[item]}' order by first_time_occur_at asc, priority asc)" if (excludeTables != ''): excludeTables += f" or table_name <> '{item}'" else: excludeTables += f"table_name <> '{item}'" # buat query untuk mengambil PRI masing2 tabel kecuali excluded table if (excludeTables == ''): # mengambil pesan PK masing2 1 pada setiap tabel # additionalQuery = '' additionalQuery += f" union (SELECT * FROM tb_sync_inbox WHERE msg_type = 'PRI' AND STATUS='waiting' GROUP BY table_name ORDER BY first_time_occur_at ASC, priority ASC)" else: additionalQuery += f" union (select * from tb_sync_inbox where status = 'waiting' and msg_type = 'PRI' and ({excludeTables}) group by table_name order by first_time_occur_at asc, priority asc)" # print(sql + additionalQuery) data = self.syncDB.executeFetchAll(sql + additionalQuery, False) self.syncDB.close() return data def getStatusInbox(self): sql = "select * from tb_sync_inbox where status = 'waiting' and (msg_type = 'ACK' or msg_type = 'DONE') order by priority asc, inbox_id asc, occur_at asc" if (self.limitRow > 0): sql += f' {self.limitRow}' data = self.syncDB.executeFetchAll(sql) return data def getSyncInbox(self): sql = "select * from tb_sync_inbox where status = 'waiting' and (msg_type = 'INS' or msg_type = 'UPD' or msg_type = 'DEL' or msg_type = 'REG' or msg_type = 'PRI') order by priority asc, inbox_id asc, occur_at asc" if (self.limitRow > 0): sql += f' {self.limitRow}' data = self.syncDB.executeFetchAll(sql) return data def setAsProcessed(self, id, status='done'): set = self.inbox.update(data={'status': status}, where_clause={'inbox_id': id}) # query = 'update tb_sync_inbox set is_process=1 where inbox_id = {}'.format( # id) # print(set) def sendStatusUpdate(self, data, status): return self.outbox.insert({ 'row_id': data['row_id'], # local row id 'table_name': data['table_name'], 'msg_type': status, 'query': data['msg_id'], # receiver outbox_id 'client_unique_id': data['client_unique_id'], 'msg_id': 0, 'priority': 1 }) def updateOutboxStatus(self, id, status, inbox_id): upd = self.syncDB.executeCommit( f"update tb_sync_outbox set status='{status}' where outbox_id={id}" ) if (upd): self.setAsProcessed(inbox_id) else: self.setPriority(inbox_id, 'tb_sync_inbox', 3) def canProcessMsg(self, data): watchedMsgType = ['INS', 'UPD', 'DEL'] if (data['msg_type'] not in watchedMsgType): return True # cek apakah ada pesan watchedMsgType yang blm selesai # sebelum inbox_id ini # jika slave, harus memastika semua outbox nya selesai di proses di master # lalu eksekusi inbox if (not env.MASTER_MODE): previousMsgs = self.syncDB.executeFetchOne( "select count(*) as total from tb_sync_outbox where (msg_type = 'INS' or msg_type='UPD' or msg_type='DEL') and status <> 'done'" ) if (previousMsgs['data']['total'] > 0): return False else: return True print(previousMsgs) def process(self, inbox): if (inbox): for item in inbox: # proses pesan selain INS, UPD dan DEL terlebih dahulu # jgn proses pesan utama jika masih ada pesan INS UPD DEL yang belum selesai # jika proses adalah INS UPD DEL, lakukan pengecekan pesan tertunda delayMsgInboxQ = "select count(*) from tb_sync_inbox where status " print("[{}] -> #{}".format( datetime.datetime.now().strftime("%d-%m-%Y %H:%M:%S"), item['msg_id']), end=" ") msgType = item['msg_type'] if (msgType == 'INS'): self.processInsert(item) elif (msgType == 'UPD'): self.processUpdate(item) elif (msgType == 'DEL'): self.processDelete(item) elif (msgType == 'ACK'): self.processAck(item) elif (msgType == "PRI"): self.processPrimaryKey(item) elif (msgType == 'REG'): self.processReg(item) elif (msgType == 'PROC'): print( self.updateOutboxStatus(item['query'], "processing", item['inbox_id'])) elif (msgType == 'NEEDPK'): print( self.updateOutboxStatus(item['query'], "need_pk_update", item['inbox_id'])) elif (msgType == 'DONE'): done = self.statusDB.executeCommit( f"update tb_sync_outbox set status = 'done' where outbox_id = {item['query']}" ) if (done): print( self.statusDB.executeCommit( f"update tb_sync_inbox set status='done' where inbox_id={item['inbox_id']}" )) else: print("False") # print(self.updateOutboxStatus( # item['query'], "done", item['inbox_id'])) else: self.syncDB.insError("Msg type not found for id=" + str(item['inbox_id'])) # print(f"finish at: {time.time()}") file = open("proctime.text", 'a') file.write(f"{time.time()}\n") file.close() else: time.sleep(0.3)
dbUser = input("[?] Masukkan Username DB: ") dbPass = input("[?] Masukkan Password DB: ") print("[/] Testing koneksi DB", end="...") db = DatabaseConnection(dbHost, dbUser, dbPass, dbName) try: db.connect() except Exception as e: print("ERROR:", e.args[1]) continue print("OK") break # cek table if (isMaster): cekTableQuery = "show tables" tables = db.executeFetchAll(cekTableQuery) if (len(tables['data']) <= 0): print( '[!] Tidak ada tabel, tidak dapat melanjutkan proses. Isi tabel lalu ulangi lagi.' ) sys.exit() # ipaddr = input( "[?] Masukkan IP Address host yang dapat hubungi oleh host lain: ") ip_candidates = re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", ipaddr) if (len(ip_candidates) <= 0): print('[!] IP Address tidak valid') sys.exit() ipaddr = ip_candidates[0] print(f"[/] Menggunakan `{ipaddr}` sebagai IP Address")
import sys import bcrypt from DatabaseConnection import DatabaseConnection host = '157.230.37.25' username = '******' password = '******' database = 'db_stahnsimak' sso = DatabaseConnection(host, 'stahnsso', 'st4hnSS0', 'db_stahnidentity') sql = """ select * from tb_users where pass is not null """ res = sso.executeFetchAll(sql) count = 1 sso.connect() for item in res: hashedPassword = bcrypt.hashpw(item['pass'].encode('utf-8'), bcrypt.gensalt()) insertSql = """ update tb_users set password='******' where user_id = {} """ # print(insertSql.format(item['mahasiswa_id'], item['nim'], hashedPassword.decode(), # item['nama'], item['nim'], 1, item['email'], item['prodi_id'], item['fakultas_id'], 165, 3, password)) # sys.exit() isInserted = sso.executeCommit(
class Ventilator: context = None sender = None processId = None system = None def __init__(self): # self.maxRowPerWorker = env.VEN_MAX_ROW_PER_WORKER self.workerAddress = env.VEN_WORKER_ADDR self.sinkAddr = env.SINK_ADDR self.context = zmq.Context() self.sender = self.context.socket(zmq.PUSH) self.sender.bind(self.workerAddress) self.processId = os.getpid() self.system = platform.system() self.db = DatabaseConnection(env.DB_HOST, env.DB_UNAME, env.DB_PASSWORD, env.DB_NAME) self.syslog = SystemLog() self.outbox = Outbox(self.db) # /////////////////////////////////////////// def setWorker(self, numberOfWorker): for i in range(numberOfWorker): os.system('py worker.py ' + self.processId) # /////////////////////////////////////////// def killWorker(self): file = open("process/" + self.processId) workers = file.read() print(workers) # /////////////////////////////////////////// def send(self, data): sink = self.context.socket(zmq.PUSH) sink.connect(self.sinkAddr) i = 1 for item in data: print("---------------------") print(f"Outbox ID: {item['outbox_id']}") print(f"Type: {item['msg_type']}") # proses mengecek pesan yang valid # pengecekan dilakukan agar sebuah pesan tidak kembali ke pengirimnya # atau terjadi looping data terus menerus isValid = False invalidReason = '' if (item['msg_type'] == 'INS' or item['msg_type'] == 'DEL' or item['msg_type'] == 'UPD'): if (item['msg_type'] == 'INS' or item['msg_type'] == 'DEL'): # mengecek pesan ins valid menggunakan # PK, client_unique_id dan nama tabel # sistem tidak akan mengirim data yang sama balik lagi ke pengirimnya isInsideInboxQuery = "select * from tb_sync_inbox where client_unique_id={} and sync_token = '{}' and table_name = '{}' and msg_type='{}'".format( item['client_unique_id'], item['sync_token'], item['table_name'], item['msg_type']) elif (item['msg_type'] == 'UPD'): isInsideInboxQuery = "select * from tb_sync_inbox where client_unique_id={} and sync_token = '{}' and table_name = '{}' and msg_type='{}'".format( item['client_unique_id'], item['sync_token'], item['table_name'], item['msg_type']) inbox = self.db.executeFetchAll(isInsideInboxQuery) if (inbox['execute_status']): clients = [ client['client_unique_id'] for client in inbox['data'] ] print(clients) if (item['client_unique_id'] not in clients): isValid = True else: self.syslog.insert("ventilator-valid-msg", "Error get data from outbox") else: isValid = True # print(isValid) # sys.exit() print("Status: ", end="") if (isValid): # filter DEL msg type # jangan kirim pesan DEL jika row yang di DEL belum selesai if (not env.MASTER_MODE and item['msg_type'] == 'DEL'): checkPRIQuery = """ select * from tb_sync_inbox where msg_type = 'PRI' and status='waiting' and table_name = '{}' and row_id = '{}' """ checkPRIRes = self.db.executeFetchAll( checkPRIQuery.format(item['table_name'], item['query'])) if (checkPRIRes['execute_status']): if (len(checkPRIRes['data']) > 0): print("PRI not yet process") continue else: print("check PRI fails") continue print('Valid') packet = { 'client_id': item['client_unique_id'], 'client_key': item['client_key'], 'client_iv': item['client_iv'], 'client_port': item['client_port'], 'client_ip': item['client_ip'], 'msg_type': item['msg_type'], 'row_id': item['row_id'], 'table_name': item['table_name'], 'msg_id': item['outbox_id'], 'occur_at': item['occur_at'], 'sync_token': item['sync_token'], 'first_time_occur_at': item['first_time_occur_at'], 'query': item['query'], 'priority': item['priority'], 'timestamp': item['created_at'].strftime("%Y-%m-%d %H:%M:%S") } nextRetryAt = datetime.datetime.now() + datetime.timedelta( seconds=30) nextRetryAt = nextRetryAt.strftime('%Y-%m-%d %H:%M:%S') if (item['msg_type'] == 'ACK'): status = 'arrived' else: status = 'sent' self.outbox.update( data={ 'priority': 3, 'status': status, 'retry_again_at': nextRetryAt }, where_clause={'outbox_id': item['outbox_id']}) self.sender.send_json(packet) file = open("sendtime.txt", 'a') file.write(f"{time.time()}\n") file.close() else: invalidReason = 'Loop' print('Invalid, Reason: {}'.format(invalidReason)) # self.outbox.update(data={'status': 'canceled'}, where_clause={ # 'outbox_id': item['outbox_id']}) query = "update tb_sync_outbox set status='canceled' where outbox_id = {}".format( item['outbox_id']) self.db.executeCommit(query)
host = '172.104.45.98' username = '******' password = '******' database = 'db_simak' rowLimit = '200' db = DatabaseConnection(host, username, password, database) tahun = 2019 semester = 1 tahunajaran = f"{tahun}{semester}" kelasStartFrom = 65 prodiQuery = "select prodi_id, nama_prodi from tb_prodi where flag = 1" prodi = db.executeFetchAll(prodiQuery) for p in prodi['data']: # mengambil mktawar mktawarQuery = f""" select mktawar_id, matakuliah_id, kelas, program_id, prodi_id from tb_mktawar where tahunajaran={tahun} and semester={semester} and prodi_id = 3 and tb_mktawar.flag=1 order by matakuliah_id, program_id, prodi_id """ mktawar = db.executeFetchAll(mktawarQuery) for mk in mktawar['data']: print( f"{mk['matakuliah_id']}#{mk['prodi_id']}#{mk['program_id']}#{mk['kelas']}" ) sys.exit()
class Instalation: def __init__(self, dbhost, dbname, dbusername, dbpass): self.dbName = dbname # self.dbName = env.DB_NAME self.db = DatabaseConnection(dbhost, dbusername, dbpass, dbname) def setUniqueId(self, id): self.uniqueId = id self.ignoreColums = ['sync_token', 'last_action_at'] def getTables(self): query = """ select * from information_schema.TABLES where TABLE_SCHEMA = '{}' and TABLE_NAME not like "tb_sync_%" """.format(self.dbName) return self.db.executeFetchAll(query) def getColums(self, tableName): query = """ select COLUMN_NAME, COLUMN_TYPE, COLUMN_KEY from information_schema.COLUMNS where TABLE_SCHEMA = '{}' and TABLE_NAME = '{}' """ return self.db.executeFetchAll(query.format(self.dbName, tableName)) def randomString(self, stringLength=16): letters = string.ascii_letters return ''.join(random.choice(letters) for i in range(stringLength)) def _createAfterInsertTrigger(self, tablename, columns=[]): triggername = f"after_insert_{tablename}" print(f"Creating `{triggername}`", end="...") header = f"""CREATE TRIGGER `{triggername}` AFTER INSERT ON `{tablename}` FOR EACH ROW BEGIN """ declaration = """ DECLARE qry TEXT; DECLARE tb VARCHAR(100); """ colWoPk = [ col['COLUMN_NAME'] for col in columns if col['COLUMN_KEY'] != "PRI" ] # creating fields string fields = "" lencol = len(colWoPk) i = 1 for item in colWoPk: fields += item if (i < lencol): fields += ',' i += 1 # # creating value string values = "" prefix = "\",\"\'\"," firstColDivider = ",\"\',\'\"," secondColDivider = ",\"\',\'\"," middle = ",\"\',\'\"," sufix = ",\"\')\"" values += prefix i = 1 for col in colWoPk: values += f"new.{col}" if (i == 1): values += firstColDivider elif (i == 2): values += secondColDivider elif (i < lencol): values += middle i += 1 values += sufix pk = columns[0]['COLUMN_NAME'] # body = f""" SET qry := CONCAT("insert into {tablename}({fields}) values({values}); SET tb := "{tablename}"; INSERT INTO `tb_sync_changelog`(`query`, `table`, `type`, row_id, occur_at, first_time_occur_at, sync_token) VALUES(qry, tb, 'INS', new.{pk}, UNIX_TIMESTAMP(now(3)), new.last_action_at, new.sync_token); """ footer = "END;" # print(header + declaration + body + footer) inserted = self.db.executeCommit(header + declaration + body + footer) return inserted def _createBeforeInsertTrigger(self, tablename): # before insert digunakan untuk membuat token dan last action at # pada setiap table yang di sinkron triggername = f"before_insert_{tablename}" print(f"Creating `{triggername}`", end="...") header = f"""CREATE TRIGGER `{triggername}` BEFORE INSERT ON `{tablename}` FOR EACH ROW BEGIN """ declaration = """ DECLARE auto_id BIGINT DEFAULT 0; #DECLARE sync_id_temp BIGINT DEFAULT 0; """ body = f""" SELECT IFNULL(MAX(log_id), 0)+1 INTO auto_id FROM tb_sync_changelog; #SELECT IFNULL(MAX(sync_id), 0)+1 INTO sync_id_temp #FROM {tablename}; #SET new.sync_id = sync_id_temp; IF new.sync_token IS NULL THEN SET new.sync_token = CAST(CONCAT('{self.uniqueId}', auto_id) AS UNSIGNED); SET new.last_action_at = UNIX_TIMESTAMP(now(6)); END IF; """ footer = "END;" created = self.db.executeCommit(header + declaration + body + footer) return created def _createAfterDeleteTrigger(self, tablename, pk): # after delete triggername = f"after_delete_{tablename}" print(f"Creating `{triggername}`", end="...") header = f"""CREATE TRIGGER `{triggername}` BEFORE DELETE ON `{tablename}` FOR EACH ROW BEGIN """ declaration = """ DECLARE qry TEXT; DECLARE tb VARCHAR(100); DECLARE time_at DOUBLE DEFAULT 0; """ body = f""" SET qry := old.{pk}; SET tb := "{tablename}"; SET time_at := UNIX_TIMESTAMP(NOW(3)); INSERT INTO `tb_sync_changelog`(`query`, `table`, `type`, row_id, occur_at, first_time_occur_at, sync_token) VALUES(qry, tb, 'DEL', old.{pk}, time_at, time_at, old.sync_token); """ footer = "END;" created = self.db.executeCommit(header + declaration + body + footer) return created def _createAfterUpdateTrigger(self, tablename, columns): triggername = f"after_update_{tablename}" print(f"Creating `{triggername}`", end="...") header = f"""CREATE TRIGGER `{triggername}` AFTER UPDATE ON `{tablename}` FOR EACH ROW BEGIN """ declaration = """ DECLARE update_query TEXT; DECLARE tb VARCHAR(100); DECLARE front_update TINYINT DEFAULT 0; DECLARE update_count TINYINT DEFAULT 0; DECLARE latest_update_id BIGINT DEFAULT 0; """ col = [ col['COLUMN_NAME'] for col in columns if col['COLUMN_KEY'] != "PRI" and col['COLUMN_NAME'] not in self.ignoreColums ] pk = columns[0]['COLUMN_NAME'] body = f""" SELECT inbox_id INTO latest_update_id FROM tb_sync_inbox WHERE msg_type = 'UPD' AND table_name = '{tablename}' AND row_id = new.{pk} AND first_time_occur_at > new.last_action_at ORDER BY first_time_occur_at DESC LIMIT 1; IF(latest_update_id != 0) THEN UPDATE tb_sync_inbox SET STATUS = 'waiting' WHERE inbox_id = latest_update_id; ELSE SET update_query := "update {tablename} set ";""" for c in col: body += f""" IF !(new.{c} <=> old.{c}) THEN SET front_update = front_update + 1; SET update_count = update_count + 1; IF(front_update > 1) THEN SET update_query = CONCAT(update_query, ","); END IF; SET update_query = CONCAT(update_query, '{c}=', "'", new.{c}, "'"); END IF; """ body += f""" SET tb := '{tablename}'; IF update_count > 0 THEN SET update_query := CONCAT(update_query, ",last_action_at='", new.last_action_at, "',", "sync_token='", new.sync_token, "'"); SET update_query := CONCAT(update_query, " where {pk}=", new.{pk}); INSERT INTO `tb_sync_changelog`(`query`, `table`, `type`, row_id, occur_at, first_time_occur_at, sync_token) VALUES(update_query, tb, 'UPD', new.{pk}, UNIX_TIMESTAMP(NOW(3)), new.last_action_at, new.sync_token); END IF; END IF; """ footer = "END;" # print(header + declaration + body + footer) created = self.db.executeCommit(header + declaration + body + footer) if not created: print(self.db.getLastCommitError()) return created def _createBeforeUpdateTrigger(self, tablename, pk): triggername = f"before_update_{tablename}" print(f"Creating `{triggername}`", end="...") header = f"""CREATE TRIGGER `{triggername}` BEFORE UPDATE ON `{tablename}` FOR EACH ROW BEGIN """ declaration = """ DECLARE auto_id BIGINT DEFAULT 0; DECLARE pri_change TINYINT DEFAULT 0; """ body = f""" SELECT IFNULL(MAX(log_id), 0)+1 INTO auto_id FROM tb_sync_changelog; IF old.{pk} != new.{pk} THEN SET pri_change := 1; END IF; IF pri_change = 1 OR (pri_change = 0 AND not (new.sync_token <=> old.sync_token)) THEN SET new.sync_token = new.sync_token; SET new.last_action_at = new.last_action_at; ELSE SET new.sync_token = CAST(CONCAT('{self.uniqueId}', auto_id) AS UNSIGNED); SET new.last_action_at = UNIX_TIMESTAMP(NOW(6)); END IF; """ footer = "END;" created = self.db.executeCommit(header + declaration + body + footer) return created def generateSyncTrigger(self): print('--------------') print("Generate sync trigger") # prosedur # 1. mengambil semua tabel dari information schema # 2. mengambil setiap kolom dari information schema untuk dimasukkan ke triiger # triiger yang dibuat adalah after insert, before insert, after delete tables = self.getTables() if (tables['execute_status']): for tb in tables['data']: columns = self.getColums(tb['TABLE_NAME']) print('OK') if self._createAfterInsertTrigger( tb['TABLE_NAME'], columns['data']) else print("ERROR") print('OK') if self._createBeforeInsertTrigger( tb['TABLE_NAME']) else print("ERROR") print('OK') if self._createAfterDeleteTrigger( tb['TABLE_NAME'], columns['data'][0]['COLUMN_NAME']) else print("ERROR") print('OK') if self._createAfterUpdateTrigger( tb['TABLE_NAME'], columns['data']) else print("ERROR") print('OK') if self._createBeforeUpdateTrigger( tb['TABLE_NAME'], columns['data'][0]['COLUMN_NAME']) else print("ERROR") def dropAllTrigger(self): print('--------------') print("Cleaning all trigger...") sql = "show triggers" triggers = self.db.executeFetchAll(sql) if (triggers['execute_status']): for trigger in triggers['data']: print('Deleting trigger `{}`'.format(trigger['Trigger']), end="...") delete = self._dropTriggerIfExist(trigger['Trigger']) print("OK") if delete else print("ERROR") def _dropTriggerIfExist(self, trigger_name): sql = 'drop trigger if exists {}'.format(trigger_name) return self.db.executeCommit(sql) def generateDefaultTrigger(self): # generating after insert changelog print('--------------') # creating triiger print('Creating default trigger `{}`...'.format( 'after_insert_changelog'), end=" ") header = """ CREATE TRIGGER `{}` AFTER INSERT ON `tb_sync_changelog` FOR EACH ROW BEGIN """.format('after_insert_changelog') declaration = """ DECLARE finished INTEGER DEFAULT 0; DECLARE id INTEGER(11); DECLARE curClient CURSOR FOR SELECT client_unique_id FROM tb_sync_client where client_mode = 2; DECLARE CONTINUE HANDLER FOR NOT FOUND SET finished = 1; """ body = """ OPEN curClient; getClient: LOOP FETCH curClient INTO id; IF finished = 1 THEN LEAVE getClient; END IF; INSERT INTO tb_sync_outbox(row_id, table_name, `query`, msg_type, `client_unique_id`, created_at, occur_at, first_time_occur_at, sync_token) VALUES(new.row_id, new.table, new.query, new.type, id, new.created_at, new.occur_at, new.first_time_occur_at, new.sync_token); END LOOP getClient; CLOSE curClient; """ footer = """ END; """ create = self.db.executeCommit(header + ' ' + declaration + ' ' + body + ' ' + footer) if (not create): print("ERROR") else: print('OK') print( 'Creating default trigger `{}`...'.format('before_insert_outbox'), end=" ") header = """ CREATE TRIGGER `before_insert_outbox` BEFORE INSERT ON `tb_sync_outbox` FOR EACH ROW BEGIN """ declaration = """ DECLARE ip VARCHAR(100); DECLARE ports INT(11); DECLARE skey VARCHAR(16); DECLARE iv VARCHAR(16); """ body = """ IF (new.client_unique_id <> 0) THEN SELECT client_ip, client_port, client_key, client_iv INTO ip, ports, skey, iv FROM tb_sync_client WHERE client_unique_id = new.client_unique_id; SET new.client_ip = ip; SET new.client_port = ports; SET new.client_key = skey; SET new.client_iv = iv; END IF; """ footer = """ END; """ create = self.db.executeCommit(header + ' ' + declaration + ' ' + body + ' ' + footer) if (not create): print("ERROR") else: print('OK') def __createChanglogTable(self): sql = """ CREATE TABLE `tb_sync_changelog` ( `log_id` bigint(20) NOT NULL AUTO_INCREMENT, `row_id` int(1) DEFAULT NULL COMMENT 'primary key of the table', `table` varchar(100) DEFAULT NULL, `query` text, `type` varchar(5) DEFAULT NULL, `is_proceed` tinyint(4) DEFAULT '0', `first_time_occur_at` double DEFAULT NULL, `occur_at` double DEFAULT NULL, `created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP, `sync_token` varchar(100) DEFAULT NULL, PRIMARY KEY (`log_id`) ) """ return self.db.executeCommit(sql) def __createClientTable(self): sql = """ CREATE TABLE `tb_sync_client` ( `client_id` int(11) NOT NULL AUTO_INCREMENT, `client_unique_id` int(11) DEFAULT NULL, `client_key` varchar(255) DEFAULT NULL, `client_iv` varchar(25) DEFAULT NULL, `client_ip` varchar(20) DEFAULT NULL, `client_port` int(11) DEFAULT NULL, `client_mode` tinyint(4) DEFAULT '2', PRIMARY KEY (`client_id`) ) """ return self.db.executeCommit(sql) def __createInboxTable(self): sql = """ CREATE TABLE `tb_sync_inbox` ( `inbox_id` bigint(20) NOT NULL AUTO_INCREMENT, `row_id` int(11) DEFAULT NULL, `table_name` varchar(255) DEFAULT NULL, `msg_type` enum('INS','UPD','DEL','ACK','PRI','REG','PROC','NEEDPK','DONE') DEFAULT NULL, `msg_id` int(11) DEFAULT NULL, `query` text, `client_unique_id` int(11) DEFAULT NULL, `master_status` tinyint(4) DEFAULT '0', `result_primary_key` int(11) DEFAULT '0' COMMENT 'primary key after process the query, due to differential PK between host', `status` enum('waiting','need_pk_update','done','error','processing') DEFAULT 'waiting', `priority` tinyint(4) DEFAULT '2', `sync_token` varchar(100) DEFAULT NULL, `first_time_occur_at` double DEFAULT NULL, `occur_at` double DEFAULT NULL, `created_at` datetime DEFAULT NULL, `updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`inbox_id`) ) """ return self.db.executeCommit(sql) def __createOutboxTable(self): sql = """ CREATE TABLE `tb_sync_outbox` ( `outbox_id` bigint(20) NOT NULL AUTO_INCREMENT, `row_id` int(11) DEFAULT NULL COMMENT 'primary key of table in local', `table_name` varchar(255) DEFAULT NULL, `msg_type` enum('INS','UPD','DEL','ACK','PRI','REG','PROC','NEEDPK','DONE') DEFAULT NULL, `msg_id` int(11) DEFAULT NULL COMMENT 'outbox_id from local', `query` text, `client_unique_id` int(11) DEFAULT NULL COMMENT 'client_unique_id', `status` enum('waiting','sent','arrived','canceled','retry','need_pk_update','done','processing') DEFAULT 'waiting', `priority` tinyint(4) DEFAULT '2', `sync_token` varchar(100) DEFAULT NULL, `retry_again_at` datetime DEFAULT NULL, `first_time_occur_at` double DEFAULT NULL, `occur_at` double DEFAULT NULL, `created_at` datetime DEFAULT NULL, `updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP, `client_ip` varchar(100) DEFAULT NULL, `client_port` int(11) DEFAULT NULL, `client_key` varchar(16) DEFAULT NULL, `client_iv` varchar(16) DEFAULT NULL, PRIMARY KEY (`outbox_id`) ) """ return self.db.executeCommit(sql) def createSyncTable(self): print('--------------') print("Creating sync table") print('Creating changelog table', end="...") print('OK') if self.__createChanglogTable() else print('ERROR') print('Creating client table', end="...") print('OK') if self.__createClientTable() else print('ERROR') print('Creating inbox table', end="...") print('OK') if self.__createInboxTable() else print('ERROR') print('Creating outbox table', end="...") print('OK') if self.__createOutboxTable() else print('ERROR') def addUnixTimestampColumnToEveryTable(self): table = self.getTables() print('--------------') print('Adding sync column') for tb in table['data']: print(f"Add `last_action_at` column to `{tb['TABLE_NAME']}`", end="...") columns = self.getColums(tb['TABLE_NAME']) lastColumn = columns['data'][len(columns['data']) - 1] alterTableQuery = """ alter table {} add last_action_at double after {} """.format(tb['TABLE_NAME'], lastColumn['COLUMN_NAME']) if (self.db.executeCommit(alterTableQuery)): print("OK") else: print('ERROR') print(f"Add `sync_token` column to `{tb['TABLE_NAME']}`", end="...") addSyncTokenQuery = f"alter table {tb['TABLE_NAME']} add sync_token varchar(100) after last_action_at" print('OK') if self.db.executeCommit(addSyncTokenQuery) else print( "ERROR")
for i in range(delay): print(delay) time.sleep(1) delay -= 1 table_name = 'tb_mahasiswa' if (option == 'ins'): print(f"Testing INSERT {count} data") for i in range(count): print(f"Insert data-{i+1}...", end="...") insert = db.executeCommit( f'insert into {table_name}(nama) values("data-{i+1} from {env.UNIQUE_ID}")' ) print("OK") if insert else print("ERROR") elif (option == 'upd'): data = db.executeFetchAll(f"select id from {table_name} limit {count}") # print(data) for item in data['data']: print(f"Updata data id ke {item['id']}", end="...") update = db.executeCommit( f"update {table_name} set nama='{time.time()}' where id={item['id']}" ) print("OK") if update else print("ERROR") if (not update): print(db.getLastCommitError()) break elif (option == 'del'): data = db.executeFetchAll(f"select id from {table_name} limit {count}") for item in data['data']: print(f"delete data id ke {item['id']}", end="...")