def Run_Customer(self, Branch_address, output_file, THREAD_CONCURRENCY): """Start a client (customer) in a subprocess.""" # DEBUG #MyLog(logger,f'Processing Customer #{self.id} with Events:' ) #for e in self.events: # MyLog(logger,f' #{e["id"]} = {e["interface"]}, {e["money"]}' ) MyLog( logger, f'Running client customer #{self.id} connecting to server {Branch_address}...' ) Customer.createStub(self, Branch_address, THREAD_CONCURRENCY) Customer.executeEvents(self, output_file) # Wait one day until keypress #try: # while True: # time.sleep(ONE_DAY.total_seconds()) #except KeyboardInterrupt: # server.stop(None) MyLog( logger, f'Client customer #{self.id} connecting to server {Branch_address} exiting successfully.' )
def MsgDelivery(self, request, context): self.recvMsg.append(request) balance_result = None response_result = None if request.OP == banking_pb2.QUERY: time.sleep(SLEEP_SECONDS) balance_result = self.Query() if request.OP == banking_pb2.DEPOSIT: balance_result = self.Deposit(request.Amount) if request.OP == banking_pb2.WITHDRAW: response_result, balance_result = self.Withdraw(request.Amount) MyLog( logger, f'Branch {self.id} response to Customer request {request.S_ID} ' f'interface {get_operation_name(request.OP)} result {get_result_name(response_result)} ' f'balance {balance_result}') response = banking_pb2.MsgDeliveryResponse( ID=request.S_ID, RC=response_result, Amount=balance_result, ) # If DO_NOT_PROPAGATE it means it has come from another branch and it must not be # spread further. Also, no need to propagate query operations. if request.D_ID != DO_NOT_PROPAGATE and request.OP == banking_pb2.DEPOSIT: self.Propagate_Deposit(request.D_ID, request.Amount) if request.D_ID != DO_NOT_PROPAGATE and request.OP == banking_pb2.WITHDRAW: if response_result == banking_pb2.SUCCESS: # only propagates if the change has been successful self.Propagate_Withdraw(request.D_ID, request.Amount) return response
def Load_Input_File(filename, branches_list, customers_list): """Load branches, customers and events from input files.""" try: file = open(filename) except OSError: raise RuntimeError(f'Failed to open {filename}.') items = json.load(file) # Retrieves: # the branches' list and populate IDs and balances # the customers' operations and populate events for item in items: if item['type'] == 'branch': branch = Branch(item['id'], item['balance'], list()) branches_list.append(branch) if item['type'] == 'customer': events = list() for event in item['events']: events.append(event) customer = Customer(item['id'], events) customers_list.append(customer) # Append the list of all branches to every branch - except self for b in branches_list: for b1 in branches_list: if b.id != b1.id: b.branches.append(b1.id) MyLog( logger, f'[Main] Branch {b.id} initialised with Balance={b.balance} and Clock={b.local_clock}; Other branches identified={b.branches}' ) # Append the list of all events to customers for c in customers_list: for e in c.events: try: MyLog( logger, f'[Main] Customer {c.id} identified event #{e["id"]} = {e["interface"]}, {e["money"]}' ) except KeyError: MyLog( logger, f'[Main] Customer {c.id} identified event #{e["id"]} = {e["interface"]}' ) file.close()
def Run_Customer(self, Branch_address, output_file, balance_file, want_windows=False, THREAD_CONCURRENCY=1): """Start a client (customer) in a subprocess.""" MyLog(logger, f'[Customer {self.id}] Booting...') Customer.createStub(self, Branch_address, want_windows, THREAD_CONCURRENCY) if ((sg != NotImplemented) and want_windows and (self.window != None)): # Start events with "Run" while True: wevent, values = self.window.read() # End program if user closes window or # presses the Close button if wevent == "Close" or wevent == sg.WIN_CLOSED: MyLog(logger, f'[Customer {self.id}] Closing windows.') break if wevent == "Run": MyLog(logger, f'[Customer {self.id}] Executing events...') # with client_lock: Customer.executeEvents(self, output_file, balance_file, want_windows) MyLog( logger, f'[Customer {self.id}] Events executed. Exiting successfully.' ) # Uncommenting this break makes the Customer's Window close after Run #break self.window.close() else: # with client_lock: Customer.executeEvents(self, output_file, balance_file, want_windows) MyLog( logger, f'[Customer {self.id}] Events executed. Exiting successfully.')
def Run_Branch(Branch, binding_address, THREAD_CONCURRENCY): """Start a server (branch) in a subprocess.""" MyLog(logger, f'Initialising branch at {binding_address}...') options = (('grpc.so_reuseport', 1), ) server = grpc.server(futures.ThreadPoolExecutor( max_workers=THREAD_CONCURRENCY, ), options=options) Branch.bind_address = binding_address banking_pb2_grpc.add_BankingServicer_to_server(Branch, server) server.add_insecure_port(binding_address) server.start() MyLog(logger, '*** Press CTRL+C to exit the process when finished ***') Wait_Loop() server.stop(None)
def Propagate_Withdraw(self, request_id, amount): MyLog( logger, f'Propagate {get_operation_name(banking_pb2.WITHDRAW)} id {request_id} amount {amount} to other branches' ) if not self.stubList: self.Populate_Stub_List() for stub in self.stubList: response = stub.MsgDelivery( banking_pb2.MsgDeliveryRequest( S_ID=request_id, OP=banking_pb2.WITHDRAW, Amount=amount, D_ID= DO_NOT_PROPAGATE, # Sets DO_NOT_PROPAGATE for receiving branches )) MyLog( logger, f'Branch {self.id} sent request {request_id} to other Branches -' f'operation {get_operation_name(banking_pb2.WITHDRAW)} result {get_result_name(response.RC)} ' f'money {response.Amount}')
def Load_Input_File(filename, branches, customers): """Load from input files.""" try: file = open(filename) except OSError: raise RuntimeError(f'Failed to open {filename}.') items = json.load(file) # Retrieves: # the branches' list and populate IDs and balances # the customers' operations and populate events for item in items: if item['type'] == 'branch': branch = Branch(item['id'], item['balance'], list()) branches.append(branch) if item['type'] == 'customer': events = list() for event in item['events']: events.append(event) customer = Customer(item['id'], events) customers.append(customer) # Append the list of all branches to every branch for b in branches: for b1 in branches: b.branches.append(b1.id) MyLog( logger, f'Branch #{b.id} initialised with Balance={b.balance}; Branches identified={b.branches}' ) # Append the list of all events to customers for c in customers: for e in c.events: MyLog( logger, f'Customer #{c.id} has Event #{e["id"]} = {e["interface"]}, {e["money"]}' ) file.close()
def Populate_Stub_List(self): if len(self.stubList) == len( self.branches): # stub list already initialized return for b in self.branches: if b != self.id: branch_address = self.bind_address MyLog( logger, f'Initializing branch to branch stub at {branch_address}') self.stubList.append( banking_pb2_grpc.BankingStub( grpc.insecure_channel(branch_address)))
def createStub(self, Branch_address, THREAD_CONCURRENCY): """Start a client (customer) stub.""" MyLog( logger, f'Initializing customer stub to branch stub at {Branch_address}') self.stub = banking_pb2_grpc.BankingStub( grpc.insecure_channel(Branch_address)) client = grpc.server( futures.ThreadPoolExecutor(max_workers=THREAD_CONCURRENCY, ), ) #banking_pb2_grpc.add_BankingServicer_to_server(Customer, client) client.start()
def createStub(self, Branch_address, want_windows=False, THREAD_CONCURRENCY=1): """ Boots a client (customer) stub in a subprocess. If PySimpleGUI/TK are installed and in mode 2, launches a window in the Windows' Manager. Args: Self: Customer class Branch_address: TCP/IP address/port where to fund the Branch to connect to want_windows: Boolean, True if graphical windows are desired as UX THREAD_CONCURRENCY: Integer, number of threads concurrency Returns: None """ MyLog( logger, f'[Customer {self.id}] Initializing customer stub to branch stub at {Branch_address}' ) self.stub = banking_pb2_grpc.BankingStub( grpc.insecure_channel(Branch_address)) if ((sg != NotImplemented) and want_windows): layout = [[ sg.Text("Operations Loaded:", size=(60, 1), justification="left") ], [sg.Listbox(values=self.events, size=(60, 3))], [sg.Output(size=(80, 12))], [ sg.Button("Run", tooltip='Start Customer\'s Operations') ], [sg.Button("Close", tooltip='Terminate Customer')]] # Create the window sg.theme('Dark Green 5') w, h = sg.Window.get_screen_size() self.window = sg.Window( f"Customer #{self.id} -> To Branch @ {Branch_address}", layout, location=(50 * (self.id) + 10, h / 5 * (self.id - 1) + 50)) client = grpc.server( futures.ThreadPoolExecutor(max_workers=THREAD_CONCURRENCY, ), ) client.start()
def register_event(self, passed_id, passed_name): """ Adds an event to the list of processed events by the branch process Args: self: Branch class passed_id: Event ID to record passed_name: Event Name passed_clock: Local clock recorded Returns: None """ if (self.clock_events != None): self.clock_events.append({ 'id': passed_id, 'name': passed_name, 'clock': self.local_clock }) if (self.window): MyLog(logger, ( f"[Branch {passed_id}] Registered event \"{passed_name}\" - ID {passed_id}, clock {self.local_clock}" ))
def executeEvents(self, output_file): """Execute customer events.""" # DEBUG #MyLog(logger,f'Executing events for Customer #{self.id}') record = {'id': self.id, 'recv': []} for event in self.events: request_id = event['id'] request_operation = get_operation(event['interface']) request_amount = event['money'] response = self.stub.MsgDelivery( banking_pb2.MsgDeliveryRequest( S_ID=request_id, OP=request_operation, Amount=request_amount, D_ID=self.id, )) MyLog( logger, f'Customer {self.id} sent request {request_id} to Branch {response.ID} ' f'interface {get_operation_name(request_operation)} result {get_result_name(response.RC)} ' f'money {response.Amount}') values = { 'interface': get_operation_name(request_operation), 'result': get_result_name(response.RC), } if request_operation == banking_pb2.QUERY: values['money'] = response.Amount record['recv'].append(values) if record['recv']: # DEBUG #MyLog(logger,f'Writing JSON file on #{output_file}') with open(f'{output_file}', 'a') as outfile: json.dump(record, outfile) outfile.write('\n')
def MsgDelivery(self, request, context): """ Manages RPC calls coming into a branch from a customer or another branch. Args: self: Branch class request: gRPC class (the message) context: gRPC context Returns: MsgDeliveryResponse class (gRPC response object) """ # with self.branch_lock: # Keep a copy of the requests self.recvMsg.append(request) balance_result = None response_result = None if request.D_ID == DO_NOT_PROPAGATE: CustomerText = 'another Branch' else: CustomerText = (f'Customer {request.D_ID}') LogMessage = ( f'[Branch {self.id}] Received request: ID {request.REQ_ID} from {CustomerText} - ' f'Operation: {get_operation_name(request.OP)} - ' f'Amount: {request.Amount}') if (self.clock_events != None): LogMessage += (f' - Clock: {request.Clock}') MyLog(logger, LogMessage, self) if request.OP == banking_pb2.QUERY: if (self.clock_events != None): self.eventReceive( request.Clock) # Event received from a customer self.eventExecute() # Event is executed response_result, balance_result = self.Query() # If DO_NOT_PROPAGATE it means it has arrived from another branch and it must not be # spread further. Also, there is no need to propagate query operations, in general. # if request.OP == banking_pb2.DEPOSIT: if request.D_ID == DO_NOT_PROPAGATE: if (self.clock_events != None): self.propagateReceive( request.Clock) # Event received from another branch self.register_event(request.REQ_ID, "deposit_broadcast_request") self.eventExecute() # Event is executed self.register_event(request.REQ_ID, "deposit_broadcast_execute") else: if (self.clock_events != None): self.eventReceive( request.Clock) # Event received from a customer self.register_event(request.REQ_ID, "deposit_request") self.eventExecute() # Event is executed self.register_event(request.REQ_ID, "deposit_execute") response_result, balance_result = self.Deposit(request.Amount) if request.OP == banking_pb2.WITHDRAW: if request.D_ID == DO_NOT_PROPAGATE: if (self.clock_events != None): self.propagateReceive( request.Clock) # Event received from another branch self.register_event(request.REQ_ID, "withdraw_broadcast_request") self.eventExecute() # Event is executed self.register_event(request.REQ_ID, "withdraw_broadcast_execute") else: if (self.clock_events != None): self.eventReceive( request.Clock) # Event received from a customer self.register_event(request.REQ_ID, "withdraw_request") self.eventExecute() # Event is executed self.register_event(request.REQ_ID, "withdraw_execute") response_result, balance_result = self.Withdraw(request.Amount) LogMessage = ( f'[Branch {self.id}] Operation executed: {get_operation_name(request.OP)} request ID {request.REQ_ID} - ' f'Result: {get_result_name(response_result)} - ' f'New balance: {balance_result}') if (self.clock_events != None): LogMessage += (f' - Clock: {self.local_clock}') MyLog(logger, LogMessage, self) # If DO_NOT_PROPAGATE it means it has arrived from another branch and it must not be # spread further. Also, there is no need to propagate query operations, in general. # Finally, only propagates if the operation has been successful. # if response_result == banking_pb2.SUCCESS: if request.D_ID == DO_NOT_PROPAGATE: if (self.clock_events != None): self.propagateReceive( request.Clock) # Sets clock for propagation received else: if request.OP == banking_pb2.DEPOSIT: if (self.clock_events != None): self.propagateSend() # Sets clock for further propagation #self.register_event(request.REQ_ID, "deposit_broadcast_execute") self.Propagate_Deposit(request.REQ_ID, request.Amount) if request.OP == banking_pb2.WITHDRAW: if (self.clock_events != None): self.propagateSend() # Sets clock for further propagation #self.register_event(request.REQ_ID, "withdraw_broadcast_execute") self.Propagate_Withdraw(request.REQ_ID, request.Amount) self.eventResponse() # Call for eventResponse response = banking_pb2.MsgDeliveryResponse(ID=request.REQ_ID, RC=response_result, Amount=balance_result, Clock=self.local_clock) if (self.clock_events != None): if request.OP == banking_pb2.DEPOSIT: if request.D_ID == DO_NOT_PROPAGATE: self.register_event(request.REQ_ID, "deposit_broadcast_response") else: self.register_event(request.REQ_ID, "deposit_response") if request.OP == banking_pb2.WITHDRAW: if request.D_ID == DO_NOT_PROPAGATE: self.register_event(request.REQ_ID, "withdraw_broadcast_response") else: self.register_event(request.REQ_ID, "withdraw_response") LogMessage = ( f'[Branch {self.id}] Sent response to request ID {request.REQ_ID} back to {CustomerText} - ' f'Result: {get_result_name(response_result)} - ' f'New balance: {balance_result}') if (self.clock_events != None): LogMessage += (f' - Clock: {self.local_clock}') MyLog(logger, LogMessage, self) LogMessage = ( f'[Branch {self.id}] Event processing completed for request ID {request.REQ_ID} ' f'replied back to {CustomerText} - ' f'Result: {get_result_name(response_result)} - ' f'New balance: {balance_result}') if (self.clock_events != None): LogMessage += (f' - Clock: {self.local_clock}') MyLog(logger, LogMessage, self) if ((sg == NotImplemented or (not self.window)) and SLEEP_SECONDS): # Wait some seconds to allow execution of propagation in case of command line execution MyLog( logger, f'[Main] *** Waiting for {SLEEP_SECONDS} seconds to allow finish propagations ***' ) MyLog( logger, f'[Main] (Otherwise it will sometimes fail when the computer is slow)' ) time.sleep(SLEEP_SECONDS) return response
def Run_Branch(Branch, clock_file=None, want_windows=False, THREAD_CONCURRENCY=1): """ Boot a server (branch) in a subprocess. If PySimpleGUI/TK are installed and mode 2 requested, launches a window in the Windows' Manager. Args: Branch: Branch class clock_file: String, output file where to write the logical clock event. If set to None, works as it was Exercise 1 (gRPC) and does not use clocks. want_windows: Boolean, True if graphical windows are desired as UX THREAD_CONCURRENCY: Integer, number of threads concurrency Returns: none. """ if (clock_file == None): MyLog(logger, f'[Branch {Branch.id}] Initialising @{Branch.bind_address}...') else: MyLog( logger, f'[Branch {Branch.id}] Initialising @{Branch.bind_address} with local clock {Branch.local_clock}...' ) options = (('grpc.so_reuseport', 1), ) server = grpc.server(futures.ThreadPoolExecutor( max_workers=THREAD_CONCURRENCY, ), options=options) banking_pb2_grpc.add_BankingServicer_to_server(Branch, server) if ((sg != NotImplemented) and want_windows): if (clock_file == None): layout = [[ sg.Text(f"Balance: {Branch.balance}", size=(40, 1), justification="left", key='-WINDOWTEXT-') ], [sg.Output(size=(90, 15))], [sg.Button("Close", tooltip='Terminates Branch')]] else: layout = [[ sg.Text( f"Balance: {Branch.balance} - Local Clock: {Branch.local_clock}", size=(40, 1), justification="left", key='-WINDOWTEXT-') ], [sg.Output(size=(90, 15))], [sg.Button("Close", tooltip='Terminates Branch')]] # Create the window sg.theme('Dark Blue 3') w, h = sg.Window.get_screen_size() Branch.window = sg.Window( f"Branch #{Branch.id} @Address {Branch.bind_address}", layout, location=(w / 2 + 50 * Branch.id, h / 5 * (Branch.id - 1) + 50)) Branch.window.refresh() server.add_insecure_port(Branch.bind_address) server.start() if ((sg == NotImplemented) or want_windows): MyLog( logger, f'[Branch {Branch.id}] *** Press CTRL+C to exit the process when finished ***' ) Wait_Loop(Branch, want_windows) if ((sg != NotImplemented) and want_windows): Branch.window.close() if Branch.clock_output: with open(f'{Branch.clock_output.name}', 'a') as outfile: if (Branch.clock_events != None): Branch.clock_events.sort(key=lambda item: item['clock']) record = {'pid': Branch.id, 'data': Branch.clock_events} else: record = {'pid': Branch.id, 'data': []} json.dump(record, outfile) outfile.write('\n') outfile.close() server.stop(None) MyLog(logger, f'[Branch {Branch.id}] Exiting Successfully.')
def Propagate_Withdraw(self, request_id, amount): """ Implements the propagation of the withdraw to other branches. Args: Self: Branch class request_id: the request ID of the event amount: the amount to be withdrawn from the balance Returns: The updated Branch balance after the amount withdrawn """ # with self.branch_lock: for stub in self.branchList: if self.id != stub[ 0]: # Do not propagate to itself (should not happen, added security) LogMessage = ( f'[Branch {self.id}] Propagate {get_operation_name(banking_pb2.WITHDRAW)} request ID {request_id} ' f'amount {amount} to Branch {stub[0]} @{stub[1]}') if (self.clock_events != None): # Verify if in the logical clock case LogMessage += (f' with clock {self.local_clock}') MyLog(logger, LogMessage, self) try: msgStub = banking_pb2_grpc.BankingStub( grpc.insecure_channel(stub[1])) if (self.clock_events != None): # Verify if in the logical clock case response = msgStub.MsgDelivery( banking_pb2.MsgDeliveryRequest( REQ_ID=request_id, OP=banking_pb2.WITHDRAW, Amount=amount, D_ID= DO_NOT_PROPAGATE, # Sets DO_NOT_PROPAGATE for receiving branches Clock=self.local_clock)) else: response = msgStub.MsgDelivery( banking_pb2.MsgDeliveryRequest( REQ_ID=request_id, OP=banking_pb2.WITHDRAW, Amount=amount, D_ID= DO_NOT_PROPAGATE, # Sets DO_NOT_PROPAGATE for receiving branches )) LogMessage = ( f'[Branch {self.id}] received response to request ID {request_id} to Branch @{stub[1]} - ' f'Operation: {get_operation_name(banking_pb2.WITHDRAW)} - Result: {get_result_name(response.RC)} - ' f'New balance: {response.Amount}') if (self.clock_events != None): # Verify if in the logical clock case LogMessage += (f' - Clock: {response.Clock}') self.eventResponse() # Call for eventResponse except grpc.RpcError as rpc_error_call: code = rpc_error_call.code() details = rpc_error_call.details() if (code.name == "UNAVAILABLE"): LogMessage = ( f'[Branch {self.id}] Error on request ID {request_id}: Branch {stub[0]} @{stub[1]} likely unavailable - Code: {code} - Details: {details}' ) else: LogMessage = ( f'[Branch {self.id}] Error on request ID {request_id}: Code: {code} - Details: {details}' ) MyLog(logger, LogMessage, self)
def executeEvents(self, output_file, want_windows=False): """ Boots a client (customer) stub in a subprocess. If PySimpleGUI/TK are installed and in mode 2, launches a window in the Windows' Manager. Args: Self: Customer class output_file: Logging file where to save customer's events want_windows: Boolean, True if graphical windows are desired as UX Returns: None """ record = {'id': self.id, 'recv': []} for event in self.events: request_id = event['id'] request_operation = get_operation(event['interface']) request_amount = event['money'] try: LogMessage = ( f'[Customer {self.id}] Executing request: ID {request_id} against Branch - ' f'Operation: {get_operation_name(request_operation)} - ' f'Initial balance: {request_amount}') MyLog(logger, LogMessage, self) # The customer's clock is not used in gRPC and Lampard's algorithm, but will be # likely used in the Client Consistency's exercise. # In the logical clock assignment (Lampars's algorithm), it is checked, but the configuration # file for customers does not allow setting it anyway. response = self.stub.MsgDelivery( banking_pb2.MsgDeliveryRequest(REQ_ID=request_id, OP=request_operation, Amount=request_amount, D_ID=self.id, Clock=0)) LogMessage = ( f'[Customer {self.id}] Received response to request ID {request_id} from Branch - ' f'Operation: {get_operation_name(request_operation)} - Result: {get_result_name(response.RC)} - ' f'New balance: {response.Amount}') values = { 'interface': get_operation_name(request_operation), 'result': get_result_name(response.RC), } MyLog(logger, LogMessage, self) if request_operation == banking_pb2.QUERY: values['money'] = response.Amount record['recv'].append(values) if record['recv']: # DEBUG #MyLog(logger,f'Writing JSON file on #{output_file}') with open(f'{output_file}', 'a') as outfile: json.dump(record, outfile) outfile.write('\n') except grpc.RpcError as rpc_error_call: code = rpc_error_call.code() details = rpc_error_call.details() if (code.name == "UNAVAILABLE"): LogMessage = ( f'[Customer {self.id}] Error on Request #{request_id}: Branch {self.id} likely unavailable - Code: {code} - Details: {details}' ) else: LogMessage = ( f'[Customer {self.id}] Error on Request #{request_id}: Code: {code} - Details: {details}' ) MyLog(logger, LogMessage, self)
def executeEvents(self, output_file, balance_file, want_windows=False): """ Boots a client (customer) stub in a subprocess. If PySimpleGUI/TK are installed and in mode 2, launches a window in the Windows' Manager. Args: Self: Customer class output_file: Logging file where to save customer's events want_windows: Boolean, True if graphical windows are desired as UX Returns: None """ record = {'id': self.id, 'recv': []} for event in self.events: try: request_id = event['id'] # In case of client-consistency except KeyError: request_id = self.id request_operation = get_operation(event['interface']) try: request_amount = event['money'] except KeyError: request_amount = 0 # In case of query try: request_dest = event['dest'] # In case of client-consistency except KeyError: request_dest = self.id try: if request_operation == banking_pb2.QUERY: request_amount_string = "" else: request_amount_string = str(event['money']) LogMessage = ( f'[Customer {self.id}] ID {request_id}: ' f'{get_operation_name(request_operation)} {request_amount_string}' f' -> Branch {request_dest}') MyLog(logger, LogMessage, self) # Find the right Branch to send to msgStubClient = None for curr_branch in self.branchList: if request_dest == curr_branch[0]: msgStubClient = banking_pb2_grpc.BankingStub( grpc.insecure_channel(curr_branch[1])) break if msgStubClient == None: LogMessage = ( f'[Customer {self.id}] Error on ID {request_id}: ' f'Branch {request_dest} not found in the list. Not executed.' ) MyLog(logger, LogMessage, self) else: progr_request_id = request_id if request_operation != banking_pb2.QUERY: # First of all, request a WriteID to the Branch - if not a query. # This is used to enforce "Monotonic Writes" and "Read Your Writes" client-centric consistency. wid_response = msgStubClient.RequestWriteSet( banking_pb2.WriteSetRequest(S_ID=self.id, LAST_ID=request_id, Clock=0)) self.writeSets.append( WriteSet(self.id, wid_response.ProgrID, False)) progr_request_id = wid_response.ProgrID # Execute the Client's request. # The customer's clock is not used, but could be a future expansion. # In the logical clock assignment (Lampars's algorithm), it is checked, # but the configuration file for customers does not allow setting it anyway. response = msgStubClient.MsgDelivery( banking_pb2.MsgDeliveryRequest( REQ_ID=request_id, OP=request_operation, Amount=request_amount, S_TYPE=banking_pb2. CUSTOMER, # Source Type = Customer S_ID=self.id, # Source ID D_ID=request_dest, Clock=0, ProgrID=progr_request_id)) if request_operation != banking_pb2.QUERY: set_found = Set_WriteSet_Executed( self, self.id, progr_request_id) response_amount_string = f"New balance: {response.Amount}" else: response_amount_string = f"Balance: {response.Amount}" # Output the final balance for the customer from the last query with open(f'{balance_file}', 'a') as outfileb: brecord = { 'id': self.id, 'balance': response.Amount } json.dump(brecord, outfileb) #outfileb.write('\n') LogMessage = ( f'[Customer {self.id}] ID {request_id}: {get_result_name(response.RC)} <- Branch {request_dest} - ' f'{response_amount_string}') MyLog(logger, LogMessage, self) values = { 'interface': get_operation_name(request_operation), 'result': get_result_name(response.RC), } if request_operation == banking_pb2.QUERY: values['money'] = response.Amount record['recv'].append(values) if record['recv']: # DEBUG #MyLog(logger,f'Writing JSON file on #{output_file}') with open(f'{output_file}', 'a') as outfile: json.dump(record, outfile) outfile.write('\n') except grpc.RpcError as rpc_error_call: code = rpc_error_call.code() details = rpc_error_call.details() if (code.name == "UNAVAILABLE"): LogMessage = ( f'[Customer {self.id}] Error on Request #{request_id}: Branch {self.id} likely unavailable - Code: {code} - Details: {details}' ) else: LogMessage = ( f'[Customer {self.id}] Error on Request #{request_id}: Code: {code} - Details: {details}' ) MyLog(logger, LogMessage, self)
def main(): """Main function.""" MyLog(logger, f'[Main] *** Processing Arguments ***') input_file, output_file, clock_file, balance_file, _sg_windows, _pretty_json = Process_Args( ) if not input_file: input_file = 'input.json' if not output_file: output_file = 'output.json' if not balance_file: output_file = 'balance.json' branches_list = list() customers_list = list() PRETTY_JSON = _pretty_json MyLog(logger, f'[Main] *** Processing Input File ***') Load_Input_File(input_file, branches_list, customers_list) branches_addresses_ids = [] workers = list() # Spawns processes for branches # # NOTE: It is imperative that the worker subprocesses be forked before # any gRPC servers start up. See # https://github.com/grpc/grpc/issues/16001 for more details. MyLog(logger, f'[Main] *** Starting Processes for Servers/Branches ***') # Reserve the addresses for Branches for curr_branch in branches_list: curr_port = Reserve_Port() # assign a port to each branch curr_branch.bind_address = '[::]:{}'.format(curr_port) # set the clock events' list and local clock if not clock_file: curr_branch.clock_events = None curr_branch.clock_output = None else: # Creates temporary files to store branches' events. To be improved in future versions using multiprocessor.Manager. curr_branch.clock_events = list() curr_branch.clock_output = tempfile.NamedTemporaryFile( mode='w+', delete=False) curr_branch.clock_output.write('\n') curr_branch.clock_output.close() MyLog( logger, f'[Main] Temporary file \"{curr_branch.clock_output.name}\" for branch {curr_branch.id}' ) # save branch bind address for the customers and other branches to know branches_addresses_ids.append( [curr_branch.id, curr_branch.bind_address]) # Copy the list of all the branches and PIDs to all the Branches' list for curr_branch in branches_list: curr_branch.branchList = branches_addresses_ids[:] for curr_branch in branches_list: worker = multiprocessing.Process(name=f'Branch-{curr_branch.id}', target=Run_Branch, args=(curr_branch, clock_file, _sg_windows, THREAD_CONCURRENCY)) workers.append(worker) worker.start() MyLog( logger, f'[Main] Booting branch \"{worker.name}\" on initial balance {curr_branch.balance}), ' f'with PID {worker.pid} at address {curr_branch.bind_address} successfully' ) # Spawns processes for customers # # We are spawning a process for each customer, which in turn execute their events via their stubs # and communicates with the respectives' servers' processes. # We need to pass the address binded of the matching server in the Customer class constructor # or it won't be able to determine it. MyLog(logger, f'[Main] *** Starting Processes for Clients/Customers ***') for curr_customer in customers_list: # DEBUG #LOGGER.info(f'Processing Customer #{curr_customer.id} with Events:' ) #for e in curr_customer.events: # LOGGER.info(f' #{e["id"]} = {e["interface"]}, {e["money"]}' ) #sys.stdout.flush() # Find the bind_address of the Branch for the current Customer and pass it to # the Customer Class. This is used in the first two assignments. In the client- # centric consistency assignment, the Customers need to be able to address all # of the Branches for i in range(len(branches_addresses_ids)): if branches_addresses_ids[i][0] == curr_customer.id: Branch_address = branches_addresses_ids[i][1] break # Copy the list of all the branches and PIDs to the Customer curr_customer.branchList = branches_addresses_ids[:] worker = multiprocessing.Process( name=f'Customer-{curr_customer.id}', target=Customer.Run_Customer, args=(curr_customer, Branch_address, output_file, balance_file, _sg_windows, THREAD_CONCURRENCY)) if ((sg == NotImplemented or not (_sg_windows)) and SLEEP_SECONDS): # Wait some seconds before initialising the clients, to give time the servers to start MyLog( logger, f'[Main] *** Waiting for {SLEEP_SECONDS} seconds before starting the clients ***' ) MyLog( logger, f'[Main] (Otherwise it will sometimes fail when the computer is slow)' ) time.sleep(SLEEP_SECONDS) workers.append(worker) worker.start() MyLog( logger, f'[Main] Started Customer \"{worker.name}\" with PID {worker.pid} successfully.' ) try: for worker in workers: worker.join() except KeyboardInterrupt: MyLog(logger, "[Main] CTRL+C requested") # Writes output file in the case of Logical Clock exercise if clock_file: records = [] total_records = [] # Iterates through all branches' temporary files and loads events for curr_branch in branches_list: if curr_branch.clock_output: with open(f'{curr_branch.clock_output.name}', 'r') as infile: records = json.load(infile) total_records.append(records) os.remove(curr_branch.clock_output.name) with open(f'{clock_file}', 'w+') as outfile: # Writes events in output file ordered by branch/clock if (PRETTY_JSON): json.dump(total_records, outfile, indent=2) else: json.dump(total_records, outfile) outfile.write('\n') # Writes events in output file ordered by event ID/clock events = [] for curr_record in total_records: for event in curr_record['data']: events.append(event) events.sort(key=lambda x: x['clock']) events.sort(key=lambda x: x['id']) # Probably not very Pythonian, but I have given my best :-P (and it works) curr_event_id = -1 curr_new_event = -1 new_events = [] for curr_record in events: if curr_record['id'] != curr_event_id: curr_event_id = curr_record['id'] curr_new_event += 1 new_events.append("eventid:") new_events.append(curr_event_id) new_events.append("data") new_events.append({ 'clock': curr_record['clock'], 'name': curr_record['name'], }) # Dumps the vent into the JSON file, one Event ID at the time curr_event_id = -1 curr_new_event = -1 event_dict = {"eventid": int} for curr_record in events: if curr_record['id'] != curr_event_id: if (PRETTY_JSON): if (curr_event_id >= 0): json.dump(event_dict, outfile, indent=2) else: if (curr_event_id >= 0): json.dump(event_dict, outfile) curr_event_id = curr_record['id'] curr_new_event += 1 event_dict["eventid"] = curr_event_id event_dict["data"] = [] event_dict["data"].append({ 'clock': curr_record['clock'], 'name': curr_record['name'], }) try: if (PRETTY_JSON): if any((event_dict.get('data'))): json.dump(event_dict, outfile, indent=2) else: if any((event_dict.get('data'))): json.dump(event_dict, outfile) except TypeError: pass outfile.close() # Just in case for worker in workers: worker.terminate() MyLog(logger, f'[Main] Program Ended successfully.')
event_dict["eventid"] = curr_event_id event_dict["data"] = [] event_dict["data"].append({ 'clock': curr_record['clock'], 'name': curr_record['name'], }) try: if (PRETTY_JSON): if any((event_dict.get('data'))): json.dump(event_dict, outfile, indent=2) else: if any((event_dict.get('data'))): json.dump(event_dict, outfile) except TypeError: pass outfile.close() # Just in case for worker in workers: worker.terminate() MyLog(logger, f'[Main] Program Ended successfully.') logger = setup_logger("Main") if __name__ == '__main__': MyLog(logger, "[Main] Logger initialised") main()
def main(): """Main function.""" MyLog(logger, f'*** Processing Arguments ***') input_file, output_file = Process_Args() if not input_file: input_file = 'input.json' if not output_file: output_file = 'output.json' branches = list() customers = list() MyLog(logger, f'*** Processing Input File ***') Load_Input_File(input_file, branches, customers) branches_addresses_ids = [] workers = list() # Spawns processes for branches # # NOTE: It is imperative that the worker subprocesses be forked before # any gRPC servers start up. See # https://github.com/grpc/grpc/issues/16001 for more details. MyLog(logger, f'*** Starting Processes for Servers/Branches ***') for curr_branch in branches: # with Reserve_Port() as curr_port: curr_port = Reserve_Port() bind_address = '[::]:{}'.format(curr_port) # DEBUG # MyLog(logger, f'Reserved {bind_address} for Branch #{curr_branch.id}...') # sys.stdout.flush() worker = multiprocessing.Process(name=f'Branch-{curr_branch.id}', target=Run_Branch, args=(curr_branch, bind_address, THREAD_CONCURRENCY)) worker.start() workers.append(worker) # save branch bind address for the customers and other branches to know branches_addresses_ids.append([curr_branch.id, bind_address]) MyLog( logger, f'Started branch \"{worker.name}\" on initial balance {curr_branch.balance}), ' f'with PID {worker.pid} at address {bind_address} successfully') # Wait some seconds before initialising the clients, to give time the servers to start MyLog( logger, f'*** Waiting for {SLEEP_SECONDS} seconds before starting the clients ***' ) MyLog(logger, f' (Otherwise it will sometimes fail when the computer is slow)') time.sleep(SLEEP_SECONDS) # Spawns processes for customers # # We are spawning a process for each customer, which in turn execute their events via their stubs # and communicates with the respectives' servers' processes. # We need to pass the address binded of the matching server in the Customer class constructor # or it won't be able to determine it. MyLog(logger, f'*** Starting Processes for Clients/Customers ***') for curr_customer in customers: # DEBUG #LOGGER.info(f'Processing Customer #{curr_customer.id} with Events:' ) #for e in curr_customer.events: # LOGGER.info(f' #{e["id"]} = {e["interface"]}, {e["money"]}' ) #sys.stdout.flush() # Find the bind_address of the Branch for the current Customer and pass it to the Customer Class for i in range(len(branches_addresses_ids)): if branches_addresses_ids[i][0] == curr_customer.id: Branch_address = branches_addresses_ids[i][1] break worker = multiprocessing.Process(name=f'Customer-{curr_customer.id}', target=Customer.Run_Customer, args=(curr_customer, Branch_address, output_file, THREAD_CONCURRENCY)) worker.start() workers.append(worker) MyLog( logger, f'Started customer \"{worker.name}\" with PID {worker.pid} successfully' ) for worker in workers: worker.join()
#for e in curr_customer.events: # LOGGER.info(f' #{e["id"]} = {e["interface"]}, {e["money"]}' ) #sys.stdout.flush() # Find the bind_address of the Branch for the current Customer and pass it to the Customer Class for i in range(len(branches_addresses_ids)): if branches_addresses_ids[i][0] == curr_customer.id: Branch_address = branches_addresses_ids[i][1] break worker = multiprocessing.Process(name=f'Customer-{curr_customer.id}', target=Customer.Run_Customer, args=(curr_customer, Branch_address, output_file, THREAD_CONCURRENCY)) worker.start() workers.append(worker) MyLog( logger, f'Started customer \"{worker.name}\" with PID {worker.pid} successfully' ) for worker in workers: worker.join() logger = setup_logger("Main") if __name__ == '__main__': MyLog(logger, "Logger initialised") main()