def client_scheduler(queues: 'QueueDict', gateway: 'Gateway', url_token_private_key: 'X448PrivateKey', unit_test: bool = False ) -> None: """Manage `client` processes.""" proc_dict = dict() # type: Dict[bytes, Process] # Wait for Tor port from `onion_service` process. while True: with ignored(EOFError, KeyboardInterrupt): while queues[TOR_DATA_QUEUE].qsize() == 0: time.sleep(0.1) tor_port, onion_addr_user = queues[TOR_DATA_QUEUE].get() break while True: with ignored(EOFError, KeyboardInterrupt): while queues[CONTACT_MGMT_QUEUE].qsize() == 0: time.sleep(0.1) command, ser_public_keys, is_existing_contact = queues[CONTACT_MGMT_QUEUE].get() onion_pub_keys = split_byte_string(ser_public_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH) if command == RP_ADD_CONTACT_HEADER: add_new_client_process(gateway, is_existing_contact, onion_addr_user, onion_pub_keys, proc_dict, queues, tor_port, url_token_private_key) elif command == RP_REMOVE_CONTACT_HEADER: remove_client_process(onion_pub_keys, proc_dict) if unit_test and queues[UNIT_TEST_QUEUE].qsize() != 0: break
def client_scheduler(queues: 'QueueDict', gateway: 'Gateway', url_token_private_key: X448PrivateKey, unittest: bool = False) -> None: """Manage `client` processes.""" proc_dict = dict() # type: Dict[bytes, Process] # Wait for Tor port from `onion_service` process. while True: with ignored(EOFError, KeyboardInterrupt): while queues[TOR_DATA_QUEUE].qsize() == 0: time.sleep(0.1) tor_port, onion_addr_user = queues[TOR_DATA_QUEUE].get() break while True: with ignored(EOFError, KeyboardInterrupt): while queues[CONTACT_KEY_QUEUE].qsize() == 0: time.sleep(0.1) command, ser_public_keys, is_existing_contact = queues[ CONTACT_KEY_QUEUE].get() onion_pub_keys = split_byte_string( ser_public_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH) if command == RP_ADD_CONTACT_HEADER: for onion_pub_key in onion_pub_keys: if onion_pub_key not in proc_dict: onion_addr_user = '' if is_existing_contact else onion_addr_user proc_dict[onion_pub_key] = Process( target=client, args=(onion_pub_key, queues, url_token_private_key, tor_port, gateway, onion_addr_user)) proc_dict[onion_pub_key].start() elif command == RP_REMOVE_CONTACT_HEADER: for onion_pub_key in onion_pub_keys: if onion_pub_key in proc_dict: process = proc_dict[onion_pub_key] # type: Process process.terminate() proc_dict.pop(onion_pub_key) rp_print( f"Removed {pub_key_to_short_address(onion_pub_key)}", bold=True) if unittest and queues[UNITTEST_QUEUE].qsize() != 0: break
def pub_key_checker(queues: 'QueueDict', local_test: bool, unit_test: bool = False) -> None: """\ Display diffs between received public keys and public keys manually imported to Source Computer. """ pub_key_check_queue = queues[PUB_KEY_CHECK_QUEUE] pub_key_send_queue = queues[PUB_KEY_SEND_QUEUE] pub_key_dictionary = dict() while True: with ignored(EOFError, KeyboardInterrupt): if pub_key_send_queue.qsize() != 0: account, pub_key = pub_key_send_queue.get() pub_key_dictionary[account] = b58encode(pub_key, public_key=True) continue if pub_key_check_queue.qsize() != 0: purp_account, purp_pub_key = pub_key_check_queue.get( ) # type: bytes, bytes if purp_account in pub_key_dictionary: purp_b58_pub_key = purp_pub_key.decode() true_b58_pub_key = pub_key_dictionary[purp_account] show_value_diffs("public key", true_b58_pub_key, purp_b58_pub_key, local_test) time.sleep(0.01) if unit_test: break
def receiver_loop(queues: Dict[bytes, 'Queue'], gateway: 'Gateway', unittest: bool = False ) -> None: """Decode received packets and forward them to packet queues.""" gateway_queue = queues[GATEWAY_QUEUE] while True: with ignored(EOFError, KeyboardInterrupt): if gateway_queue.qsize() == 0: time.sleep(0.01) _, packet = gateway_queue.get() try: packet = gateway.detect_errors(packet) except FunctionReturn: continue header, ts_bytes, payload = separate_headers(packet, [DATAGRAM_HEADER_LENGTH, DATAGRAM_TIMESTAMP_LENGTH]) try: ts = datetime.strptime(str(bytes_to_int(ts_bytes)), "%Y%m%d%H%M%S%f") except (ValueError, struct.error): m_print("Error: Failed to decode timestamp in the received packet.", head=1, tail=1) continue if header in [MESSAGE_DATAGRAM_HEADER, FILE_DATAGRAM_HEADER, COMMAND_DATAGRAM_HEADER, LOCAL_KEY_DATAGRAM_HEADER]: queues[header].put((ts, payload)) if unittest: break
def g_msg_manager(queues: 'QueueDict', unit_test: bool = False) -> None: """Show group management messages according to contact list state. This process keeps track of existing contacts for whom there's a `client` process. When a group management message from a contact is received, existing contacts are displayed under "known contacts", and non-existing contacts are displayed under "unknown contacts". """ existing_contacts = [] # type: List[bytes] group_management_queue = queues[GROUP_MGMT_QUEUE] while True: with ignored(EOFError, KeyboardInterrupt): while queues[GROUP_MSG_QUEUE].qsize() == 0: time.sleep(0.01) header, payload, trunc_addr = queues[GROUP_MSG_QUEUE].get() group_id, data = separate_header(payload, GROUP_ID_LENGTH) if len(group_id) != GROUP_ID_LENGTH: continue group_id_hr = b58encode(group_id) existing_contacts = update_list_of_existing_contacts( group_management_queue, existing_contacts) process_group_management_message(data, existing_contacts, group_id_hr, header, trunc_addr) if unit_test and queues[UNIT_TEST_QUEUE].qsize() != 0: break
def check_kernel_entropy() -> None: """Wait until the kernel CSPRNG is sufficiently seeded. Wait until the `entropy_avail` file states that kernel entropy pool has at least 512 bits of entropy. The waiting ensures the ChaCha20 CSPRNG is fully seeded (i.e., it has the maximum of 384 bits of entropy) when it generates keys. The same entropy threshold is used by the GETRANDOM syscall in random.c: #define CRNG_INIT_CNT_THRESH (2*CHACHA20_KEY_SIZE) For more information on the kernel CSPRNG threshold, see https://security.stackexchange.com/a/175771/123524 https://crypto.stackexchange.com/a/56377 """ message = "Waiting for kernel CSPRNG entropy pool to fill up" phase(message, head=1) ent_avail = 0 while ent_avail < ENTROPY_THRESHOLD: with ignored(EOFError, KeyboardInterrupt): with open('/proc/sys/kernel/random/entropy_avail') as f: ent_avail = int(f.read().strip()) m_print(f"{ent_avail}/{ENTROPY_THRESHOLD}") print_on_previous_line(delay=0.1) print_on_previous_line() phase(message) phase(DONE)
def noise_loop(queues: 'QueueDict', contact_list: Optional['ContactList'] = None, unittest: bool = False) -> None: """Generate noise packets for traffic masking. This process ensures noise packet / noise command queue always has noise assembly packets available. """ log_messages = True # This setting is ignored: settings.log_file_masking controls logging of noise packets. log_as_ph = True header = C_N_HEADER if contact_list is None else P_N_HEADER noise_assembly_packet = header + bytes(PADDING_LENGTH) if contact_list is None: # Noise command queue = queues[TM_NOISE_COMMAND_QUEUE] content = noise_assembly_packet # type: Union[bytes, Tuple[bytes, bool, bool]] else: # Noise packet queue = queues[TM_NOISE_PACKET_QUEUE] content = (noise_assembly_packet, log_messages, log_as_ph) while True: with ignored(EOFError, KeyboardInterrupt): while queue.qsize() < NOISE_PACKET_BUFFER: queue.put(content) time.sleep(0.1) if unittest: break
def tx_loop( io_queue: 'Queue[Any]', # Queue through which to push datagrams through output_socket: int, # Socket number for the Relay/Receiver Program argv: str, # Arguments for the simulator position/orientation unit_test: bool = False # Break out from the loop during unit testing ) -> None: """Send queued datagrams to a receiving program.""" draw_frame(argv, IDLE) while True: try: interface = multiprocessing.connection.Client( (LOCALHOST, output_socket)) break except socket.error: time.sleep(0.01) while True: with ignored(EOFError, KeyboardInterrupt): while io_queue.qsize() == 0: time.sleep(0.01) animate(argv) interface.send(io_queue.get()) if unit_test: break
def group_rm_group(group_name: str, group_list: 'GroupList', settings: 'Settings', queues: Dict[bytes, 'Queue'], master_key: 'MasterKey'): """Remove group with it's members.""" if not yes(f"Remove group '{group_name}'?", head=1): raise FunctionReturn("Group removal aborted.") rm_logs = yes("Also remove logs for the group?", head=1) command = GROUP_DELETE_HEADER + group_name.encode() queue_command(command, settings, queues[COMMAND_PACKET_QUEUE]) if rm_logs: command = LOG_REMOVE_HEADER + group_name.encode() queue_command(command, settings, queues[COMMAND_PACKET_QUEUE]) with ignored(FunctionReturn): remove_logs(group_name, settings, master_key) if group_name not in group_list.get_list_of_group_names(): raise FunctionReturn(f"TxM has no group '{group_name}' to remove.") group = group_list.get_group(group_name) if group.has_members() and yes("Notify members about leaving the group?"): for member in group: queue_message(user_input=UserInput(group_name, MESSAGE), window =MockWindow(member.rx_account, [member]), settings =settings, m_queue =queues[MESSAGE_PACKET_QUEUE], header =GROUP_MSG_EXIT_GROUP_HEADER, log_as_ph =True) group_list.remove_group(group_name) raise FunctionReturn(f"Removed group '{group_name}'")
def src_incoming(queues: 'QueueDict', gateway: 'Gateway', unittest: bool = False ) -> None: """\ Redirect messages received from Source Computer to appropriate queues. """ packets_from_sc = queues[GATEWAY_QUEUE] packets_to_dc = queues[DST_MESSAGE_QUEUE] commands_to_dc = queues[DST_COMMAND_QUEUE] messages_to_flask = queues[M_TO_FLASK_QUEUE] files_to_flask = queues[F_TO_FLASK_QUEUE] commands_to_relay = queues[SRC_TO_RELAY_QUEUE] while True: with ignored(EOFError, KeyboardInterrupt): while packets_from_sc.qsize() == 0: time.sleep(0.01) ts, packet = packets_from_sc.get() # type: datetime, bytes ts_bytes = int_to_bytes(int(ts.strftime('%Y%m%d%H%M%S%f')[:-4])) try: packet = gateway.detect_errors(packet) except FunctionReturn: continue header, packet = separate_header(packet, DATAGRAM_HEADER_LENGTH) if header == UNENCRYPTED_DATAGRAM_HEADER: commands_to_relay.put(packet) elif header in [COMMAND_DATAGRAM_HEADER, LOCAL_KEY_DATAGRAM_HEADER]: commands_to_dc.put(header + ts_bytes + packet) p_type = 'Command ' if header == COMMAND_DATAGRAM_HEADER else 'Local key' rp_print(f"{p_type} to local Receiver", ts) elif header in [MESSAGE_DATAGRAM_HEADER, PUBLIC_KEY_DATAGRAM_HEADER]: onion_pub_key, payload = separate_header(packet, ONION_SERVICE_PUBLIC_KEY_LENGTH) packet_str = header.decode() + b85encode(payload) queue_to_flask(packet_str, onion_pub_key, messages_to_flask, ts, header) if header == MESSAGE_DATAGRAM_HEADER: packets_to_dc.put(header + ts_bytes + onion_pub_key + ORIGIN_USER_HEADER + payload) elif header == FILE_DATAGRAM_HEADER: no_contacts_b, payload = separate_header(packet, ENCODED_INTEGER_LENGTH) no_contacts = bytes_to_int(no_contacts_b) ser_accounts, file_ct = separate_header(payload, no_contacts * ONION_SERVICE_PUBLIC_KEY_LENGTH) pub_keys = split_byte_string(ser_accounts, item_len=ONION_SERVICE_PUBLIC_KEY_LENGTH) for onion_pub_key in pub_keys: queue_to_flask(file_ct, onion_pub_key, files_to_flask, ts, header) elif header in [GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER, GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER, GROUP_MSG_EXIT_GROUP_HEADER]: process_group_management_message(ts, packet, header, messages_to_flask) if unittest: break
def test_ignored_contextmanager(self): raised = False try: with ignored(KeyboardInterrupt): TestIgnored.func() except KeyboardInterrupt: raised = True self.assertFalse(raised)
def remove_contact(user_input: 'UserInput', window: 'TxWindow', contact_list: 'ContactList', group_list: 'GroupList', settings: 'Settings', queues: Dict[bytes, 'Queue'], master_key: 'MasterKey') -> None: """Remove contact on TxM/RxM.""" if settings.session_traffic_masking: raise FunctionReturn( "Error: Command is disabled during traffic masking.") try: selection = user_input.plaintext.split()[1] except IndexError: raise FunctionReturn("Error: No account specified.") if not yes(f"Remove {selection} completely?", head=1): raise FunctionReturn("Removal of contact aborted.") rm_logs = yes(f"Also remove logs for {selection}?", head=1) # Load account if selector was nick if selection in contact_list.get_list_of_nicks(): selection = contact_list.get_contact(selection).rx_account packet = CONTACT_REMOVE_HEADER + selection.encode() queue_command(packet, settings, queues[COMMAND_PACKET_QUEUE]) if rm_logs: packet = LOG_REMOVE_HEADER + selection.encode() queue_command(packet, settings, queues[COMMAND_PACKET_QUEUE]) with ignored(FunctionReturn): remove_logs(selection, settings, master_key) queues[KEY_MANAGEMENT_QUEUE].put((KDB_REMOVE_ENTRY_HEADER, selection)) if selection in contact_list.get_list_of_accounts(): contact_list.remove_contact(selection) box_print(f"Removed {selection} from contacts.", head=1, tail=1) else: box_print(f"TxM has no {selection} to remove.", head=1, tail=1) if any([g.remove_members([selection]) for g in group_list]): box_print(f"Removed {selection} from group(s).", tail=1) if window.type == WIN_TYPE_CONTACT: if selection == window.uid: window.deselect_window() if window.type == WIN_TYPE_GROUP: for c in window: if selection == c.rx_account: window.update_group_win_members(group_list) # If last member from group is removed, deselect group. # Deselection is not done in update_group_win_members # because it would prevent selecting the empty group # for group related commands such as notifications. if not window.window_contacts: window.deselect_window()
def ch_master_key(ts: 'datetime', window_list: 'WindowList', contact_list: 'ContactList', group_list: 'GroupList', key_list: 'KeyList', settings: 'Settings', master_key: 'MasterKey') -> None: """Prompt the user for a new master password and derive a new master key from that.""" if not master_key.authenticate_action(): raise SoftError("Error: Invalid password.", tail_clear=True, delay=1, head=2) # Cache old master key to allow log file re-encryption. old_master_key = master_key.master_key[:] # Create new master key but do not store new master key data into any database. new_master_key = master_key.master_key = master_key.new_master_key( replace=False) phase("Re-encrypting databases") # Update encryption keys for databases contact_list.database.database_key = new_master_key key_list.database.database_key = new_master_key group_list.database.database_key = new_master_key settings.database.database_key = new_master_key # Create temp databases for each database, do not replace original. with ignored(SoftError): change_log_db_key(old_master_key, new_master_key, settings) contact_list.store_contacts(replace=False) key_list.store_keys(replace=False) group_list.store_groups(replace=False) settings.store_settings(replace=False) # At this point all temp files exist and they have been checked to be valid by the respective # temp file writing function. It's now time to create a temp file for the new master key # database. Once the temp master key database is created, the `replace_database_data()` method # will also run the atomic `os.replace()` command for the master key database. master_key.replace_database_data() # Next we do the atomic `os.replace()` for all other files too. replace_log_db(settings) contact_list.database.replace_database() key_list.database.replace_database() group_list.database.replace_database() settings.database.replace_database() phase(DONE) m_print("Master password successfully changed.", bold=True, tail_clear=True, delay=1, head=1) cmd_win = window_list.get_command_window() cmd_win.add_new(ts, "Changed Receiver master password.")
def gateway_loop(queues: Dict[bytes, 'Queue'], gateway: 'Gateway', unittest: bool = False) -> None: """Loop that loads data from TxM side gateway to NH.""" queue = queues[TXM_INCOMING_QUEUE] while True: with ignored(EOFError, KeyboardInterrupt): queue.put(gateway.read()) if unittest: break
def c_req_manager(queues: 'QueueDict', unit_test: bool = False) -> None: """Manage incoming contact requests.""" existing_contacts = [] # type: List[bytes] contact_requests = [] # type: List[bytes] request_queue = queues[CONTACT_REQ_QUEUE] contact_queue = queues[C_REQ_MGMT_QUEUE] setting_queue = queues[C_REQ_STATE_QUEUE] show_requests = True while True: with ignored(EOFError, KeyboardInterrupt): while request_queue.qsize() == 0: time.sleep(0.1) purp_onion_address = request_queue.get() while setting_queue.qsize() != 0: show_requests = setting_queue.get() # Update list of existing contacts while contact_queue.qsize() > 0: command, ser_onion_pub_keys = contact_queue.get() onion_pub_key_list = split_byte_string( ser_onion_pub_keys, ONION_SERVICE_PUBLIC_KEY_LENGTH) if command == RP_ADD_CONTACT_HEADER: existing_contacts = list( set(existing_contacts) | set(onion_pub_key_list)) elif command == RP_REMOVE_CONTACT_HEADER: existing_contacts = list( set(existing_contacts) - set(onion_pub_key_list)) if validate_onion_addr(purp_onion_address) == '': onion_pub_key = onion_address_to_pub_key(purp_onion_address) if onion_pub_key in existing_contacts: continue if onion_pub_key in contact_requests: continue if show_requests: ts_fmt = datetime.now().strftime( '%b %d - %H:%M:%S.%f')[:-4] m_print([ f"{ts_fmt} - New contact request from an unknown TFC account:", purp_onion_address ], box=True) contact_requests.append(onion_pub_key) if unit_test and queues[UNIT_TEST_QUEUE].qsize() != 0: break
def account_checker(queues: 'QueueDict', stdin_fd: int, unit_test: bool = False) -> None: """\ Display diffs between received TFC accounts and accounts manually imported to Source Computer.""" if not unit_test: # pragma: no cover sys.stdin = os.fdopen(stdin_fd) account_list = [] # type: List[str] account_check_queue = queues[ACCOUNT_CHECK_QUEUE] account_send_queue = queues[ACCOUNT_SEND_QUEUE] account_input_queue = queues[GUI_INPUT_QUEUE] while queues[USER_ACCOUNT_QUEUE].qsize() == 0: time.sleep(0.01) onion_address_user = queues[USER_ACCOUNT_QUEUE].get() while True: with ignored(EOFError, KeyboardInterrupt): if account_send_queue.qsize() != 0: account = account_send_queue.get() # type: Optional[str] if account is not None and account not in account_list: account_list.append(account) continue if account_check_queue.qsize() != 0: purp_account = account_check_queue.get() # type: str # Determine correct account for account in account_list: # Check if accounts are similar enough: ratio = difflib.SequenceMatcher(a=account, b=purp_account).ratio() if ratio >= ACCOUNT_RATIO_LIMIT: break else: account = get_account_from_user(account_list, onion_address_user, account_input_queue) if account is not None: show_value_diffs("account", account, purp_account, local_test=True) continue time.sleep(0.01) if unit_test: break
def im_incoming(queues: Dict[bytes, 'Queue']) -> None: """Loop that maintains signal receiver process.""" def pidgin_to_rxm(account: str, sender: str, message: str, *_: Any) -> None: """Signal receiver process that receives packets from Pidgin.""" sender = sender.split('/')[0] ts = datetime.now().strftime("%m-%d / %H:%M:%S") d_bus = dbus.SessionBus(private=True) obj = d_bus.get_object("im.pidgin.purple.PurpleService", "/im/pidgin/purple/PurpleObject") purple = dbus.Interface(obj, "im.pidgin.purple.PurpleInterface") user = '' for a in purple.PurpleAccountsGetAllActive(): if a == account: user = purple.PurpleAccountGetUsername(a)[:-1] if not message.startswith(TFC): return None try: __, header, payload = message.split( '|') # type: Tuple[str, str, str] except ValueError: return None if header.encode() == PUBLIC_KEY_PACKET_HEADER: print("{} - pub key {} > {} > RxM".format(ts, sender, user)) elif header.encode() == MESSAGE_PACKET_HEADER: print("{} - message {} > {} > RxM".format(ts, sender, user)) else: print("Received invalid packet from {}".format(sender)) return None decoded = base64.b64decode(payload) packet = header.encode( ) + decoded + ORIGIN_CONTACT_HEADER + sender.encode() queues[RXM_OUTGOING_QUEUE].put(packet) while True: with ignored(dbus.exceptions.DBusException, EOFError, KeyboardInterrupt): bus = dbus.SessionBus(private=True, mainloop=DBusGMainLoop()) bus.add_signal_receiver( pidgin_to_rxm, dbus_interface="im.pidgin.purple.PurpleInterface", signal_name="ReceivedImMsg") GObject.MainLoop().run()
def remove_contact(user_input: 'UserInput', window: 'TxWindow', contact_list: 'ContactList', group_list: 'GroupList', settings: 'Settings', queues: 'QueueDict', master_key: 'MasterKey') -> None: """Remove contact from TFC.""" if settings.traffic_masking: raise SoftError("Error: Command is disabled during traffic masking.", head_clear=True) try: selection = user_input.plaintext.split()[1] except IndexError: raise SoftError("Error: No account specified.", head_clear=True) if not yes(f"Remove contact '{selection}'?", abort=False, head=1): raise SoftError("Removal of contact aborted.", head=0, delay=1, tail_clear=True) if selection in contact_list.contact_selectors(): onion_pub_key = contact_list.get_contact_by_address_or_nick( selection).onion_pub_key else: if validate_onion_addr(selection): raise SoftError("Error: Invalid selection.", head=0, delay=1, tail_clear=True) onion_pub_key = onion_address_to_pub_key(selection) receiver_command = CONTACT_REM + onion_pub_key queue_command(receiver_command, settings, queues) with ignored(SoftError): remove_logs(contact_list, group_list, settings, master_key, onion_pub_key) queues[KEY_MANAGEMENT_QUEUE].put((KDB_REMOVE_ENTRY_HEADER, onion_pub_key)) relay_command = UNENCRYPTED_DATAGRAM_HEADER + UNENCRYPTED_REM_CONTACT + onion_pub_key queue_to_nc(relay_command, queues[RELAY_PACKET_QUEUE]) target = determine_target(selection, onion_pub_key, contact_list) if any([g.remove_members([onion_pub_key]) for g in group_list]): m_print(f"Removed {target} from group(s).", tail=1) check_for_window_deselection(onion_pub_key, window, group_list)
def group_rm_group(group_name: str, contact_list: 'ContactList', group_list: 'GroupList', settings: 'Settings', queues: 'QueueDict', master_key: 'MasterKey', _: Optional[bytes] = None) -> None: """Remove the group with its members.""" if not yes(f"Remove group '{group_name}'?", abort=False): raise FunctionReturn("Group removal aborted.", head=0, delay=1, tail_clear=True) if group_name in group_list.get_list_of_group_names(): group_id = group_list.get_group(group_name).group_id else: try: group_id = b58decode(group_name) except ValueError: raise FunctionReturn("Error: Invalid group name/ID.", head_clear=True) command = LOG_REMOVE + group_id queue_command(command, settings, queues) command = GROUP_DELETE + group_id queue_command(command, settings, queues) if group_list.has_group(group_name): with ignored(FunctionReturn): remove_logs(contact_list, group_list, settings, master_key, group_id) else: raise FunctionReturn( f"Transmitter has no group '{group_name}' to remove.") group = group_list.get_group(group_name) if not group.empty() and yes("Notify members about leaving the group?", abort=False): exit_packet = (GROUP_MSG_EXIT_GROUP_HEADER + group.group_id + b''.join(group.get_list_of_member_pub_keys())) queue_to_nc(exit_packet, queues[RELAY_PACKET_QUEUE]) group_list.remove_group_by_name(group_name) raise FunctionReturn(f"Removed group '{group_name}'.", head=0, delay=1, tail_clear=True, bold=True)
def input_loop(queues: Dict[bytes, 'Queue[bytes]'], settings: 'Settings', gateway: 'Gateway', contact_list: 'ContactList', group_list: 'GroupList', master_key: 'MasterKey', onion_service: 'OnionService', stdin_fd: int) -> NoReturn: """Get input from user and process it accordingly. Running this loop as a process allows handling different functions including inputs, key exchanges, file loading and assembly packet generation, separate from assembly packet output. """ sys.stdin = os.fdopen(stdin_fd) window = TxWindow(contact_list, group_list) while True: with ignored(EOFError, FunctionReturn, KeyboardInterrupt): readline.set_completer( get_tab_completer(contact_list, group_list, settings, gateway)) readline.parse_and_bind('tab: complete') window.update_window(group_list) while not onion_service.is_delivered: export_onion_service_data(contact_list, settings, onion_service, gateway) while not contact_list.has_local_contact(): new_local_key(contact_list, settings, queues) while not contact_list.has_contacts(): add_new_contact(contact_list, group_list, settings, queues, onion_service) while not window.is_selected(): window.select_tx_window(settings, queues, onion_service, gateway) user_input = get_input(window, settings) if user_input.type == MESSAGE: queue_message(user_input, window, settings, queues) elif user_input.type == FILE: queue_file(window, settings, queues) elif user_input.type == COMMAND: process_command(user_input, window, contact_list, group_list, settings, queues, master_key, onion_service, gateway)
def client(onion_pub_key: bytes, queues: 'QueueDict', url_token_private_key: X448PrivateKey, tor_port: str, gateway: 'Gateway', onion_addr_user: str, unit_test: bool = False) -> None: """Load packets from contact's Onion Service.""" cached_pk = '' short_addr = pub_key_to_short_address(onion_pub_key) onion_addr = pub_key_to_onion_address(onion_pub_key) check_delay = RELAY_CLIENT_MIN_DELAY is_online = False session = requests.session() session.proxies = { 'http': f'socks5h://127.0.0.1:{tor_port}', 'https': f'socks5h://127.0.0.1:{tor_port}' } rp_print(f"Connecting to {short_addr}...", bold=True) # When Transmitter Program sends contact under UNENCRYPTED_ADD_EXISTING_CONTACT, this function # receives user's own Onion address: That way it knows to request the contact to add them: if onion_addr_user: send_contact_request(onion_addr, onion_addr_user, session) while True: with ignored(EOFError, KeyboardInterrupt, SoftError): time.sleep(check_delay) url_token_public_key_hex = load_url_token(onion_addr, session) is_online, check_delay = manage_contact_status( url_token_public_key_hex, check_delay, is_online, short_addr) if not is_online: continue url_token, cached_pk = update_url_token(url_token_private_key, url_token_public_key_hex, cached_pk, onion_pub_key, queues) get_data_loop(onion_addr, url_token, short_addr, onion_pub_key, queues, session, gateway) if unit_test: break
def relay_command(queues: 'QueueDict', gateway: 'Gateway', unit_test: bool = False) -> None: """Process Relay Program commands.""" queue_from_src = queues[SRC_TO_RELAY_QUEUE] while True: with ignored(EOFError, KeyboardInterrupt, SoftError): while queue_from_src.qsize() == 0: time.sleep(0.01) command = queue_from_src.get() process_command(command, gateway, queues) if unit_test: break
def src_incoming(queues: 'QueueDict', gateway: 'Gateway', unit_test: bool = False) -> None: """\ Redirect datagrams received from Source Computer to appropriate queues. """ commands_to_relay = queues[SRC_TO_RELAY_QUEUE] buf_key_queue = queues[TX_BUF_KEY_QUEUE] buf_key = None while True: with ignored(EOFError, KeyboardInterrupt, SoftError): if buf_key is None and buf_key_queue.qsize() > 0: buf_key = buf_key_queue.get() ts, packet = load_packet_from_queue(queues, gateway) header, packet = separate_header(packet, DATAGRAM_HEADER_LENGTH) if header == UNENCRYPTED_DATAGRAM_HEADER: commands_to_relay.put(packet) elif header in [ COMMAND_DATAGRAM_HEADER, LOCAL_KEY_DATAGRAM_HEADER ]: process_command_datagram(ts, packet, header, queues) elif header in [ MESSAGE_DATAGRAM_HEADER, PUBLIC_KEY_DATAGRAM_HEADER ] and buf_key is not None: process_message_datagram(ts, packet, header, buf_key, queues) elif header == FILE_DATAGRAM_HEADER and buf_key is not None: process_file_datagram(ts, packet, header, buf_key) elif header in [ GROUP_MSG_INVITE_HEADER, GROUP_MSG_JOIN_HEADER, GROUP_MSG_MEMBER_ADD_HEADER, GROUP_MSG_MEMBER_REM_HEADER, GROUP_MSG_EXIT_GROUP_HEADER ] and buf_key is not None: process_group_management_message(ts, packet, header, buf_key) if unit_test: break
def nh_command(queues: Dict[bytes, 'Queue'], settings: 'Settings', stdin_fd: int, unittest: bool = False) -> None: """Loop that processes NH side commands.""" sys.stdin = os.fdopen(stdin_fd) queue_from_txm = queues[TXM_TO_NH_QUEUE] while True: with ignored(EOFError, FunctionReturn, KeyboardInterrupt): while queue_from_txm.qsize() == 0: time.sleep(0.01) command = queue_from_txm.get() process_command(settings, command, queues) if unittest: break
def relay_command(queues: 'QueueDict', gateway: 'Gateway', stdin_fd: int, unit_test: bool = False) -> None: """Process Relay Program commands.""" sys.stdin = os.fdopen(stdin_fd) queue_from_src = queues[SRC_TO_RELAY_QUEUE] while True: with ignored(EOFError, FunctionReturn, KeyboardInterrupt): while queue_from_src.qsize() == 0: time.sleep(0.01) command = queue_from_src.get() process_command(command, gateway, queues) if unit_test: break
def tx_loop(io_queue: 'Queue', output_socket: int, argv: str) -> None: """Loop that sends packets to receiving program.""" draw_frame(argv, 'Idle', high=False) while True: try: interface = multiprocessing.connection.Client( ('localhost', output_socket)) break except socket.error: time.sleep(0.01) while True: with ignored(EOFError, KeyboardInterrupt): while io_queue.empty(): time.sleep(0.01) animate(argv) interface.send(io_queue.get())
def gateway_loop(queues: Dict[bytes, 'Queue[Tuple[datetime, bytes]]'], gateway: 'Gateway', unit_test: bool = False) -> None: """Load data from serial interface or socket into a queue. Also place the current timestamp to queue to be delivered to the Receiver Program. The timestamp is used both to notify when the sent message was received by Relay Program, and as part of a commitment scheme: For more information, see the section on "Covert channel based on user interaction" under TFC's Security Design wiki article. """ queue = queues[GATEWAY_QUEUE] while True: with ignored(EOFError, KeyboardInterrupt): queue.put((datetime.now(), gateway.read())) if unit_test: break
def input_loop(queues: Dict[bytes, 'Queue'], settings: 'Settings', gateway: 'Gateway', contact_list: 'ContactList', group_list: 'GroupList', master_key: 'MasterKey', stdin_fd: int) -> None: """Get input from user and process it accordingly. Tx side of TFC runs two processes -- input and sender loop -- separate from one another. This allows prioritized output of queued assembly packets. input_loop handles Tx-side functions excluding assembly packet encryption, output and logging, and hash ratchet key/counter updates in key_list database. """ sys.stdin = os.fdopen(stdin_fd) window = TxWindow(contact_list, group_list) while True: with ignored(EOFError, FunctionReturn, KeyboardInterrupt): readline.set_completer(get_tab_completer(contact_list, group_list, settings)) readline.parse_and_bind('tab: complete') window.update_group_win_members(group_list) while not contact_list.has_local_contact(): new_local_key(contact_list, settings, queues) while not contact_list.has_contacts(): add_new_contact(contact_list, group_list, settings, queues) while not window.is_selected(): window.select_tx_window(settings, queues) user_input = get_input(window, settings) if user_input.type == MESSAGE: queue_message(user_input, window, settings, queues[MESSAGE_PACKET_QUEUE]) elif user_input.type == FILE: queue_file(window, settings, queues[FILE_PACKET_QUEUE], gateway) elif user_input.type == COMMAND: process_command(user_input, window, settings, queues, contact_list, group_list, master_key)
def c_req_manager(queues: 'QueueDict', unit_test: bool = False) -> None: """Manage displayed contact requests.""" existing_contacts = [] # type: List[bytes] displayed_requests = [] # type: List[bytes] request_queue = queues[CONTACT_REQ_QUEUE] contact_queue = queues[C_REQ_MGMT_QUEUE] setting_queue = queues[C_REQ_STATE_QUEUE] account_queue = queues[ACCOUNT_SEND_QUEUE] show_requests = True while True: with ignored(EOFError, KeyboardInterrupt): while request_queue.qsize() == 0: time.sleep(0.1) purp_onion_address = request_queue.get() while setting_queue.qsize() != 0: show_requests = setting_queue.get() existing_contacts = update_list_of_existing_contacts( contact_queue, existing_contacts) if validate_onion_addr(purp_onion_address) == '': onion_pub_key = onion_address_to_pub_key(purp_onion_address) if onion_pub_key in existing_contacts: continue if onion_pub_key in displayed_requests: continue if show_requests: ts = datetime.now().strftime('%b %d - %H:%M:%S.%f')[:-4] m_print([ f"{ts} - New contact request from an unknown TFC account:", purp_onion_address ], box=True) account_queue.put(purp_onion_address) displayed_requests.append(onion_pub_key) if unit_test and queues[UNIT_TEST_QUEUE].qsize() != 0: break
def tx_loop(io_queue: 'Queue', # Queue through which to push datagrams through output_socket: int, # Socket number for Relay/Receiver Program argv: str # Arguments for simulator position/orientation ) -> None: """Send queued datagrams to a receiving program.""" draw_frame(argv, 'Idle') while True: try: interface = multiprocessing.connection.Client((LOCALHOST, output_socket)) break except socket.error: time.sleep(0.01) while True: with ignored(EOFError, KeyboardInterrupt): while io_queue.empty(): time.sleep(0.01) animate(argv) interface.send(io_queue.get())