def init(party_name=None, device=None): """ Initialize CrypTen. It will initialize communicator, setup party name for file save / load, and setup seeds for Random Number Generatiion. By default the function will initialize a set of RNG generators on CPU. If torch.cuda.is_available() returns True, it will initialize an additional set of RNG generators on GPU. Users can specify the GPU device the generators are initialized with device. Args: party_name (str): party_name for file save and load, default is None device (int, str, torch.device): Specify device for RNG generators on GPU. Must be a GPU device. """ # Return and raise warning if initialized if comm.is_initialized(): warnings.warn("CrypTen is already initialized.", RuntimeWarning) return # Initialize communicator comm._init(use_threads=False, init_ttp=crypten.mpc.ttp_required()) # Setup party name for file save / load if party_name is not None: comm.get().set_name(party_name) # Setup seeds for Random Number Generation if comm.get().get_rank() < comm.get().get_world_size(): _setup_przs(device=device) if crypten.mpc.ttp_required(): crypten.mpc.provider.ttp_provider.TTPClient._init()
def test_is_initialized(self): """Tests that the is_initialized flag is set properly""" comm = crypten.communicator self.assertTrue(crypten.is_initialized()) self.assertTrue(comm.is_initialized()) crypten.uninit() self.assertFalse(crypten.is_initialized()) self.assertFalse(comm.is_initialized()) # note that uninit() kills the TTP process, so we need to restart it: if self.rank == self.MAIN_PROCESS_RANK and crypten.mpc.ttp_required(): self.processes += [self._spawn_ttp()] crypten.init() self.assertTrue(crypten.is_initialized()) self.assertTrue(comm.is_initialized())
def is_initialized(): return comm.is_initialized()