def test_gather_embeddings_word_size_2(self): if torch.cuda.device_count() >= 2: port = find_free_tcp_port() WORLD_SIZE = 2 BATCH_SIZE = 2 mp.spawn(self.worker_fn, args=(WORLD_SIZE, BATCH_SIZE, port), nprocs=WORLD_SIZE)
def test_backward_world_size_1(self): if torch.cuda.device_count() >= 1: port = find_free_tcp_port() WORLD_SIZE = 1 BATCH_SIZE = 2 mp.spawn(self.worker_fn, args=(WORLD_SIZE, BATCH_SIZE, port), nprocs=WORLD_SIZE)