def __getitem__(self, batch): """ Hack to do multi-GPU training""" batch.scatter() if self.num_gpus == 1: return self(*batch[0]) replicas = nn.parallel.replicate(self, devices=list(range(self.num_gpus))) outputs = nn.parallel.parallel_apply( replicas, [batch[i] for i in range(self.num_gpus)]) if self.training: return gather_res(outputs, 0, dim=0) return outputs