Beispiel #1
0
def sync_params(module):
    """ Sync all parameters of module across all MPI processes. """
    if num_procs() == 1:
        return
    for p in module.parameters():
        p_numpy = p.data.numpy()
        broadcast(p_numpy)
Beispiel #2
0
def sync_params(module):
    if num_procs() == 1:
        return

    for p in module.parameters():
        p_numpy = p.data.numpy()
        broadcast(p_numpy)
Beispiel #3
0
 def _broadcast(x):
     broadcast(x)
     return x
def sync_all_params(param, root=0):
    data = torch.nn.utils.parameters_to_vector(param).detach().numpy()
    broadcast(data, root)
    torch.nn.utils.vector_to_parameters(torch.from_numpy(data), param)
def _get_sync_params(params):
    flat_params = flat_concat(params).numpy()
    broadcast(flat_params, root=0)
    return _get_params_from_flat(flat_params, params)