Ejemplo n.º 1
0
 def _worker(i, module, input, kwargs, device=None):
     torch.set_grad_enabled(grad_enabled)
     if device is None:
         device = get_a_var(input).get_device()
     try:
         with torch.cuda.device(device):
             output = module(input, **kwargs)
         with lock:
             results[i] = output
     except Exception as e:
         with lock:
             results[i] = e
Ejemplo n.º 2
0
 def _worker(i, module, input, target, kwargs, device=None):
     torch.set_grad_enabled(grad_enabled)
     if device is None:
         device = get_a_var(input).get_device()
     try:
         with torch.cuda.device(device):
             if not isinstance(input, (list, tuple)):
                 input = (input, )
             output = module(*(input + target), **kwargs)
         with lock:
             results[i] = output
     except Exception as e:
         with lock:
             results[i] = e
Ejemplo n.º 3
0
 def _worker(i, module, input, target, kwargs, device=None):
     if torch_ver != "0.3":
         torch.set_grad_enabled(grad_enabled)
     if device is None:
         device = get_a_var(input).get_device()
     try:
         with torch.cuda.device(device):
             output = module(input, target)
         #                 output = module(*(input + target), **kwargs)# modified at Issue 54
         with lock:
             results[i] = output
     except Exception as e:
         with lock:
             results[i] = e
Ejemplo n.º 4
0
 def _worker(i, module, input, kwargs, device=None):
     torch.set_grad_enabled(grad_enabled)
     if device is None:
         device = get_a_var(input).get_device()
     try:
         with torch.cuda.device(device):
             # this also avoids accidental slicing of `input` if it is a Tensor
             if not isinstance(input, (list, tuple)):
                 input = (input, )
             output = getattr(module, func_name)(*input, **kwargs)
         with lock:
             results[i] = output
     except Exception as e:
         with lock:
             results[i] = e
Ejemplo n.º 5
0
 def _worker(i, module, input, target, kwargs, device=None):
     torch.set_grad_enabled(grad_enabled)
     if device is None:
         device = get_a_var(input).get_device()
     try:
         with torch.cuda.device(device):
             if not isinstance(input, (list, tuple)):
                 input = (input, )
             if not isinstance(target, (list, tuple)):
                 target = (target, )
             output = module(*input, *target, **kwargs)
         with lock:
             results[i] = output
     except Exception:
         with lock:
             results[i] = ExceptionWrapper(
                 where="in replica {} on device {}".format(i, device))
Ejemplo n.º 6
0
 def _worker(i, module, input, target, kwargs, device=None):
     if torch_ver != "0.3":
         torch.set_grad_enabled(grad_enabled)
     if device is None:
         device = get_a_var(input).get_device()
     try:
         with torch.cuda.device(device):
             # output = module(*(input + target), **kwargs)
             if type(input) == tuple:
                 input = input[0]
             # print('module: {}\ninput: {}\ntarget:{}'.format(module, input, target))
             output = module(input, target)
         with lock:
             results[i] = output
     except Exception as e:
         with lock:
             results[i] = e
Ejemplo n.º 7
0
 def _worker(i, module, input, target, kwargs, device=None):
     #import pdb;pdb.set_trace()
     if torch_ver != "0.3":
         torch.set_grad_enabled(grad_enabled)
     if device is None:
         device = get_a_var(input).get_device()
     try:
         if not isinstance(input, tuple):
             input = (input, )
         if not isinstance(target, tuple):
             target = (target, )
         with torch.cuda.device(device):
             output = module(*(input + target), **kwargs)
         with lock:
             results[i] = output
     except Exception as e:
         with lock:
             results[i] = e
Ejemplo n.º 8
0
 def _worker(i, module, input, target, kwargs, device=None):
     if torch_ver != "0.3":
         torch.set_grad_enabled(grad_enabled)
     if device is None:
         device = get_a_var(input).get_device()
     try:
         with torch.cuda.device(device):
             # this also avoids accidental slicing of `input` if it is a Tensor
             if not isinstance(input, (list, tuple)):
                 input = (input,)
             if not isinstance(target, (list, tuple)):
                 target = (target,)
             output = module(*(input + target))    # **kwargs) removed by Turing_Lee
         with lock:
             results[i] = output
     except Exception as e:
         with lock:
             results[i] = e
Ejemplo n.º 9
0
 def _worker(i, module, input, kwargs, device=None):
     if torch_ver != "0.3":
         torch.set_grad_enabled(grad_enabled)
     if device is None:
         device = get_a_var(input).get_device()
     try:
         with torch.cuda.device(device):
             # this also avoids accidental slicing of `input` if it is a Tensor
             if not isinstance(input, (list, tuple)):
                 input = (input,)
             # FTWS: Notice that in Decoder and parallel loss the input has already been tuple, so
             # we change codes here.
             output = module(input, **kwargs)
         with lock:
             results[i] = output
     except Exception as e:
         with lock:
             results[i] = e
Ejemplo n.º 10
0
 def _worker(i, module, input, kwargs, device=None):
     torch.set_grad_enabled(grad_enabled)
     if device is None:
         device = get_a_var(input).get_device()
     try:
         with torch.cuda.device(device):
             # this also avoids accidental slicing of `input` if it is a Tensor
             if not isinstance(input, (list, tuple)):
                 input = (input,)
             if forward:
                 output = module(*input, **kwargs)
             else:
                 output = module.sample(*input, **kwargs)
         with lock:
             results[i] = output
     except Exception:
         with lock:
             results[i] = ExceptionWrapper(
                 where="in replica {} on device {}".format(i, device))
 def _worker(i, module, input, target, kwargs, device=None):
     torch.set_grad_enabled(grad_enabled)
     if device is None:
         device = get_a_var(input).get_device()
     try:
         with torch.cuda.device(device):
             # PyTorch's native implementation convert to tuple to avoid further slicing
             # Just extract the tensor out of the tuple to compute loss
             if isinstance(input, (list, tuple)):
                 input = input[0]
             if isinstance(target, (list, tuple)):
                 target = target[0]
             assert target.device == input.device
             if module.device != input.device:
                 module = module.to(input.device)
             output = module(input, target, **kwargs)
         with lock:
             results[i] = output
     except Exception as e:
         with lock:
             results[i] = e
Ejemplo n.º 12
0
    def _worker(i, module, input, target, kwargs, device=None):
        if torch_ver != "0.3":
            torch.set_grad_enabled(grad_enabled)
        if device is None:
            device = get_a_var(input).get_device()
        try:
            with torch.cuda.device(device):
                # this also avoids accidental slicing of `input` if it is a Tensor
                if not isinstance(input, (list, tuple)):
                    input = (input, )
                if type(input) != type(target):
                    if isinstance(target, tuple):
                        input = tuple(input)
                    elif isinstance(target, list):
                        input = list(input)
                    else:
                        raise Exception("Types problem")

                output = module(*(input + target), **kwargs)
            with lock:
                results[i] = output
        except Exception as e:
            with lock:
                results[i] = e