示例#1
0
 def convert_cuda(self, model, input):
     cuda_model = model.cuda()
     # input might be nested - we want to move everything to GPU
     cuda_input = function._nested_map(
         lambda o: isinstance(o, Variable) or torch.is_tensor(o),
         lambda o: o.cuda())(input)
     return cuda_model, cuda_input
示例#2
0
 def convert_cuda(self, model, input):
     cuda_model = model.cuda()
     # input might be nested - we want to move everything to GPU
     cuda_input = function._nested_map(
         lambda o: isinstance(o, Variable) or torch.is_tensor(o),
         lambda o: o.cuda())(input)
     return cuda_model, cuda_input
示例#3
0
def _clone_inputs(args):
    def clone_input(a):
        if a is None:
            return None
        elif isinstance(a, Variable):
            v = Variable(a.data.clone(), requires_grad=a.requires_grad)
            if a.grad is not None:
                v.grad = clone_input(v.grad)
            return v
        else:
            return a.clone()
    return function._nested_map(lambda o: isinstance(o, Variable) or torch.is_tensor(o),
                                clone_input, condition_msg="Variables")(args)
示例#4
0
def _clone_inputs(args):
    def clone_input(a):
        if a is None:
            return None
        elif isinstance(a, Variable):
            v = Variable(a.data.clone(), requires_grad=a.requires_grad)
            if a.grad is not None:
                v.grad = clone_input(v.grad)
            return v
        else:
            return a.clone()
    return function._nested_map(lambda o: isinstance(o, Variable) or torch.is_tensor(o),
                                clone_input, condition_msg="Variables")(args)
示例#5
0
def _clone_inputs(args):
    def clone_input(a):
        if a is None:
            return None
        elif isinstance(a, torch.Tensor):
            # TODO: figure out one liner to .clone() and set requires_grad
            v = Variable(a.data.clone(), requires_grad=a.requires_grad)
            if a.grad is not None:
                v.grad = clone_input(v.grad)
            return v
        else:
            return a.clone()
    return function._nested_map(lambda x: isinstance(x, torch.Tensor),
                                clone_input, condition_msg="tensors")(args)
示例#6
0
def _clone_inputs(args):
    def clone_input(a):
        if a is None:
            return None
        elif isinstance(a, torch.Tensor):
            # TODO: figure out one liner to .clone() and set requires_grad
            v = Variable(a.data.clone(), requires_grad=a.requires_grad)
            if a.grad is not None:
                v.grad = clone_input(v.grad)
            return v
        else:
            return a.clone()
    return function._nested_map(lambda x: isinstance(x, torch.Tensor),
                                clone_input, condition_msg="tensors")(args)
示例#7
0
def _clone_inputs(args):
    def clone_input(a):
        if a is None:
            return None
        elif isinstance(a, torch.Tensor):
            # TODO: figure out one liner to .clone() and set requires_grad
            v = (a.detach().clone(
                memory_format=torch.preserve_format).requires_grad_(
                    a.requires_grad))
            if a.grad is not None:
                v.grad = clone_input(v.grad)
            return v
        else:
            return a.clone(memory_format=torch.preserve_format)

    return function._nested_map(lambda x: isinstance(x, torch.Tensor),
                                clone_input,
                                condition_msg="tensors")(args)
示例#8
0
def do_input_map(fn, input):
    return _nested_map(lambda t: isinstance(t, torch.Tensor), fn)(input)
示例#9
0
def _flatten(obj, params=tuple()):
    obj_vars = tuple(itertools.chain(function._iter_variables(obj), params))
    obj_struct = function._nested_map(lambda o: isinstance(o, Variable),
                                      lambda x: HOLE)(obj)
    return obj_vars, obj_struct