def backward(self, flag, dy): '''Backward propagate gradients through this layer. Args: flag (int): for future use. dy (Tensor or list<Tensor>): the gradient tensor(s) y w.r.t the objective loss Return: <dx, <dp1, dp2..>>, dx is a (set of) tensor(s) for the gradient of x , dpi is the gradient of the i-th parameter ''' if type(flag) is bool: if flag: flag = model_pb2.kTrain else: flag = model_pb2.kEval if type(dy) == list: dys = [t.singa_tensor for t in dy] ret = self.layer.BackwardWithMultInputs(flag, dys) else: assert isinstance(dy, tensor.Tensor), \ 'input of %s (type:%s) must be a Tensor or Tensor list'\ % (self.name, type(dy).__name__) dys = dy.singa_tensor ret = self.layer.Backward(flag, dys) if type(ret[0]) is tuple: dxs = tensor.from_raw_tensors(ret[0]) else: dxs = tensor.from_raw_tensor(ret[0]) return dxs, tensor.from_raw_tensors(ret[1])
def backward(self, flag, dy): '''Backward propagate gradients through this layer. Args: flag (int): for future use. dy (Tensor or list<Tensor>): the gradient tensor(s) y w.r.t the objective loss Return: <dx, <dp1, dp2..>>, dx is a (set of) tensor(s) for the gradient of x , dpi is the gradient of the i-th parameter ''' if type(flag) is bool: if flag: flag = model_pb2.kTrain else: flag = model_pb2.kEval if type(dy) == list: dys = [t.singa_tensor for t in dy] ret = self.layer.BackwardWithMultInputs(flag, dys) else: assert isinstance(dy, tensor.Tensor), \ 'the input must be a Tensor or a set of Tensor' dys = dy.singa_tensor ret = self.layer.Backward(flag, dys) if type(ret[0]) is tuple: dxs = tensor.from_raw_tensors(ret[0]) else: dxs = tensor.from_raw_tensor(ret[0]) return dxs, tensor.from_raw_tensors(ret[1])
def forward(self, flag, x): '''Forward propagate through this layer. Args: flag: True (kTrain) for training (kEval); False for evaluating; other values for furture use. x (Tensor or list<Tensor>): an input tensor if the layer is connected from a single layer; a list of tensors if the layer is connected from multiple layers. Return: a tensor if the layer is connected to a single layer; a list of tensors if the layer is connected to multiple layers; ''' assert self.has_setup, 'Must call setup() before forward()' if type(flag) is bool: if flag: flag = model_pb2.kTrain else: flag = model_pb2.kEval if type(x) is list: xs = [t.singa_tensor for t in x] y = self.layer.ForwardWithMultInputs(flag, xs) else: assert isinstance(x, tensor.Tensor), \ 'input of %s (type:%s) must be a Tensor or Tensor list'\ % (self.name, type(x).__name__) y = self.layer.Forward(flag, x.singa_tensor) if type(y) is tuple: return tensor.from_raw_tensors(y) else: return tensor.from_raw_tensor(y)
def forward(self, flag, x): '''Forward propagate through this layer. Args: flag (int): kTrain or kEval x (Tensor or list<Tensor>): an input tensor if the layer is connected from a single layer; a list of tensors if the layer is connected from multiple layers. Return: a tensor if the layer is connected to a single layer; a list of tensors if the layer is connected to multiple layers; ''' assert self.has_setup, 'Must call setup() before forward()' if type(x) == list: xs = [] for t in x: xs.append(t.singa_tensor) else: assert isinstance(x, tensor.Tensor), \ 'input must be a Tensor or a list of Tensor' xs = x.singa_tensor y = self.layer.Forward(flag, xs) if type(y) == list: return tensor.from_raw_tensors(y) else: return tensor.from_raw_tensor(y)
def forward(self, flag, x): '''Forward propagate through this layer. Args: flag: True (kTrain) for training (kEval); False for evaluating; other values for furture use. x (Tensor or list<Tensor>): an input tensor if the layer is connected from a single layer; a list of tensors if the layer is connected from multiple layers. Return: a tensor if the layer is connected to a single layer; a list of tensors if the layer is connected to multiple layers; ''' assert self.has_setup, 'Must call setup() before forward()' if type(flag) is bool: if flag: flag = model_pb2.kTrain else: flag = model_pb2.kEval if type(x) is list: xs = [t.singa_tensor for t in x] y = self.layer.ForwardWithMultInputs(flag, xs) else: assert isinstance(x, tensor.Tensor), \ 'input must be a Tensor or a list of Tensor' y = self.layer.Forward(flag, x.singa_tensor) if type(y) is tuple: return tensor.from_raw_tensors(y) else: return tensor.from_raw_tensor(y)
def backward(self, flag, dy): '''Backward propagate gradients through this layer. Args: flag (int): for future use. dy (Tensor or list<Tensor>): the gradient tensor(s) y w.r.t the objective loss Return: <dx, <dp1, dp2..>>, dx is a (set of) tensor(s) for the gradient of x , dpi is the gradient of the i-th parameter ''' if type(dy) == list: dys = [] for t in dy: dys.append(t.singa_tensor) else: assert isinstance(dy, tensor.Tensor), \ 'the input must be a Tensor or a set of Tensor' dys = dy.singa_tensor ret = self.layer.Backward(flag, dys) if type(ret[0]) == list: dxs = tensor.from_raw_tensors(ret[0]) else: dxs = tensor.from_raw_tensor(ret[0]) return dxs, tensor.from_raw_tensors(ret[1])
def forward(self, flag, x): '''Forward propagate through this layer. Args: flag (int): kTrain or kEval x (Tensor or list<Tensor>): an input tensor if the layer is connected from a single layer; a list of tensors if the layer is connected from multiple layers. Return: a tensor if the layer is connected to a single layer; a list of tensors if the layer is connected to multiple layers; ''' assert self.has_setup, 'Must call setup() before forward()' if type(x) == list: xs = [] for t in x: x.append(t.singa_tensor) else: assert isinstance(x, tensor.Tensor), \ 'input must be a Tensor or a list of Tensor' xs = x.singa_tensor y = self.layer.Forward(flag, xs) if type(y) == list: return tensor.from_raw_tensors(y) else: return tensor.from_raw_tensor(y)
def read(self): '''Call read method to load all (param_name, param_val) Returns: a dict of (parameter name, parameter Tensor) ''' params = {} p = self.snapshot.Read(); for (param_name, param_val) in p: print param_name params[param_name] = tensor.from_raw_tensor(param_val) return params
def forward(self, x, y): '''Compute the metric for each sample. Args: x (Tensor): predictions, one row per sample y (Tensor): ground truth values, one row per sample Returns: a tensor of floats, one per sample ''' return tensor.from_raw_tensor( self.swig_metric.Forward(x.singa_tensor, y.singa_tensor))
def forward(self, flag, x, y): '''Compute the loss values. Args: flag (int): kTrain or kEval. If it is kTrain, then the backward function must be called before calling forward again. x (Tensor): the prediction Tensor y (Tensor): the ground truch Tensor, x.shape[0] must = y.shape[0] Returns: a tensor of floats for the loss values, one per sample ''' return tensor.from_raw_tensor( self.swig_loss.Forward(flag, x.singa_tensor, y.singa_tensor))
def forward(self, flag, x, y): '''Compute the loss values. Args: flag: kTrain/kEval or bool. If it is kTrain/True, then the backward function must be called before calling forward again. x (Tensor): the prediction Tensor y (Tensor): the ground truch Tensor, x.shape[0] must = y.shape[0] Returns: a tensor of floats for the loss values, one per sample ''' if type(flag) is bool: if flag: flag = model_pb2.kTrain else: flag = model_pb2.kEval return tensor.from_raw_tensor( self.swig_loss.Forward(flag, x.singa_tensor, y.singa_tensor))
def backward(self): ''' Returns: the grad of x w.r.t. the loss ''' return tensor.from_raw_tensor(self.swig_loss.Backward())