Beispiel #1
0
    def backward(self, flag, dy):
        '''Backward propagate gradients through this layer.

        Args:
            flag (int): for future use.
            dy (Tensor or list<Tensor>): the gradient tensor(s) y w.r.t the
                objective loss
        Return:
            <dx, <dp1, dp2..>>, dx is a (set of) tensor(s) for the gradient of x
            , dpi is the gradient of the i-th parameter
        '''
        if type(flag) is bool:
            if flag:
                flag = model_pb2.kTrain
            else:
                flag = model_pb2.kEval

        if type(dy) == list:
            dys = [t.singa_tensor for t in dy]
            ret = self.layer.BackwardWithMultInputs(flag, dys)
        else:
            assert isinstance(dy, tensor.Tensor), \
                    'input of %s (type:%s) must be a Tensor or Tensor list'\
                    % (self.name, type(dy).__name__)
            dys = dy.singa_tensor
            ret = self.layer.Backward(flag, dys)
        if type(ret[0]) is tuple:
            dxs = tensor.from_raw_tensors(ret[0])
        else:
            dxs = tensor.from_raw_tensor(ret[0])
        return dxs, tensor.from_raw_tensors(ret[1])
Beispiel #2
0
    def backward(self, flag, dy):
        '''Backward propagate gradients through this layer.

        Args:
            flag (int): for future use.
            dy (Tensor or list<Tensor>): the gradient tensor(s) y w.r.t the
                objective loss
        Return:
            <dx, <dp1, dp2..>>, dx is a (set of) tensor(s) for the gradient of x
            , dpi is the gradient of the i-th parameter
        '''
        if type(flag) is bool:
            if flag:
                flag = model_pb2.kTrain
            else:
                flag = model_pb2.kEval

        if type(dy) == list:
            dys = [t.singa_tensor for t in dy]
            ret = self.layer.BackwardWithMultInputs(flag, dys)
        else:
            assert isinstance(dy, tensor.Tensor), \
                'the input must be a Tensor or a set of Tensor'
            dys = dy.singa_tensor
            ret = self.layer.Backward(flag, dys)
        if type(ret[0]) is tuple:
            dxs = tensor.from_raw_tensors(ret[0])
        else:
            dxs = tensor.from_raw_tensor(ret[0])
        return dxs, tensor.from_raw_tensors(ret[1])
Beispiel #3
0
    def forward(self, flag, x):
        '''Forward propagate through this layer.

        Args:
            flag: True (kTrain) for training (kEval); False for evaluating;
                other values for furture use.
            x (Tensor or list<Tensor>): an input tensor if the layer is
                connected from a single layer; a list of tensors if the layer
                is connected from multiple layers.

        Return:
            a tensor if the layer is connected to a single layer; a list of
            tensors if the layer is connected to multiple layers;
        '''
        assert self.has_setup, 'Must call setup() before forward()'
        if type(flag) is bool:
            if flag:
                flag = model_pb2.kTrain
            else:
                flag = model_pb2.kEval
        if type(x) is list:
            xs = [t.singa_tensor for t in x]
            y = self.layer.ForwardWithMultInputs(flag, xs)
        else:
            assert isinstance(x, tensor.Tensor), \
                    'input of %s (type:%s) must be a Tensor or Tensor list'\
                    % (self.name, type(x).__name__)
            y = self.layer.Forward(flag, x.singa_tensor)
        if type(y) is tuple:
            return tensor.from_raw_tensors(y)
        else:
            return tensor.from_raw_tensor(y)
Beispiel #4
0
    def forward(self, flag, x):
        '''Forward propagate through this layer.

        Args:
            flag (int): kTrain or kEval
            x (Tensor or list<Tensor>): an input tensor if the layer is
                connected from a single layer; a list of tensors if the layer
                is connected from multiple layers.

        Return:
            a tensor if the layer is connected to a single layer; a list of
            tensors if the layer is connected to multiple layers;
        '''
        assert self.has_setup, 'Must call setup() before forward()'
        if type(x) == list:
            xs = []
            for t in x:
                xs.append(t.singa_tensor)
        else:
            assert isinstance(x, tensor.Tensor), \
                'input must be a Tensor or a list of Tensor'
            xs = x.singa_tensor
        y = self.layer.Forward(flag, xs)
        if type(y) == list:
            return tensor.from_raw_tensors(y)
        else:
            return tensor.from_raw_tensor(y)
Beispiel #5
0
    def forward(self, flag, x):
        '''Forward propagate through this layer.

        Args:
            flag: True (kTrain) for training (kEval); False for evaluating;
                other values for furture use.
            x (Tensor or list<Tensor>): an input tensor if the layer is
                connected from a single layer; a list of tensors if the layer
                is connected from multiple layers.

        Return:
            a tensor if the layer is connected to a single layer; a list of
            tensors if the layer is connected to multiple layers;
        '''
        assert self.has_setup, 'Must call setup() before forward()'
        if type(flag) is bool:
            if flag:
                flag = model_pb2.kTrain
            else:
                flag = model_pb2.kEval
        if type(x) is list:
            xs = [t.singa_tensor for t in x]
            y = self.layer.ForwardWithMultInputs(flag, xs)
        else:
            assert isinstance(x, tensor.Tensor), \
                'input must be a Tensor or a list of Tensor'
            y = self.layer.Forward(flag, x.singa_tensor)
        if type(y) is tuple:
            return tensor.from_raw_tensors(y)
        else:
            return tensor.from_raw_tensor(y)
Beispiel #6
0
    def backward(self, flag, dy):
        '''Backward propagate gradients through this layer.

        Args:
            flag (int): for future use.
            dy (Tensor or list<Tensor>): the gradient tensor(s) y w.r.t the
                objective loss
        Return:
            <dx, <dp1, dp2..>>, dx is a (set of) tensor(s) for the gradient of x
            , dpi is the gradient of the i-th parameter
        '''
        if type(dy) == list:
            dys = []
            for t in dy:
                dys.append(t.singa_tensor)
        else:
            assert isinstance(dy, tensor.Tensor), \
                'the input must be a Tensor or a set of Tensor'
            dys = dy.singa_tensor
        ret = self.layer.Backward(flag, dys)
        if type(ret[0]) == list:
            dxs = tensor.from_raw_tensors(ret[0])
        else:
            dxs = tensor.from_raw_tensor(ret[0])
        return dxs, tensor.from_raw_tensors(ret[1])
Beispiel #7
0
    def forward(self, flag, x):
        '''Forward propagate through this layer.

        Args:
            flag (int): kTrain or kEval
            x (Tensor or list<Tensor>): an input tensor if the layer is
                connected from a single layer; a list of tensors if the layer
                is connected from multiple layers.

        Return:
            a tensor if the layer is connected to a single layer; a list of
            tensors if the layer is connected to multiple layers;
        '''
        assert self.has_setup, 'Must call setup() before forward()'
        if type(x) == list:
            xs = []
            for t in x:
                x.append(t.singa_tensor)
        else:
            assert isinstance(x, tensor.Tensor), \
                'input must be a Tensor or a list of Tensor'
            xs = x.singa_tensor
        y = self.layer.Forward(flag, xs)
        if type(y) == list:
            return tensor.from_raw_tensors(y)
        else:
            return tensor.from_raw_tensor(y)
Beispiel #8
0
    def read(self):
        '''Call read method to load all (param_name, param_val)

        Returns:
            a dict of (parameter name, parameter Tensor)
        '''
        params = {}
        p = self.snapshot.Read();
        for (param_name, param_val) in p:
            print param_name
            params[param_name] = tensor.from_raw_tensor(param_val)
        return params
Beispiel #9
0
    def forward(self, x, y):
        '''Compute the metric for each sample.

        Args:
            x (Tensor): predictions, one row per sample
            y (Tensor): ground truth values, one row per sample

        Returns:
            a tensor of floats, one per sample
        '''
        return tensor.from_raw_tensor(
            self.swig_metric.Forward(x.singa_tensor, y.singa_tensor))
Beispiel #10
0
    def forward(self, x, y):
        '''Compute the metric for each sample.

        Args:
            x (Tensor): predictions, one row per sample
            y (Tensor): ground truth values, one row per sample

        Returns:
            a tensor of floats, one per sample
        '''
        return tensor.from_raw_tensor(
            self.swig_metric.Forward(x.singa_tensor, y.singa_tensor))
Beispiel #11
0
    def forward(self, flag, x, y):
        '''Compute the loss values.

        Args:
            flag (int): kTrain or kEval. If it is kTrain, then the backward
                function must be called before calling forward again.
            x (Tensor): the prediction Tensor
            y (Tensor): the ground truch Tensor, x.shape[0] must = y.shape[0]

        Returns:
            a tensor of floats for the loss values, one per sample
        '''
        return tensor.from_raw_tensor(
            self.swig_loss.Forward(flag, x.singa_tensor, y.singa_tensor))
Beispiel #12
0
    def forward(self, flag, x, y):
        '''Compute the loss values.

        Args:
            flag (int): kTrain or kEval. If it is kTrain, then the backward
                function must be called before calling forward again.
            x (Tensor): the prediction Tensor
            y (Tensor): the ground truch Tensor, x.shape[0] must = y.shape[0]

        Returns:
            a tensor of floats for the loss values, one per sample
        '''
        return tensor.from_raw_tensor(
            self.swig_loss.Forward(flag, x.singa_tensor, y.singa_tensor))
Beispiel #13
0
    def forward(self, flag, x, y):
        '''Compute the loss values.

        Args:
            flag: kTrain/kEval or bool. If it is kTrain/True, then the backward
                function must be called before calling forward again.
            x (Tensor): the prediction Tensor
            y (Tensor): the ground truch Tensor, x.shape[0] must = y.shape[0]

        Returns:
            a tensor of floats for the loss values, one per sample
        '''
        if type(flag) is bool:
            if flag:
                flag = model_pb2.kTrain
            else:
                flag = model_pb2.kEval
        return tensor.from_raw_tensor(
            self.swig_loss.Forward(flag, x.singa_tensor, y.singa_tensor))
Beispiel #14
0
 def backward(self):
     '''
     Returns:
         the grad of x w.r.t. the loss
     '''
     return tensor.from_raw_tensor(self.swig_loss.Backward())
Beispiel #15
0
 def backward(self):
     '''
     Returns:
         the grad of x w.r.t. the loss
     '''
     return tensor.from_raw_tensor(self.swig_loss.Backward())