Exemple #1
0
def test_grad_refactor_13():
    class Net(nn.Cell):
        """ Net definition """
        def __init__(self):
            super(Net, self).__init__()
            self.z = Parameter(Tensor(np.ones([2]).astype(np.float32)), name='z')
        def construct(self, x, y):
            return x * self.z * y
    net = Net()
    weights = ParameterTuple(net.trainable_params())
    C.grad_by_list(net, weights)(Tensor(np.ones([2]).astype(np.float32)), Tensor(np.zeros([2]).astype(np.float32)))
Exemple #2
0
def test_switch_layer_with_single_prim():
    class SwitchLayerCell(nn.Cell):
        def __init__(self):
            super(SwitchLayerCell, self).__init__()
            self.layers = (nn.ReLU(), nn.ReLU())
            self.z3 = Parameter(
                Tensor(np.full([128, 96], 0.6, dtype=np.float32)), name='z3')

        def construct(self, index, x):
            ret = self.layers[index](x) * self.z3
            return ret

    index = Tensor(0, dtype=mstype.int32)
    net = SwitchLayerCell()
    net(index, Tensor(np.full([128, 96], 0.6, dtype=np.float32)))
    C.grad_by_list(net, ParameterTuple(net.trainable_params()))(index,
                                                                Tensor(np.full([128, 96], 0.6, dtype=np.float32)))
    C.grad_all(net)(index, Tensor(np.full([128, 96], 0.6, dtype=np.float32)))
Exemple #3
0
def test_index_to_switch_layer():
    class Layer1(nn.Cell):
        def __init__(self):
            super(Layer1, self).__init__()
            self.z1 = Parameter(Tensor(
                np.full([128, 96], 0.6, dtype=np.float32)),
                                name='z1')

        def construct(self, x):
            return x * self.z1

    class Layer2(nn.Cell):
        def __init__(self):
            super(Layer2, self).__init__()
            self.z2 = Parameter(Tensor(
                np.full([128, 96], 0.6, dtype=np.float32)),
                                name='z2')

        def construct(self, x):
            return x * self.z2

    class SwitchLayerCell(nn.Cell):
        def __init__(self):
            super(SwitchLayerCell, self).__init__()
            self.layers = (Layer1(), Layer2())
            self.z3 = Parameter(Tensor(
                np.full([128, 96], 0.6, dtype=np.float32)),
                                name='z3')

        def construct(self, index, x):
            ret = self.layers[index](x) * self.z3
            return ret

    index = Tensor(0, dtype=mstype.int32)
    net = SwitchLayerCell()
    net(index, Tensor(np.full([128, 96], 0.6, dtype=np.float32)))
    C.grad_by_list(net, ParameterTuple(net.trainable_params()))(
        index, Tensor(np.full([128, 96], 0.6, dtype=np.float32)))
    C.grad_all(net)(index, Tensor(np.full([128, 96], 0.6, dtype=np.float32)))
Exemple #4
0
 def construct(self, x, label):
     weights = self.weights
     return C.grad_by_list(self.network, weights)(x, label)
 def construct(self, data, label):
     weights = self.weights
     grads = C.grad_by_list(self.network, weights)(data, label)
     return grads
Exemple #6
0
 def construct(self, *inputs):
     return C.grad_by_list(self.net, self.weights)(*inputs)
 def construct(self, data, label, *args):
     weights = self.weights
     grads = grad_by_list(self.network, weights)(data, label)
     if self.lr_schedule:
         self.schedule.update_lr(*args)
     return self.optimizer(grads)
Exemple #8
0
 def construct(self, a, b, c):
     return C.grad_by_list(self.net, self.weights)(a, b, c)
 def construct(self, x, y):
     return C.grad_by_list(self.net, self.weights)(x, y)
Exemple #10
0
 def construct(self):
     return C.grad_by_list(self.network, self.weights)()
Exemple #11
0
 def construct(self, x, label):
     weights = self.weights
     grads = C.grad_by_list(self.network, weights)(x, label)
     return self.optimizer(grads)