def __init__(self): super(NetEqualCount, self).__init__() self.equalcount = P.EqualCount() x = Tensor(np.array([1, 20, 5]).astype(np.int32)) y = Tensor(np.array([2, 20, 5]).astype(np.int32)) self.x = Parameter(initializer(x, x.shape()), name='x') self.y = Parameter(initializer(y, y.shape()), name='y')
def __init__( self): super(NetSoftmaxWithCrossEntropy, self).__init__() logits = Tensor(np.array([[1,1,10], [1,10,1], [10,1,1]]).astype(np.float32)) self.logits = Parameter(initializer(logits, logits.shape()), name ='logits') labels = Tensor(np.array([2,1,0]).astype(np.int32)) self.labels = Parameter(initializer(labels, labels.shape()), name ='labels') self.SoftmaxWithCrossEntropy = P.SparseSoftmaxCrossEntropyWithLogits(True)
def __init__(self): super(NetArgmax, self).__init__() self.argmax = P.Argmax(output_type=mstype.int32) x = Tensor( np.array([[1., 20., 5.], [67., 8., 9.], [130., 24., 15.]]).astype(np.float32)) self.x = Parameter(initializer(x, x.shape()), name='x')
def __init__(self): super(NetSoftmax, self).__init__() self.softmax = P.Softmax() x = Tensor( np.array([[0.1, 0.3, 0.6], [0.2, -0.6, 0.8], [0.6, 1, 0.4]]).astype(np.float32)) self.x = Parameter(initializer(x, x.shape()), name='x')
def test(name, file_path, batch_size): """ test """ network = get_model(name) batch = get_dataset(batch_size=batch_size) data_list = [] for data in batch: data_list.append(data.asnumpy()) batch_data = np.concatenate(data_list, axis=0).transpose((0, 3, 1, 2)) input_tensor = Tensor(batch_data) print(input_tensor.shape()) network(input_tensor)
def test_check_layer_norm_2(): x = Tensor(np.ones([20, 5, 10, 10]), mstype.float32) shape1 = x.shape()[1:] m = nn.LayerNorm(shape1, begin_params_axis=1) with pytest.raises(NotImplementedError): m(x)