コード例 #1
0
ファイル: loss.py プロジェクト: lvleysuper/mindspore
    def __init__(self,
                 is_grad=True,
                 sparse=False,
                 reduction=None,
                 smooth_factor=0,
                 num_classes=2):
        super(SoftmaxCrossEntropyWithLogits, self).__init__(reduction)
        self.is_grad = is_grad
        self.sparse = sparse
        validator.check_integer("num_classes", num_classes, 1, Rel.GT,
                                self.cls_name)
        validator.check_number_range("smooth_factor", smooth_factor, 0, 1,
                                     Rel.INC_BOTH, self.cls_name)
        self.smooth_factor = smooth_factor
        self.num_classes = num_classes
        self.softmax_cross_entropy = P.SoftmaxCrossEntropyWithLogits()
        self.one_hot = P.OneHot()
        self.on_value = Tensor(1.0 - self.smooth_factor, mstype.float32)
        self.off_value = Tensor(
            1.0 * self.smooth_factor / (self.num_classes - 1), mstype.float32)
        self.is_cpugpu = context.get_context('device_target') in ["CPU", "GPU"]

        if self.is_cpugpu:
            self.sparse_softmax_cross_entropy = P.SparseSoftmaxCrossEntropyWithLogits(
                is_grad=self.is_grad)
コード例 #2
0
 def __init__( self):
     super(NetSoftmaxWithCrossEntropy, self).__init__()
     logits = Tensor(np.array([[1,1,10],
                               [1,10,1],
                               [10,1,1]]).astype(np.float32))
     self.logits = Parameter(initializer(logits, logits.shape()), name ='logits')
     labels = Tensor(np.array([2,1,0]).astype(np.int32))
     self.labels = Parameter(initializer(labels, labels.shape()), name ='labels')
     self.SoftmaxWithCrossEntropy = P.SparseSoftmaxCrossEntropyWithLogits(True)
コード例 #3
0
ファイル: loss.py プロジェクト: peixinhou/mindspore
 def __init__(self, sparse=False, reduction='none'):
     super(SoftmaxCrossEntropyWithLogits, self).__init__(reduction)
     self.sparse = validator.check_bool(sparse, "sparse")
     self.reduction = reduction
     self.softmax_cross_entropy = P.SoftmaxCrossEntropyWithLogits()
     self.one_hot = P.OneHot()
     self.on_value = Tensor(1.0, mstype.float32)
     self.off_value = Tensor(0., mstype.float32)
     self.is_cpugpu = context.get_context('device_target') in ["CPU", "GPU"]
     self.sparse_softmax_cross_entropy = P.SparseSoftmaxCrossEntropyWithLogits(
     )
コード例 #4
0
    def __init__(self, is_grad=True, sparse=False, reduction=None):
        super(SoftmaxCrossEntropyWithLogits, self).__init__(reduction)
        self.is_grad = is_grad
        self.sparse = sparse
        self.softmax_cross_entropy = P.SoftmaxCrossEntropyWithLogits()
        self.one_hot = P.OneHot()
        self.on_value = Tensor(1.0, mstype.float32)
        self.off_value = Tensor(0.0, mstype.float32)
        self.is_cpugpu = context.get_context('device_target') in ["CPU", "GPU"]

        if self.is_cpugpu:
            self.sparse_softmax_cross_entropy = P.SparseSoftmaxCrossEntropyWithLogits(
                is_grad=self.is_grad)
 def __init__(self, is_grad=False):
     super(Net, self).__init__()
     self.SparseSoftmaxCrossEntropyWithLogits = P.SparseSoftmaxCrossEntropyWithLogits(is_grad=is_grad)
コード例 #6
0
    # shapes of logits and labels mismatch
    ('SoftmaxCrossEntropyWithLogits3', {
        'block': (P.SoftmaxCrossEntropyWithLogits(), {
            'exception': ValueError,
            'error_keywords': ['SoftmaxCrossEntropyWithLogits']
        }),
        'desc_inputs': [
            Tensor(np.ones([5]).astype(np.float32)),
            Tensor(np.ones([3]).astype(np.float32))
        ],
        'skip': ['backward']
    }),

    # input is scalar
    ('SparseSoftmaxCrossEntropyWithLogits0', {
        'block': (P.SparseSoftmaxCrossEntropyWithLogits(), {
            'exception': TypeError,
            'error_keywords': ['SparseSoftmaxCrossEntropyWithLogits']
        }),
        'desc_inputs': [5.0, 5.0],
        'skip': ['backward']
    }),
    # logits is Tensor(bool)
    ('SparseSoftmaxCrossEntropyWithLogits1', {
        'block': (P.SparseSoftmaxCrossEntropyWithLogits(), {
            'exception': TypeError,
            'error_keywords': ['SparseSoftmaxCrossEntropyWithLogits']
        }),
        'desc_inputs': [
            Tensor(np.ones([5]).astype(np.bool_)),
            Tensor(np.ones([5]).astype(np.bool_))