Esempio n. 1
0
def predict(network, x):
    w1, w2, w3 = network['W1'], network['W2'], network['W3']
    b1, b2, b3 = network['b1'], network['b2'], network['b3']

    a1 = np.dot(x, w1) + b1
    z1 = sigmoid(a1)
    a2 = np.dot(z1, w2) + b2
    z2 = sigmoid(a2)
    a3 = np.dot(z2, w3) + b3
    y = softmax(a3)

    return y
Esempio n. 2
0
    def loss(self, x, t):
        z = self.predict(x)
        y = softmax(z)
        loss = cross_entropy_error(y, t)

        return loss
Esempio n. 3
0
	def loss(self, x, t):
		z = self.predict(x)
		y = softmax(z)
		loss = cross_entropy_error(y, t)

		return loss
Esempio n. 4
0
 def forward(self, x):
     self.out = softmax(x)
     return self.out
Esempio n. 5
0
 def loss(self, x, t):
     z = self.predict(x)
     y = softmax(z)
     loss = cross_entropy_error(y, t)  # t is one-hot encoded answer label
     return loss
Esempio n. 6
0
    def loss(self, x, t):
        z = self.predict(x)  # 演算
        y = softmax(z)  # 活性化関数としてソフトマックス関数を用いて確率を求める
        loss = cross_entropy_error(y, t)  # 損失関数の平均値

        return loss
Esempio n. 7
0
    def forward(self, x, t):
        self.t = t
        self.y = softmax(x)
        self.loss = cross_entropy_error(self.y, self.t)

        return self.loss
Esempio n. 8
0
    def loss(self, x, t):  #t:正解ラベル   #損失関数の値を求めるメソッド
        z = self.predict(x)
        y = softmax(z)
        loss = cross_entropy_error(y, t)

        return loss
Esempio n. 9
0
    def forward(self, x, d):
        self.d = d
        self.y = functions.softmax(x)
        self.loss = functions.cross_entropy_error(self.d, self.y)

        return self.loss
Esempio n. 10
0
    input_silhouette = util.resizeImage(input_silhouette, data_size, rect,
                                        True)

    if flip:
        input_silhouette = cv2.flip(input_silhouette, 1)

    input_silhouette_path = os.path.join(data_path, 'input_silhouette')
    input_silhouette_path = os.path.join(
        input_silhouette_path, file_name + '_v' + extractor.version + '.png')
    cv2.imwrite(input_silhouette_path, input_silhouette)

    input_silhouette = np.where(input_silhouette > 0, 1, 0)
    input_silhouette = input_silhouette.flatten()

    result = estimator.predict(input_silhouette)
    result = functions.softmax(result[0])
    index = np.argmax(result)

    _label = label.copy()
    final_result = []
    for i in range(3):
        for j in range(i + 1, len(result)):
            if result[i] < result[j]:
                temp = result[i]
                temp_label = _label[i]
                result[i] = result[j]
                result[j] = temp
                _label[i] = _label[j]
                _label[j] = temp_label

        res = {'keyword': _label[i], 'probability': round(result[i], 2)}
Esempio n. 11
0
    def forward(self, x, t):
        self.t = t
        self.y = functions.softmax(x)
        self.loss = loss_functions.cross_entropy_error(self.y, self.t)

        return self.loss
Esempio n. 12
0
 def predict(self, x):
     a = np.dot(x, self.W)       # 신경망 각 층의 계산은 행렬의 내젹으로 처리할 수 있다. a = xW
     y = functions.softmax(a)    # 출력층 활성화함수로 소프트맥스함수 사용
     return y