class Perceptron(object): def __init__(self, input_size, output_size): W1 = 0.01 * np.random.randn(input_size, output_size) b1 = np.zeros(output_size) self.loss_layer = SigmoidWithLoss() self.layers = [Affine(W1, b1)] self.params, self.grads = [], [] for layer in self.layers: self.params += layer.params self.grads += layer.grads return def predict(self, X): for layer in self.layers: X = layer.forward(X) return X def forward(self, X, y): score = self.predict(X) loss = self.loss_layer.forward(score, y) return loss def backward(self, dout=1): dout = self.loss_layer.backward(dout) for layer in reversed(self.layers): dout = layer.backward(dout) return dout
def __init__(self, input_size, output_size): W1 = 0.01 * np.random.randn(input_size, output_size) b1 = np.zeros(output_size) self.loss_layer = SigmoidWithLoss() self.layers = [Affine(W1, b1)] self.params, self.grads = [], [] for layer in self.layers: self.params += layer.params self.grads += layer.grads return
def forward(self, xs, ts): _, T = xs.shape self.xs_shape = xs.shape self.layers = [] loss = 0 for t in range(T): layer = SigmoidWithLoss() loss += layer.forward(xs[:, t], ts[:, t]) self.layers.append(layer) return loss / T
def __init__(self, W, corpus, power=0.75, sample_size=5): """ Parameters ---------- W : 出力層の重み corpus : コーパス(単語ID)のリスト power : 確率分布の累乗の値 sample_size : 負例のサンプリング数 """ self.sample_size = sample_size # UnigramSamplerを使用する self.sampler = UnigramSampler(corpus, power, sample_size) # 正例用のレイヤをひとつ、負例用のレイヤをsample_size個作成する # リストの最初のレイヤが正例を扱うものとする self.loss_layers = [SigmoidWithLoss() for _ in range(sample_size + 1)] # 4.2.4で作成したEmbeddingDotレイヤ self.embed_dot_layers = [ EmbeddingDot(W) for _ in range(sample_size + 1) ] self.params, self.grads = [], [] for layer in self.embed_dot_layers: self.params += layer.params self.grads += layer.grads
def __init__(self, W, corpus, power=0.75, sample_size=5): self.sample_size = sample_size # 상위 5개만 self.sampler = UnigramSampler(corpus, power, sample_size) self.loss_layers = [SigmoidWithLoss() for _ in range(sample_size + 1)] self.embed_dot_layers = [EmbeddingDot(W) for _ in range(sample_size + 1)] #print(len(self.embed_dot_layers));exit(1) self.params, self.grads = [], [] for layer in self.embed_dot_layers: self.params += layer.params # array형식으로 params를 묶어서 나열. [np.array(w), np.array(w), ...] self.grads += layer.grads
def __init__(self, W, corpus, power=0.75, sample_size=5): self.sample_size = sample_size self.sampler = UnigramSampler(corpus, power, sample_size) self.loss_layers = [SigmoidWithLoss() for _ in range(sample_size + 1)] self.embed_dot_layers = [EmbeddingDot(W) for _ in range(sample_size + 1)] self.params, self.grads = [], [] for layer in self.embed_dot_layers: self.params += layer.params self.grads += layer.grads