Пример #1
0
 def forward(self, x):
     x = x.data
     mask = (np.random.rand(*x.shape) < self.p) / self.p
     out = x * mask
     out = out.astype(x.dtype, copy=False)
     self.mask = mask
     return uvar.BaseVar(data=out)
Пример #2
0
 def train(self,
           xs,
           ys,
           epochs,
           batchs=100,
           learning_rate=1e-2,
           display_per_epoch=10):
     """
     - xs: numpy array, shape of (N, W, H)
     - ys: numpy array, shape of (N,)
     where N is the number of datas
     W is the width of image
     H is the height of image
     - epochs: train 
     - batchs: N/batchs = k, (k \in N)
     """
     N, = ys.shape
     assert N % batchs == 0, "N/batchs (%d) is not a integer" % (N / batchs)
     C = N // batchs
     xs = np.reshape(xs, (C, batchs, -1))
     ys = np.reshape(ys, (C, batchs))
     epo = 0
     xCache = [(uvar.BaseVar(data=xs[i]), ys[i]) for i in range(C)]
     loss_hist = []
     accu_hist = []
     while epo < epochs:
         epo += 1
         loss = 0
         acc = 0
         for x, y in xCache:
             out = self.forward(x)
             l, dout = svm_loss(out.data, y)
             self.backward(uvar.BaseVar(grad=dout))
             self.optim(learning_rate)
             loss += l
             acc += np.sum(np.argmax(out.data, axis=1) == y)
         if epo % display_per_epoch == 0:
             print("epochs", epo, "loss", loss, "accuracy", acc / N)
             learning_rate *= 0.8
         loss_hist.append(loss)
         accu_hist.append(acc / N)
     return loss_hist, accu_hist
Пример #3
0
 def runModel(self, xs):
     """
     - xs: a numpy array, shape of (N, W, H)
     where N is the number of datas
     W is the width of image
     H is the height of image
     return the probolity of each class
     """
     N, _, _ = xs.shape
     x = np.reshape(xs, (N, -1))
     out = self.forward(uvar.BaseVar(data=x))
     return out.data
Пример #4
0
 def __init__(self, W, O, alpfa=1, beta=0):
     """
     - W: input width
     - O: output size
     - alpfa, beta: in w initialization alpha*randn(M,O)+beta
     - self: 
         - w: (M,O) 
         - b: (O,)
     """
     w = uvar.RandnVar((W, O))
     b = uvar.RandnVar((O, ))
     w.m, w.v, w.t = (None, None, 1)
     b.m, b.v, b.t = (None, None, 1)
     self.w, self.b = w, b
     self.out = uvar.BaseVar()
Пример #5
0
 def backward(self, dout):
     dx = self.mask * dout.data
     return uvar.BaseVar(grad=dx)
Пример #6
0
 def __init__(self):
     self.out = uvar.BaseVar()