示例#1
0
文件: mdn.py 项目: kaeufl/NNPy
 def __init__(self, H = 3, d = 1, ny = 1, M = 3, debug_output = False):
   """
   H: number of hidden units
   d: number of inputs
   ny: number of outputs
   M: number of mixture components
   """
   self.c = ny
   ny = 2*M + M * ny # M mixing coefficients + M variances + M*ny means
   self.M = M
   self.count_fwd = 0
   TLP.__init__(self, H, d, ny, linear_output = True, error_function = 'mdn', 
                  debug_output = debug_output)
示例#2
0
文件: rnn.py 项目: kaeufl/NNPy
 def __init__(self, H = 3, d = 1, ny = 1, T = 100):
     """
     Create a fully-connected neural network with one hidden recurrent layer .
     @param H: number of hidden units
     @param ny: number of output units
     @param T: number of time-steps
     """
     TLP.__init__(self, H, d, ny)
     self.T = T
     z = np.zeros([T, H+1]) # hidden unit activations + bias
     self.z = z[None, :]
     self.z[:,:,0] = 1 # set extra bias unit to one
     
     dj = np.zeros([T, H+1])
     self.dj = dj[None, :]
     
     # init recurrent weights
     self.wh = np.random.normal(loc=0.0, scale = 1,size=[H, H]) / np.sqrt(H) # TODO: check?
     self.Nwh = H**2
示例#3
0
文件: BayesTLP.py 项目: kaeufl/NNPy
 def __init__(self, H = 3, d = 1, ny = 1):
   TLP.__init__(self, H, d, ny, linear_output = True, error_function = 'bayes')