コード例 #1
0
ファイル: convlayers.py プロジェクト: evander-dacosta/nuronet
def normalize_padding(value):
    padding = value.lower()
    allowed = {'valid', 'same', 'causal'}
    if N.backend() == 'theano':
        allowed.add('full')
    if padding not in allowed:
        raise ValueError(
            'The `padding` argument must be one of "valid", "same" (or "causal" for Conv1D). '
            'Received: ' + str(padding))
    return padding
コード例 #2
0
def time_dist_dense(x, w, b, dropout=None,
                    input_dim=None, output_dim=None, 
                    timesteps=None, training=False):
    """
    Apply 'W.y + b' for every temporal slice y of x
    
    Inputs
    ------
        @param x : tensor holding time series data
        @param w : weight matrix
        @param b : bias vector
        @param is_training: is the caller in training phase or not
        @param dropout : applies dropout to the operation
        @param input_dim: (optional) dimensionality of the input
        @param output_dim: (optional) dimensionality of the output
        @param timesteps: (optional) number of timesteps
        
    Returns
    -------
        Output tensor
    """
    if(not input_dim):
        input_dim = N.shape(x)[2]
    if(not timesteps):
        timesteps = N.shape(x)[1]
    if(not output_dim):
        output_dim = N.shape(w)[1]
    
    if(dropout is not None and 0. < dropout < 1.):
        ones = N.ones_like(N.reshape(x[:, 0, :], (-1, input_dim)))
        dropout_matrix = N.dropout(ones, dropout)
        expanded_dropout_matrix = N.repeat(dropout_matrix, timesteps)
        if(training):
            x = x * expanded_dropout_matrix
            
    #collpse time dimension and batch dimension together
    x = N.reshape(x, (-1, input_dim))
    x = N.dot(x, w)
    x += b
    
    #reshape to 3D
    if N.backend() == 'tensorflow':
        x = N.reshape(x, N.stack([-1, timesteps, output_dim]))
        x.set_shape([None, None, output_dim])
    else:
        x = N.reshape(x, (-1, timesteps, output_dim))
    return x