Exemple #1
0
 def CreatRNN(cell_dim, 
              activation, 
              initial_state,
              direction, 
              num_layers, 
              init=C.default_override_or(C.glorot_uniform()), 
              init_bias=C.default_override_or(0)):
     if direction == 'bidirectional':  
         return C.layers.Sequential([  
             C.layers.For(range(num_layers), lambda i: [  
                 (C.layers.Recurrence(C.layers.RNNStep(cell_dim, 
                                                       activation = activation,    
                                                       init = init,   
                                                       init_bias = init_bias),  
                             initial_state = initial_state,  
                             return_full_state = False, go_backwards=False),   
                  C.layers.Recurrence(C.layers.RNNStep(cell_dim, activation = activation,   
                                 init = init,  
                                 init_bias = init_bias), 
                             initial_state = initial_state,  
                             return_full_state = False, go_backwards=True)),   
                 C.splice])])
     else:
         go_backward = False if direction == 'forward' else True
         return C.layers.Sequential([ 
             C.layers.For(range(num_layers), lambda i: [ 
                 C.layers.Recurrence(C.layers.RNNStep(cell_dim, 
                                                      activation = activation,   
                                 init = init,  
                                 init_bias = init_bias),  
                             initial_state = initial_state,  
                             return_full_state = False, go_backwards=go_backward)])])
Exemple #2
0
 def CreatRNN(cell_dim, 
              activation, 
              initial_state,
              direction, 
              num_layers, 
              init=C.default_override_or(C.glorot_uniform()), 
              init_bias=C.default_override_or(0)):
     if direction == 'bidirectional':  
         return C.layers.Sequential([  
             C.layers.For(range(num_layers), lambda i: [  
                 (C.layers.Recurrence(C.layers.RNNStep(cell_dim, 
                                                       activation = activation,    
                                                       init = init,   
                                                       init_bias = init_bias),  
                             initial_state = initial_state,  
                             return_full_state = False, go_backwards=False),   
                  C.layers.Recurrence(C.layers.RNNStep(cell_dim, activation = activation,   
                                 init = init,  
                                 init_bias = init_bias), 
                             initial_state = initial_state,  
                             return_full_state = False, go_backwards=True)),   
                 C.splice])])
     else:
         go_backward = False if direction == 'forward' else True
         return C.layers.Sequential([ 
             C.layers.For(range(num_layers), lambda i: [ 
                 C.layers.Recurrence(C.layers.RNNStep(cell_dim, 
                                                      activation = activation,   
                                 init = init,  
                                 init_bias = init_bias),  
                             initial_state = initial_state,  
                             return_full_state = False, go_backwards=go_backward)])])
def InstanceNormalization(
        num_channel, initial_scale=1, initial_bias=0, epsilon=C.default_override_or(0.00001), name=''):
    """ Instance Normalization (2016) """
    epsilon = C.get_default_override(InstanceNormalization, epsilon=epsilon)

    dtype = C.get_default_override(None, dtype=C.default_override_or(np.float32))

    scale = C.Parameter(num_channel, init=initial_scale, name='scale')
    bias = C.Parameter(num_channel, init=initial_bias, name='bias')
    epsilon = np.asarray(epsilon, dtype=dtype)

    @C.BlockFunction('InstanceNormalization', name)
    def instance_normalization(x):
        mean = C.reduce_mean(x, axis=(1, 2))
        x0 = x - mean
        std = C.sqrt(C.reduce_mean(x0 * x0, axis=(1, 2)))
        if epsilon != 0:
            std += epsilon
        x_hat = x0 / std
        return x_hat * C.reshape(scale, (-1, 1, 1)) + C.reshape(bias, (-1, 1, 1))

    return instance_normalization
Exemple #4
0
 def Test(some_param=default_override_or(13)):
     some_param = get_default_override(Test, some_param=some_param)
     return some_param
Exemple #5
0
 def Test(some_param=default_override_or(13)):
     some_param = get_default_override(Test, some_param=some_param)
     return some_param