Пример #1
0
def get_nn_model(D_IN,H,D_OUT, num_layers, width_class='identity', weight_variance=None, bias_variance=None):
    if weight_variance is None:
        weight_variance = defaults.weight_variance
    if bias_variance is None:
        bias_variance = defaults.bias_variance
    assert(width_class in valid_width_classes )
    intermediate_layers = get_intermediate_layers(H, num_layers, True, width_class, weight_variance, bias_variance)
    if width_class == 'identity':
        first_hidden_size = H
        last_hidden_size = H
    elif width_class == 'largest_last':
        first_hidden_size = H
        last_hidden_size = num_layers * H
    elif width_class == 'largest_first':
        first_hidden_size = num_layers*H 
        last_hidden_size = H
    else:
        raise NotImplementedError
    #embed()
    model = torch.nn.Sequential(
        GaussLinearStandardized(D_IN, first_hidden_size, bias=True, raw_weight_variance = weight_variance, raw_bias_variance = bias_variance),
        ScaledRelu(),
        *intermediate_layers,
        GaussLinearStandardized(last_hidden_size, D_OUT, bias=True, raw_weight_variance = weight_variance, raw_bias_variance = bias_variance)
    )
    return model 
Пример #2
0
def get_nn_model(D_IN,H,D_OUT, num_layers):
    intermediate_layers = get_intermediate_layers(H, num_layers, True)
    model = torch.nn.Sequential(
        GaussLinearStandardized(D_IN, H, bias=True, raw_weight_variance = defaults.weight_variance, raw_bias_variance = defaults.bias_variance),
        ScaledRelu(),
        *intermediate_layers,
        GaussLinearStandardized(H, D_OUT, bias=True, raw_weight_variance = defaults.weight_variance, raw_bias_variance = defaults.bias_variance)
    )
    return model 
Пример #3
0
def get_middel_nn_model(H, D_OUT, num_layers, weight_variance=None, bias_variance=None):
    if weight_variance is None:
        weight_variance = defaults.weight_variance
    if bias_variance is None:
        bias_variance = defaults.bias_variance
    #for use in the HSIC experiments.
    #width_class is assumed to be identity.
    width_class = 'identity'
    last_hidden_size = H
    #D_IN is assumed to equal H.
    intermediate_layers = get_intermediate_layers(H, num_layers, True, width_class, weight_variance, bias_variance)
    model = torch.nn.Sequential(
        ScaledRelu(),
        *intermediate_layers,
        GaussLinearStandardized(last_hidden_size, D_OUT, bias=True, raw_weight_variance = weight_variance, raw_bias_variance = bias_variance)
    )
    return model     
Пример #4
0
def get_intermediate_layers(H, num_layers, bias, width_class, weight_variance, bias_variance):
    intermediate_layers = []
    for layer_index in range(num_layers-1):
        if width_class == 'identity':
            incoming_hidden_size = H
            outgoing_hidden_size = H
        elif width_class == 'largest_last':
            incoming_hidden_size = (layer_index+1)*H
            outgoing_hidden_size = (layer_index+2)*H
        elif width_class == 'largest_first':
            incoming_hidden_size = (num_layers-layer_index)*H
            outgoing_hidden_size = (num_layers-layer_index-1)*H
        else:
            raise NotImplementedError
        intermediate_layers+= [GaussLinearStandardized(incoming_hidden_size, outgoing_hidden_size, bias=bias, raw_weight_variance = weight_variance, raw_bias_variance = bias_variance),
        ScaledRelu() ]
    return intermediate_layers
Пример #5
0
def get_intermediate_layers(H, num_layers, bias):
    intermediate_layers = []
    for layer_index in range(num_layers-1):
        intermediate_layers+= [GaussLinearStandardized(H, H, bias=bias, raw_weight_variance = defaults.weight_variance, raw_bias_variance = defaults.bias_variance),
        ScaledRelu() ]
    return intermediate_layers