def __init__(self, hidden_dims, input_dim, layer_type="concat", nonlinearity="softplus"): super(AmortizedBiasODEnet, self).__init__() base_layer = { "ignore": diffeq_layers.IgnoreLinear, "hyper": diffeq_layers.HyperLinear, "squash": diffeq_layers.SquashLinear, "concat": diffeq_layers.ConcatLinear, "concat_v2": diffeq_layers.ConcatLinear_v2, "concatsquash": diffeq_layers.ConcatSquashLinear, "blend": diffeq_layers.BlendLinear, "concatcoord": diffeq_layers.ConcatLinear, }[layer_type] self.input_dim = input_dim # build layers and add them layers = [] activation_fns = [] hidden_shape = input_dim for dim_out in hidden_dims: layer = base_layer(hidden_shape, dim_out) layers.append(layer) activation_fns.append(NONLINEARITIES[nonlinearity]) hidden_shape = dim_out self.layers = nn.ModuleList(layers) self.activation_fns = nn.ModuleList(activation_fns[:-1])
def _make_layer(self, block, hidden, planes, num_blocks, stride): strides = [stride] + [1] * (num_blocks - 1) layers = [] for stride in strides: layers.append(block(self.in_planes, hidden, planes, stride)) self.in_planes = planes * block.expansion return nn.Sequential(*layers)
def create_net_with_time(n_inputs, n_outputs, n_layers=1, n_units=100, layer_type="concat", nonlinear="tanh"): base_layer = LAYER[layer_type] base_activate = NONLINEARITIES[nonlinear] layers = [base_layer(n_inputs, n_units)] for i in range(n_layers): layers.append(base_activate()) layers.append(base_layer(n_units, n_units)) layers.append(base_activate()) layers.append(nn.Linear(n_units, n_outputs)) return nn.ModuleList(layers)