def __init__(self, nb, nu, na, ny, nx, n_nodes_per_layer=64, n_hidden_layers=2, activation=nn.Tanh): super(default_encoder_net, self).__init__() from deepSI.utils import simple_res_net self.nu = tuple() if nu is None else ((nu,) if isinstance(nu,int) else nu) self.ny = tuple() if ny is None else ((ny,) if isinstance(ny,int) else ny) self.net = simple_res_net(n_in=nb*np.prod(self.nu,dtype=int) + na*np.prod(self.ny,dtype=int), \ n_out=nx, n_nodes_per_layer=n_nodes_per_layer, n_hidden_layers=n_hidden_layers, activation=activation)
def __init__(self, nx, nu, ny, n_nodes_per_layer=64, n_hidden_layers=2, activation=nn.Tanh): #ny here? super(default_ino_state_net, self).__init__() from deepSI.utils import simple_res_net self.nu = tuple() if nu is None else ((nu,) if isinstance(nu,int) else nu) self.ny = tuple() if ny is None else ((ny,) if isinstance(ny,int) else ny) self.nx = nx self.net = simple_res_net(n_in=nx+np.prod(self.nu,dtype=int), n_out=nx, n_nodes_per_layer=n_nodes_per_layer, n_hidden_layers=n_hidden_layers, activation=activation) self.K = nn.Linear(np.prod(self.ny,dtype=int),nx,bias=False)
def __init__(self, nx, ny, nu=-1, n_nodes_per_layer=64, n_hidden_layers=2, activation=nn.Tanh): super(default_output_net, self).__init__() from deepSI.utils import simple_res_net self.ny = tuple() if ny is None else ((ny,) if isinstance(ny,int) else ny) self.feedthrough = nu!=-1 if self.feedthrough: self.nu = tuple() if nu is None else ((nu,) if isinstance(nu,int) else nu) net_in = nx + np.prod(self.nu, dtype=int) else: net_in = nx self.net = simple_res_net(n_in=net_in, n_out=np.prod(self.ny,dtype=int), n_nodes_per_layer=n_nodes_per_layer, \ n_hidden_layers=n_hidden_layers, activation=activation)