def __init__(self): super().__init__() self.dropout = nn.Dropout(DROPOUT_RATIO) self.lstm = AnalogLSTM(INPUT_SIZE, HIDDEN_SIZE, num_layers=1, dropout=DROPOUT_RATIO, bias=True, rpu_config=rpu_config) self.decoder = AnalogLinear(HIDDEN_SIZE, OUTPUT_SIZE, bias=True, rpu_config=rpu_config)
def get_layer(self, input_size=2, hidden_size=3, **kwargs): kwargs.setdefault('rpu_config', self.get_rpu_config()) kwargs.setdefault('bias', self.bias) return AnalogLSTM(input_size, hidden_size, **kwargs)
def get_layer(self, input_size=2, hidden_size=3, **kwargs): kwargs.setdefault('rpu_config', self.get_rpu_config()) kwargs.setdefault('bias', self.bias) kwargs['rpu_config'].mapping.digital_bias = self.digital_bias return AnalogLSTM(input_size, hidden_size, **kwargs)