Beispiel #1
0
    def __init__(self, model="super"):
        super(LSNNPolicy, self).__init__()
        self.state_dim = 4
        self.input_features = 16
        self.hidden_features = 128
        self.output_features = 2
        # self.affine1 = torch.nn.Linear(self.state_dim, self.input_features)
        self.constant_current_encoder = ConstantCurrentLIFEncoder(40)
        self.lif_layer = LSNNCell(
            2 * self.state_dim,
            self.hidden_features,
            p=LSNNParameters(method=model, alpha=100.0),
        )
        self.dropout = torch.nn.Dropout(p=0.5)
        self.readout = LICell(self.hidden_features, self.output_features)

        self.saved_log_probs = []
        self.rewards = []
Beispiel #2
0
 def __init__(
     self,
     input_features,
     output_features,
     seq_length,
     is_lsnn,
     dt=0.01,
     model="super",
 ):
     super(MemoryNet, self).__init__()
     self.input_features = input_features
     self.output_features = output_features
     self.seq_length = seq_length
     self.is_lsnn = is_lsnn
     if is_lsnn:
         p = LSNNParameters(method=model)
         self.layer = LSNNRecurrentCell(input_features, input_features, p, dt=dt)
     else:
         p = LIFParameters(method=model)
         self.layer = LIFRecurrentCell(input_features, input_features, dt=dt)
     self.dropout = torch.nn.Dropout(p=0.2)
     self.readout = LILinearCell(input_features, output_features)
Beispiel #3
0
 def __init__(self, input_features, output_features, args):
     super(MemoryNet, self).__init__()
     self.input_features = input_features
     self.output_features = output_features
     self.seq_length = args.seq_length
     self.optimizer = args.optimizer
     self.learning_rate = args.learning_rate
     self.regularization_factor = args.regularization_factor
     self.regularization_target = args.regularization_target / (
         self.seq_length * args.seq_repetitions
     )
     self.log("Neuron model", args.neuron_model)
     p_lsnn = LSNNParameters(
         method=args.model,
         v_th=torch.as_tensor(0.5),
         tau_adapt_inv=torch.as_tensor(1 / 1200.0),
         beta=torch.as_tensor(1.8),
     )
     p_lif = LIFParameters(
         method=args.model,
         v_th=torch.as_tensor(0.5),
     )
     p_li = LIParameters()
     if args.neuron_model == "lsnn":
         self.capture_b = False
         self.layer = LSNNRecurrentCell(input_features, input_features, p=p_lsnn)
     elif args.neuron_model == "lsnnlif":
         self.layer = LSNNLIFNet(
             input_features, p_lsnn=p_lsnn, p_lif=p_lif, dt=args.dt
         )
         self.capture_b = True
     else:
         self.layer = LIFRecurrentCell(
             input_features, input_features, p=p_lif, dt=args.dt
         )
         self.capture_b = False
     self.readout = LILinearCell(input_features, output_features, p=p_li)
     self.scheduler = None