Esempio n. 1
0
 def __init__(self,
              conv_features,
              conv_width,
              lr_features,
              conv_out=32,
              conv_layers=1,
              conv_arg_map={},
              mlp_layers=3,
              mlp_arg_map={},
              resid=True):
     super(LrConv, self).__init__()
     self.conv_width = conv_width
     self.window_config = WindowConfig(input_sequence_length=conv_width, output_sequence_length=1)
     self.resid = resid
     self.conv, conv_seq_reduction = simple.n_conv_1d(features=conv_features,
                                                      conv_width=conv_width,
                                                      out_features=conv_out,
                                                      conv_layers=conv_layers,
                                                      return_seq_reducer=True,
                                                      **conv_arg_map)
     self.min_input_dim = conv_seq_reduction.minimal_input_dim
     self.mlp = simple.mlp(features=lr_features + conv_out,
                           num_layers=mlp_layers,
                           hidden=lr_features + conv_out,
                           out_features=lr_features,
                           **mlp_arg_map)
Esempio n. 2
0
    def __init__(self, ts_features, lr_features,
                 conv_layers=3,
                 conv_width=8,
                 conv_pool_width=3,
                 conv_pool_stride=3,
                 conv_out_feature_div=2,
                 conv_out_features=1,
                 conv_arg_map=None,
                 mlp_layers=1,
                 mlp_arg_map=None):
        super(CombinedLrNConvWithRawLr, self).__init__()
        conv_arg_map = {} if conv_arg_map is None else conv_arg_map
        mlp_arg_map = {} if mlp_arg_map is None else mlp_arg_map

        self.conv_width = conv_width
        self.window_config = WindowConfig(input_sequence_length=conv_width, output_sequence_length=1)

        self.conv = simple.n_conv_1d(features=ts_features + lr_features,
                                     conv_layers=conv_layers,
                                     pool_width=conv_pool_width,
                                     pool_stride=conv_pool_stride,
                                     fc_layers=1,
                                     conv_width=conv_width,
                                     out_features=conv_out_features,
                                     conv_out_feature_div=conv_out_feature_div,
                                     **conv_arg_map)
        self.mlp = simple.mlp(features=conv_out_features + lr_features,
                              hidden=conv_out_features + lr_features,
                              num_layers=mlp_layers,
                              out_features=lr_features,
                              **mlp_arg_map)
Esempio n. 3
0
    def lstm_all_to_lr(w, num_layers, hidden):
        # HIST+LR -> LR
        inp = nn.Sequential(
            agg_m.FlatCat(),
            simple.mlp(features=411, num_layers=1, out_features=hidden)
        )
        out = simple.mlp(features=hidden, num_layers=2, out_features=11)

        lstm = agg_m.AutoregLstm(input=inp, output=out, in_len=10, out_len=5, hidden=hidden, num_layers=num_layers)
        # -------
        lstm.name = 'autoreg-lstm'
        train.train_window_models([lstm], w, patience=10, validate=True, weight_decay=0, max_epochs=1,
                                  lrs=[0.0001, 0.00001],
                                  source='all', target='lr', log=False)
        # -------
        _, axs = plt.subplots(ncols=3, nrows=1, sharey='row', figsize=(15, 3))
        w.plot_lr(axs=axs)
        w.plot_model(lstm, axs=axs, other={'source': 'all', 'target': 'lr'})
        plt.show()
Esempio n. 4
0
 def __init__(self, ts_features, num_layers=1, hidden=64, output_sequence_length=1):
     super(AutoregLstm, self).__init__()
     self.window_config = WindowConfig(output_sequence_length=output_sequence_length)
     hidden_shape = ts_features if hidden is None else hidden
     self.enc = nn.LSTM(input_size=ts_features,
                        hidden_size=hidden_shape,
                        num_layers=num_layers, batch_first=True)
     self.dec = nn.LSTM(input_size=hidden_shape,
                        hidden_size=hidden_shape,
                        num_layers=num_layers, batch_first=True)
     self.to_result = simple.mlp(features=hidden_shape, out_features=ts_features)
Esempio n. 5
0
 def __init__(self, ts_features, lr_features, num_layers=1, hidden=64, output_sequence_length=1,
              return_deltas=False, single_output=False, ret_type='all'):
     super(AutoregLstmLrAux, self).__init__()
     self.window_config = WindowConfig(output_sequence_length=output_sequence_length)
     self.return_deltas = return_deltas
     hidden_shape = (ts_features + lr_features) if hidden is None else hidden
     self.enc = nn.LSTM(input_size=ts_features + lr_features,
                        hidden_size=hidden_shape,
                        num_layers=num_layers, batch_first=True)
     self.dec = nn.LSTM(input_size=hidden_shape,
                        hidden_size=hidden_shape,
                        num_layers=num_layers, batch_first=True)
     self.single_output = single_output
     if single_output:
         self.to_delta_aux = simple.mlp(features=hidden_shape, out_features=lr_features + ts_features)
     else:
         self.to_aux = simple.mlp(features=hidden_shape, out_features=ts_features)
         self.to_delta = simple.mlp(features=hidden_shape, out_features=lr_features)
     self.lr_features = lr_features
     self.ret_type = ret_type
Esempio n. 6
0
    def __init__(self, hidden=64, in_len=10, cat_s=True):
        super(AttentionLayer, self).__init__()
        self.hidden = hidden
        self.in_len = in_len
        self.cat_s = cat_s

        self.w = simple.mlp(hidden,
                            num_layers=1,
                            out_features=1,
                            input_dropout=0.2,
                            dropout=0.5)
        self.u = simple.mlp(hidden,
                            num_layers=1,
                            out_features=1,
                            input_dropout=0.2,
                            dropout=0.5)
        self.v = simple.mlp(in_len,
                            num_layers=8,
                            out_features=in_len,
                            input_dropout=0.2,
                            dropout=0.5)
Esempio n. 7
0
    def __init__(self, hidden=64, cat_s=True):
        super(SlimAttentionLayer, self).__init__()
        self.hidden = hidden
        self.cat_s = cat_s
        inner_hidden = hidden  # // 4

        self.w = simple.mlp(hidden,
                            num_layers=4,
                            out_features=inner_hidden,
                            input_dropout=0.2,
                            dropout=0.5)
        self.u = simple.mlp(hidden,
                            num_layers=4,
                            out_features=inner_hidden,
                            input_dropout=0.2,
                            dropout=0.5)
        self.v = simple.mlp(inner_hidden,
                            num_layers=8,
                            out_features=1,
                            input_dropout=0.2,
                            dropout=0.5)
Esempio n. 8
0
 def __init__(self, conv_features, conv_width, lr_features, conv_hidden=32, mlp_layers=3):
     super(LrConvOld, self).__init__()
     self.conv_width = conv_width
     self.window_config = WindowConfig(input_sequence_length=conv_width, output_sequence_length=1)
     self.conv = simple.conv_1d(conv_width=conv_width,
                                features=conv_features,
                                hidden=conv_hidden,
                                out_features=conv_hidden)
     self.mlp = simple.mlp(features=lr_features + conv_hidden,
                           num_layers=mlp_layers,
                           hidden=lr_features + conv_hidden,
                           out_features=lr_features)
Esempio n. 9
0
 def __init__(self, hidden=64, in_len=10, cat_s=True):
     super(FourierAttentionLayer, self).__init__()
     self.hidden = hidden
     self.in_len = in_len
     self.cat_s = cat_s
     inner_hidden = hidden
     self.w = simple.mlp(hidden,
                         num_layers=4,
                         out_features=inner_hidden,
                         input_dropout=0.2,
                         dropout=0.5)
     self.u = simple.mlp(hidden,
                         num_layers=4,
                         out_features=inner_hidden,
                         input_dropout=0.2,
                         dropout=0.5)
     self.v = simple.mlp(inner_hidden,
                         num_layers=8,
                         out_features=1,
                         input_dropout=0.2,
                         dropout=0.5)
     self.f = fourier.HistogramLerner(extra_dims=0, t_in=in_len)
Esempio n. 10
0
 def __init__(self,
              name,
              model,
              in_size,
              out_size,
              model_size,
              criterion=nn.functional.mse_loss,
              lr=0.001):
     self.model = copy.deepcopy(model)
     self.wrapped = nn.Sequential(
         FLT(), simple.mlp(in_size, out_features=model_size), self.model,
         simple.mlp(model_size, out_features=out_size), Rev())
     self.optimizer = Adam(lr=lr, params=self.wrapped.parameters())
     self.criterion = criterion
     self.train_losses = []
     self.reg_losses = []
     self.grad_losses = []
     self.epochs_per_train = 2
     self.max_parts = 20
     self.name = name
     self.wrapped.window_config = WindowConfig(1, 1, 0)
     self.wrapped.name = name
Esempio n. 11
0
 def __init__(self, ts_features, lr_features, num_layers=1, hidden=64, output_sequence_length=1,
              return_deltas=False):
     super(AutoregLstmLr, self).__init__()
     self.window_config = WindowConfig(output_sequence_length=output_sequence_length)
     self.return_deltas = return_deltas
     hidden_shape = (ts_features + lr_features) if hidden is None else hidden
     self.enc = nn.LSTM(input_size=ts_features + lr_features,
                        hidden_size=hidden_shape,
                        num_layers=num_layers, batch_first=True)
     self.dec = nn.LSTM(input_size=hidden_shape,
                        hidden_size=hidden_shape,
                        num_layers=num_layers, batch_first=True)
     self.to_lr = simple.mlp(features=hidden_shape, out_features=lr_features)
Esempio n. 12
0
    def __init__(self, ts_features, lr_features,
                 conv_layers=3,
                 conv_width=8,
                 ts_conv_pool_width=3,
                 ts_conv_pool_stride=3,
                 lr_conv_pool_width=3,
                 lr_conv_pool_stride=3,
                 ts_conv_fc_layers=1,
                 lr_conv_fc_layers=1,
                 ts_conv_out_feature_div=2,
                 lr_conv_out_feature_div=2,
                 mlp_width=10,
                 mlp_layers=3,
                 out_features=1):
        super(LrNConv, self).__init__()
        self.conv_width = conv_width
        self.window_config = WindowConfig(input_sequence_length=conv_width, output_sequence_length=1)

        self.conv_ts = simple.n_conv_1d(features=ts_features,
                                        conv_layers=conv_layers,
                                        pool_width=ts_conv_pool_width,
                                        pool_stride=ts_conv_pool_stride,
                                        fc_layers=ts_conv_fc_layers,
                                        conv_width=conv_width,
                                        conv_out_feature_div=ts_conv_out_feature_div,
                                        out_features=mlp_width)
        self.conv_lr = simple.n_conv_1d(features=lr_features,
                                        conv_layers=conv_layers,
                                        pool_width=lr_conv_pool_width,
                                        pool_stride=lr_conv_pool_stride,
                                        fc_layers=lr_conv_fc_layers,
                                        conv_width=conv_width,
                                        conv_out_feature_div=lr_conv_out_feature_div,
                                        out_features=mlp_width)
        self.mlp = simple.mlp(features=mlp_width * 2,
                              num_layers=mlp_layers,
                              hidden=mlp_width * 2,
                              out_features=out_features)
Esempio n. 13
0
 def __init__(self, in_size, out_size):
     super(EncodeSingleOutput, self).__init__()
     self.fc = simple.mlp(features=in_size,
                          num_layers=1,
                          out_features=out_size)
Esempio n. 14
0
 def __init__(self, lstm_props={}, mlp_props={}):
     lstm_props['batch_first'] = True
     self.rnn = nn.LSTM(**lstm_props)
     self.mlp = simple.mlp(**mlp_props)
Esempio n. 15
0
 def __init__(self, dist):
     super(DummyNetTS, self).__init__()
     self.mlp = simple.mlp(1)
     self.window_config = WindowConfig()
     self.dist = dist
Esempio n. 16
0
        'agr_g': make_window(binary.agr_g(), 'agr_g', window_size=500),
        'sea_a': make_window(binary.sea_a(), 'sea_a', window_size=500),
        'sea_g': make_window(binary.sea_g(), 'sea_g', window_size=500),
        'hyper_f': make_window(binary.hyper_f(), 'hyper_f', window_size=500),
        'hyper_f2': make_window(binary.hyper_f2(), 'hyper_f2',
                                window_size=500),

        # 'weather': make_window(binary.weather(), 'weather'),
        # 'electric': make_window(binary.electric(), 'electric')
    }

    tasks = []
    model_size = 256
    model = simple.mlp(features=model_size,
                       num_layers=8,
                       out_features=model_size,
                       dropout=0.5,
                       input_dropout=0.2)
    model.name = 'mlp'

    for data_type in data_types:
        data, dims, hist_bins, hist_dim = data_types[data_type]
        print(dims, hist_bins, hist_dim)
        tasks.append(
            Task(
                name=data_type,
                window=data,
                lr=0.001,
                input_size=dims * hist_dim * hist_bins * 2,
                output_size=dims + 1,  # THIS IS WRONG FIX
                model_size=model_size,
Esempio n. 17
0
 def __init__(self):
     super(DummyNet, self).__init__()
     self.mlp = simple.mlp(1)
     self.window_config = WindowConfig()