def exp_g(name):
    global source
    try:
        a = source
    except NameError:
        source = RealApplianceSource(**source_dict)
    source.lag = 5
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [
        {
            'type': LSTMLayer,
            'num_units': 200,
            'gradient_steps': GRADIENT_STEPS,
            'peepholes': False,
            'W_in_to_cell': Normal(std=1.)
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1/sqrt(200)))
        }
    ]
    net = Net(**net_dict_copy)
    return net
def exp_a(name):
    # 3 appliances
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy['reshape_target_to_2D'] = False
    source = RealApplianceSource(**source_dict_copy)
    source.reshape_target_to_2D = False
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    N = 50
    net_dict_copy['layers_config'] = [
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1.),
            'nonlinearity': tanh
        },
        {
            'type': FeaturePoolLayer,
            'ds': 4, # number of feature maps to be pooled together
            'axis': 1, # pool over the time axis
            'pool_function': T.max
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(N)),
            'nonlinearity': tanh
        },
        {
            'type': DenseLayer,
            'W': Normal(std=1/sqrt(N)),
            'num_units': source.n_outputs,
            'nonlinearity': None
        }
    ]
    net_dict_copy['layer_changes'] = {
        5001: {
            'remove_from': -2,
            'callback': callback,
            'new_layers': [
                {
                    'type': MixtureDensityLayer,
                    'num_units': source.n_outputs,
                    'num_components': 2
                }
            ]
        }
    }

    net = Net(**net_dict_copy)
    return net
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    N = 512
    output_shape = source.output_shape_after_processing()
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'type': Conv1DLayer, # convolve over the time axis
            'num_filters': 32,
            'filter_length': 4,
            'stride': 1,
            'nonlinearity': rectify,
            'border_mode': 'same'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1) # back to (batch, time, features)
        },
        {
            'type': DenseLayer,
            'num_units': N * 2,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': N,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': N // 2,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': output_shape[1] * output_shape[2],
            'nonlinearity': sigmoid
        }
    ]
    net = Net(**net_dict_copy)
    return net
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    output_shape = source.output_shape_after_processing()
    net_dict_copy['layers_config'] = [
        {
            'type': BLSTMLayer,
            'num_units': 40,
            'gradient_steps': GRADIENT_STEPS,
            'peepholes': False,
            'nonlinearity_cell': rectify,
            'nonlinearity_out': rectify
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': Conv1DLayer,
            'num_filters': 20,
            'filter_length': 4,
            'stride': 4,
            'nonlinearity': rectify
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': BLSTMLayer,
            'num_units': 80,
            'gradient_steps': GRADIENT_STEPS,
            'peepholes': False,
            'nonlinearity_cell': rectify,
            'nonlinearity_out': rectify
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': T.nnet.softplus
        }
    ]
    net = Net(**net_dict_copy)
    return net
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    N = 1024
    NUM_FILTERS = 128
    FILTER_LENGTH = 64
    output_shape = source.output_shape()
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'type': Conv1DLayer, # convolve over the time axis
            'num_filters': NUM_FILTERS,
            'filter_length': FILTER_LENGTH,
            'stride': 1,
            'nonlinearity': rectify,
            'W': Normal(std=1/sqrt(FILTER_LENGTH)),
            'border_mode': 'same'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1) # back to (batch, time, features)
        },
        {
            'type': DenseLayer,
            'num_units': output_shape[1] * output_shape[2],
            'W': Normal(std=1/sqrt(N)),
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': source.output_shape()[1] * source.output_shape()[2],
            'W': Normal(std=1/sqrt(N)),
            'nonlinearity': T.nnet.softplus
        }

    ]
    net = Net(**net_dict_copy)
    return net
def exp_b(name):
    global source
    try:
        a = source
    except NameError:
        source = RealApplianceSource(**source_dict)
    source.lag = 5
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'].append(
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1/sqrt(100)))
        }
    )
    net = Net(**net_dict_copy)
    return net
def exp_c(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy['random_window'] = 256
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source,
        learning_rate=1e-5
    ))
    N = 512 * 8
    output_shape = source.output_shape_after_processing()
    net_dict_copy['layers_config'] = [
        {
            'type': DenseLayer,
            'num_units': N * 2,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': N,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': N // 2,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': N // 4,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': output_shape[1] * output_shape[2],
            'nonlinearity': sigmoid
        }
    ]
    net = Net(**net_dict_copy)
    net.load_params(30000)
    return net
def exp_a(name):
    # conv, conv
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(logger=logging.getLogger(name)))
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    NUM_FILTERS = 16
    target_seq_length = source.output_shape_after_processing()[1]
    net_dict_copy["layers_config"] = [
        {"type": DimshuffleLayer, "pattern": (0, 2, 1)},  # (batch, features, time)
        {
            "type": Conv1DLayer,  # convolve over the time axis
            "num_filters": NUM_FILTERS,
            "filter_size": 4,
            "stride": 1,
            "nonlinearity": None,
            "border_mode": "valid",
        },
        {
            "type": Conv1DLayer,  # convolve over the time axis
            "num_filters": NUM_FILTERS,
            "filter_size": 4,
            "stride": 1,
            "nonlinearity": None,
            "border_mode": "valid",
        },
        {"type": DimshuffleLayer, "pattern": (0, 2, 1)},  # back to (batch, time, features)
        {"type": DenseLayer, "num_units": 512, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": 512, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": 512, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": target_seq_length, "nonlinearity": None},
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #9
0
def exp_e(name):
    # failed
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy['input_padding'] = 0
    source_dict_copy['subsample_target'] = 3
    source = RealApplianceSource(**source_dict_copy)

    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [{
        'type': BLSTMLayer,
        'num_units': 50,
        'gradient_steps': GRADIENT_STEPS,
        'peepholes': False,
        'W_in_to_cell': Normal(std=1.)
    }, {
        'type': DimshuffleLayer,
        'pattern': (0, 2, 1)
    }, {
        'type': Conv1DLayer,
        'num_filters': 80,
        'filter_length': 3,
        'stride': 3,
        'nonlinearity': tanh,
        'W': Normal(std=(1 / sqrt(50)))
    }, {
        'type': DimshuffleLayer,
        'pattern': (0, 2, 1)
    }, {
        'type':
        BLSTMLayer,
        'num_units':
        50,
        'gradient_steps':
        GRADIENT_STEPS,
        'peepholes':
        False,
        'W_in_to_cell':
        Normal(std=(1 / sqrt(50)))
    }, {
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': None,
        'W': Normal(std=(1 / sqrt(50)))
    }]
    net = Net(**net_dict_copy)
    return net
Exemple #10
0
def exp_b(name):
    # one pool layer
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    N = 50
    net_dict_copy['layers_config'] = [
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1.),
            'nonlinearity': tanh
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(N)),
            'nonlinearity': tanh
        },
        {
            'type': FeaturePoolLayer,
            'ds': 4, # number of feature maps to be pooled together
            'axis': 1, # pool over the time axis
            'pool_function': T.max
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(N)),
            'nonlinearity': tanh
        },
        {
            'type': MixtureDensityLayer,
            'num_units': source.n_outputs,
            'num_components': 2
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #11
0
def exp_c(name):
    # BLSTM
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    N = 50
    net_dict_copy['layers_config'] = [
        {
            'type': BLSTMLayer,
            'num_units': 50,
            'gradient_steps': GRADIENT_STEPS,
            'peepholes': False,
            'W_in_to_cell': Normal(std=1.)
        },
        {
            'type': BLSTMLayer,
            'num_units': 50,
            'gradient_steps': GRADIENT_STEPS,
            'peepholes': False,
            'W_in_to_cell': Normal(std=1/sqrt(N))
        },
        {
            'type': FeaturePoolLayer,
            'ds': 4, # number of feature maps to be pooled together
            'axis': 1, # pool over the time axis
            'pool_function': T.max
        },
        {
            'type': BLSTMLayer,
            'num_units': 50,
            'gradient_steps': GRADIENT_STEPS,
            'peepholes': False,
            'W_in_to_cell': Normal(std=1/sqrt(N))
        },
        {
            'type': MixtureDensityLayer,
            'num_units': source.n_outputs,
            'num_components': 2
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #12
0
def exp_a(name):
    source = RealApplianceSource(
        filename='/data/dk3810/ukdale.h5',
        appliances=[
            ['fridge freezer', 'fridge', 'freezer'], 
            'hair straighteners', 
            'television'
            # 'dish washer',
            # ['washer dryer', 'washing machine']
        ],
        max_appliance_powers=[300, 500, 200], #, 2500, 2400],
        on_power_thresholds=[20, 20, 20], #, 20, 20],
        max_input_power=1000,
        min_on_durations=[60, 60, 60], #, 1800, 1800],
        window=("2013-06-01", "2014-07-01"),
        seq_length=1000,
        output_one_appliance=False,
        boolean_targets=False,
        min_off_duration=60,
        train_buildings=[1],
        validation_buildings=[1], 
        skip_probability=0,
        n_seq_per_batch=50
    )

    net = Net(
        experiment_name=name,
        source=source,
        save_plot_interval=SAVE_PLOT_INTERVAL,
        loss_function=crossentropy,
        updates=partial(nesterov_momentum, learning_rate=1.0),
        layers_config=[
            {
                'type': LSTMLayer,
                'num_units': 50,
                'W_in_to_cell': Uniform(5),
                'gradient_steps': GRADIENT_STEPS,
                'peepholes': False
            },
            {
                'type': DenseLayer,
                'num_units': source.n_outputs,
                'nonlinearity': sigmoid
            }
        ]
    )
    return net
Exemple #13
0
def exp_j(name):
    # 3 BLSTM layers
    # avg valid cost =  1.4343644381 (virtually identical to BiRNN!)
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy['subsample_target'] = 5
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    N = 50
    net_dict_copy['layers_config'] = [
        {
            'type': BLSTMLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_cell': Normal(std=1.),
            'peepholes': False
        },
        {
            'type': BLSTMLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_cell': Normal(std=1.),
            'peepholes': False
        },
        {
            'type': FeaturePoolLayer,
            'ds': 5,  # number of feature maps to be pooled together
            'axis': 1,  # pool over the time axis
            'pool_function': T.mean
        },
        {
            'type': BLSTMLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_cell': Normal(std=1.),
            'peepholes': False
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1 / sqrt(N)))
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #14
0
def exp_e(name):
    # T.max
    # best yet?  avg top 25 valid cost =  1.0413346291
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy['subsample_target'] = 5
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    N = 50
    net_dict_copy['layers_config'] = [
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1.),
            'nonlinearity': tanh
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1 / sqrt(N)),
            'nonlinearity': tanh
        },
        {
            'type': FeaturePoolLayer,
            'ds': 5,  # number of feature maps to be pooled together
            'axis': 1,  # pool over the time axis
            'pool_function': T.max
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1 / sqrt(N)),
            'nonlinearity': tanh
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1 / sqrt(N)))
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #15
0
def exp_x(name):
    try:
        source.lag = 1
        source.target_is_diff = False
    except NameError:
        global source
        source = RealApplianceSource(**source_dict)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'].append({
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': sigmoid,
        'W': Normal(std=(1 / sqrt(50)))
    })
    net = Net(**net_dict_copy)
    return net
Exemple #16
0
def exp_a(name):
    # Trains but doesn't do well at discriminating
    source = RealApplianceSource(
        filename='/data/dk3810/ukdale.h5',
        appliances=
        [['fridge freezer', 'fridge', 'freezer'], 'hair straighteners',
         'television'
         #            'dish washer'
         #            ['washer dryer', 'washing machine']
         ],
        max_appliance_powers=[300, 500, 200, 2500, 2400],
        on_power_thresholds=[5, 5, 5, 5, 5],
        max_input_power=5900,
        min_on_durations=[60, 60, 60, 1800, 1800],
        min_off_durations=[12, 12, 12, 1800, 600],
        window=("2013-06-01", "2014-07-01"),
        seq_length=1500,
        output_one_appliance=False,
        boolean_targets=False,
        train_buildings=[1],
        validation_buildings=[1],
        skip_probability=0.0,
        n_seq_per_batch=25,
        include_diff=True)

    net = Net(experiment_name=name,
              source=source,
              save_plot_interval=250,
              loss_function=mse,
              updates=partial(nesterov_momentum,
                              learning_rate=.1,
                              clip_range=(-1, 1)),
              layers_config=[{
                  'type': DenseLayer,
                  'num_units': 50,
                  'nonlinearity': rectify
              }, {
                  'type': DenseLayer,
                  'num_units': 50,
                  'nonlinearity': rectify
              }, {
                  'type': DenseLayer,
                  'num_units': source.n_outputs,
                  'nonlinearity': None
              }])
    return net
Exemple #17
0
def exp_b(name):
    # 3 appliances
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy['appliances'] = [['fridge freezer', 'fridge', 'freezer'],
                                      'hair straighteners', 'television']
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    N = 50
    net_dict_copy['layers_config'].extend([{
        'type': MixtureDensityLayer,
        'num_units': source.n_outputs,
        'num_components': 2
    }])
    net = Net(**net_dict_copy)
    return net
Exemple #18
0
def exp_x(name):
    source = RealApplianceSource(**source_dict)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name, 
        source=source
    ))
    net_dict_copy['layers_config'].append(
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1/sqrt(50)))
        }
    )
    net = Net(**net_dict_copy)
    return net
Exemple #19
0
def exp_o(name):
    # Larger net
    # Best performance yet. Valid cost of 0.129 at 470 iterations!
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy['subsample_target'] = 5
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 50,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1.),
            'nonlinearity': tanh
        },
        {
            'type': FeaturePoolLayer,
            'ds': 5,  # number of feature maps to be pooled together
            'axis': 1,  # pool over the time axis
            'pool_function': T.mean
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 50,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1 / sqrt(50)),
            'nonlinearity': tanh
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 50,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1 / sqrt(50)),
            'nonlinearity': tanh
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1 / sqrt(50)))
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #20
0
def exp_q(name):
    # 3x pooling not 5x
    # best valid cost =  0.3535898030 at iteration   450
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy['subsample_target'] = 3
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 25,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1.),
            'nonlinearity': tanh
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 25,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1 / sqrt(25)),
            'nonlinearity': tanh
        },
        {
            'type': FeaturePoolLayer,
            'ds': 3,  # number of feature maps to be pooled together
            'axis': 1,  # pool over the time axis
            'pool_function': T.mean
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 25,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1 / sqrt(25)),
            'nonlinearity': tanh
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1 / sqrt(25)))
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #21
0
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    net_dict_copy['layers_config'] = [
        {
            'type': BLSTMLayer,
            'num_units': 40,
            'gradient_steps': GRADIENT_STEPS,
            'peepholes': False
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': Conv1DLayer,
            'num_filters': 20,
            'filter_length': 4,
            'stride': 4,
            'nonlinearity': rectify
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': BLSTMLayer,
            'num_units': 80,
            'gradient_steps': GRADIENT_STEPS,
            'peepholes': False
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': T.nnet.softplus
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #22
0
def exp_f(name):
    # ReLU hidden layers
    # linear output
    # output one appliance
    # 0% skip prob for first appliance
    # 100% skip prob for other appliances
    # input is diff
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy['skip_probability'] = 0.75
    source_dict_copy['lag'] = 15
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    net_dict_copy['layers_config']= [
        {
            'type': RecurrentLayer,
            'num_units': 50,
            'W_in_to_hid': Normal(std=1),
            'W_hid_to_hid': Identity(scale=0.9),
            'nonlinearity': rectify,
            'learn_init': False, 
            'precompute_input': True
        },
        {
            'type': RecurrentLayer,
            'num_units': 50,
            'W_in_to_hid': Normal(std=1/sqrt(50)),
            'W_hid_to_hid': Identity(scale=0.9),
            'nonlinearity': rectify,
            'learn_init': False, 
            'precompute_input': True
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=1/sqrt(50))
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #23
0
def exp_b(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [
        {
            'type': DenseLayer,
            'num_units': SEQ_LENGTH,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': SEQ_LENGTH // 4,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': SEQ_LENGTH // 8,
            'nonlinearity': rectify
        },
        {  # MIDDLE LAYER
            'type': DenseLayer,
            'num_units': 32,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': SEQ_LENGTH // 8,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': SEQ_LENGTH // 4,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': SEQ_LENGTH,
            'nonlinearity': None
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #24
0
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    net_dict_copy['layers_config'].extend([
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': T.nnet.softplus
        }
    ])
    net = Net(**net_dict_copy)
    return net
Exemple #25
0
def exp_c(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(
        dict(
            experiment_name=name,
            source=source,
            loss_function=partial(scaled_cost3, ignore_inactive=True),
        ))
    net_dict_copy['layers_config'].extend([{
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': T.nnet.softplus
    }])
    net = Net(**net_dict_copy)
    return net
Exemple #26
0
def exp_c(name):
    # Pretty good. Not perfect.
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 25,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1.),
            'nonlinearity': tanh
        },
        {
            'type': FeaturePoolLayer,
            'ds': 5,  # number of feature maps to be pooled together
            'axis': 1,  # pool over the time axis
            'pool_function': T.max
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 5,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1 / sqrt(25)),
            'nonlinearity': tanh
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 25,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1 / sqrt(5)),
            'nonlinearity': tanh
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1 / sqrt(25)))
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #27
0
def exp_i(name):
    # Not great
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [
        {
            'type': BLSTMLayer,
            'num_units': 25,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_cell': Normal(std=1.),
            'peepholes': False
        },
        {
            'type': FeaturePoolLayer,
            'ds': 5,  # number of feature maps to be pooled together
            'axis': 1,  # pool over the time axis
            'pool_function': T.mean
        },
        {
            'type': BLSTMLayer,
            'num_units': 5,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_cell': Normal(std=1 / sqrt(5)),
            'peepholes': False
        },
        {
            'type': BLSTMLayer,
            'num_units': 25,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_cell': Normal(std=1 / sqrt(25)),
            'peepholes': False
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1 / sqrt(25)))
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #28
0
def exp_x(name, learning_rate):
    global source
    try:
        a = source
    except NameError:
        source = RealApplianceSource(**source_dict)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(
        dict(experiment_name=name,
             source=source,
             updates=partial(nesterov_momentum, learning_rate=learning_rate)))
    net_dict_copy['layers_config'].append({
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': sigmoid,
        'W': Normal(std=(1 / sqrt(50)))
    })
    net = Net(**net_dict_copy)
    return net
Exemple #29
0
def exp_h(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'label': 'conv0',
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': 16,
            'filter_length': 4,
            'stride': 1,
            'nonlinearity': rectify,
            'border_mode': 'valid'
        },
        {
            'type': DeConv1DLayer,
            'num_output_channels': 1,
            'filter_length': 4,
            'stride': 1,
            'nonlinearity': rectify,
            'border_mode': 'full',
            'W': 'ref:conv0.W.dimshuffle(1, 0, 2)[:, :, ::-1]',
            'b': None,
            'shared_weights': True
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # back to (batch, time, features)
        },
        {
            'type': DenseLayer,
            'num_units': SEQ_LENGTH,
            'nonlinearity': None
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #30
0
def exp_a(name):
    # 5 appliances
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    net_dict_copy['layers_config'].extend([
        {
            'type': MixtureDensityLayer,
            'num_units': source.n_outputs,
            'num_components': 2
        }
    ])
    net = Net(**net_dict_copy)
    return net
Exemple #31
0
def exp_k(name):
    # no downsampling
    # avg valid cost =  0.6215093136
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy['subsample_target'] = 1
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    N = 50
    net_dict_copy['layers_config'] = [
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1.),
            'nonlinearity': tanh
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(N)),
            'nonlinearity': tanh
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(N)),
            'nonlinearity': tanh
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1/sqrt(N)))
        }
    ]
    net = Net(**net_dict_copy)
    return net
Exemple #32
0
def exp_i(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [{
        'label': 'dense0',
        'type': DenseLayer,
        'num_units': SEQ_LENGTH,
        'nonlinearity': None
    }, {
        'type': SharedWeightsDenseLayer,
        'num_units': SEQ_LENGTH,
        'nonlinearity': None,
        'W': 'ref:dense0.W.T'
    }]
    net = Net(**net_dict_copy)
    return net
Exemple #33
0
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    N = 100
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [{
        'type': BidirectionalRecurrentLayer,
        'num_units': N,
        'gradient_steps': GRADIENT_STEPS,
        'W_in_to_hid': Normal(std=1.),
        'nonlinearity': tanh
    }, {
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': None,
        'W': Normal(std=(1 / sqrt(N)))
    }]

    new_layers = {
        'remove_from':
        -3,
        'new_layers': [{
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=(1 / sqrt(N))),
            'nonlinearity': tanh
        }, {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1 / sqrt(N)))
        }]
    }

    rng = range(1501, 4502, 1500)
    net_dict_copy['layer_changes'] = {k: deepcopy(new_layers) for k in rng}

    net = Net(**net_dict_copy)
    return net
Exemple #34
0
def exp_a(name):
    # 5 appliances
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(
        dict(appliances=[['fridge freezer', 'fridge', 'freezer'],
                         'hair straighteners', 'television', 'dish washer',
                         ['washer dryer', 'washing machine']],
             skip_probability=0.7))
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'].extend([{
        'type': MixtureDensityLayer,
        'num_units': source.n_outputs,
        'num_components': 2
    }])
    net = Net(**net_dict_copy)
    net.load_params(iteration=4000)
    return net
Exemple #35
0
def exp_d(name):
    global source
    try:
        a = source
    except NameError:
        source = RealApplianceSource(**source_dict)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [{
        'type': DenseLayer,
        'num_units': 50,
        'nonlinearity': sigmoid
    }, {
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': sigmoid,
        'W': Normal(std=1 / sqrt(50))
    }]
    net = Net(**net_dict_copy)
    return net
Exemple #36
0
def exp_b(name):
    # 5 appliances and normal cost func
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source,
        loss_function=lambda x, t: mdn_nll(x, t).mean(),
    ))
    net_dict_copy['layers_config'].extend([
        {
            'type': MixtureDensityLayer,
            'num_units': source.n_outputs,
            'num_components': 2
        }
    ])
    net = Net(**net_dict_copy)
    return net
Exemple #37
0
def exp_a(name):
    # solid performer, not perfect though.
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)

    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [{
        'type': BLSTMLayer,
        'num_units': 50,
        'gradient_steps': GRADIENT_STEPS,
        'peepholes': False,
        'W_in_to_cell': Normal(std=1.)
    }, {
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': None,
        'W': Normal(std=(1 / sqrt(50)))
    }]
    net = Net(**net_dict_copy)
    return net
Exemple #38
0
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)

    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [{
        'type': BidirectionalRecurrentLayer,
        'num_units': 100,
        'gradient_steps': GRADIENT_STEPS,
        'W_in_to_hid': Normal(std=1.),
        'nonlinearity': tanh
    }, {
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': None,
        'W': Normal(std=(1 / sqrt(100)))
    }]
    net = Net(**net_dict_copy)
    return net
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    source.subsample_target = 4
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 25,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1.),
            'nonlinearity': tanh
        },
        {
            'type': FeaturePoolLayer,
            'ds': 2, # number of feature maps to be pooled together
            'axis': 1, # pool over the time axis
            'pool_function': T.mean
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 10,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(25)),
            'nonlinearity': tanh
        },
        {
            'type': FeaturePoolLayer,
            'ds': 2, # number of feature maps to be pooled together
            'axis': 1, # pool over the time axis
            'pool_function': T.mean
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 5,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(10)),
            'nonlinearity': tanh
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 10,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(5)),
            'nonlinearity': tanh
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 25,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(10)),
            'nonlinearity': tanh
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1/sqrt(25)))
        }
    ]
    net = Net(**net_dict_copy)
    return net
def exp_a(name):
    # conv, conv
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(
        logger=logging.getLogger(name)
    ))
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    NUM_FILTERS = 16
    target_seq_length = source.output_shape_after_processing()[1]
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': NUM_FILTERS,
            'filter_size': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': NUM_FILTERS,
            'filter_size': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # back to (batch, time, features)
        },
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': target_seq_length,
            'nonlinearity': None
        }
    ]
    net = Net(**net_dict_copy)
    return net
# create new source, based on net's source,
# but with 5 outputs (so each seq includes entire appliance activation,
# and to make it easier to plot every appliance),
# and long seq length,
# then make one long mains by concatenating each seq
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
    logger=logger,
    seq_length=2048,
    border=100,
    output_one_appliance=False,
    input_stats=input_stats,
    target_is_start_and_end_and_mean=False,
    window=("2014-12-10", None)
))
mains_source = RealApplianceSource(**source_dict_copy)
mains_source.start()

N_BATCHES = 1
logger.info("Preparing synthetic mains data for {} batches.".format(N_BATCHES))
mains = None
targets = None
TARGET_I = 2
for batch_i in range(N_BATCHES):
    batch = mains_source.queue.get(timeout=30)
    mains_batch, targets_batch = batch.data
    if mains is None:
        mains = mains_batch
        targets = targets_batch[:, :, TARGET_I]
    else:
        mains = np.concatenate((mains, mains_batch))
def exp_a(name):
    # conv, conv
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(
        logger=logging.getLogger(name)
    ))
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    NUM_FILTERS = 16
    target_seq_length = source.output_shape_after_processing()[1]
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': NUM_FILTERS,
            'filter_size': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        #  Need to do ugly dimshuffle, reshape, reshape, dimshuffle
        #  to get output of first Conv1DLayer ready for
        #  ConcatLayer
        # {
        #     'type': DimshuffleLayer,
        #     'pattern': (0, 2, 1),  # back to (batch, time, features)
        #     'label': 'dimshuffle1'
        # },
        # {
        #     'type': ReshapeLayer,
        #     'shape': (N_SEQ_PER_BATCH, -1),
        #     'label': 'reshape0'
        # },
        # {
        #     'type': ReshapeLayer,
        #     'shape': (N_SEQ_PER_BATCH, NUM_FILTERS, -1)
        # },
        # {
        #     'type': DimshuffleLayer,
        #     'pattern': (0, 2, 1),  # back to (batch, time, features)
        #     'label': 'dimshuffle2'
        # },
        {
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': NUM_FILTERS,
            'filter_size': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1),  # back to (batch, time, features)
            'label': 'dimshuffle3'
        },
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify,
            'label': 'dense0'
        },
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify,
            'label': 'dense1'
        },
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify,
            'label': 'dense2'
        },
        {
            'type': ConcatLayer,
            'axis': 1,
            'incomings': ['dense0', 'dense2']
        },
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': target_seq_length,
            'nonlinearity': None
        }
    ]
    net = Net(**net_dict_copy)
    return net
# Generate mains data
# create new source, based on net's source,
# but with 5 outputs (so each seq includes entire appliance activation,
# and to make it easier to plot every appliance),
# and long seq length,
# then make one long mains by concatenating each seq
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
    logger=logger,
    seq_length=2000,
    output_one_appliance=False,
    input_stats=net.source.input_stats,
    target_is_start_and_end_and_mean=False,
    window=("2013-03-18", "2013-05-18")
))
mains_source = RealApplianceSource(**source_dict_copy)

N_BATCHES = 1
logger.info("Preparing synthetic mains data for {} batches.".format(N_BATCHES))
mains = None
targets = None
for batch_i in range(N_BATCHES):
    mains_batch, targets_batch = mains_source._gen_data()
    mains_batch, targets_batch = mains_source._process_data(
        mains_batch, targets_batch)
    if mains is None:
        mains = mains_batch
        targets = targets_batch[:, :, 0]
    else:
        mains = np.concatenate((mains, mains_batch))
        targets = np.concatenate((targets, targets_batch[:, :, 0]))