Esempio n. 1
0
def exp_a(name):
    # 5 appliances
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(
        appliances=[
            ['fridge freezer', 'fridge', 'freezer'], 
            'hair straighteners', 
            'television',
            'dish washer',
            ['washer dryer', 'washing machine']
        ],
        skip_probability=0.7
    ))
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    net_dict_copy['layers_config'].extend([
        {
            'type': MixtureDensityLayer,
            'num_units': source.n_outputs,
            'num_components': 2
        }
    ])
    net = Net(**net_dict_copy)
    net.load_params(iteration=4000)
    return net
Esempio n. 2
0
def exp_a(name):
    # ReLU hidden layers
    # linear output
    # output one appliance
    # 0% skip prob for first appliance
    # 100% skip prob for other appliances
    # input is diff
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [{
        'type': BidirectionalRecurrentLayer,
        'num_units': 50,
        'W_in_to_hid': Normal(std=1),
        'W_hid_to_hid': Identity(scale=0.9),
        'nonlinearity': rectify,
        'learn_init': False,
        'precompute_input': True
    }, {
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': None,
        'W': Normal(std=1 / sqrt(50))
    }]
    net = Net(**net_dict_copy)
    net.load_params(5000)
    return net
Esempio n. 3
0
def exp_c(name):
    global source
    MAX_TARGET_POWER = 200
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(
        logger=logging.getLogger(name),
        appliances=[
            'HTPC',
            'dish washer',
            ['fridge freezer', 'fridge', 'freezer'],
            ['washer dryer', 'washing machine'],
            'kettle'
        ],
        max_appliance_powers=[MAX_TARGET_POWER, 2500, 300, 2400, 2600],
        on_power_thresholds=[5] * 5,
        min_on_durations=[60, 1800, 60, 1800, 30],
        min_off_durations=[12, 1800, 12, 600, 1],
        seq_length=2048
    ))
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source,
        plotter=StartEndMeanPlotter(
            n_seq_to_plot=32, max_target_power=MAX_TARGET_POWER),
        learning_rate_changes_by_iteration={
            150000: 1e-4,
            275000: 1e-5
        }
    ))
    net = Net(**net_dict_copy)
    net.load_params(146758)
    return net
Esempio n. 4
0
def exp_a(name):
    # ReLU hidden layers
    # linear output
    # output one appliance
    # 0% skip prob for first appliance
    # 100% skip prob for other appliances
    # input is diff
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    net_dict_copy['layers_config']= [
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 50,
            'W_in_to_hid': Normal(std=1),
            'W_hid_to_hid': Identity(scale=0.9),
            'nonlinearity': rectify,
            'learn_init': False, 
            'precompute_input': True
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=1/sqrt(50))
        }
    ]
    net = Net(**net_dict_copy)
    net.load_params(5000)
    return net
Esempio n. 5
0
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    N = 512 * 8
    output_shape = source.output_shape_after_processing()
    net_dict_copy['layers_config'] = [{
        'type': DenseLayer,
        'num_units': N * 2,
        'nonlinearity': rectify
    }, {
        'type': DenseLayer,
        'num_units': N,
        'nonlinearity': rectify
    }, {
        'type': DenseLayer,
        'num_units': N // 2,
        'nonlinearity': rectify
    }, {
        'type': DenseLayer,
        'num_units': N // 4,
        'nonlinearity': rectify
    }, {
        'type':
        DenseLayer,
        'num_units':
        output_shape[1] * output_shape[2],
        'nonlinearity':
        sigmoid
    }]
    net = Net(**net_dict_copy)
    net.load_params(120000)
    return net
Esempio n. 6
0
def exp_a(name):
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=multi_source,
        plotter=StartEndMeanPlotter(
            n_seq_to_plot=32, max_target_power=MAX_TARGET_POWER)
    ))
    net = Net(**net_dict_copy)
    net.load_params(350000)
    return net
Esempio n. 7
0
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'].extend([{
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': T.nnet.softplus
    }])
    net = Net(**net_dict_copy)
    net.load_params(1000)
    return net
def get_net(appliance, architecture):
    """
    Parameters
    ----------
    appliance : string
    architecture : {'rnn', 'ae', 'rectangles'}
    """
    NET_DICTS = {
        'rnn': net_dict_rnn,
        'ae': net_dict_ae,
        'rectangles': net_dict_rectangles
    }
    net_dict_func = NET_DICTS[architecture]
    source = get_source(
        appliance,
        logger,
        target_is_start_and_end_and_mean=(architecture == 'rectangles'),
        is_rnn=(architecture == 'rnn'),
        window_per_building={  # just load a tiny bit of data. Won't be used.
            1: ("2013-04-12", "2013-05-12"),
            2: ("2013-05-22", "2013-06-22"),
            3: ("2013-02-27", "2013-03-27"),
            4: ("2013-03-09", "2013-04-09"),
            5: ("2014-06-29", "2014-07-29")
        },
        source_type='real_appliance_source',
        filename=UKDALE_FILENAME
    )
    seq_length = source.seq_length
    net_dict = net_dict_func(seq_length)
    if appliance == 'dish washer' and architecture == 'rectangles':
        epochs = 200000
        net_dict.pop('epochs')
    else:
        epochs = net_dict.pop('epochs')
    net_dict_copy = deepcopy(net_dict)
    experiment_name = EXPERIMENT + "_" + appliance + "_" + architecture
    net_dict_copy.update(dict(
        source=source,
        logger=logger,
        experiment_name=experiment_name
    ))
    net = Net(**net_dict_copy)
    net.plotter.max_target_power = source.max_appliance_powers.values()[0]
    net.load_params(iteration=epochs,
                    path=join(NET_BASE_PATH, experiment_name))
    net.print_net()
    net.compile()
    return net
Esempio n. 9
0
def get_net(appliance, architecture):
    """
    Parameters
    ----------
    appliance : string
    architecture : {'rnn', 'ae', 'rectangles'}
    """
    NET_DICTS = {
        'rnn': net_dict_rnn,
        'ae': net_dict_ae,
        'rectangles': net_dict_rectangles
    }
    net_dict_func = NET_DICTS[architecture]
    source = get_source(
        appliance,
        logger,
        target_is_start_and_end_and_mean=(architecture == 'rectangles'),
        is_rnn=(architecture == 'rnn'),
        window_per_building={  # just load a tiny bit of data. Won't be used.
            1: ("2013-04-12", "2013-05-12"),
            2: ("2013-05-22", "2013-06-22"),
            3: ("2013-02-27", "2013-03-27"),
            4: ("2013-03-09", "2013-04-09"),
            5: ("2014-06-29", "2014-07-29")
        },
        source_type='real_appliance_source',
        filename=UKDALE_FILENAME
    )
    seq_length = source.seq_length
    net_dict = net_dict_func(seq_length)
    epochs = net_dict.pop('epochs')
    net_dict_copy = deepcopy(net_dict)
    experiment_name = EXPERIMENT + "_" + appliance + "_" + architecture
    net_dict_copy.update(dict(
        source=source,
        logger=logger,
        experiment_name=experiment_name
    ))
    net = Net(**net_dict_copy)
    net.plotter.max_target_power = source.max_appliance_powers.values()[0]
    net.load_params(iteration=epochs,
                    path=join(NET_BASE_PATH, experiment_name))
    net.print_net()
    net.compile()
    return net
Esempio n. 10
0
def exp_c(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy['random_window'] = 256
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source,
        learning_rate=1e-5
    ))
    N = 512 * 8
    output_shape = source.output_shape_after_processing()
    net_dict_copy['layers_config'] = [
        {
            'type': DenseLayer,
            'num_units': N * 2,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': N,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': N // 2,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': N // 4,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': output_shape[1] * output_shape[2],
            'nonlinearity': sigmoid
        }
    ]
    net = Net(**net_dict_copy)
    net.load_params(30000)
    return net
Esempio n. 11
0
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    net_dict_copy['layers_config'].extend([
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': T.nnet.softplus
        }
    ])
    net = Net(**net_dict_copy)
    net.load_params(1000)
    return net
Esempio n. 12
0
def exp_a(name):
    # 5 appliances
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    net_dict_copy['layers_config'].extend([
        {
            'type': MixtureDensityLayer,
            'num_units': source.n_outputs,
            'num_components': 2
        }
    ])
    net = Net(**net_dict_copy)
    net.load_params(397)
    return net
Esempio n. 13
0
def exp_a(name):
    # 5 appliances
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(
        dict(appliances=[['fridge freezer', 'fridge', 'freezer'],
                         'hair straighteners', 'television', 'dish washer',
                         ['washer dryer', 'washing machine']],
             skip_probability=0.7))
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'].extend([{
        'type': MixtureDensityLayer,
        'num_units': source.n_outputs,
        'num_components': 2
    }])
    net = Net(**net_dict_copy)
    net.load_params(iteration=4000)
    return net
Esempio n. 14
0
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    N = 50
    net_dict_copy['layers_config'] = [
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1.),
            'nonlinearity': tanh
        },
        {
            'type': FeaturePoolLayer,
            'ds': 4, # number of feature maps to be pooled together
            'axis': 1, # pool over the time axis
            'pool_function': T.max
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(N)),
            'nonlinearity': tanh
        },
        {
            'type': MixtureDensityLayer,
            'num_units': source.n_outputs,
            'num_components': 2
        }
    ]
    net = Net(**net_dict_copy)
    net.load_params(iteration=5000)
    return net
Esempio n. 15
0
def exp_a(name):
    global source
    # source_dict_copy = deepcopy(source_dict)
    # source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    N = 512
    output_shape = source.output_shape_after_processing()
    net_dict_copy['layers_config'] = [
        {
            'type': DenseLayer,
            'num_units': N,
            'W': Normal(std=1/sqrt(N)),
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': N // 2,
            'W': Normal(std=1/sqrt(N)),
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': N // 4,
            'W': Normal(std=1/sqrt(N // 2)),
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': output_shape[1] * output_shape[2],
            'W': Normal(std=1/sqrt(N // 4)),
            'nonlinearity': T.nnet.softplus
        }
    ]
    net = Net(**net_dict_copy)
    net.load_params(25000)
    return net
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(logger=logging.getLogger(name)))
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy["layers_config"] = [
        {"type": DimshuffleLayer, "pattern": (0, 2, 1)},  # (batch, features, time)
        {"type": PadLayer, "width": 4},
        {
            "type": Conv1DLayer,  # convolve over the time axis
            "num_filters": 16,
            "filter_size": 4,
            "stride": 1,
            "nonlinearity": None,
            "border_mode": "valid",
        },
        {
            "type": Conv1DLayer,  # convolve over the time axis
            "num_filters": 16,
            "filter_size": 4,
            "stride": 1,
            "nonlinearity": None,
            "border_mode": "valid",
        },
        {"type": DimshuffleLayer, "pattern": (0, 2, 1), "label": "dimshuffle3"},  # back to (batch, time, features)
        {"type": DenseLayer, "num_units": 512 * 16, "nonlinearity": rectify, "label": "dense0"},
        {"type": DenseLayer, "num_units": 512 * 8, "nonlinearity": rectify, "label": "dense1"},
        {"type": DenseLayer, "num_units": 512 * 4, "nonlinearity": rectify, "label": "dense2"},
        {"type": DenseLayer, "num_units": 512, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": 3, "nonlinearity": None},
    ]
    net = Net(**net_dict_copy)
    net.load_params(300000)
    return net
Esempio n. 17
0
def exp_a(name):
    global source
    # source_dict_copy = deepcopy(source_dict)
    # source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    N = 512
    output_shape = source.output_shape_after_processing()
    net_dict_copy['layers_config'] = [{
        'type': DenseLayer,
        'num_units': N,
        'W': Normal(std=1 / sqrt(N)),
        'nonlinearity': rectify
    }, {
        'type': DenseLayer,
        'num_units': N // 2,
        'W': Normal(std=1 / sqrt(N)),
        'nonlinearity': rectify
    }, {
        'type': DenseLayer,
        'num_units': N // 4,
        'W': Normal(std=1 / sqrt(N // 2)),
        'nonlinearity': rectify
    }, {
        'type':
        DenseLayer,
        'num_units':
        output_shape[1] * output_shape[2],
        'W':
        Normal(std=1 / sqrt(N // 4)),
        'nonlinearity':
        T.nnet.softplus
    }]
    net = Net(**net_dict_copy)
    net.load_params(25000)
    return net
Esempio n. 18
0
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = SameLocation(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    NUM_FILTERS = 4
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'label': 'conv0',
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': NUM_FILTERS,
            'filter_length': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # back to (batch, time, features)
        },
        {
            'label': 'dense0',
            'type': DenseLayer,
            'num_units': (SEQ_LENGTH - 3) * NUM_FILTERS,
            'nonlinearity': rectify
        },
        {
            'label': 'dense1',
            'type': DenseLayer,
            'num_units': SEQ_LENGTH - 3,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': (SEQ_LENGTH - 3) * NUM_FILTERS,
            'nonlinearity': rectify
        },
        {
            'type': ReshapeLayer,
            'shape': (N_SEQ_PER_BATCH, SEQ_LENGTH - 3, NUM_FILTERS)
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'type': DeConv1DLayer,
            'num_output_channels': 1,
            'filter_length': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'full'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # back to (batch, time, features)
        }
    ]
    net = Net(**net_dict_copy)
    net.load_params(15000)
    return net
Esempio n. 19
0
def exp_i(name):
    # like a but with max power = 1000W and 5 appliances
    # tanh and softplus output
    # sane inits for other layers
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(
        standardise_targets=True,
        unit_variance_targets=True,
        max_input_power=1000,
        skip_probability=0.9
    ))
    source_dict_copy['appliances'] = [
            ['fridge freezer', 'fridge', 'freezer'], 
            'hair straighteners', 
            'television',
            'dish washer',
            ['washer dryer', 'washing machine']
        ]
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source,
        loss_function=lambda x, t: mse(x, t).mean(),
        learning_rate=1e-4,
        learning_rate_changes_by_iteration={}
    ))
    net_dict_copy['layers_config']= [
        {
            'type': DenseLayer,
            'num_units': 200,
            'nonlinearity': tanh,
            'W': Uniform(25),
            'b': Uniform(25)
        },
        {
            'type': DenseLayer,
            'num_units': 50,
            'nonlinearity': tanh,
            'W': Normal(std=1/sqrt(50)),
            'b': Normal(std=1/sqrt(50))
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 40,
            'W_in_to_hid': Normal(std=1/sqrt(50)),
            'gradient_steps': GRADIENT_STEPS,
            'nonlinearity': tanh,
            'learn_init': False, 
            'precompute_input': False
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': Conv1DLayer,
            'num_filters': 20,
            'filter_length': 4,
            'stride': 4,
            'nonlinearity': tanh,
            'W': Normal(std=1/sqrt(50))
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 80,
            'W_in_to_hid': Normal(std=1/sqrt(50)),
            'gradient_steps': GRADIENT_STEPS,
            'nonlinearity': tanh,
            'learn_init': False, 
            'precompute_input': False
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': T.nnet.softplus
        }
    ]
    net = Net(**net_dict_copy)
    net.load_params(18500)
    return net
Esempio n. 20
0
def exp_a(name):
    # tanh and softplus output
    # sane inits for other layers
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(
        standardise_targets=True,
        unit_variance_targets=True
    ))
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source,
        loss_function=lambda x, t: mse(x, t).mean(),
        learning_rate=1e-3,
        learning_rate_changes_by_iteration={
            1000: 1e-4,
            2000: 1e-5
        }
    ))
    net_dict_copy['layers_config']= [
        {
            'type': DenseLayer,
            'num_units': 50,
            'nonlinearity': tanh,
            'W': Uniform(25),
            'b': Uniform(25)
        },
        {
            'type': DenseLayer,
            'num_units': 50,
            'nonlinearity': tanh,
            'W': Normal(std=1/sqrt(50)),
            'b': Normal(std=1/sqrt(50))
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 40,
            'W_in_to_hid': Normal(std=1/sqrt(50)),
            'gradient_steps': GRADIENT_STEPS,
            'nonlinearity': tanh,
            'learn_init': False, 
            'precompute_input': False
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': Conv1DLayer,
            'num_filters': 20,
            'filter_length': 4,
            'stride': 4,
            'nonlinearity': tanh,
            'W': Normal(std=1/sqrt(50))
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 80,
            'W_in_to_hid': Normal(std=1/sqrt(50)),
            'gradient_steps': GRADIENT_STEPS,
            'nonlinearity': tanh,
            'learn_init': False, 
            'precompute_input': False
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': T.nnet.softplus
        }
    ]
    net = Net(**net_dict_copy)
    net.load_params(2000)
    return net
Esempio n. 21
0
def exp_f(name):
    # like a but with max power = 5900W and 5 appliances
    # tanh and softplus output
    # sane inits for other layers
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(
        dict(standardise_targets=True,
             unit_variance_targets=True,
             max_input_power=5900))
    source_dict_copy['appliances'] = [['fridge freezer', 'fridge',
                                       'freezer'], 'hair straighteners',
                                      'television', 'dish washer',
                                      ['washer dryer', 'washing machine']]

    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(
        dict(experiment_name=name,
             source=source,
             loss_function=lambda x, t: mse(x, t).mean(),
             learning_rate=1e-3,
             learning_rate_changes_by_iteration={
                 1000: 1e-4,
                 2000: 1e-5
             }))
    net_dict_copy['layers_config'] = [{
        'type': DenseLayer,
        'num_units': 50,
        'nonlinearity': tanh,
        'W': Uniform(25),
        'b': Uniform(25)
    }, {
        'type': DenseLayer,
        'num_units': 50,
        'nonlinearity': tanh,
        'W': Normal(std=1 / sqrt(50)),
        'b': Normal(std=1 / sqrt(50))
    }, {
        'type': BidirectionalRecurrentLayer,
        'num_units': 40,
        'W_in_to_hid': Normal(std=1 / sqrt(50)),
        'gradient_steps': GRADIENT_STEPS,
        'nonlinearity': tanh,
        'learn_init': False,
        'precompute_input': False
    }, {
        'type': DimshuffleLayer,
        'pattern': (0, 2, 1)
    }, {
        'type': Conv1DLayer,
        'num_filters': 20,
        'filter_length': 4,
        'stride': 4,
        'nonlinearity': tanh,
        'W': Normal(std=1 / sqrt(50))
    }, {
        'type': DimshuffleLayer,
        'pattern': (0, 2, 1)
    }, {
        'type': BidirectionalRecurrentLayer,
        'num_units': 80,
        'W_in_to_hid': Normal(std=1 / sqrt(50)),
        'gradient_steps': GRADIENT_STEPS,
        'nonlinearity': tanh,
        'learn_init': False,
        'precompute_input': False
    }, {
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': T.nnet.softplus
    }]
    net = Net(**net_dict_copy)
    net.load_params(1000)
    return net
Esempio n. 22
0
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(logger=logging.getLogger(name)))
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'type': PadLayer,
            'width': 4
        },
        {
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': 16,
            'filter_size': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': 16,
            'filter_size': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1),  # back to (batch, time, features)
            'label': 'dimshuffle3'
        },
        {
            'type': DenseLayer,
            'num_units': 512 * 16,
            'nonlinearity': rectify,
            'label': 'dense0'
        },
        {
            'type': DenseLayer,
            'num_units': 512 * 8,
            'nonlinearity': rectify,
            'label': 'dense1'
        },
        {
            'type': DenseLayer,
            'num_units': 512 * 4,
            'nonlinearity': rectify,
            'label': 'dense2'
        },
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': 3,
            'nonlinearity': None
        }
    ]
    net = Net(**net_dict_copy)
    net.load_params(300000)
    return net
Esempio n. 23
0
def exp_z(name):
    # N = 50, 5 layers (!), 2x2x subsampling
    #  avg valid cost =  0.4871760607
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy['subsample_target'] = 4
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source,
        updates=partial(nesterov_momentum, learning_rate=0.001),
        epoch_callbacks={},
        do_save_activations=False
    ))
    N = 50
    net_dict_copy['layers_config'] = [
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1.),
            'nonlinearity': tanh
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(N)),
            'nonlinearity': tanh
        },
        {
            'type': FeaturePoolLayer,
            'ds': 2, # number of feature maps to be pooled together
            'axis': 1, # pool over the time axis
            'pool_function': T.max
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(N)),
            'nonlinearity': tanh
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(N)),
            'nonlinearity': tanh
        },
        {
            'type': FeaturePoolLayer,
            'ds': 2, # number of feature maps to be pooled together
            'axis': 1, # pool over the time axis
            'pool_function': T.max
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': N,
            'gradient_steps': GRADIENT_STEPS,
            'W_in_to_hid': Normal(std=1/sqrt(N)),
            'nonlinearity': tanh
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=(1/sqrt(N)))
        }
    ]
    net = Net(**net_dict_copy)
    net.load_params('e277z.hdf5', 1500)
    return net
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(
        logger=logging.getLogger(name)
    ))
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'type': PadLayer,
            'width': 4
        },
        {
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': 16,
            'filter_size': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': 16,
            'filter_size': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1),  # back to (batch, time, features)
            'label': 'dimshuffle3'
        },
        {
            'type': DenseLayer,
            'num_units': 512 * 16,
            'nonlinearity': rectify,
            'label': 'dense0'
        },
        {
            'type': DenseLayer,
            'num_units': 512 * 8,
            'nonlinearity': rectify,
            'label': 'dense1'
        },
        {
            'type': DenseLayer,
            'num_units': 512 * 4,
            'nonlinearity': rectify,
            'label': 'dense2'
        },
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': 3,
            'nonlinearity': None
        }
    ]
    net = Net(**net_dict_copy)
    net.load_params(300000)
    return net
Esempio n. 25
0
def exp_a(name):
    logger = logging.getLogger(name)
    real_appliance_source1 = RealApplianceSource(
        logger=logger,
        filename=UKDALE_FILENAME,
        appliances=[
            TARGET_APPLIANCE, ['fridge freezer', 'fridge', 'freezer'],
            'dish washer', 'kettle', ['washer dryer', 'washing machine']
        ],
        max_appliance_powers=[MAX_TARGET_POWER, 300, 2500, 2600, 2400],
        on_power_thresholds=[5] * 5,
        min_on_durations=[12, 60, 1800, 12, 1800],
        min_off_durations=[12, 12, 1800, 12, 600],
        divide_input_by_max_input_power=False,
        window_per_building=WINDOW_PER_BUILDING,
        seq_length=SEQ_LENGTH,
        output_one_appliance=True,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=0.75,
        skip_probability_for_first_appliance=SKIP_PROBABILITY_FOR_TARGET,
        target_is_start_and_end_and_mean=True,
        standardise_input=True,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS)

    same_location_source1 = SameLocation(
        logger=logger,
        filename=UKDALE_FILENAME,
        target_appliance=TARGET_APPLIANCE,
        window_per_building=WINDOW_PER_BUILDING,
        seq_length=SEQ_LENGTH,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=SKIP_PROBABILITY_FOR_TARGET,
        target_is_start_and_end_and_mean=True,
        standardise_input=True,
        offset_probability=1,
        divide_target_by=MAX_TARGET_POWER,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS)

    multi_source = MultiSource(sources=[{
        'source': real_appliance_source1,
        'train_probability': 0.5,
        'validation_probability': 0
    }, {
        'source': same_location_source1,
        'train_probability': 0.5,
        'validation_probability': 1
    }],
                               standardisation_source=same_location_source1)

    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(
        dict(experiment_name=name,
             source=multi_source,
             plotter=StartEndMeanPlotter(n_seq_to_plot=32,
                                         n_training_examples_to_plot=16,
                                         max_target_power=MAX_TARGET_POWER)))
    net = Net(**net_dict_copy)
    net.load_params(730532)
    return net
Esempio n. 26
0
def exp_b(name):
    # conv at beginning
    logger = logging.getLogger(name)
    global multi_source

    SEQ_LENGTH = 256
    N_SEQ_PER_BATCH = 64

    real_appliance_source1 = RealApplianceSource(
        logger=logger,
        filename=UKDALE_FILENAME,
        appliances=[
            TARGET_APPLIANCE,
            ['fridge freezer', 'fridge', 'freezer'],
            'dish washer',
            'kettle',
            ['washer dryer', 'washing machine']
        ],
        max_appliance_powers=[MAX_TARGET_POWER, 300, 2500, 2600, 2400],
        on_power_thresholds=[ON_POWER_THRESHOLD] + [10] * 4,
        min_on_durations=[MIN_ON_DURATION, 60, 1800, 12, 1800],
        min_off_durations=[MIN_OFF_DURATION, 12, 1800, 12, 600],
        divide_input_by_max_input_power=False,
        window_per_building=WINDOW_PER_BUILDING,
        seq_length=SEQ_LENGTH,
        output_one_appliance=True,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=0.75,
        skip_probability_for_first_appliance=SKIP_PROBABILITY_FOR_TARGET,
        standardise_input=True,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
        subsample_target=SUBSAMPLE_TARGET,
        input_padding=INPUT_PADDING
    )

    same_location_source1 = SameLocation(
        logger=logger,
        filename=UKDALE_FILENAME,
        target_appliance=TARGET_APPLIANCE,
        window_per_building=WINDOW_PER_BUILDING,
        seq_length=SEQ_LENGTH,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=SKIP_PROBABILITY_FOR_TARGET,
        standardise_input=True,
        offset_probability=1,
        divide_target_by=MAX_TARGET_POWER,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
        on_power_threshold=ON_POWER_THRESHOLD,
        min_on_duration=MIN_ON_DURATION,
        min_off_duration=MIN_OFF_DURATION,
        include_all=True,
        allow_incomplete=True,
        subsample_target=SUBSAMPLE_TARGET,
        input_padding=INPUT_PADDING
    )

    multi_source = MultiSource(
        sources=[
            {
                'source': real_appliance_source1,
                'train_probability': 0.5,
                'validation_probability': 0
            },
            {
                'source': same_location_source1,
                'train_probability': 0.5,
                'validation_probability': 1
            }
        ],
        standardisation_source=same_location_source1
    )

    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        auto_reshape=True,
        experiment_name=name,
        source=multi_source,
        plotter=Plotter(
            n_seq_to_plot=32,
            n_training_examples_to_plot=16
        ),
        layers_config=[
            {
                'type': DimshuffleLayer,
                'pattern': (0, 2, 1)  # (batch, features, time)
            },
            {
                'type': Conv1DLayer,  # convolve over the time axis
                'num_filters': 16,
                'filter_size': 4,
                'stride': 1,
                'nonlinearity': None,
                'border_mode': 'same'
            },
            {
                'type': DimshuffleLayer,
                'pattern': (0, 2, 1),  # back to (batch, time, features)
                'label': 'dimshuffle3'
            },
            {
                'type': BLSTMLayer,
                'num_units': 128,
                'merge_mode': 'concatenate'
            },
            {
                'type': BLSTMLayer,
                'num_units': 256,
                'merge_mode': 'concatenate'
            },
            {
                'type': DenseLayer,
                'num_units': 128,
                'nonlinearity': tanh
            },
            {
                'type': DenseLayer,
                'num_units': 1,
                'nonlinearity': None
            }
        ]

    ))
    net = Net(**net_dict_copy)
    net.load_params(1500)
    return net
Esempio n. 27
0
def exp_b(name):
    # conv at beginning
    logger = logging.getLogger(name)
    global multi_source

    SEQ_LENGTH = 256
    N_SEQ_PER_BATCH = 64

    real_appliance_source1 = RealApplianceSource(
        logger=logger,
        filename=UKDALE_FILENAME,
        appliances=[
            TARGET_APPLIANCE, ['fridge freezer', 'fridge', 'freezer'],
            'dish washer', 'kettle', ['washer dryer', 'washing machine']
        ],
        max_appliance_powers=[MAX_TARGET_POWER, 300, 2500, 2600, 2400],
        on_power_thresholds=[ON_POWER_THRESHOLD] + [10] * 4,
        min_on_durations=[MIN_ON_DURATION, 60, 1800, 12, 1800],
        min_off_durations=[MIN_OFF_DURATION, 12, 1800, 12, 600],
        divide_input_by_max_input_power=False,
        window_per_building=WINDOW_PER_BUILDING,
        seq_length=SEQ_LENGTH,
        output_one_appliance=True,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=0.75,
        skip_probability_for_first_appliance=SKIP_PROBABILITY_FOR_TARGET,
        standardise_input=True,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
        subsample_target=SUBSAMPLE_TARGET,
        input_padding=INPUT_PADDING)

    same_location_source1 = SameLocation(
        logger=logger,
        filename=UKDALE_FILENAME,
        target_appliance=TARGET_APPLIANCE,
        window_per_building=WINDOW_PER_BUILDING,
        seq_length=SEQ_LENGTH,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=SKIP_PROBABILITY_FOR_TARGET,
        standardise_input=True,
        offset_probability=1,
        divide_target_by=MAX_TARGET_POWER,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
        on_power_threshold=ON_POWER_THRESHOLD,
        min_on_duration=MIN_ON_DURATION,
        min_off_duration=MIN_OFF_DURATION,
        include_all=True,
        allow_incomplete=True,
        subsample_target=SUBSAMPLE_TARGET,
        input_padding=INPUT_PADDING)

    multi_source = MultiSource(sources=[{
        'source': real_appliance_source1,
        'train_probability': 0.5,
        'validation_probability': 0
    }, {
        'source': same_location_source1,
        'train_probability': 0.5,
        'validation_probability': 1
    }],
                               standardisation_source=same_location_source1)

    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(
        dict(
            auto_reshape=True,
            experiment_name=name,
            source=multi_source,
            plotter=Plotter(n_seq_to_plot=32, n_training_examples_to_plot=16),
            layers_config=[
                {
                    'type': DimshuffleLayer,
                    'pattern': (0, 2, 1)  # (batch, features, time)
                },
                {
                    'type': Conv1DLayer,  # convolve over the time axis
                    'num_filters': 16,
                    'filter_size': 4,
                    'stride': 1,
                    'nonlinearity': None,
                    'border_mode': 'same'
                },
                {
                    'type': DimshuffleLayer,
                    'pattern': (0, 2, 1),  # back to (batch, time, features)
                    'label': 'dimshuffle3'
                },
                {
                    'type': BLSTMLayer,
                    'num_units': 128,
                    'merge_mode': 'concatenate'
                },
                {
                    'type': BLSTMLayer,
                    'num_units': 256,
                    'merge_mode': 'concatenate'
                },
                {
                    'type': DenseLayer,
                    'num_units': 128,
                    'nonlinearity': tanh
                },
                {
                    'type': DenseLayer,
                    'num_units': 1,
                    'nonlinearity': None
                }
            ]))
    net = Net(**net_dict_copy)
    net.load_params(1500)
    return net
Esempio n. 28
0
def exp_a(name):
    logger = logging.getLogger(name)
    real_appliance_source1 = RealApplianceSource(
        logger=logger,
        filename=UKDALE_FILENAME,
        appliances=[
            TARGET_APPLIANCE,
            ['fridge freezer', 'fridge', 'freezer'],
            'dish washer',
            'kettle',
            ['washer dryer', 'washing machine']
        ],
        max_appliance_powers=[MAX_TARGET_POWER, 300, 2500, 2600, 2400],
        on_power_thresholds=[ON_POWER_THRESHOLD] + [10] * 4,
        min_on_durations=[MIN_ON_DURATION, 60, 1800, 12, 1800],
        min_off_durations=[MIN_OFF_DURATION, 12, 1800, 12, 600],
        divide_input_by_max_input_power=False,
        window_per_building=WINDOW_PER_BUILDING,
        seq_length=SEQ_LENGTH,
        output_one_appliance=True,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=0.75,
        skip_probability_for_first_appliance=SKIP_PROBABILITY_FOR_TARGET,
        target_is_start_and_end_and_mean=True,
        standardise_input=True,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS
    )

    same_location_source1 = SameLocation(
        logger=logger,
        filename=UKDALE_FILENAME,
        target_appliance=TARGET_APPLIANCE,
        window_per_building=WINDOW_PER_BUILDING,
        seq_length=SEQ_LENGTH,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=SKIP_PROBABILITY_FOR_TARGET,
        target_is_start_and_end_and_mean=True,
        standardise_input=True,
        offset_probability=1,
        divide_target_by=MAX_TARGET_POWER,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
        on_power_threshold=ON_POWER_THRESHOLD,
        min_on_duration=MIN_ON_DURATION,
        min_off_duration=MIN_OFF_DURATION
    )

    multi_source = MultiSource(
        sources=[
            {
                'source': real_appliance_source1,
                'train_probability': 0.5,
                'validation_probability': 0
            },
            {
                'source': same_location_source1,
                'train_probability': 0.5,
                'validation_probability': 1
            }
        ],
        standardisation_source=same_location_source1
    )

    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=multi_source,
        plotter=StartEndMeanPlotter(
            n_seq_to_plot=32,
            n_training_examples_to_plot=16,
            max_target_power=MAX_TARGET_POWER)
    ))
    net = Net(**net_dict_copy)
    net.load_params(11589)
    return net
Esempio n. 29
0
def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = SameLocation(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    NUM_FILTERS = 4
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'label': 'conv0',
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': NUM_FILTERS,
            'filter_length': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # back to (batch, time, features)
        },
        {
            'label': 'dense0',
            'type': DenseLayer,
            'num_units': (SEQ_LENGTH - 3) * NUM_FILTERS,
            'nonlinearity': rectify
        },
        {
            'label': 'dense1',
            'type': DenseLayer,
            'num_units': SEQ_LENGTH - 3,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': (SEQ_LENGTH - 3) * NUM_FILTERS,
            'nonlinearity': rectify
        },
        {
            'type': ReshapeLayer,
            'shape': (N_SEQ_PER_BATCH, SEQ_LENGTH - 3, NUM_FILTERS)
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'type': DeConv1DLayer,
            'num_output_channels': 1,
            'filter_length': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'full'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # back to (batch, time, features)
        }
    ]
    net = Net(**net_dict_copy)
    net.load_params(15000)
    return net