Esempio n. 1
0
def exp_a(name, target_appliance, seq_length):
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(
        dict(target_appliance=target_appliance,
             logger=logging.getLogger(name),
             seq_length=seq_length))
    source = SameLocation(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    NUM_FILTERS = 16
    target_seq_length = source.output_shape_after_processing()[1]
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': NUM_FILTERS,
            'filter_size': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': NUM_FILTERS,
            'filter_size': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # back to (batch, time, features)
        },
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': 256,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': 128,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': target_seq_length,
            'nonlinearity': None
        }
    ]
    net = Net(**net_dict_copy)
    return net
Esempio n. 2
0
def exp_e(name):
    # conv then pool
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(logger=logging.getLogger(name)))
    source = SameLocation(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    NUM_FILTERS = 16
    target_seq_length = source.output_shape_after_processing()[1]
    net_dict_copy["layers_config"] = [
        {"type": DimshuffleLayer, "pattern": (0, 2, 1)},  # (batch, features, time)
        {
            "type": Conv1DLayer,  # convolve over the time axis
            "num_filters": NUM_FILTERS,
            "filter_size": 4,
            "stride": 1,
            "nonlinearity": None,
            "border_mode": "same",
        },
        {
            "type": FeaturePoolLayer,
            "pool_size": 2,  # number of feature maps to be pooled together
            "axis": 2,  # pool over the time axis
            "pool_function": T.max,
        },
        {"type": DimshuffleLayer, "pattern": (0, 2, 1)},  # back to (batch, time, features)
        {"type": DenseLayer, "num_units": 512, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": 256, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": 128, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": target_seq_length, "nonlinearity": None},
    ]
    net = Net(**net_dict_copy)
    return net
Esempio n. 3
0
def exp_a(name):
    # no conv
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(logger=logging.getLogger(name)))
    source = SameLocation(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    NUM_FILTERS = 16
    target_seq_length = source.output_shape_after_processing()[1]
    net_dict_copy['layers_config'] = [{
        'type': DenseLayer,
        'num_units': 512,
        'nonlinearity': rectify
    }, {
        'type': DenseLayer,
        'num_units': 256,
        'nonlinearity': rectify
    }, {
        'type': DenseLayer,
        'num_units': 128,
        'nonlinearity': rectify
    }, {
        'type': DenseLayer,
        'num_units': target_seq_length,
        'nonlinearity': None
    }]
    net = Net(**net_dict_copy)
    return net
Esempio n. 4
0
def exp_a(name, target_appliance, seq_length):
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(
        dict(target_appliance=target_appliance, logger=logging.getLogger(name), seq_length=seq_length)
    )
    source = SameLocation(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    NUM_FILTERS = 16
    target_seq_length = source.output_shape_after_processing()[1]
    net_dict_copy["layers_config"] = [
        {"type": DimshuffleLayer, "pattern": (0, 2, 1)},  # (batch, features, time)
        {
            "type": Conv1DLayer,  # convolve over the time axis
            "num_filters": NUM_FILTERS,
            "filter_size": 4,
            "stride": 1,
            "nonlinearity": None,
            "border_mode": "valid",
        },
        {"type": DimshuffleLayer, "pattern": (0, 2, 1)},  # back to (batch, time, features)
        {"type": DenseLayer, "num_units": 512, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": 256, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": 128, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": target_seq_length, "nonlinearity": sigmoid},
    ]
    net = Net(**net_dict_copy)
    return net
Esempio n. 5
0
def exp_e(name):
    # conv then pool
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(logger=logging.getLogger(name)))
    source = SameLocation(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    NUM_FILTERS = 16
    target_seq_length = source.output_shape_after_processing()[1]
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': NUM_FILTERS,
            'filter_size': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'same'
        },
        {
            'type': FeaturePoolLayer,
            'pool_size': 2,  # number of feature maps to be pooled together
            'axis': 2,  # pool over the time axis
            'pool_function': T.max
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # back to (batch, time, features)
        },
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': 256,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': 128,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': target_seq_length,
            'nonlinearity': None
        }
    ]
    net = Net(**net_dict_copy)
    return net
Esempio n. 6
0
def exp_a(name, target_appliance, seq_length):
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(
        target_appliance=target_appliance,
        logger=logging.getLogger(name),
        seq_length=seq_length
    ))
    source = SameLocation(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    NUM_FILTERS = 16
    target_seq_length = source.output_shape_after_processing()[1]
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': NUM_FILTERS,
            'filter_size': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # back to (batch, time, features)
        },
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': 256,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': 128,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': target_seq_length,
            'nonlinearity': None
        }
    ]
    net = Net(**net_dict_copy)
    return net
Esempio n. 7
0
def exp_f(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = SameLocation(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [{
        'label': 'dense0',
        'type': DenseLayer,
        'num_units': SEQ_LENGTH,
        'nonlinearity': rectify
    }, {
        'label': 'dense0',
        'type': DenseLayer,
        'num_units': SEQ_LENGTH * 4,
        'nonlinearity': rectify
    }, {
        'label': 'dense1',
        'type': DenseLayer,
        'num_units': SEQ_LENGTH * 4,
        'nonlinearity': rectify
    }, {
        'type': DenseLayer,
        'num_units': SEQ_LENGTH,
        'nonlinearity': rectify
    }, {
        'type': DenseLayer,
        'num_units': SEQ_LENGTH,
        'nonlinearity': None
    }]
    net = Net(**net_dict_copy)
    return net
Esempio n. 8
0
def exp_a(name):
    # no conv
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(logger=logging.getLogger(name)))
    source = SameLocation(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    NUM_FILTERS = 16
    target_seq_length = source.output_shape_after_processing()[1]
    net_dict_copy["layers_config"] = [
        {"type": DenseLayer, "num_units": 512, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": 256, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": 128, "nonlinearity": rectify},
        {"type": DenseLayer, "num_units": target_seq_length, "nonlinearity": None},
    ]
    net = Net(**net_dict_copy)
    return net
Esempio n. 9
0
def exp_a(name, target_appliance, seq_length):
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(dict(
        target_appliance=target_appliance,
        logger=logging.getLogger(name),
        seq_length=seq_length
    ))
    source = SameLocation(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    NUM_FILTERS = 16
    target_seq_length = source.output_shape_after_processing()[1]
    net_dict_copy['layers_config'] = [
        {
            'type': DenseLayer,
            'num_units': 512,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': 256,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': 128,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': target_seq_length,
            'nonlinearity': sigmoid
        }
    ]
    net = Net(**net_dict_copy)
    return net
Esempio n. 10
0
def exp_a(name, target_appliance, seq_length):
    global source
    source_dict_copy = deepcopy(source_dict)
    source_dict_copy.update(
        dict(target_appliance=target_appliance,
             logger=logging.getLogger(name),
             seq_length=seq_length))
    source = SameLocation(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    NUM_FILTERS = 4
    net_dict_copy['layers_config'] = [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'label': 'conv0',
            'type': Conv1DLayer,  # convolve over the time axis
            'num_filters': NUM_FILTERS,
            'filter_length': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'valid'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # back to (batch, time, features)
        },
        {
            'label': 'dense0',
            'type': DenseLayer,
            'num_units': (seq_length - 3) * NUM_FILTERS,
            'nonlinearity': rectify
        },
        {
            'label': 'dense1',
            'type': DenseLayer,
            'num_units': seq_length,
            'nonlinearity': rectify
        },
        {
            'label': 'dense2',
            'type': DenseLayer,
            'num_units': 128,
            'nonlinearity': rectify
        },
        {
            'label': 'dense3',
            'type': DenseLayer,
            'num_units': seq_length,
            'nonlinearity': rectify
        },
        {
            'type': DenseLayer,
            'num_units': (seq_length - 3) * NUM_FILTERS,
            'nonlinearity': rectify
        },
        {
            'type': ReshapeLayer,
            'shape': (N_SEQ_PER_BATCH, seq_length - 3, NUM_FILTERS)
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # (batch, features, time)
        },
        {
            'type': DeConv1DLayer,
            'num_output_channels': 1,
            'filter_length': 4,
            'stride': 1,
            'nonlinearity': None,
            'border_mode': 'full'
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)  # back to (batch, time, features)
        }
    ]
    net = Net(**net_dict_copy)
    return net
Esempio n. 11
0
def get_source(appliance,
               logger,
               target_is_start_and_end_and_mean=False,
               is_rnn=False,
               window_per_building=WINDOW_PER_BUILDING,
               source_type='multisource',
               filename=UKDALE_FILENAME):
    """
    Parameters
    ----------
    source_type : {'multisource', 'real_appliance_source'}

    Returns
    -------
    Source
    """
    N_SEQ_PER_BATCH = 64
    TRAIN_BUILDINGS_REAL = None

    if appliance == 'microwave':
        SEQ_LENGTH = 288
        TRAIN_BUILDINGS = [1, 2]
        VALIDATION_BUILDINGS = [5]
        APPLIANCES = [
            'microwave', ['fridge freezer', 'fridge', 'freezer'],
            'dish washer', 'kettle', ['washer dryer', 'washing machine']
        ]
        MAX_APPLIANCE_POWERS = [3000, 300, 2500, 3100, 2500]
        ON_POWER_THRESHOLDS = [200, 50, 10, 2000, 20]
        MIN_ON_DURATIONS = [12, 60, 1800, 12, 1800]
        MIN_OFF_DURATIONS = [30, 12, 1800, 0, 160]

    elif appliance == 'washing machine':
        SEQ_LENGTH = 1024
        TRAIN_BUILDINGS = [1, 5]
        VALIDATION_BUILDINGS = [2]
        APPLIANCES = [['washer dryer', 'washing machine'],
                      ['fridge freezer', 'fridge', 'freezer'], 'dish washer',
                      'kettle', 'microwave']
        MAX_APPLIANCE_POWERS = [2500, 300, 2500, 3100, 3000]
        ON_POWER_THRESHOLDS = [20, 50, 10, 2000, 200]
        MIN_ON_DURATIONS = [1800, 60, 1800, 12, 12]
        MIN_OFF_DURATIONS = [160, 12, 1800, 0, 30]
        if is_rnn:
            N_SEQ_PER_BATCH = 16

    elif appliance == 'fridge':
        SEQ_LENGTH = 512
        TRAIN_BUILDINGS = [1, 2, 4]
        VALIDATION_BUILDINGS = [5]
        APPLIANCES = [['fridge freezer', 'fridge', 'freezer'],
                      ['washer dryer', 'washing machine'], 'dish washer',
                      'kettle', 'microwave']
        MAX_APPLIANCE_POWERS = [300, 2500, 2500, 3100, 3000]
        ON_POWER_THRESHOLDS = [50, 20, 10, 2000, 200]
        MIN_ON_DURATIONS = [60, 1800, 1800, 12, 12]
        MIN_OFF_DURATIONS = [12, 160, 1800, 0, 30]
        if is_rnn:
            N_SEQ_PER_BATCH = 16

    elif appliance == 'kettle':
        SEQ_LENGTH = 128
        TRAIN_BUILDINGS = [1, 2, 3, 4]
        # House 3's mains often doesn't include kettle!
        TRAIN_BUILDINGS_REAL = [1, 2, 4]
        VALIDATION_BUILDINGS = [5]
        APPLIANCES = [
            'kettle', ['fridge freezer', 'fridge', 'freezer'],
            ['washer dryer', 'washing machine'], 'dish washer', 'microwave'
        ]
        MAX_APPLIANCE_POWERS = [3100, 300, 2500, 2500, 3000]
        ON_POWER_THRESHOLDS = [2000, 50, 20, 10, 200]
        MIN_ON_DURATIONS = [12, 60, 1800, 1800, 12]
        MIN_OFF_DURATIONS = [0, 12, 160, 1800, 30]

    elif appliance == 'dish washer':
        SEQ_LENGTH = 1024 + 512
        TRAIN_BUILDINGS = [1, 2]
        VALIDATION_BUILDINGS = [5]
        APPLIANCES = [
            'dish washer', ['fridge freezer', 'fridge', 'freezer'],
            ['washer dryer', 'washing machine'], 'kettle', 'microwave'
        ]
        MAX_APPLIANCE_POWERS = [2500, 300, 2500, 3100, 3000]
        ON_POWER_THRESHOLDS = [10, 50, 20, 2000, 200]
        MIN_ON_DURATIONS = [1800, 60, 1800, 12, 12]
        MIN_OFF_DURATIONS = [1800, 12, 160, 0, 30]

        if is_rnn:
            N_SEQ_PER_BATCH = 16

    TARGET_APPLIANCE = APPLIANCES[0]
    MAX_TARGET_POWER = MAX_APPLIANCE_POWERS[0]
    ON_POWER_THRESHOLD = ON_POWER_THRESHOLDS[0]
    MIN_ON_DURATION = MIN_ON_DURATIONS[0]
    MIN_OFF_DURATION = MIN_OFF_DURATIONS[0]
    if TRAIN_BUILDINGS_REAL is None:
        TRAIN_BUILDINGS_REAL = TRAIN_BUILDINGS

    real_appliance_source1 = RealApplianceSource(
        logger=logger,
        filename=filename,
        appliances=APPLIANCES,
        max_appliance_powers=MAX_APPLIANCE_POWERS,
        on_power_thresholds=ON_POWER_THRESHOLDS,
        min_on_durations=MIN_ON_DURATIONS,
        min_off_durations=MIN_OFF_DURATIONS,
        divide_input_by_max_input_power=False,
        window_per_building=window_per_building,
        seq_length=SEQ_LENGTH,
        output_one_appliance=True,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=0.75,
        skip_probability_for_first_appliance=SKIP_PROBABILITY_FOR_TARGET,
        standardise_input=True,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
        target_is_start_and_end_and_mean=target_is_start_and_end_and_mean)

    if source_type != 'multisource':
        return real_appliance_source1

    same_location_source1 = SameLocation(
        logger=logger,
        filename=filename,
        target_appliance=TARGET_APPLIANCE,
        window_per_building=window_per_building,
        seq_length=SEQ_LENGTH,
        train_buildings=TRAIN_BUILDINGS_REAL,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=SKIP_PROBABILITY_FOR_TARGET,
        standardise_input=True,
        offset_probability=1,
        divide_target_by=MAX_TARGET_POWER,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
        on_power_threshold=ON_POWER_THRESHOLD,
        min_on_duration=MIN_ON_DURATION,
        min_off_duration=MIN_OFF_DURATION,
        include_all=False,
        allow_incomplete=False,
        target_is_start_and_end_and_mean=target_is_start_and_end_and_mean)

    multi_source = MultiSource(sources=[{
        'source': real_appliance_source1,
        'train_probability': 0.5,
        'validation_probability': 0
    }, {
        'source': same_location_source1,
        'train_probability': 0.5,
        'validation_probability': 1
    }],
                               standardisation_source=same_location_source1)

    return multi_source
Esempio n. 12
0
def exp_a(name):
    logger = logging.getLogger(name)
    real_appliance_source1 = RealApplianceSource(
        logger=logger,
        filename=UKDALE_FILENAME,
        appliances=[
            TARGET_APPLIANCE, ['fridge freezer', 'fridge', 'freezer'],
            'dish washer', 'kettle', ['washer dryer', 'washing machine']
        ],
        max_appliance_powers=[MAX_TARGET_POWER, 300, 2500, 2600, 2400],
        on_power_thresholds=[5] * 5,
        min_on_durations=[12, 60, 1800, 12, 1800],
        min_off_durations=[12, 12, 1800, 12, 600],
        divide_input_by_max_input_power=False,
        window_per_building=WINDOW_PER_BUILDING,
        seq_length=SEQ_LENGTH,
        output_one_appliance=True,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=0.75,
        skip_probability_for_first_appliance=SKIP_PROBABILITY_FOR_TARGET,
        target_is_start_and_end_and_mean=True,
        standardise_input=True,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS)

    same_location_source1 = SameLocation(
        logger=logger,
        filename=UKDALE_FILENAME,
        target_appliance=TARGET_APPLIANCE,
        window_per_building=WINDOW_PER_BUILDING,
        seq_length=SEQ_LENGTH,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=SKIP_PROBABILITY_FOR_TARGET,
        target_is_start_and_end_and_mean=True,
        standardise_input=True,
        offset_probability=1,
        divide_target_by=MAX_TARGET_POWER,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS)

    multi_source = MultiSource(sources=[{
        'source': real_appliance_source1,
        'train_probability': 0.5,
        'validation_probability': 0
    }, {
        'source': same_location_source1,
        'train_probability': 0.5,
        'validation_probability': 1
    }],
                               standardisation_source=same_location_source1)

    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(
        dict(experiment_name=name,
             source=multi_source,
             plotter=StartEndMeanPlotter(n_seq_to_plot=32,
                                         n_training_examples_to_plot=16,
                                         max_target_power=MAX_TARGET_POWER)))
    net = Net(**net_dict_copy)
    net.load_params(730532)
    return net
Esempio n. 13
0
    seq_length=512,
    output_one_appliance=True,
    train_buildings=[1],
    validation_buildings=[1],
    n_seq_per_batch=N_SEQ_PER_BATCH,
    skip_probability=0.75,
    skip_probability_for_first_appliance=0.5,
    target_is_start_and_end_and_mean=True,
    standardise_input=True)

same_location_source1 = SameLocation(filename=UKDALE_FILENAME,
                                     target_appliance='fridge freezer',
                                     window=("2013-04-18", None),
                                     seq_length=512,
                                     train_buildings=[1],
                                     validation_buildings=[1],
                                     n_seq_per_batch=N_SEQ_PER_BATCH,
                                     skip_probability=0.5,
                                     target_is_start_and_end_and_mean=True,
                                     standardise_input=True,
                                     offset_probability=1,
                                     divide_target_by=MAX_TARGET_POWER)

multi_source = MultiSource(sources=[{
    'source': real_appliance_source1,
    'train_probability': 0.5,
    'validation_probability': 0
}, {
    'source': same_location_source1,
    'train_probability': 0.5,
    'validation_probability': 1
}],
Esempio n. 14
0
    n_seq_per_batch=N_SEQ_PER_BATCH,
    skip_probability=0.75,
    skip_probability_for_first_appliance=SKIP_PROBABILITY_FOR_TARGET,
    target_is_start_and_end_and_mean=True,
    standardise_input=True,
    input_stats=INPUT_STATS,
    independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS)

same_location_source1 = SameLocation(
    filename=UKDALE_FILENAME,
    target_appliance=TARGET_APPLIANCE,
    window_per_building=WINDOW_PER_BUILDING,
    seq_length=SEQ_LENGTH,
    train_buildings=TRAIN_BUILDINGS,
    validation_buildings=VALIDATION_BUILDINGS,
    n_seq_per_batch=N_SEQ_PER_BATCH,
    skip_probability=SKIP_PROBABILITY_FOR_TARGET,
    target_is_start_and_end_and_mean=True,
    standardise_input=True,
    offset_probability=1,
    divide_target_by=MAX_TARGET_POWER,
    input_stats=INPUT_STATS,
    independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS)

multi_source = MultiSource(sources=[{
    'source': real_appliance_source1,
    'train_probability': 0.5,
    'validation_probability': 0
}, {
    'source': same_location_source1,
    'train_probability': 0.5,
Esempio n. 15
0
def exp_g(name):
    # conv at beginning
    # b but with dropout
    logger = logging.getLogger(name)
    global multi_source

    SEQ_LENGTH = 256
    N_SEQ_PER_BATCH = 64

    real_appliance_source1 = RealApplianceSource(
        logger=logger,
        filename=UKDALE_FILENAME,
        appliances=[
            TARGET_APPLIANCE, ['fridge freezer', 'fridge', 'freezer'],
            'dish washer', 'kettle', ['washer dryer', 'washing machine']
        ],
        max_appliance_powers=[MAX_TARGET_POWER, 300, 2500, 2600, 2400],
        on_power_thresholds=[ON_POWER_THRESHOLD] + [10] * 4,
        min_on_durations=[MIN_ON_DURATION, 60, 1800, 12, 1800],
        min_off_durations=[MIN_OFF_DURATION, 12, 1800, 12, 600],
        divide_input_by_max_input_power=False,
        window_per_building=WINDOW_PER_BUILDING,
        seq_length=SEQ_LENGTH,
        output_one_appliance=True,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=0.75,
        skip_probability_for_first_appliance=SKIP_PROBABILITY_FOR_TARGET,
        standardise_input=True,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
        subsample_target=SUBSAMPLE_TARGET,
        input_padding=INPUT_PADDING)

    same_location_source1 = SameLocation(
        logger=logger,
        filename=UKDALE_FILENAME,
        target_appliance=TARGET_APPLIANCE,
        window_per_building=WINDOW_PER_BUILDING,
        seq_length=SEQ_LENGTH,
        train_buildings=TRAIN_BUILDINGS,
        validation_buildings=VALIDATION_BUILDINGS,
        n_seq_per_batch=N_SEQ_PER_BATCH,
        skip_probability=SKIP_PROBABILITY_FOR_TARGET,
        standardise_input=True,
        offset_probability=1,
        divide_target_by=MAX_TARGET_POWER,
        input_stats=INPUT_STATS,
        independently_center_inputs=INDEPENDENTLY_CENTER_INPUTS,
        on_power_threshold=ON_POWER_THRESHOLD,
        min_on_duration=MIN_ON_DURATION,
        min_off_duration=MIN_OFF_DURATION,
        include_all=True,
        allow_incomplete=True,
        subsample_target=SUBSAMPLE_TARGET,
        input_padding=INPUT_PADDING)

    multi_source = MultiSource(sources=[{
        'source': real_appliance_source1,
        'train_probability': 0.5,
        'validation_probability': 0
    }, {
        'source': same_location_source1,
        'train_probability': 0.5,
        'validation_probability': 1
    }],
                               standardisation_source=same_location_source1)

    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(
        dict(
            auto_reshape=True,
            experiment_name=name,
            source=multi_source,
            plotter=Plotter(n_seq_to_plot=32, n_training_examples_to_plot=16),
            layers_config=[
                {
                    'type': DimshuffleLayer,
                    'pattern': (0, 2, 1)  # (batch, features, time)
                },
                {
                    'type': Conv1DLayer,  # convolve over the time axis
                    'num_filters': 16,
                    'filter_size': 4,
                    'stride': 1,
                    'nonlinearity': None,
                    'border_mode': 'same'
                },
                {
                    'type': DimshuffleLayer,
                    'pattern': (0, 2, 1),  # back to (batch, time, features)
                    'label': 'dimshuffle3'
                },
                {
                    'type': DropoutLayer
                },
                {
                    'type': BLSTMLayer,
                    'num_units': 128,
                    'merge_mode': 'concatenate'
                },
                {
                    'type': DropoutLayer
                },
                {
                    'type': BLSTMLayer,
                    'num_units': 256,
                    'merge_mode': 'concatenate'
                },
                {
                    'type': DropoutLayer
                },
                {
                    'type': DenseLayer,
                    'num_units': 128,
                    'nonlinearity': tanh
                },
                {
                    'type': DenseLayer,
                    'num_units': 1,
                    'nonlinearity': None
                }
            ]))
    net = Net(**net_dict_copy)
    return net