예제 #1
0
def exp_b(name):
    # ReLU hidden layers
    # linear output
    # output one appliance
    # 0% skip prob for first appliance
    # 100% skip prob for other appliances
    # input is diff
    global source
    # source_dict_copy = deepcopy(source_dict)
    # source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [{
        'type': BidirectionalRecurrentLayer,
        'num_units': 50,
        'W_in_to_hid': Normal(std=1),
        'W_hid_to_hid': Identity(scale=0.5),
        'gradient_steps': GRADIENT_STEPS,
        'nonlinearity': rectify,
        'learn_init': False,
        'precompute_input': False
    }, {
        'type': DimshuffleLayer,
        'pattern': (0, 2, 1)
    }, {
        'type': Conv1DLayer,
        'num_filters': 50,
        'filter_length': 4,
        'stride': 4,
        'nonlinearity': rectify,
        'W': Normal(std=1 / sqrt(50))
    }, {
        'type': DimshuffleLayer,
        'pattern': (0, 2, 1)
    }, {
        'type': BidirectionalRecurrentLayer,
        'num_units': 100,
        'W_in_to_hid': Normal(std=1 / sqrt(50)),
        'W_hid_to_hid': Identity(scale=0.5),
        'gradient_steps': GRADIENT_STEPS,
        'nonlinearity': rectify,
        'learn_init': False,
        'precompute_input': False
    }, {
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': None,
        'W': Normal(std=1 / sqrt(100))
    }]
    net = Net(**net_dict_copy)
    return net
예제 #2
0
def exp_b(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(experiment_name=name, source=source))
    net_dict_copy['layers_config'] = [{
        'type': DimshuffleLayer,
        'pattern': (0, 2, 1)
    }, {
        'type': Conv1DLayer,
        'num_filters': 10,
        'filter_length': 2,
        'stride': 1,
        'nonlinearity': rectify,
        'border_mode': 'same'
    }, {
        'type': DimshuffleLayer,
        'pattern': (0, 2, 1)
    }, {
        'type': BidirectionalRecurrentLayer,
        'num_units': 40,
        'gradient_steps': GRADIENT_STEPS,
        'nonlinearity': rectify,
        'W_hid_to_hid': Identity(scale=0.5)
    }, {
        'type': DimshuffleLayer,
        'pattern': (0, 2, 1)
    }, {
        'type': Conv1DLayer,
        'num_filters': 40,
        'filter_length': 4,
        'stride': 4,
        'nonlinearity': rectify
    }, {
        'type': DimshuffleLayer,
        'pattern': (0, 2, 1)
    }, {
        'type': BidirectionalRecurrentLayer,
        'num_units': 40,
        'gradient_steps': GRADIENT_STEPS,
        'nonlinearity': rectify,
        'W_hid_to_hid': Identity(scale=0.5)
    }, {
        'type': DenseLayer,
        'num_units': source.n_outputs,
        'nonlinearity': T.nnet.softplus
    }]
    net = Net(**net_dict_copy)
    return net
예제 #3
0
def exp_a(name):
    # ReLU hidden layers
    # linear output
    # output one appliance
    # 0% skip prob for first appliance
    # 100% skip prob for other appliances
    # input is diff
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    net_dict_copy['layers_config']= [
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 50,
            'W_in_to_hid': Normal(std=1),
            'W_hid_to_hid': Identity(scale=0.9),
            'nonlinearity': rectify,
            'learn_init': False, 
            'precompute_input': True
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': None,
            'W': Normal(std=1/sqrt(50))
        }
    ]
    net = Net(**net_dict_copy)
    return net
예제 #4
0
        # 3000: 1e-05
        # 7000: 5e-06,
        # 10000: 1e-06,
        # 15000: 5e-07,
        # 50000: 1e-07
    },
    do_save_activations=True,
    #    auto_reshape=False,
    #    plotter=CentralOutputPlotter
    #    plotter=MDNPlotter
    layers_config=[{
        'type': BidirectionalRecurrentLayer,
        'num_units': 40,
        'gradient_steps': GRADIENT_STEPS,
        'nonlinearity': rectify,
        'W_hid_to_hid': Identity(scale=0.1),
        'batch_norm': True
    }, {
        'type': BidirectionalRecurrentLayer,
        'num_units': 40,
        'gradient_steps': GRADIENT_STEPS,
        'nonlinearity': rectify,
        'W_hid_to_hid': Identity(scale=0.1),
        'batch_norm': True
    }])


def exp_a(name):
    global source
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
예제 #5
0
def exp_c(name):
    """
    tanh all the way through.  Identity init of RNNs
    """
    
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source
    ))
    net_dict_copy['layers_config']= [
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': Conv1DLayer,
            'num_filters': 40,
            'filter_length': 2,
            'stride': 1,
            'nonlinearity': identity,
            'b': None,
            'border_mode': 'same'
        },
        {
            'type': BatchNormLayer,
            'nonlinearity': tanh
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 40,
            'gradient_steps': GRADIENT_STEPS,
            'nonlinearity': identity,
            'b': None,
            'W_hid_to_hid': Identity(scale=0.5),
            'learn_init': True, 'precompute_input': False
        },
        {
            'type': BatchNormLayer,
            'axes': (0, 1),
            'nonlinearity': tanh
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': Conv1DLayer,
            'num_filters': 40,
            'filter_length': 4,
            'stride': 4,
            'nonlinearity': identity,
            'b': None
        },
        {
            'type': BatchNormLayer,
            'nonlinearity': tanh
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 40,
            'gradient_steps': GRADIENT_STEPS,
            'nonlinearity': identity,
            'b': None,
            'W_hid_to_hid': Identity(scale=0.5),
            'learn_init': True, 'precompute_input': False
        },
        {
            'type': BatchNormLayer,
            'axes': (0, 1),
            'nonlinearity': tanh
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': T.nnet.softplus
        }
    ]
    net = Net(**net_dict_copy)
    return net
예제 #6
0
     # 7000: 5e-06,
     # 10000: 1e-06,
     # 15000: 5e-07,
     # 50000: 1e-07
 },
 do_save_activations=True,
 #    auto_reshape=False,
 #    plotter=CentralOutputPlotter
 #    plotter=MDNPlotter
 layers_config=[{
     'type': BidirectionalRecurrentLayer,
     'num_units': 40,
     'gradient_steps': GRADIENT_STEPS,
     'nonlinearity': identity,
     'b': None,
     'W_hid_to_hid': Identity(scale=0.5)
 }, {
     'type': BatchNormLayer,
     'axes': (0, 1),
     'nonlinearity': rectify
 }, {
     'type': BidirectionalRecurrentLayer,
     'num_units': 40,
     'gradient_steps': GRADIENT_STEPS,
     'nonlinearity': identity,
     'b': None,
     'W_hid_to_hid': Identity(scale=0.5)
 }, {
     'type': BatchNormLayer,
     'axes': (0, 1),
     'nonlinearity': rectify
예제 #7
0
 }, {
     'type': Conv1DLayer,
     'num_filters': 20,
     'filter_length': 2,
     'stride': 1,
     'nonlinearity': rectify,
     'border_mode': 'same'
 }, {
     'type': DimshuffleLayer,
     'pattern': (0, 2, 1)
 }, {
     'type': BidirectionalRecurrentLayer,
     'num_units': 40,
     'gradient_steps': GRADIENT_STEPS,
     'nonlinearity': rectify,
     'W_hid_to_hid': Identity()
 }, {
     'type': DimshuffleLayer,
     'pattern': (0, 2, 1)
 }, {
     'type': Conv1DLayer,
     'num_filters': 40,
     'filter_length': 4,
     'stride': 4,
     'nonlinearity': rectify
 }, {
     'type': DimshuffleLayer,
     'pattern': (0, 2, 1)
 }, {
     'type': BidirectionalRecurrentLayer,
     'num_units': 40,
예제 #8
0
def exp_b(name):
    # e59 but ReLU
    source_dict_copy = deepcopy(source_dict)
    source = RealApplianceSource(**source_dict_copy)
    net_dict_copy = deepcopy(net_dict)
    net_dict_copy.update(dict(
        experiment_name=name,
        source=source,
        learning_rate=1e-3,
        learning_rate_changes_by_iteration={
            1000: 1e-4,
            2000: 1e-5
        }
    ))
    net_dict_copy['layers_config']= [
        {
            'type': DenseLayer,
            'num_units': 50,
            'nonlinearity': rectify,
            'W': Uniform(25),
            'b': Uniform(25)
        },
        {
            'type': DenseLayer,
            'num_units': 50,
            'nonlinearity': rectify,
            'W': Uniform(10),
            'b': Uniform(10)
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 40,
            'W_in_to_hid': Uniform(5),
            'gradient_steps': GRADIENT_STEPS,
            'nonlinearity': rectify,
            'learn_init': False, 
            'precompute_input': False,
            'W_hid_to_hid': Identity(0.5)
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': Conv1DLayer,
            'num_filters': 20,
            'filter_length': 4,
            'stride': 4,
            'nonlinearity': rectify
        },
        {
            'type': DimshuffleLayer,
            'pattern': (0, 2, 1)
        },
        {
            'type': BidirectionalRecurrentLayer,
            'num_units': 80,
            'W_in_to_hid': Uniform(5),
            'W_hid_to_hid': Identity(0.5),
            'gradient_steps': GRADIENT_STEPS,
            'nonlinearity': rectify,
            'learn_init': False, 
            'precompute_input': False
        },
        {
            'type': DenseLayer,
            'num_units': source.n_outputs,
            'nonlinearity': sigmoid
        }
    ]
    net = Net(**net_dict_copy)
    return net