Ejemplo n.º 1
0
                saveto='weights/lstm_exeqrep.npz',
                arch_output_fn='logistic')
    # RUN HPM
    else:
        import hpm_0102
        for i in range(1, n_datasets + 1):
            filename = '../data/exeqrep/exeqrep_per_student_' + str(i)
            te[i - 1], tll[i - 1], tauc[i - 1] = hpm_0102.train_model(
                encoder='hpm',
                show_weights=False,
                arch_hpm_gated=True,
                arch_remap_input=True,
                arch_hpm_recurrent=True,
                arch_hpm_prior_exp=False,
                #arch_hpm_gamma_scaled_mu=True,
                timescales=2.0**numpy.arange(-11, 7),
                valid_portion=3.0 / (32.0 - 4.0),
                valid_freq=5,
                maxlen=50000,
                patience=150,
                n_hid=60,
                saveto='weights/hpm_0102_exeqrep.npz',
                data_file=filename,
                arch_output_fn='logistic')
elif (0):
    import hpm_030517
    for i in range(1, n_datasets + 1):
        filename = '../data/exeqrep/exeqrep_per_student_' + str(i)
        te[i - 1], tll[i - 1], tauc[i - 1] = hpm_030517.train_model(
            encoder='hpm',
            show_weights=False,
Ejemplo n.º 2
0
import numpy

filename = '../data/synthetic_hp_extrapolation/hp_10streams'

# RUN LSTM WITH ADDITIONAL INPUTS
if 1:
    import hpm_0102
    tr, va, te = hpm_0102.train_model(encoder='lstm',
                                      arch_lstm_include_delta_t=True,
                                      valid_portion=.15,
                                      valid_freq=1,
                                      maxlen=1000,
                                      patience=25,
                                      n_hid=20,
                                      data_file=filename,
                                      saveto='weights/lstm_hp_10streams.npz',
                                      arch_output_fn='softmax')
    print(1 - te)

# RUN HPM
if 0:
    import hpm_0102
    tr, va, te = hpm_0102.train_model(
        encoder='hpm',
        show_weights=False,
        arch_hpm_gated=True,  # DEBUG ***********
        arch_input_map_constraint='none',
        arch_hpm_recurrent=True,
        arch_hpm_prior_exp=False,
        arch_hpm_alpha_constraint='strong',  # CHEAT!
        timescales=2.0**numpy.arange(0, 13),
Ejemplo n.º 3
0
                                 valid_freq=1,
                                 maxlen=1000,
                                 patience=20,
                                 n_hid=20,
                                 data_file=filename,
                                 saveto='weights/hpm_synthetic_cluster.npz',
                                 arch_output_fn='logistic')
if 0:
    import hpm_0102
    print "RUNNING LSTM"
    tr, va, te = hpm_0102.train_model(
        encoder='lstm',
        arch_lstm_include_delta_t=True,
        valid_portion=.15,
        valid_freq=1,
        maxlen=1000,
        patience=20,
        n_hid=20,
        data_file=filename,
        saveto='weights/lstm_synthetic_cluster.npz',
        arch_output_fn='logistic')
if 0:
    import gru_5_0 as gru
    print "RUNNING GRU WITH DELTA T"
    tr, va, te = gru.train_model(encoder='gru',
                                 arch_gru_include_delta_t=True,
                                 valid_portion=.15,
                                 valid_freq=1,
                                 maxlen=1000,
                                 patience=20,
                                 n_hid=20,
Ejemplo n.º 4
0
                        arch_output_fn='softmax')
    print(1-te)

# RUN HPM
if 0:
    import hpm_0102
    tr, va, te = hpm_0102.train_model(encoder='hpm',
                        show_weights=False,
                        arch_hpm_gated=True, 
                        arch_input_map_constraint='none', # NOTE
                        arch_hpm_alpha_constraint='none', # NOTE
                        arch_hpm_recurrent=True, 
                        arch_hpm_prior_exp=False,
                        timescales=2.0**numpy.arange(-7,7),
                        # NOTE: arange(-7,7) = -7:6
                        #timescales=[1./60./60./24., 1./60./24., 1./24., 1., 30.],
                        #timescales=10.0**numpy.arange(-4,3),
                        valid_portion=.15,
                        valid_freq=1,
                        maxlen=1000,
                        patience=25,
                        n_hid=50, 
                        data_file=filename,
                        saveto='weights/hpm_0102_reddit.npz',
                        arch_output_fn='softmax')
    print(1-te)

# RUN HPM
if 0:
    import hpm_030517
    tr, va, te = hpm_030517.train_model(encoder='hpm',
Ejemplo n.º 5
0
import numpy
import hpm_0102

# RUN LSTM
if 1:
    te, tll, tauc = hpm_0102.train_model(
        encoder='lstm',
        arch_lstm_include_delta_t=True,
        arch_lstm_include_input_gate=True,
        arch_lstm_include_forget_gate=True,
        arch_lstm_include_output_gate=True,
        valid_portion=.15,
        data_file='../data/synthetic_music/5streams',
        valid_freq=5,
        patience=100,
        n_hid=5,
        saveto='weights/lstm_music.npz',
        arch_output_fn='softmax')

# RUN HPM
if 0:
    te, tll, tauc = hpm_0102.train_model(
        encoder='hpm',
        valid_portion=.15,
        show_weights=False,
        arch_hpm_gated=True,
        arch_remap_input=True,
        arch_hpm_recurrent=True,
        #arch_hpm_gamma_scaled_mu=True,
        #arch_hpm_gamma_scaled_alpha=True,
        arch_hpm_prior_exp=False,
Ejemplo n.º 6
0
                                         valid_freq=5,
                                         patience=100,
                                         n_hid=25,
                                         saveto='weights/lstm_msnbc.npz',
                                         arch_output_fn='softmax')

# RUN HPM 0102
if 0:
    import hpm_0102
    te, tll, tauc = hpm_0102.train_model(encoder='hpm',
                                         valid_portion=.15,
                                         show_weights=False,
                                         arch_hpm_gated=True,
                                         arch_remap_input=True,
                                         arch_hpm_recurrent=True,
                                         arch_hpm_prior_exp=False,
                                         timescales=2.0**numpy.arange(0, 7),
                                         data_file='../data/msnbc/msnbc',
                                         valid_freq=5,
                                         patience=100,
                                         n_hid=25,
                                         saveto='weights/hpm_msnbc.npz',
                                         arch_output_fn='softmax')

# RUN HPM 031117
if 0:
    import hpm_031117  # version with mixture of time scales
    te, tll, tauc = hpm_031117.train_model(
        encoder='hpm',
        show_weights=False,
        arch_hpm_gated=True,
        arch_input_map_constraint='none',