def build_rlstm2(input_dim, h0_dim, h1_dim, output_dim=1, lstm_init='zero', lr=.001, base_name='rlstm', add_input_noise=True, add_target_noise=False): model = Sequential() if add_input_noise: model.add(GaussianNoise(.1, input_shape=(None, input_dim))) model.add( RLSTM(input_dim, h0_dim, h1_dim, output_dim, init=lstm_init, W_h0_regularizer=l2(0.0005), W_h1_regularizer=l2(0.0005), return_sequences=True)) # if add_target_noise: # model.add(GaussianNoise(5.)) model.compile(loss="mse", optimizer=RMSprop(lr=lr)) model.base_name = base_name yaml_string = model.to_yaml() # print(yaml_string) with open(model_savedir + model.base_name + '.yaml', 'w') as f: f.write(yaml_string) return model
def build_rlstm(input_dim, h0_dim=40, h1_dim=None, output_dim=1, rec_layer_type=ReducedLSTMA, rec_layer_init='zero', fix_b_f=False, layer_type=TimeDistributedDense, lr=.001, base_name='rlstm', add_input_noise=True, add_target_noise=True): model = Sequential() if add_input_noise: model.add(GaussianNoise(.1, input_shape=(None, input_dim))) model.add(layer_type(h0_dim, input_dim=input_dim, init='uniform_small', W_regularizer=l2(0.0005), activation='tanh')) if h1_dim is not None: model.add(layer_type(h1_dim, init='uniform_small', W_regularizer=l2(0.0005), activation='tanh')) model.add(rec_layer_type(output_dim, init=rec_layer_init, fix_b_f=fix_b_f, return_sequences=True)) if add_target_noise: model.add(GaussianNoise(5.)) model.compile(loss="mse", optimizer=RMSprop(lr=lr)) model.base_name = base_name yaml_string = model.to_yaml() # print(yaml_string) with open(model_savedir + model.base_name+'.yaml', 'w') as f: f.write(yaml_string) return model
def build_rnn(in_dim, out_dim, h0_dim, h1_dim=None, layer_type=LSTM, return_sequences=False): model = Sequential() model.add(layer_type(h0_dim, input_shape=(None, in_dim), return_sequences=(h1_dim is not None or return_sequences))) if h1_dim is not None: model.add(layer_type(h1_dim, return_sequences=return_sequences)) if return_sequences: model.add(TimeDistributedDense(out_dim)) else: model.add(Dense(out_dim)) model.add(Activation("linear")) model.compile(loss="mse", optimizer="rmsprop") return model
def build_rlstm2(input_dim, h0_dim, h1_dim, output_dim=1, lstm_init='zero', lr=.001, base_name='rlstm', add_input_noise=True, add_target_noise=False): model = Sequential() if add_input_noise: model.add(GaussianNoise(.1, input_shape=(None, input_dim))) model.add(RLSTM(input_dim, h0_dim, h1_dim, output_dim, init=lstm_init, W_h0_regularizer=l2(0.0005), W_h1_regularizer=l2(0.0005), return_sequences=True)) # if add_target_noise: # model.add(GaussianNoise(5.)) model.compile(loss="mse", optimizer=RMSprop(lr=lr)) model.base_name = base_name yaml_string = model.to_yaml() # print(yaml_string) with open(model_savedir + model.base_name+'.yaml', 'w') as f: f.write(yaml_string) return model
def build_rlstm(input_dim, h0_dim=40, h1_dim=None, output_dim=1, rec_layer_type=ReducedLSTMA, rec_layer_init='zero', fix_b_f=False, layer_type=TimeDistributedDense, lr=.001, base_name='rlstm', add_input_noise=True, add_target_noise=True): model = Sequential() if add_input_noise: model.add(GaussianNoise(.1, input_shape=(None, input_dim))) model.add( layer_type(h0_dim, input_dim=input_dim, init='uniform_small', W_regularizer=l2(0.0005), activation='tanh')) if h1_dim is not None: model.add( layer_type(h1_dim, init='uniform_small', W_regularizer=l2(0.0005), activation='tanh')) model.add( rec_layer_type(output_dim, init=rec_layer_init, fix_b_f=fix_b_f, return_sequences=True)) if add_target_noise: model.add(GaussianNoise(5.)) model.compile(loss="mse", optimizer=RMSprop(lr=lr)) model.base_name = base_name yaml_string = model.to_yaml() # print(yaml_string) with open(model_savedir + model.base_name + '.yaml', 'w') as f: f.write(yaml_string) return model
def build_reduced_lstm(input_dim, h0_dim=40, h1_dim=None, output_dim=1, rec_layer_type=ReducedLSTMA, rec_layer_init='uniform', layer_type=TimeDistributedDense, lr=.001, base_name='rlstm'): model = Sequential() model.add(layer_type(h0_dim, input_dim=input_dim, init='uniform', W_regularizer=l2(0.0005), activation='relu')) if h1_dim is not None: model.add(layer_type(h1_dim, init='uniform', W_regularizer=l2(0.0005), activation='relu')) # model.add(LSTM(h0_dim, # input_dim=input_dim, # init='uniform', # inner_activation='sigmoid', # return_sequences=True)) # model.add(Dropout(0.4)) # if h1_dim is not None: # model.add(LSTM(h1_dim, # init='uniform', # inner_activation='sigmoid', # return_sequences=True)) # model.add(Dropout(0.4)) model.add(rec_layer_type(output_dim, init=rec_layer_init, return_sequences=True)) model.compile(loss="mse", optimizer=RMSprop(lr=lr)) model.base_name = base_name yaml_string = model.to_yaml() # print(yaml_string) with open(model_savedir + model.base_name+'.yaml', 'w') as f: f.write(yaml_string) return model
def train_model(dataset, h0_dim, h1_dim, out_dim): X_train, y_train, X_test, y_test = dataset batch_size = 128 nb_epoch = 100 model = Sequential() model.add( RNN(h0_dim, input_shape=(None, X_train.shape[-1]), return_sequences=True)) model.add(TimeDistributedDense(out_dim)) model.add(Activation("linear")) model.compile(loss="mse", optimizer="rmsprop") #model.get_config(verbose=1) #yaml_string = model.to_yaml() #with open('ifshort_mlp.yaml', 'w') as f: # f.write(yaml_string) early_stopping = EarlyStopping(monitor='val_loss', patience=10) checkpointer = ModelCheckpoint(filepath="/tmp/ifshort_rnn_weights.hdf5", verbose=1, save_best_only=True) model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=2, validation_data=(X_test, y_test), callbacks=[early_stopping, checkpointer])
def train_model(dataset, h0_dim, h1_dim, y_dim): X_train, y_train, X_test, y_test = dataset batch_size = 512 nb_epoch = 100 model = Sequential() model.add(Dense(h0_dim, input_shape=(X_train.shape[1],), init='uniform', W_regularizer=l2(0.0005), activation='relu')) model.add(Dense(h1_dim, init='uniform', W_regularizer=l2(0.0005), activation='relu')) model.add(Dense(y_dim, init='uniform', W_regularizer=l2(0.0005))) rms = RMSprop() sgd = SGD(lr=0.01, decay=1e-4, momentum=0.6, nesterov=False) model.compile(loss='mse', optimizer=sgd) #model.get_config(verbose=1) #yaml_string = model.to_yaml() #with open('ifshort_mlp.yaml', 'w') as f: # f.write(yaml_string) early_stopping = EarlyStopping(monitor='val_loss', patience=10) checkpointer = ModelCheckpoint(filepath="/tmp/ifshort_mlp_weights.hdf5", verbose=1, save_best_only=True) model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=2, validation_data=(X_test, y_test), callbacks=[early_stopping, checkpointer])
def build_reduced_lstm(input_dim, h0_dim=40, h1_dim=None, output_dim=1, rec_layer_type=ReducedLSTMA, rec_layer_init='uniform', layer_type=TimeDistributedDense, lr=.001, base_name='rlstm'): model = Sequential() model.add( layer_type(h0_dim, input_dim=input_dim, init='uniform', W_regularizer=l2(0.0005), activation='relu')) if h1_dim is not None: model.add( layer_type(h1_dim, init='uniform', W_regularizer=l2(0.0005), activation='relu')) # model.add(LSTM(h0_dim, # input_dim=input_dim, # init='uniform', # inner_activation='sigmoid', # return_sequences=True)) # model.add(Dropout(0.4)) # if h1_dim is not None: # model.add(LSTM(h1_dim, # init='uniform', # inner_activation='sigmoid', # return_sequences=True)) # model.add(Dropout(0.4)) model.add( rec_layer_type(output_dim, init=rec_layer_init, return_sequences=True)) model.compile(loss="mse", optimizer=RMSprop(lr=lr)) model.base_name = base_name yaml_string = model.to_yaml() # print(yaml_string) with open(model_savedir + model.base_name + '.yaml', 'w') as f: f.write(yaml_string) return model
def build_mlp(in_dim, out_dim, h0_dim, h1_dim, learning_rate=.01): model = Sequential() model.add(Dense(h0_dim, input_shape=(in_dim,), init='uniform', W_regularizer=l2(0.0005), activation='relu')) model.add(Dense(h1_dim, init='uniform', W_regularizer=l2(0.0005), activation='relu')) model.add(Dense(out_dim, init='uniform', W_regularizer=l2(0.0005))) # sgd = SGD(lr=0.01, decay=1e-4, momentum=0.6, nesterov=False) sgd = SGD(lr=learning_rate, decay=1e-14, momentum=0.6, nesterov=False) model.compile(loss='mse', optimizer=sgd) return model
def train_model(dataset, h0_dim, h1_dim, out_dim): X_train, y_train, X_test, y_test = dataset batch_size = 128 nb_epoch = 100 model = Sequential() model.add(RNN(h0_dim, input_shape=(None, X_train.shape[-1]), return_sequences=True)) model.add(TimeDistributedDense(out_dim)) model.add(Activation("linear")) model.compile(loss="mse", optimizer="rmsprop") #model.get_config(verbose=1) #yaml_string = model.to_yaml() #with open('ifshort_mlp.yaml', 'w') as f: # f.write(yaml_string) early_stopping = EarlyStopping(monitor='val_loss', patience=10) checkpointer = ModelCheckpoint(filepath="/tmp/ifshort_rnn_weights.hdf5", verbose=1, save_best_only=True) model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=2, validation_data=(X_test, y_test), callbacks=[early_stopping, checkpointer])
def build_mlp(in_dim, out_dim, h0_dim, h1_dim, optimizer='rmsprop'): model = Sequential() model.add(Dense(h0_dim, input_shape=(in_dim,), init='uniform', W_regularizer=l2(0.0005), activation='relu')) model.add(Dense(h1_dim, init='uniform', W_regularizer=l2(0.0005), activation='relu')) model.add(Dense(out_dim, init='uniform', W_regularizer=l2(0.0005) )) # sgd = SGD(lr=0.01, decay=1e-4, momentum=0.6, nesterov=False) # sgd = SGD(lr=learning_rate, decay=1e-24, momentum=0.6, nesterov=False) model.compile(loss='mse', optimizer=optimizer) # model.get_config(verbose=1) yaml_string = model.to_yaml() with open(model_savedir + 'mlp.yaml', 'w') as f: f.write(yaml_string) return model
def build_mlp(in_dim, out_dim, h0_dim, h1_dim, optimizer='rmsprop'): model = Sequential() model.add( Dense(h0_dim, input_shape=(in_dim, ), init='uniform', W_regularizer=l2(0.0005), activation='relu')) model.add( Dense(h1_dim, init='uniform', W_regularizer=l2(0.0005), activation='relu')) model.add(Dense(out_dim, init='uniform', W_regularizer=l2(0.0005))) # sgd = SGD(lr=0.01, decay=1e-4, momentum=0.6, nesterov=False) # sgd = SGD(lr=learning_rate, decay=1e-24, momentum=0.6, nesterov=False) model.compile(loss='mse', optimizer=optimizer) # model.get_config(verbose=1) yaml_string = model.to_yaml() with open(model_savedir + 'mlp.yaml', 'w') as f: f.write(yaml_string) return model
## convert class vectors to binary class matrices #Y_train = np_utils.to_categorical(y_train, nb_classes) #Y_test = np_utils.to_categorical(y_test, nb_classes) from pylearn2.datasets.if_monthly import IFMonthlyLong train = IFMonthlyLong(target_type='ASV', gain_range=[0, 10], which_set='train') X_train = train.X y_train = train.y test = IFMonthlyLong(target_type='ASV', gain_range=[0, 10], which_set='test') X_test = test.X y_test = test.y model = Sequential() model.add(Dense(80, input_shape=(79,), init='uniform', # init=lambda shape: uniform(shape, scale=0.05), W_regularizer=l2(0.0005), activation='relu')) #model.add(Dropout(0.2)) model.add(Dense(40, init='uniform', # init=lambda shape: uniform(shape, scale=0.05), W_regularizer=l2(0.0005), activation='relu')) #model.add(Dropout(0.2)) model.add(Dense(1, init='uniform', # init=lambda shape: uniform(shape, scale=0.005), W_regularizer=l2(0.0005))) rms = RMSprop()
#Y_train = np_utils.to_categorical(y_train, nb_classes) #Y_test = np_utils.to_categorical(y_test, nb_classes) from pylearn2.datasets.if_monthly import IFMonthlyLong train = IFMonthlyLong(target_type='ASV', gain_range=[0, 10], which_set='train') X_train = train.X y_train = train.y test = IFMonthlyLong(target_type='ASV', gain_range=[0, 10], which_set='test') X_test = test.X y_test = test.y model = Sequential() model.add( Dense( 80, input_shape=(79, ), init='uniform', # init=lambda shape: uniform(shape, scale=0.05), W_regularizer=l2(0.0005), activation='relu')) #model.add(Dropout(0.2)) model.add( Dense( 40, init='uniform', # init=lambda shape: uniform(shape, scale=0.05), W_regularizer=l2(0.0005), activation='relu')) #model.add(Dropout(0.2)) model.add( Dense( 1,