Example #1
0
def test_pickling_right_after_compilation():
    model = Sequential()
    model.add(Dense(2, input_shape=(3,)))
    model.add(Dense(3))
    model.compile(loss='mse', optimizer='sgd', metrics=['acc'])
    model._make_train_function()

    model = pickle.loads(pickle.dumps(model))
Example #2
0
def test_saving_right_after_compilation():
    model = Sequential()
    model.add(Dense(2, input_shape=(3,)))
    model.add(Dense(3))
    model.compile(loss='mse', optimizer='sgd', metrics=['acc'])
    model._make_train_function()

    _, fname = tempfile.mkstemp('.h5')
    save_model(model, fname)
    model = load_model(fname)
    os.remove(fname)
Example #3
0
model.add(Flatten())
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.5))
# model.add(Dense(nb_classes))
model.add(Dense(nb_classes, kernel_initializer='zero', activation=masked_softmax))


# Define our training protocol
protocol_name, protocol = protocols.PATH_INT_PROTOCOL(omega_decay='sum', xi=1e-3 )
opt = Adam(lr=learning_rate, beta_1=0.9, beta_2=0.999)
# opt = RMSprop(lr=1e-3) 
# opt = SGD(1e-3)
oopt = KOOptimizer(opt, model=model, **protocol)
model.compile(loss='categorical_crossentropy', optimizer=oopt, metrics=['accuracy'])
model._make_train_function()

history = LossHistory()
callbacks = [history]
datafile_name = "split_cifar10_data_%s_lr%.2e_ep%i.pkl.gz"%(protocol_name, learning_rate, epochs_per_task)



def run_fits(cvals, training_data, valid_data, nstats=1):
    acc_mean = dict()
    acc_std = dict()
    for cidx, cval_ in enumerate(cvals):
        runs = []
        for runid in range(nstats):
            evals = []
            sess.run(tf.global_variables_initializer())