Пример #1
0
learning_rate = 0.001 
batch_size = 2
config_model["chunk_size"] = chunk_size
config_model["alpha"] = learning_rate
config_model["batch_size"] = batch_size
folder = 'DPCL_finetuning'



model = Adapt(config_model=config_model, pretraining=False)
model.create_saver()

path = os.path.join(config.model_root, 'log', 'DPCL_train_front')
model.restore_model(path, full_id)

model.connect_front_back_to_separator(DPCL)

with model.graph.as_default():
    model.create_saver()
    model.restore_model(path, full_id)
    # model.freeze_front()
    model.optimize
    model.tensorboard_init()

init = model.non_initialized_variables()

model.sess.run(init)

print 'Total name :' 
print model.runID
Пример #2
0
####

config_model["type"] = "L41_finetuning"
learning_rate = 0.001 
batch_size = 1
config_model["chunk_size"] = chunk_size
config_model["alpha"] = learning_rate
config_model["batch_size"] = batch_size

model = Adapt(config_model=config_model, pretraining=False)
model.create_saver()

path = os.path.join(config.model_root, 'log', 'L41_train_front')
model.restore_model(path, full_id)

model.connect_front_back_to_separator(L41Model)

with model.graph.as_default():
    model.create_saver()
    model.restore_model(path, full_id)
    # model.freeze_front()
    model.optimize
    model.tensorboard_init()

init = model.non_initialized_variables()

model.sess.run(init)

print 'Total name :' 
print model.runID