Пример #1
0
#### NEW MODEL
####

config_model["type"] = "L41_train_front"
learning_rate = 0.01
batch_size = 8
config_model["chunk_size"] = 512 * 40
config_model["batch_size"] = batch_size
config_model["alpha"] = learning_rate

model = Adapt(config_model=config_model, pretraining=False)
model.create_saver()

model.restore_model(path, full_id)

model.connect_only_front_to_separator(L41Model)
init = model.non_initialized_variables()
model.sess.run(init)

print 'Total name :'
print model.runID

# nb_iterations = 500
mixed_data.adjust_split_size_to_batchsize(batch_size)
nb_batches = mixed_data.nb_batches(batch_size)
nb_epochs = 40

time_spent = [0 for _ in range(5)]
print 'NB BATCHES =', nb_batches
print 'NB ITERATIONS =', nb_batches * nb_epochs
print 'NB SAVE = ', (nb_batches * nb_epochs) / 20
Пример #2
0
from models.adapt import Adapt
import config

full_id = 'soft-base-9900' + idd

folder = 'DAS_train_front'
model = Adapt(config_model=config_model, pretraining=False)
model.create_saver()

path = os.path.join(config.workdir, 'floydhub_model', "pretraining")
# path = os.path.join(config.log_dir, "pretraining")
model.restore_model(path, full_id)

from models.das import DAS

model.connect_only_front_to_separator(DAS)

init = model.non_initialized_variables()

# Model creation

# Pretraining the model

nb_iterations = 1000

#initialize the model
model.sess.run(init)

for i in range(nb_iterations):
    X_in, X_mix, Ind = mixed_data.get_batch(batch_size)
    c = model.train(X_mix, X_in, learning_rate, i, ind_train=Ind)