Exemple #1
0
class Adapt_Pretrainer(Trainer):

	def __init__(self, **kwargs):
		super(Adapt_Pretrainer, self).__init__(trainer_type='pretraining', **kwargs)

	def build(self):
		self.model = Adapt(**self.args)
		self.model.tensorboard_init()
		self.model.init_all()
Exemple #2
0

model = Adapt(config_model=config_model, pretraining=False)
model.create_saver()

path = os.path.join(config.model_root, 'log', 'DPCL_train_front')
model.restore_model(path, full_id)

model.connect_front_back_to_separator(DPCL)

with model.graph.as_default():
    model.create_saver()
    model.restore_model(path, full_id)
    # model.freeze_front()
    model.optimize
    model.tensorboard_init()

init = model.non_initialized_variables()

model.sess.run(init)

print 'Total name :' 
print model.runID

# nb_iterations = 500
mixed_data.adjust_split_size_to_batchsize(batch_size)
nb_batches = mixed_data.nb_batches(batch_size)
nb_epochs = 1

time_spent = [ 0 for _ in range(5)]
Exemple #3
0
config_model["alpha"] = learning_rate
config_model["reg"] = 1e-3
config_model["beta"] = 0.1
config_model["rho"] = 0.01

config_model["same_filter"] = True
config_model["optimizer"] = 'Adam'

####
####

adapt_model = Adapt(config_model=config_model,
                    pretraining=True,
                    folder='pretraining')
adapt_model.tensorboard_init()
adapt_model.init()

print 'Total name :'
print adapt_model.runID

# nb_iterations = 500
mixed_data.adjust_split_size_to_batchsize(batch_size)
nb_batches = mixed_data.nb_batches(batch_size)
nb_epochs = 2

time_spent = [0 for _ in range(5)]

for epoch in range(nb_epochs):
    for b in range(nb_batches):
        X_non_mix, _, _ = mixed_data.get_batch(batch_size)