Ejemplo n.º 1
0
def runCustomAutpEncoderWithParams(model_params,data):


	#model_params
	np.random.seed(RANDOM_SEED)
	NUM_EPOCHS = model_params['num_epochs']
	#data
	AP_train,TRP_train = data[0]
	AP_dev,TRP_dev = data[1]

	#Check direction
	if (model_params["DirectionForward"]):
		X_train,Y_train,X_dev,Y_dev = TRP_train,AP_train,TRP_dev,AP_dev
	else:
		X_train,Y_train,X_dev,Y_dev = AP_train,TRP_train,AP_dev,TRP_dev
		model_params["OutputNames"],model_params["InputNames"] = model_params["InputNames"],model_params["OutputNames"]

	if model_params["scaling"]:
		X_train,X_dev,_ = prep.scaleData(X_train,X_dev)


	model_params["InverseModelLocation"]  = "models/Forward_BaseLine_MSE_run_0_time_2018_11_07_00_01_09.hdf5" # mse based
	#model_params["InverseModelLocation"] = "models/Fowrard_BaseLine_run_0_time_2018_11_05_19_05_19.hdf5" # atanmse based
	model_params["ForwardModelLocation"] = os.path.join('models',model_params["model_name"] + "forward" +  '.hdf5')

	model_location = os.path.join('models',model_params["model_name"] +  '.hdf5')
	weights_location = os.path.join('weights',model_params["model_name"] +  '.h5')

	#model_forward = customModels.SimpleNNMultiOutputModelConcat(model_params)

	model_forward = customModels.DistNNMultiOutputModelCustom(model_params)


	#pdb.set_trace()





	model_reverse = run_eval.loadBestModel(model_params["InverseModelLocation"])
	model_reverse.trainable = False
	#model_reverse.compile(optimizer = "adam", loss = ["mse","mse","mse",atan_mse,atan_mse,phase_mse])

	inputs = model_forward.input
	outputs = model_forward.outputs + model_reverse(model_forward(model_forward.inputs))
	

	combined_model = keras.models.Model(inputs = inputs  , outputs = outputs )
	#combined_model = keras.models.Model(inputs = model_forward.inputs  , outputs = model_forward.outputs + model_reverse(model_forward.outputs) )

	keras.utils.plot_model(combined_model,to_file=os.path.join('images',model_params["model_name"] +  '.png'),show_shapes=True)
	#email_logger.send_email(text_mesage = "Test", title_text = "ModelImage",image_path = 'demo.png')

	#pdb.set_trace()



	model_forward.save(model_params["ForwardModelLocation"])




	"""
	combined_model.compile(
	 optimizer = keras.optimizers.adam(),
	 loss = ["mse","mse","mse","mse","mse","mse",atan_mse,atan_mse,phase_mse],
	 loss_weights = [10,10,10,1,1,1,10,10,10],
	 #metrics = "mse"
	)
	"""

	combined_model.compile(
	 #optimizer = keras.optimizers.Adadelta(lr = 1.0),
	 optimizer = keras.optimizers.RMSprop(lr = 0.001),
	 #optimizer = keras.optimizers.Nadam(),
	 #optimizer = keras.optimizers.Adam(lr=0.001),
	 loss = [custom_loss,"mse","mse","mse","mse","mse","mse"],
	 #custom loss - zero
	 loss_weights = [1,0.1,0.1,0.1,0.3,0.3,0.3],
	 metrics = {"out_TRP":[custom_metroc_TRP,custom_loss_TRP]}
	)
	combined_model.save(model_location)



	#CALBACKS SETUP
	print("Local Training Params:")
	customUtils.print_dic_params(model_params,False	,delimitor = "_",kvdelimitor = "_" )
	print("="*50)



	callback_list = customCallbacks.addCallBacks(model_params)


	
	input_train = [X_train]
	output_train = [Y_train,X_train[:,0],X_train[:,1],X_train[:,2],X_train[:,3],X_train[:,4],X_train[:,5]]
	input_dev = [X_dev]
	output_dev = [Y_dev,X_dev[:,0],X_dev[:,1],X_dev[:,2],X_dev[:,3],X_dev[:,4],X_dev[:,5]]
	


	"""
	input_train = [X_train]
	output_train = [Y_train[:,0],Y_train[:,1],Y_train[:,2],X_train[:,0],X_train[:,1],X_train[:,2],X_train[:,3],X_train[:,4],X_train[:,5]]
	input_dev = [X_dev]
	output_dev = [Y_dev[:,0],Y_dev[:,1],Y_dev[:,2],X_dev[:,0],X_dev[:,1],X_dev[:,2],X_dev[:,3],X_dev[:,4],X_dev[:,5]]
	"""

	"""
	input_train = [X_train]
	output_train = [X_train[:,0],X_train[:,1],X_train[:,2],X_train[:,3],X_train[:,4],X_train[:,5]]
	input_dev = [X_dev]
	output_dev = [X_dev[:,0],X_dev[:,1],X_dev[:,2],X_dev[:,3],X_dev[:,4],X_dev[:,5]]
	"""


	results = combined_model.fit(
	 input_train,output_train,
	 epochs = NUM_EPOCHS ,
	 batch_size = model_params["batchSize"],
	 validation_data = (input_dev,output_dev),
	 verbose =2,
	 #callbacks = [customCallbacks.CustomSaveModel(model_params = model_params,model = combined_model)],
	 callbacks = callback_list,
	 shuffle = True
	)
	
	




	print("*"*50)
	print("Finished Training - Saving Models")
	print("*"*50)


	with open(os.path.join('model_params',model_params["model_name"] +  '.json'), 'w') as fp:
		json.dump(model_params, fp, sort_keys=True)

	mse_total_dev = run_eval_base(model_location,dataset = "dev",email = model_params["email"])
	mse_total_train = run_eval_base(model_location,dataset = "train",email = model_params["email"])
	mse_total_test = run_eval_base(model_location,dataset = "test",email = model_params["email"])


	pdb.set_trace()

	return (
		mse_total_train[0:len(model_params["OutputNames"])],
		mse_total_dev[0:len(model_params["OutputNames"])],
		mse_total_test[0:len(model_params["OutputNames"])]
		)
Ejemplo n.º 2
0
def runSimpleModelWithParams(model_params,data):

	
	#model_params
	BATCH_SIZE = model_params["batchSize"]
	np.random.seed(RANDOM_SEED)
	NUM_EPOCHS = model_params['num_epochs']
	#data
	AP_train,TRP_train = data[0]
	AP_dev,TRP_dev = data[1]

	#Check direction
	if (model_params["DirectionForward"]):
		X_train,Y_train,X_dev,Y_dev = TRP_train,AP_train,TRP_dev,AP_dev
	else:
		X_train,Y_train,X_dev,Y_dev = AP_train,TRP_train,AP_dev,TRP_dev
		model_params["OutputNames"],model_params["InputNames"] = model_params["InputNames"],model_params["OutputNames"]


	model = customModels.SimpleNNMultiOutputModel(model_params)

	if model_params["scaling"]:
		X_train,X_dev,_ = prep.scaleData(X_train,X_dev)


	metrics_vect = {}
	for k in (model_params["OutputNames"]):
		if "ph" in k:
			metrics_vect[k] = [atan_mse] + ["mse"] + [modulo_2pi_error] + [phase_mse]+ [smape]
		else:
			metrics_vect[k] =  ["mse"] + [smape] + [custom_loss]

	losses_vect = {}
	losses_weights = {}
	for k in (model_params["OutputNames"]):
		if "ph" in k:
			if model_params["phase_loss"] == "mse":
				losses_vect[k] = "mse"
			elif model_params["phase_loss"] == "phase_mse":
				losses_vect[k] = phase_mse
			elif model_params["phase_loss"] == "atan_mse":
				losses_vect[k] =  atan_mse
			else:
				raise NotImplementedError("not valid phase_loss")
			#losses_weights[k] = 1
		elif k in model_params["OutputNames"]:
			losses_vect[k] =  "mse"
			#losses_vect[k] =  custom_loss
			#losses_weights[k] = 1
		else:
			losses_vect[k] =  "mse"
			#losses_weights[k] = 1

	if model_params["use_weights"]:
		wights =  (Y_train.mean(axis = 0)).tolist()
	else:
		wights = [1]*len(model_params["OutputNames"])

	for i,k in enumerate(model_params["OutputNames"]):
		f = model_params["weights_factor"]
		w = wights[i]
		losses_weights[k] = (1/w**2)


	#losses_vect["phi_1"] = atan_mse
	#losses_vect["phi_2"] = atan_mse
	#losses_vect["phi_3"] = phase_mse



	model.compile(
	 optimizer = "adam",
	 loss = losses_vect,
	 loss_weights = losses_weights,
	 metrics = metrics_vect
	)

	#CALBACKS SETUP
	print("Local Training Params:")
	customUtils.print_dic_params(model_params,True,delimitor = "_",kvdelimitor = "_" )
	print("="*50)


	callback_list = customCallbacks.addCallBacks(model_params)

	results = model.fit(
	 X_train,
	 dict(zip(model_params["OutputNames"],[row for row in Y_train.T])),
	 epochs = NUM_EPOCHS ,
	 batch_size = model_params["batchSize"],
	 validation_data = (
	 X_dev, 
	 dict(zip(model_params["OutputNames"],[row for row in Y_dev.T]))
	 ),
	 verbose =0,
	 callbacks = callback_list,
	 shuffle = True
	)

	#run eval:
	model_location = os.path.join('models',model_params["model_name"] +  '.hdf5')
	with open(os.path.join('model_params',model_params["model_name"] +  '.json'), 'w') as fp:
		json.dump(model_params, fp, sort_keys=True)


	#eval_ensemble.run_ensemble_eval(models_locations = [model_location])
	#pdb.set_trace()


	mse_total_train = run_eval_base(model_location,dataset = "train",email = model_params["email"])
	mse_total_dev = run_eval_base(model_location,dataset = "dev",email = model_params["email"])
	mse_total_test = run_eval_base(model_location,dataset = "test",email = model_params["email"])

	return (
		mse_total_train[0:len(model_params["OutputNames"])],
		mse_total_dev[0:len(model_params["OutputNames"])],
		mse_total_test[0:len(model_params["OutputNames"])]
		)
Ejemplo n.º 3
0
def runDistModelWithParams(model_params,data):


	#model_params
	np.random.seed(RANDOM_SEED)
	NUM_EPOCHS = model_params['num_epochs']
	#data
	AP_train,TRP_train = data[0]
	AP_dev,TRP_dev = data[1]

	#Check direction
	if (model_params["DirectionForward"]):
		X_train,Y_train,X_dev,Y_dev = TRP_train,AP_train,TRP_dev,AP_dev
	else:
		X_train,Y_train,X_dev,Y_dev = AP_train,TRP_train,AP_dev,TRP_dev
		model_params["OutputNames"],model_params["InputNames"] = model_params["InputNames"],model_params["OutputNames"]

	if model_params["scaling"]:
		X_train,X_dev,_ = prep.scaleData(X_train,X_dev)


	model = customModels.DistNNMultiOutputModel(model_params)

	metrics_vect = {}
	for k in (model_params["OutputNames"]):
		if "ph" in k:
			metrics_vect[k] = [atan_mse] + ["mse"] + [modulo_2pi_error] + [mean_squared_error_mu] + [smape]
		elif k in ["T","R","P"]:
			metrics_vect[k] =  [nll] + [log_likelihood_normal_cost] + [mean_squared_error_mu] +[acc_dist] + [cv_test] + ["mse"] + [smape]
		else:
			metrics_vect[k] =  ["mse"] + [mean_squared_error_mu] + [smape]

	losses_vect = {}
	losses_weights = {}

	for k in (model_params["OutputNames"]):
		if "ph" in k:
			losses_vect[k] = "mse"
			#losses_weights[k] = 1
		elif k in ["T","R","P"]:
			losses_vect[k] =  log_likelihood_normal_cost
			#losses_vect[k] =  custom_loss
			#losses_weights[k] = 1
		else:
			losses_vect[k] =  "mse"
			#losses_weights[k] = 1


	if model_params["use_weights"]:
		wights =  (Y_train.mean(axis = 0)).tolist()
	else:
		wights = [1]*len(model_params["OutputNames"])


	for i,k in enumerate(model_params["OutputNames"]):
		f = model_params["weights_factor"]
		w = wights[i]
		losses_weights[k] = w  #0.4/0.12/0.4





	model.compile(
	 optimizer = "adam",
	 loss = losses_vect,
	 loss_weights = losses_weights,
	 metrics = metrics_vect
	)

	#CALBACKS SETUP
	print("Local Training Params:")
	customUtils.print_dic_params(model_params,True,delimitor = "_",kvdelimitor = "_" )
	print("="*50)



	callback_list = customCallbacks.addCallBacks(model_params)


	results = model.fit(
	 X_train,
	 dict(zip(model_params["OutputNames"],[np.stack((row,row), axis = 0).T for row in Y_train.T])),
	 epochs = NUM_EPOCHS ,
	 batch_size = model_params["batchSize"],
	 validation_data = (
	 X_dev, 
	 dict(zip(model_params["OutputNames"],[np.stack((row,row), axis = 0).T for row in Y_dev.T])),
	 ),
	 verbose =0,
	 callbacks = callback_list,
	 shuffle = True
	)

	


	model_location = os.path.join('models',model_params["model_name"] +  '.hdf5')
	with open(os.path.join('model_params',model_params["model_name"] +  '.json'), 'w') as fp:
		json.dump(model_params, fp, sort_keys=True)


	mse_total_train = run_eval_base(model_location,dataset = "train",email = model_params["email"])
	mse_total_dev = run_eval_base(model_location,dataset = "dev",email = model_params["email"])
	mse_total_test = run_eval_base(model_location,dataset = "test",email = model_params["email"])

	return (
		mse_total_train[0:len(model_params["OutputNames"])],
		mse_total_dev[0:len(model_params["OutputNames"])],
		mse_total_test[0:len(model_params["OutputNames"])]
		)