def run_training(db_file, name_out_path, n_components, data_type): db = Utility.load_obj(db_file) Y = [] names = [] for syl in db: feat = syl['TF'][data_type]['data'] Y.append(feat) names.append(syl['id']) # sys.exit() Y = np.array(Y) print Y.shape # print Y[0] config = {'n_components': n_components, 'data': Y} print config m, Y_r = GPy_Interface.pca(config) # print Y_r.shape Utility.save_obj(m, '{}/model.pkl'.format(name_out_path)) Utility.save_obj(Y_r, '{}/pca_reduction_output.pkl'.format(name_out_path)) Utility.save_obj(names, '{}/names.pkl'.format(name_out_path)) Utility.save_obj(Y, '{}/training_data.pkl'.format(name_out_path)) pass
def run_training(db_file, name_out_path, data_type, input_dim): db = Utility.load_obj(db_file) Y = [] names = [] for syl in db: feat = syl['TF'][data_type]['data'] Y.append(feat) names.append(syl['id']) # sys.exit() Y = np.array(Y) print Y.shape # print Y[0] num_inducing = int(len(Y) * 0.01) if num_inducing > 100: num_inducing = 100 elif num_inducing < 10: num_inducing = 10 config = { 'input_dim': input_dim, 'data': Y, 'num_inducing': num_inducing, 'max_iters': 500, 'missing_data': True, 'optimize_algo': 'scg' } print config m = GPy_Interface.Bayesian_GPLVM_Training(config) print m print '---------------------------' print m.X print '---------------------------' print m.input_sensitivity() print '---------------------------' Utility.save_obj(m, '{}/model.pkl'.format(name_out_path)) Utility.save_obj(np.array(m.X.mean), '{}/x.pkl'.format(name_out_path)) Utility.save_obj(m.input_sensitivity(), '{}/input_sensitivity.pkl'.format(name_out_path)) Utility.save_obj(names, '{}/names.pkl'.format(name_out_path)) Utility.save_obj(Y, '{}/training_data.pkl'.format(name_out_path)) pass
def run_training(db_file, name_out_path): db = Utility.load_obj(db_file) Y = [] for syl in db: feat = syl['TF']['missing151']['data'] Y.append(feat) # sys.exit() Y = np.array(Y) print Y.shape # print Y[0] config = { 'input_dim': 10, 'data': Y, 'num_inducing': int(len(Y) * 0.1), 'max_iters': 1000, 'missing_data': True, 'optimize_algo': 'bfgs' } print config m = GPy_Interface.Bayesian_GPLVM_Training(config) print m print '---------------------------' print m.X print '---------------------------' print np.array(m.X.mean) print '---------------------------' print m.input_sensitivity() print '---------------------------' Utility.save_obj(m, '{}/model.pkl'.format(name_out_path)) Utility.save_obj(np.array(m.X.mean), '{}/x.pkl'.format(name_out_path)) Utility.save_obj(m.input_sensitivity(), '{}/input_sensitivity.pkl'.format(name_out_path)) Utility.save_obj(names, '{}/names.pkl'.format(name_out_path)) Utility.save_obj(Y, '{}/training_data.pkl'.format(name_out_path)) pass
def run_training(db_file, name_out_path): db = Utility.load_obj(db_file) Y = [] names = [] for syl in db: feat = syl['TF']['intepolate151']['data'] Y.append(feat) names.append(syl['id']) # sys.exit() Y = np.array(Y) print Y.shape # print Y[0] config = { 'input_dim': 10, 'data': Y, 'num_inducing': int(len(Y) * 0.1), 'max_iters': 300, 'missing_data': True, 'optimize_algo': 'scg' } print config m = GPy_Interface.Bayesian_GPLVM_Training(config) Utility.save_obj(m, '{}/model.pkl'.format(name_out_path)) Utility.save_obj(names, '{}/names.pkl'.format(name_out_path)) Utility.save_obj(Y, '{}/training_data.pkl'.format(name_out_path)) pass