download_data(args.input_path) X, Y, C = load_data(args.input_path, fname) if len(X.shape) == 1: X = X[:, None] n_tasks = Y.shape[0] n_configs = X.shape[0] index_task = np.repeat(np.arange(n_tasks), n_configs) Y_norm, _, _ = normalize_Y(deepcopy(Y.flatten()), index_task) # train the probabilistic encoder kern = GPy.kern.Matern52(Q_h, ARD=True) m_lvm = BayesianGPLVM(Y_norm.reshape(n_tasks, n_configs), Q_h, kernel=kern, num_inducing=n_inducing_lvm) m_lvm.optimize(max_iters=10000, messages=1) ls = np.array([ m_lvm.kern.lengthscale[i] for i in range(m_lvm.kern.lengthscale.shape[0]) ]) # generate data to train the multi-task model task_features_mean = np.array(m_lvm.X.mean / ls) task_features_std = np.array(np.sqrt(m_lvm.X.variance) / ls) X_train = [] Y_train = [] C_train = []