def execute_experiment(features_hypers1, features_hypers2): experiment_id, gfre_vec = gfr_pairwise_experiment(dataset_name, nb_samples, features_hypers1, features_hypers2, two_split=two_split, train_ratio=train_ratio, seed=seed, noise_removal=False, regularizer=regularizer, compute_distortion=False, one_direction=True) experiment_ids.append(experiment_id) print(gfre_vec)
} }] for nb_local_envs in nbs_local_envs: hash_value = lfre_pairwise_experiment(dataset_name, nb_samples, features_hypers1, features_hypers2, nb_local_envs, two_split, seed, train_ratio, regularizer, inner_epsilon, outer_epsilon, one_direction=False) experiment_ids.append(hash_value) print(f"nb_local_envs={nb_local_envs} hash_value={hash_value}") print(experiment_ids) hash_value, _ = gfr_pairwise_experiment(dataset_name, nb_samples, features_hypers1, features_hypers2, two_split=two_split, train_ratio=train_ratio, seed=seed, noise_removal=False, regularizer=regularizer, compute_distortion=False) print(f"GFRE={hash_value}")
"interaction_cutoff": cutoff, "max_radial": max_radial, "max_angular": max_angular, "gaussian_sigma_constant": sigma, "gaussian_sigma_type": "Constant", "cutoff_smooth_width": cutoff_smooth_width, "normalize": normalize } } for max_radial, max_angular in max_radials_angulars] hash_value, _ = gfr_pairwise_experiment( dataset_name, nb_samples, features_hypers1, features_hypers2, two_split=two_split, train_ratio=train_ratio, seed=seed, noise_removal=False, regularizer=regularizer, set_methane_dataset_to_same_species=False) hash_values.append(hash_value) print('"' + ' '.join(hash_values).replace(' ', '" "') + '" ') print(f"soap_{dataset_name}_hash_value = " + '[' + '"' + ' '.join(hash_values).replace(' ', '", "') + '"]') hash_values = [] for feature_count in feature_counts: features_hypers1 = [{