def grid_search_subhalo_centres_as_array_from_grid_search_result( grid_search_result, ) -> [(float, float)]: if grid_search_result.no_dimensions != 2: raise exc.AggregatorException( "The GridSearchResult is not dimensions 2, meaning a 2D array cannot be made." ) return [ res.samples.median_pdf_instance.galaxies.subhalo.mass.centre for results in grid_search_result.results_reshaped for res in results ]
def grid_search_subhalo_masses_as_array(aggregator: af.Aggregator) -> al.Array: grid_search_result_gen = aggregator.values("grid_search_result") grid_search_results = list(filter(None, list(grid_search_result_gen))) if len(grid_search_results) != 1: raise exc.AggregatorException( "There is more than one grid search result in the aggregator - please filter the" "aggregator.") return grid_search_subhalo_masses_as_array_from_grid_search_result( grid_search_result=grid_search_results[0])
def grid_search_log_evidences_as_array_from_grid_search_result( grid_search_result, use_log_evidences=True, use_stochastic_log_evidences: bool = False, ) -> al.Array2D: if grid_search_result.no_dimensions != 2: raise exc.AggregatorException( "The GridSearchResult is not dimensions 2, meaning a 2D array cannot be made." ) if use_log_evidences and not use_stochastic_log_evidences: values = [ value for values in grid_search_result.log_evidence_values for value in values ] elif use_stochastic_log_evidences: stochastic_log_evidences = [] for result in grid_search_result.results: stochastic_log_evidences_json_file = path.join( result.search.paths.output_path, "stochastic_log_evidences.json") try: with open(stochastic_log_evidences_json_file, "r") as f: stochastic_log_evidences_array = np.asarray(json.load(f)) except FileNotFoundError: raise FileNotFoundError( f"File not found at {result.search.paths.output_path}") stochastic_log_evidences.append( np.median(stochastic_log_evidences_array)) values = stochastic_log_evidences else: values = [ value for values in grid_search_result.max_log_likelihood_values for value in values ] return al.Array2D.manual_yx_and_values( y=[centre[0] for centre in grid_search_result.physical_centres_lists], x=[centre[1] for centre in grid_search_result.physical_centres_lists], values=values, pixel_scales=grid_search_result.physical_step_sizes, shape_native=grid_search_result.shape, )
def grid_search_result_as_array( aggregator: af.Aggregator, use_log_evidences: bool = True, use_stochastic_log_evidences: bool = False, ) -> np.ndarray: grid_search_result_gen = aggregator.values("grid_search_result") grid_search_results = list(filter(None, list(grid_search_result_gen))) if len(grid_search_results) == 0: raise exc.AggregatorException( "There is no grid search resultin the aggregator.") elif len(grid_search_results) > 1: raise exc.AggregatorException( "There is more than one grid search result in the aggregator - please filter the" "aggregator.") return grid_search_log_evidences_as_array_from_grid_search_result( grid_search_result=grid_search_results[0], use_log_evidences=use_log_evidences, use_stochastic_log_evidences=use_stochastic_log_evidences, )
def grid_search_subhalo_masses_as_array_from_grid_search_result( grid_search_result, ) -> [float]: if grid_search_result.no_dimensions != 2: raise exc.AggregatorException( "The GridSearchResult is not dimensions 2, meaning a 2D array cannot be made." ) masses = [ res.samples.median_pdf_instance.galaxies.subhalo.mass.mass_at_200 for results in grid_search_result.results_reshaped for res in results ] return al.Array2D.manual_yx_and_values( y=[centre[0] for centre in grid_search_result.physical_centres_lists], x=[centre[1] for centre in grid_search_result.physical_centres_lists], values=masses, pixel_scales=grid_search_result.physical_step_sizes, shape_native=grid_search_result.shape, )