def output_positions_info(self, output_path: str, tracer: Tracer): """ Outputs a `positions.info` file which summarizes the positions penalty term for a model fit, including: - The arc second coordinates of the lensed source multiple images used for the model-fit. - The radial distance of these coordinates from (0.0, 0.0). - The threshold value used by the likelihood penalty. - The maximum source plane seperation of the maximum likelihood tracer. Parameters ---------- output_path tracer Returns ------- """ positions_fit = FitPositionsSourceMaxSeparation( positions=self.positions, noise_map=None, tracer=tracer) distances = positions_fit.positions.distances_to_coordinate_from( coordinate=(0.0, 0.0)) with open_(path.join(output_path, "positions.info"), "w+") as f: f.write(f"Positions: \n {self.positions} \n\n") f.write(f"Radial Distance from (0.0, 0.0): \n {distances} \n\n") f.write(f"Threshold = {self.threshold} \n") f.write( f"Max Source Plane Seperation of Maximum Likelihood Model = {positions_fit.max_separation_of_source_plane_positions}" )
def _save_model_info(self, model): """ Save the model.info file, which summarizes every parameter and prior. """ with open_(path.join(self.output_path, "model.info"), "w+") as f: f.write(f"Total Free Parameters = {model.prior_count} \n\n") f.write(f"{model.parameterization} \n\n") f.write(model.info)
def _save_metadata(self, search_name): """ Save metadata associated with the phase, such as the name of the pipeline, the name of the phase and the name of the dataset being fit """ with open_(path.join(self.output_path, "metadata"), "a") as f: f.write(f"""name={self.name} non_linear_search={search_name} """)
def save_object(self, name: str, obj: object): """ Serialise an object using dill and save it to the pickles directory of the search. Parameters ---------- name The name of the object obj A serialisable object """ with open_(self._path_for_pickle(name), "wb") as f: dill.dump(obj, f)
def load_object(self, name: str): """ Load a serialised object with the given name. e.g. if the name is 'model' then pickles/model.pickle is loaded. Parameters ---------- name The name of a serialised object Returns ------- The deserialised object """ with open_(self._path_for_pickle(name), "rb") as f: return dill.load(f)
def test__output_positions_info(): output_path = path.join( "{}".format(os.path.dirname(os.path.realpath(__file__))), "files") positions_likelihood = al.PositionsLHResample(positions=al.Grid2DIrregular( [(1.0, 2.0), (3.0, 4.0)]), threshold=0.1) tracer = al.m.MockTracer(traced_grid_2d_list_from=al.Grid2DIrregular( grid=[[(0.5, 1.5), (2.5, 3.5)]])) positions_likelihood.output_positions_info(output_path=output_path, tracer=tracer) positions_file = path.join(output_path, "positions.info") with open_(positions_file, "r") as f: output = f.readlines() assert "Positions" in output[0] os.remove(positions_file)
def output_list_of_strings_to_file(file, list_of_strings): with open_(file, "w") as f: f.write("".join(list_of_strings))
def completed(self): """ Mark the search as complete by saving a file """ open_(self._has_completed_path, "w+").close()
def save_unique_tag(self, is_grid_search=False): if is_grid_search: with open_(self._grid_search_path, "w+") as f: if self.unique_tag is not None: f.write(self.unique_tag)
def save_parent_identifier(self): if self.parent is not None: with open_(self._parent_identifier_path, "w+") as f: f.write(self.parent.identifier) self.parent.save_unique_tag()
def load_samples_info(self): with open_(self._info_file) as infile: return json.load(infile)
def _save_search(self, config_dict): with open_(path.join(self.output_path, "search.json"), "w+") as f: json.dump(config_dict, f, indent=4)
def save_identifier(self): with open_(f"{self._sym_path}/.identifier", "w+") as f: f.write( self._identifier.description )