# ----------------------------------------------------------------- modeling_path = verify_modeling_cwd() runs = FittingRuns(modeling_path) # ----------------------------------------------------------------- # Create configuration definition definition = ConfigurationDefinition() # ----------------------------------------------------------------- # FITTING RUN if runs.empty: raise RuntimeError("No fitting runs are present (yet)") elif runs.has_single: definition.add_fixed("fitting_run", "name of the fitting run", runs.single_name) else: definition.add_required("fitting_run", "string", "name of the fitting run", choices=runs.names) # Create the configuration config = parse_arguments("check_populations", definition) # ----------------------------------------------------------------- # Load populations table populations = get_populations(modeling_path) populations_run = populations[config.fitting_run] # Load generation table generations = get_generations_table(modeling_path, config.fitting_run) # -----------------------------------------------------------------
"fitting method", default_fitting_method, choices=fitting_methods) definition.add_optional("rerun_preparation_step", "string", "rerun a certain preparation step for all images", choices=steps) if cache_host_id is not None: definition.add_flag("cache", "cache unimportant data to the remote host storage", False) else: definition.add_fixed( "cache", "caching not possible since cache host ID is not set in the modeling configuration", False) # Number of dust grids (= number of model representations) and the number of wavelength grids definition.add_optional("nwavelength_grids", "positive_integer", "number of wavelength grids to use for the fitting", 10) definition.add_optional( "ndust_grids", "positive_integer", "number of dust grids, or the number of model representations", 10) # NEW: RERUN FOR THE MODELING STEPS definition.add_optional("rerun", "string", "rerun from a certain modeling step", choices=single_commands)
dust_grid_types = ["cartesian", "bintree", "octtree"] default_dust_grid_type = "bintree" # ----------------------------------------------------------------- # Create the configuration definition = ConfigurationDefinition(log_path="log", config_path="config") # Name of the representation if suite.has_representations: definition.add_required("name", "string", "name for the representation", forbidden=suite.representation_names) else: definition.add_optional("name", "string", "name for the representation", default="highres") # Name of the model for which to create the representation if suite.no_models: raise RuntimeError("No models found: first run build_model to create a new model") elif suite.has_single_model: definition.add_fixed("model_name", "name of the model", suite.single_model_name) else: definition.add_required("model_name", "string", "name of the model", choices=suite.model_names) # Dust grid properties definition.add_section("dg", "settings for the dust grid") definition.sections["dg"].add_optional("grid_type", "string", "type of dust grid", default_dust_grid_type, choices=dust_grid_types) definition.sections["dg"].add_optional("scale", "real", "number of image pixels to take as the minimum scale in the model (can also be a certain fraction of a pixel)", 0.5) definition.sections["dg"].add_optional("bintree_min_level", "integer", "minimum depth level for binary trees", 9) definition.sections["dg"].add_optional("octtree_min_level", "integer", "minimum depth level for octrees", 3) definition.sections["dg"].add_optional("max_mass_fraction", "real", "maximum mass fraction in each cell", 0.5e-6) definition.sections["dg"].add_optional("scale_heights", "real", "number of times to take the dust scale height as the vertical radius of the dust grid", 10.) # Whether quality has to be calculated definition.add_flag("check_dust_grid_quality", "check the quality of the dust grid in various ways", True) # -----------------------------------------------------------------
# ----------------------------------------------------------------- modeling_path = verify_modeling_cwd() runs = FittingRuns(modeling_path) # ----------------------------------------------------------------- # Create configuration definition definition = ConfigurationDefinition() # ----------------------------------------------------------------- # FITTING RUN if runs.empty: raise RuntimeError("No fitting runs are present (yet)") elif runs.has_single: definition.add_fixed("fitting_run", "name of the fitting run", runs.single_name) else: definition.add_required("fitting_run", "string", "name of the fitting run", choices=runs.names) # Generations definition.add_positional_optional( "generations", "string_list", "name of the generations for which to check the database") # Create the configuration config = parse_arguments("check_database", definition) # -----------------------------------------------------------------
# ----------------------------------------------------------------- default_nrepresentations = 2 # ----------------------------------------------------------------- # Create the configuration definition = ConfigurationDefinition(log_path="log", config_path="config") # Name of the model for which to create the representation model_names = suite.model_names if len(model_names) == 0: raise RuntimeError( "No models found: first run build_model to create a new model") elif len(model_names) == 1: definition.add_fixed("model_name", "name of the model", model_names[0]) else: definition.add_required("model_name", "string", "name of the model", choices=model_names) # Number of representation to generate definition.add_optional("nrepresentations", "positive_integer", "number of representations to generate", default_nrepresentations, min_value=2) # Settings for the dust grid generation definition.add_section("dg", "settings for the dust grids")
#!/usr/bin/env python # -*- coding: utf8 -*- # ***************************************************************** # ** PTS -- Python Toolkit for working with SKIRT ** # ** © Astronomical Observatory, Ghent University ** # ***************************************************************** # Import the relevant PTS classes and modules from pts.core.basics.configuration import ConfigurationDefinition from pts.modeling.analysis.run import AnalysisRuns from pts.modeling.core.environment import verify_modeling_cwd # ----------------------------------------------------------------- modeling_path = verify_modeling_cwd() runs = AnalysisRuns(modeling_path) # ----------------------------------------------------------------- # Create the configuration definition = ConfigurationDefinition(log_path="log", config_path="config") # Positional optional if runs.empty: raise ValueError("No analysis runs present (yet)") elif runs.has_single: definition.add_fixed("run", "name of the analysis run", runs.single_name) else: definition.add_positional_optional("run", "string", "name of the analysis run for which to launch the heating simulations", runs.last_name, runs.names) # -----------------------------------------------------------------
"cluster_name", "string", "cluster of the remote host to use for the simulation") definition.add_optional("images_remote", "string", "remote host on which to make the observed images", default_other_host_id, choices=other_host_ids) definition.add_flag("attached", "launch remote executions in attached mode", True) definition.add_flag("debug_output", "show all simulation output when in debugging mode", False) # ANALYSIS RUN if runs.empty: raise RuntimeError("No analysis runs are present (yet)") elif runs.has_single: definition.add_fixed("run", "name of the analysis run", runs.single_name) else: definition.add_positional_optional("run", "string", "name of the analysis run", runs.last_name, runs.names) # Parallelization options #definition.add_optional("nnodes", "integer", "number of nodes to use for the simulations (for scheduler)", 4) #definition.add_optional("cores_per_process", "integer", "number of cores per process (for non-scheduler)", 10) #definition.add_flag("data_parallel", "data parallelization mode", False) # ----------------------------------------------------------------- # Simulation options definition.add_optional("npackages", "real", "the number of photon packages per wavelength", 1e7)
from pts.modeling.misc.examination import ModelExamination # ----------------------------------------------------------------- # Determine the modeling path environment = load_modeling_environment_cwd() suite = environment.static_model_suite # ----------------------------------------------------------------- # Create the configuration definition definition = ConfigurationDefinition() # Name of the model for which to create the representation if suite.no_models: raise RuntimeError("No models found: first run build_model to create a new model") elif suite.has_single_model: definition.add_fixed("model_name", "name of the model", suite.single_model_name) else: definition.add_required("model_name", "string", "name of the model", choices=suite.model_names) # Get configuration config = parse_arguments("show_model", definition) # ----------------------------------------------------------------- # Load the model model = suite.get_model(config.model_name) # ----------------------------------------------------------------- # Examination examination = ModelExamination()
# Determine the modeling path environment = load_modeling_environment_cwd() suite = environment.static_model_suite # ----------------------------------------------------------------- # Create the configuration definition definition = ConfigurationDefinition() # Name of the model for which to create the representation if suite.no_models: raise RuntimeError( "No models found: first run build_model to create a new model") elif suite.has_single_model: definition.add_fixed("model_name", "name of the model", suite.single_model_name) else: definition.add_required("model_name", "string", "name of the model", choices=suite.model_names) # Get configuration config = parse_arguments("show_model", definition) # ----------------------------------------------------------------- # Load the model definition model = suite.get_model_definition(config.model_name) # -----------------------------------------------------------------
"string", "name for the fitting run", default=default_run_name) else: definition.add_required("name", "string", "name for the fitting run", forbidden=runs.names) # ----------------------------------------------------------------- # MODEL if suite.no_models: raise ValueError("No models are present (yet)") elif suite.has_single_model: definition.add_fixed("model_name", "name of the model to use for the fitting", suite.single_model_name) else: definition.add_optional("model_name", "string", "name of the model to use for the fitting", choices=suite.model_names) # ----------------------------------------------------------------- # NEW: FITTING METHOD definition.add_optional("fitting_method", "string", "fitting method", default_fitting_method, fitting_methods) # Add optional definition.add_optional(
# ----------------------------------------------------------------- all_host_ids = find_host_ids() all_hosts = find_hosts() nhosts = len(all_hosts) has_hosts = nhosts > 0 # ----------------------------------------------------------------- # Create the configuration definition definition = ConfigurationDefinition() # The remote hosts if has_hosts: definition.add_positional_optional("hosts", "host_list", "remote hosts", choices=all_host_ids, default=all_hosts) else: definition.add_fixed("hosts", "remote hosts", []) # Add optional definition.add_optional("pts_repo_name", "string", "PTS repository name to deploy remotely", "origin", choices=introspection.pts_git_remotes()) definition.add_optional("skirt_repo_name", "string", "SKIRT repository name to deploy remotely", "origin", choices=introspection.skirt_git_remotes()) # Add flags definition.add_flag("local", "also deploy locally", True) definition.add_flag("skirt", "deploy SKIRT", True) definition.add_flag("pts", "deploy PTS", True) definition.add_flag("check", "check versions after deployment", True) definition.add_flag("one_attempt", "only perform one attempt at connecting to a remote") # Also update the dependencies definition.add_flag("update_dependencies", "update the dependencies if possible", False)
definition.add_optional("sfr", "real", "average star formation rate", default_sfr) definition.add_optional("dust_mass", "quantity", "estimated mass of the dust disk", default_dust_mass) # Ask for additional components definition.add_flag("additional", "ask for additional components", True) # ----------------------------------------------------------------- # ADVANCED: create model from a previous model if suite.has_models: definition.add_optional("from_previous", "string", "create from previous model", choices=suite.model_names) else: definition.add_fixed("from_previous", "create from previous model", None) # ----------------------------------------------------------------- # Force overwrite definition.add_flag("overwrite", "overwrite possibly existing model with this name", False) # Show after adjusting/building definition.add_flag("show", "show the components after the model is built", True) # -----------------------------------------------------------------
# generation_methods = ["grid"] #else: raise ValueError("Fitting method has an invalid value: " + fitting_method + " (must be 'genetic' or 'grid'") default_generation_method = "genetic" generation_methods = ["genetic", "grid"] # ----------------------------------------------------------------- # Create the configuration definition = ConfigurationDefinition(log_path="log", config_path="config") # The fitting run for which to explore the parameter space # FITTING RUN if runs.empty: raise RuntimeError("No fitting runs are present") elif runs.has_single: definition.add_fixed("name", "name of the fitting run", runs.single_name) else: definition.add_required("name", "string", "name of the fitting run", choices=runs.names) # Positional optional parameter definition.add_positional_optional("generation_method", "string", "model generation method", default_generation_method, choices=generation_methods) # Optional parameters if len(find_host_ids()) > 0:
# Import the relevant PTS classes and modules from pts.core.basics.configuration import ConfigurationDefinition from pts.core.remote.host import find_host_ids # ----------------------------------------------------------------- # Create definition definition = ConfigurationDefinition(write_config=False) # Number of frames definition.add_optional("nframes", "positive_integer", "number of frames", 2) # Size of frames definition.add_optional("npixels", "positive_integer", "number of pixels of the frames", 500) # Number of sources definition.add_optional("nrandom_sources", "positive_integer", "number of point sources", 100) # Flags definition.add_flag("vary_fwhm", "vary the FWHM", False) # PSF model definition.add_fixed("psf_model", "psf model", "gaussian") definition.add_optional("noise_stddev", "real", "stddev of noise", 5.) # -----------------------------------------------------------------
# Create the configuration definition definition = ConfigurationDefinition(log_path="log", config_path="config") # Add optional arguments definition.add_positional_optional("image", "string", "the name of the image for which to run the initialization") # Add flags definition.add_flag("visualise", "make visualisations") # Remote source detection definition.add_optional("remote", "string", "remote host on which to run the source finder", choices=find_host_ids()) definition.add_flag("attached", "run remotely in attached mode") # Add section for the source finder definition.import_section("sources", "options for the source finder", sources_definition) # Flags default_catalogs = ["II/246"] definition.add_optional("catalogs", "string_list", "catalogs for point sources", default_catalogs) definition.add_flag("catalog_overlapping", "only fetch catalog data in the area where all images are overlapping", True) definition.add_flag("manual", "don't find sources, but mark them from the catalog, and let the selection be done manually", False) # Cache cache_host_id = get_cache_host_id(modeling_path) if cache_host_id is not None: definition.add_flag("cache", "cache image data for which the initialized image has been created", False) else: definition.add_fixed("cache", "caching not possible since cache host id not defined", False) # -----------------------------------------------------------------
# ----------------------------------------------------------------- # Create the configuration definition definition = ConfigurationDefinition() # Add optional host_ids = find_host_ids() if len(host_ids) > 0: definition.add_positional_optional("host_ids", "string_list", "remote host ids", choices=host_ids, default=host_ids) else: definition.add_fixed("host_ids", "remote host_ids", []) # Add optional definition.add_optional("pts_repo_name", "string", "PTS repository name to deploy remotely", "origin", choices=introspection.pts_git_remotes()) definition.add_optional("skirt_repo_name", "string", "SKIRT repository name to deploy remotely", "origin", choices=introspection.skirt_git_remotes()) # Add flags definition.add_flag("local", "also deploy locally", True)
# ** © Astronomical Observatory, Ghent University ** # ***************************************************************** # Import the relevant PTS classes and modules from pts.core.basics.configuration import ConfigurationDefinition from pts.core.remote.host import find_host_ids # ----------------------------------------------------------------- # Create definition definition = ConfigurationDefinition(write_config=False) # Number of frames definition.add_optional("nframes", "positive_integer", "number of frames", 2) # Size of frames definition.add_optional("npixels", "positive_integer", "number of pixels of the frames", 500) # Number of sources definition.add_optional("nrandom_sources", "positive_integer", "number of point sources", 100) # Flags definition.add_flag("vary_fwhm", "vary the FWHM", False) # PSF model definition.add_fixed("psf_model", "psf model", "gaussian") definition.add_optional("noise_stddev", "real", "stddev of noise", 5.) # -----------------------------------------------------------------