Ejemplo n.º 1
0
    def uninstall_skirt_local(self):
        """
        This function ...
        :return:
        """

        # Inform the user
        log.info("Uninstalling SKIRT locally ...")

        # Check installation
        skirt_root_path = introspection.skirt_root_dir
        if not fs.is_directory(skirt_root_path):
            log.warning("SKIRT was not found locally")
            return

        # Debugging
        log.debug("Removing the SKIRT directory ...")

        # Remove the entire directory
        fs.remove_directory(skirt_root_path)

        # Debugging
        log.debug("Removing lines from shell configuration ...")

        # Remove lines from shell configuration file
        comment = "For SKIRT and FitSKIRT, added by PTS (Python Toolkit for SKIRT)"
        terminal.remove_aliases_and_variables_with_comment(comment)
Ejemplo n.º 2
0
    def uninstall_conda_local(self):
        """
        This function ...
        :return:
        """

        # Inform the user
        log.info("Uninstalling the Conda python distribution locally ...")

        # Check installation
        installation_path = fs.join(fs.home(), "miniconda")
        if not fs.is_directory(installation_path):
            log.warning("Conda was not found locally")
            return

        # Debugging
        log.debug("Removing the Conda directory ...")

        # Remove the directory
        fs.remove_directory(installation_path)

        # Debugging
        log.debug("Removing lines from shell configuration ...")

        # Remove lines from shell configuration file
        comment = "For Conda, added by PTS (Python Toolkit for SKIRT)"
        terminal.remove_aliases_and_variables_with_comment(comment)
        terminal.remove_from_path_variable_containing("miniconda/bin")
Ejemplo n.º 3
0
    def uninstall_conda_local(self):

        """
        This function ...
        :return:
        """

        # Inform the user
        log.info("Uninstalling the Conda python distribution locally ...")

        # Check installation
        installation_path = fs.join(fs.home, "miniconda")
        if not fs.is_directory(installation_path):
            log.warning("Conda was not found locally")
            return

        # Debugging
        log.debug("Removing the Conda directory ...")

        # Remove the directory
        fs.remove_directory(installation_path)

        # Debugging
        log.debug("Removing lines from shell configuration ...")

        # Remove lines from shell configuration file
        comment = "For Conda, added by PTS (Python Toolkit for SKIRT)"
        terminal.remove_aliases_and_variables_with_comment(comment)
        terminal.remove_from_path_variable_containing("miniconda/bin")
Ejemplo n.º 4
0
Archivo: test.py Proyecto: rag9704/PTS
    def check_reference_data(self):
        """
        This function ...
        :return:
        """

        # Inform the user
        log.info("Checking the reference data ...")

        # Determine simulation directory
        if self.config.reference_path is not None:
            data_path = self.config.reference_path
        elif self.config.reference_test is not None:
            data_path = fs.join(introspection.pts_tests_dir,
                                self.config.reference_test, "data")
        else:
            raise ValueError(
                "Reference path and reference test settings are None")

        # Check whether directory exist and not empty
        if not fs.is_directory(data_path):
            raise ValueError("Directory does not exist: " + data_path)
        if fs.is_empty(data_path):
            raise ValueError("Empty directory: " + data_path)

        # Remove data directory for this test
        fs.remove_directory(self.data_path)

        # Set data path
        self.data_path = data_path
Ejemplo n.º 5
0
    def uninstall_skirt_local(self):

        """
        This function ...
        :return:
        """

        # Inform the user
        log.info("Uninstalling SKIRT locally ...")

        # Check installation
        skirt_root_path = introspection.skirt_root_dir
        if not fs.is_directory(skirt_root_path):
            log.warning("SKIRT was not found locally")
            return

        # Debugging
        log.debug("Removing the SKIRT directory ...")

        # Remove the entire directory
        fs.remove_directory(skirt_root_path)

        # Debugging
        log.debug("Removing lines from shell configuration ...")

        # Remove lines from shell configuration file
        comment = "For SKIRT, added by PTS (Python Toolkit for SKIRT)"
        terminal.remove_aliases_and_variables_with_comment(comment)
Ejemplo n.º 6
0
    def uninstall_pts_local(self):

        """
        This function ...
        :return:
        """

        # Inform the user
        log.info("Uninstalling PTS locally ...")

        # Check installation
        pts_root_path = introspection.pts_root_dir
        if not fs.is_directory(pts_root_path):
            log.warning("PTS could not be found locally (which is certainly weird) ...")
            return

        # Debugging
        log.debug("Removing the PTS directory ...")

        # Remove the entire directory
        fs.remove_directory(pts_root_path)

        # Debugging
        log.debug("Removing lines from shell configuration ...")

        # Remove lines from shell configuration file
        comment = "For PTS, added by PTS (Python Toolkit for SKIRT)"
        terminal.remove_aliases_and_variables_with_comment(comment)
Ejemplo n.º 7
0
    def uninstall_pts_local(self):
        """
        This function ...
        :return:
        """

        # Inform the user
        log.info("Uninstalling PTS locally ...")

        # Check installation
        pts_root_path = introspection.pts_root_dir
        if not fs.is_directory(pts_root_path):
            log.warning(
                "PTS could not be found locally (which is certainly weird) ..."
            )
            return

        # Debugging
        log.debug("Removing the PTS directory ...")

        # Remove the entire directory
        fs.remove_directory(pts_root_path)

        # Debugging
        log.debug("Removing lines from shell configuration ...")

        # Remove lines from shell configuration file
        comment = "For PTS, added by PTS (Python Toolkit for SKIRT)"
        terminal.remove_aliases_and_variables_with_comment(comment)
Ejemplo n.º 8
0
    def load_reference(self):
        """
        This function ...
        :return: 
        """

        # Inform the user
        log.info("Loading the reference simulation ...")

        # Determine simulation directory
        if self.config.reference_path is not None:
            simulation_path = self.config.reference_path
        elif self.config.reference_test is not None:
            simulation_path = fs.join(introspection.pts_tests_dir,
                                      self.config.reference_test, "ref")
        else:
            raise ValueError(
                "Reference path and reference test settings are None")

        # Check whether present
        if not fs.is_directory(simulation_path):
            raise ValueError(
                "The reference simulation path could not be found")

        # Look for simulation
        prefix, ski_path, in_path, out_path = find_one_simulation_in_path(
            simulation_path)

        # Load the ski file
        self.ski = LabeledSkiFile(ski_path)

        # Other paths
        extr_path = fs.join(simulation_path, "extr")
        plot_path = fs.join(simulation_path, "plot")
        misc_path = fs.join(simulation_path, "misc")

        # Check existence
        if not fs.is_directory(extr_path):
            raise IOError("Extraction directory not found")
        if not fs.is_directory(plot_path):
            raise IOError("Plotting directory not found")
        if not fs.is_directory(misc_path):
            raise IOError("Misc directory not found")

        # Copy
        self.copy_reference(ski_path, in_path, out_path, extr_path, plot_path,
                            misc_path)
Ejemplo n.º 9
0
Archivo: restore.py Proyecto: SKIRT/PTS
    def has_best(self):

        """
        This function ...
        :return:
        """

        return fs.is_directory(self.restore_best_path)
Ejemplo n.º 10
0
Archivo: restore.py Proyecto: SKIRT/PTS
    def has_generation(self, generation_name):

        """
        This function ...
        :param generation_name:
        :return:
        """

        return fs.is_directory(self.get_generation_restore_path(generation_name))
Ejemplo n.º 11
0
Archivo: restore.py Proyecto: SKIRT/PTS
    def restore_path(self):

        """
        This function ...
        :return:
        """

        path = fs.join(self.fitting_run.refitting_path, self.config.name)
        if not fs.is_directory(path): raise ValueError("'" + self.config.name + "' is not a backup of a fit")
        return path
Ejemplo n.º 12
0
Archivo: test.py Proyecto: SKIRT/PTS
    def load_reference(self):

        """
        This function ...
        :return: 
        """

        # Inform the user
        log.info("Loading the reference simulation ...")

        # Determine simulation directory
        if self.config.reference_path is not None: simulation_path = self.config.reference_path
        elif self.config.reference_test is not None: simulation_path = fs.join(introspection.pts_tests_dir, self.config.reference_test, "ref")
        else: raise ValueError("Reference path and reference test settings are None")

        # Check whether present
        if not fs.is_directory(simulation_path): raise ValueError("The reference simulation path could not be found")

        # Look for simulation
        prefix, ski_path, in_path, out_path = find_one_simulation_in_path(simulation_path)

        # Load the ski file
        self.ski = SkiFile(ski_path)

        # Other paths
        extr_path = fs.join(simulation_path, "extr")
        plot_path = fs.join(simulation_path, "plot")
        misc_path = fs.join(simulation_path, "misc")
        
        # Check existence
        if not fs.is_directory(extr_path): raise IOError("Extraction directory not found")
        if not fs.is_directory(plot_path): raise IOError("Plotting directory not found")
        if not fs.is_directory(misc_path): raise IOError("Misc directory not found")

        # Copy
        self.copy_reference(ski_path, in_path, out_path, extr_path, plot_path, misc_path)
Ejemplo n.º 13
0
Archivo: test.py Proyecto: SKIRT/PTS
    def check_reference_data(self):

        """
        This function ...
        :return:
        """

        # Inform the user
        log.info("Checking the reference data ...")

        # Determine simulation directory
        if self.config.reference_path is not None: data_path = self.config.reference_path
        elif self.config.reference_test is not None: data_path = fs.join(introspection.pts_tests_dir, self.config.reference_test, "data")
        else: raise ValueError("Reference path and reference test settings are None")

        # Check whether directory exist and not empty
        if not fs.is_directory(data_path): raise ValueError("Directory does not exist: " + data_path)
        if fs.is_empty(data_path): raise ValueError("Empty directory: " + data_path)

        # Remove data directory for this test
        fs.remove_directory(self.data_path)

        # Set data path
        self.data_path = data_path
Ejemplo n.º 14
0
Archivo: test.py Proyecto: rag9704/PTS
    def setup(self, **kwargs):
        """
        This function ...
        :param kwargs:
        :return:
        """

        # CAll the setup function of the base class
        super(NGC4013Test, self).setup(**kwargs)

        # Check the data
        if fs.is_directory(norm_path): self.data_path = norm_path
        else:

            # Create the data directory and get the data
            self.data_path = fs.create_directory_in(self.path, "data")
            self.get_data()

        # Create the reference directory and subdirectories
        self.reference_path = fs.create_directory_in(self.path, "ref")
        self.reference_output_path = fs.create_directory_in(
            self.reference_path, "out")
        self.reference_ski_path = fs.join(self.reference_path, "NGC4013.ski")
        self.reference_fski_path = fs.join(self.reference_path, "NGC4013.fski")
Ejemplo n.º 15
0
# Loop over the remote hosts
for host_id in config.remotes:

    # Check whether the remote is available
    if config.full:
        remote = Remote()
        if not remote.setup(host_id):
            log.warning("The remote host '" + host_id + "' is not available: skipping ...")
            continue
    else: remote = None

    # Determine the path to the run directory for the specified remote host
    host_run_path = fs.join(introspection.skirt_run_dir, host_id)

    # Check if there are simulations
    if not fs.is_directory(host_run_path):
        log.debug("No run directory for host '" + host_id + "'")
        continue
    if fs.is_empty(host_run_path): log.debug("No simulations for host '" + host_id + "'")

    # Loop over the simulation files in the run directory
    for path, name in fs.files_in_path(host_run_path, extension="sim", returns=["path", "name"], sort=int):

        # Skip
        if config.ids is not None and int(name) not in config.ids: continue

        # Inform the user
        log.info("Removing simulation " + name + " ...")

        # Fully clear
        if config.full:
Ejemplo n.º 16
0
# Interpolation method
parser.add_argument("--method", type=str, help="the interpolation method to use", default="biharmonic")

# Parse the command line arguments
arguments = parser.parse_args()

# -----------------------------------------------------------------

# If an input directory is given
if arguments.input is not None:

    # Determine the full path to the input directory
    input_path = fs.absolute(arguments.input)

    # Give an error if the input directory does not exist
    if not fs.is_directory(input_path): raise argparse.ArgumentError(input_path, "The input directory does not exist")

# If no input directory is given, assume the input is placed in the current working directory
else: input_path = fs.cwd()

# -----------------------------------------------------------------

# If an output directory is given
if arguments.output is not None:
    
    # Determine the full path to the output directory
    output_path = fs.absolute(arguments.output)
    
    # Create the directory if it does not yet exist
    if not fs.is_directory(output_path): fs.create_directory(output_path)
Ejemplo n.º 17
0
        galaxy_name = galactic_catalog["Name"][i]
        break

# If the galaxy name is still None, something is wrong with the galaxy catalog (principal not defined)
if galaxy_name is None:
    raise RuntimeError(
        "The galactic catalog is invalid: principal galaxy not defined")

# Determine the path to the user catalogs directory
catalogs_user_path = fs.join(introspection.pts_user_dir, "magic", "catalogs")

# Determint the path to the directory to contain the catalogs for this galaxy
galaxy_user_path = fs.join(catalogs_user_path, galaxy_name)

# Cache the galaxy and stellar catalog
if fs.is_directory(galaxy_user_path):

    old_galactic_catalog_path = fs.join(galaxy_user_path, "galaxies.dat")
    old_stellar_catalog_path = fs.join(galaxy_user_path, "stars.dat")

    if fs.is_file(old_galactic_catalog_path):

        # Open the 'old' galaxy catalog
        old_galaxy_catalog = tables.from_file(old_galactic_catalog_path)

        # Create merged galaxy catalog
        galaxy_catalog = catalogs.merge_galactic_catalogs(
            galactic_catalog, old_galaxy_catalog)

        # Save the merged catalog
        path = fs.join(galaxy_user_path, "galaxies.dat")
Ejemplo n.º 18
0
# Show the fitting filters
print("")
print("FITTING FILTERS:")
print("")
for fltr in fitting_run.fitting_filters: print(" - " + tostr(fltr))
print("")

# -----------------------------------------------------------------

# Determine the path to the wavelength grids directory
grids_path = fitting_run.wavelength_grids_path

# -----------------------------------------------------------------

# Check whether there are already wavelength grids
if fs.is_directory(grids_path) and not fs.is_empty(grids_path):
    if config.backup: fs.backup_directory(grids_path)
    fs.clear_directory(grids_path)

# -----------------------------------------------------------------

# Create a wavelength grids table
table = WavelengthGridsTable()

# -----------------------------------------------------------------

# Get fixed wavelengths
fixed_wavelengths = fitting_run.normalization_wavelengths

# -----------------------------------------------------------------
Ejemplo n.º 19
0
    # Loop over the remotes, look for match
    for host_id in simulation_paths:
        if simulation_name in simulation_paths[host_id]:
            the_host_id = host_id
            break

    # Simulation file not found
    if the_host_id is None:
        if config.ignore_missing: continue
        else: raise ValueError("Cannot find simulation file for simulation '" + simulation_name + "'")

    # Determine the output path
    if config.per_host:
        new_path = fs.join(output_path, the_host_id)
        if not fs.is_directory(new_path): fs.create_directory(new_path)
    else: new_path = output_path

    # Get the original simulation file path
    filepath = simulation_paths[the_host_id][simulation_name]

    # Debugging
    log.debug("Moving the '" + simulation_name + "' simulation ...")

    # Move the file
    if config.rename: new_name = simulation_name + ".sim"
    else: new_name = None
    fs.move_file(filepath, new_path, new_name=new_name)

# -----------------------------------------------------------------
Ejemplo n.º 20
0
        # Determine the preparation name
        if frame.filter is not None: prep_name = str(frame.filter)
        else: prep_name = image_name

        # Set the row entries
        names_column.append(image_name)
        paths_column.append(image_path)
        prep_names_column.append(prep_name)

# Create the table
data = [names_column, paths_column, prep_names_column]
table = tables.new(data, names)

# Check whether the preparation directory exists
prep_path = fs.join(config.path, "prep")
if not fs.is_directory(prep_path): fs.create_directory(prep_path)

# Save the table
prep_info_table_path = fs.join(prep_path, "prep_info.dat")
tables.write(table, prep_info_table_path, format="ascii.ecsv")

# -----------------------------------------------------------------

# Create a PreparationInitializer instance
initializer = PreparationInitializer(config)

# Run the data initializer
initializer.run()

# -----------------------------------------------------------------
Ejemplo n.º 21
0
# -----------------------------------------------------------------

# Set figsize
if config.small:
    figsize = "8,6"
    figsize_timelines = "8,8"
else:
    figsize = "12,9"
    figsize_timelines = "12,12"

# -----------------------------------------------------------------

# Locate the scaling test suite directory
suite_path = fs.join(fs.cwd(), config.suite_name)
if not fs.is_directory(suite_path):
    raise ValueError("The directory '" + suite_path + "' does not exist")

# -----------------------------------------------------------------

# Make directory for output
output_path = fs.create_directory_in(fs.cwd(),
                                     time.unique_name("scaling_plots"))

# Make subdirectories
single_node_path = fs.create_directory_in(output_path,
                                          "Single-node comparison")
multi_node_path = fs.create_directory_in(
    output_path, "Load balancing and multi-node scaling")
communication_path = fs.create_directory_in(output_path, "Communication")
hybridization_path = fs.create_directory_in(output_path, "Hybridization")
Ejemplo n.º 22
0
parser.add_argument("--special", type=str, help="the name of the file specifying regions with objects needing special attention (in sky coordinates!)")
parser.add_argument("--bad", type=str, help="the name of the file specifying regions that have to be added to the mask of bad pixels")

# Parse the command line arguments
arguments = parser.parse_args()

# -----------------------------------------------------------------

# If an input directory is given
if arguments.input is not None:

    # Determine the full path to the input directory
    input_path = fs.absolute(arguments.input)

    # Give an error if the input directory does not exist
    if not fs.is_directory(input_path):
        raise argparse.ArgumentError(input_path, "The input directory does not exist")

# If no input directory is given, assume the input is placed in the current working directory
else: input_path = fs.cwd()

# -----------------------------------------------------------------

# If an output directory is given
if arguments.output is not None:
    
    # Determine the full path to the output directory
    output_path = fs.absolute(arguments.output)
    
    # Create the directory if it does not yet exist
    if not fs.is_directory(output_path): fs.create_directory(output_path)
Ejemplo n.º 23
0
        # Determine the preparation name
        if frame.filter is not None: prep_name = str(frame.filter)
        else: prep_name = image_name

        # Set the row entries
        names_column.append(image_name)
        paths_column.append(image_path)
        prep_names_column.append(prep_name)

# Create the table
data = [names_column, paths_column, prep_names_column]
table = tables.new(data, names)

# Check whether the preparation directory exists
prep_path = fs.join(config.path, "prep")
if not fs.is_directory(prep_path): fs.create_directory(prep_path)

# Save the table
prep_info_table_path = fs.join(prep_path, "prep_info.dat")
tables.write(table, prep_info_table_path, format="ascii.ecsv")

# -----------------------------------------------------------------

# Create a PreparationInitializer instance
initializer = PreparationInitializer(config)

# Run the data initializer
initializer.run()

# -----------------------------------------------------------------
Ejemplo n.º 24
0
Archivo: run.py Proyecto: SKIRT/PTS
def run_configurable(table_matches, args, tables):

    """
    This function ...
    :param table_matches:
    :param args:
    :param tables:
    :return:
    """

    # Determine the configuration method
    configuration_method = None
    if args.interactive: configuration_method = "interactive"
    elif args.arguments: configuration_method = "arguments"
    elif args.configfile is not None: configuration_method = "file:" + args.configfile
    elif args.rerun: configuration_method = "last"

    # Regenerate the configuration method option
    if args.interactive: configuration_method_argument = "--interactive"
    elif args.arguments: configuration_method_argument = "--arguments"
    elif args.configfile is not None: configuration_method_argument = "--configfile '" + args.configfile + "'"
    elif args.rerun: configuration_method_argument = "--rerun"
    else: configuration_method_argument = ""

    # Resolve
    subproject, index = table_matches[0]
    resolved = introspection.resolve_from_match(subproject, tables[subproject], index)

    # Get properties
    title = resolved.title
    command_name = resolved.command_name
    hidden = resolved.hidden
    description = resolved.description
    module_path = resolved.module_path
    class_name = resolved.class_name
    configuration_method_table = resolved.configuration_method
    configuration_module_path = resolved.configuration_module_path
    subproject_path = introspection.pts_subproject_dir(subproject)

    # Set
    sys.argv[0] = fs.join(introspection.pts_root_dir, module_path.replace(".", "/") + ".py") # this is actually not necessary (and not really correct, it's not like we are calling the module where the class is..)
    del sys.argv[1] # but this is important

    # Get a list of the leftover arguments
    leftover_arguments = sys.argv[1:]

    # Welcome message
    if subproject == "modeling": welcome_modeling()
    elif subproject == "magic": welcome_magic()
    elif subproject == "dustpedia": welcome_dustpedia()
    elif subproject == "evolve": welcome_evolve()

    # Special
    if subproject == "modeling": check_modeling_cwd(command_name, fs.cwd())

    # Get the configuration definition
    definition = introspection.get_configuration_definition_pts_not_yet_in_pythonpath(configuration_module_path)

    # If not specified on the command line (before the command name), then use the default specified in the commands.dat file
    if configuration_method is None: configuration_method = configuration_method_table

    # Check whether arguments are passed and the configuration method is interactive
    if configuration_method == "interactive" and len(leftover_arguments) > 0: raise ValueError("Arguments on the command-line are not supported by default for this command. Run with pts --arguments to change this behaviour.")

    # Create the configuration
    config = create_configuration(definition, command_name, description, configuration_method)

    ## SAVE THE CONFIG if requested
    if config.write_config:
        config_filepath = config.config_file_path(command_name)
        config.saveto(config_filepath)
    else: config_filepath = None

    # If this is not a re-run
    if not args.rerun:
        if not fs.is_directory(introspection.pts_user_config_dir): fs.create_directory(introspection.pts_user_config_dir)
        # CACHE THE CONFIG
        config_cache_path = fs.join(introspection.pts_user_config_dir, command_name + ".cfg")
        config.saveto(config_cache_path)

    # Setup function
    if subproject == "modeling": setup_modeling(command_name, fs.cwd(), configuration_method_argument)
    elif subproject == "magic": setup_magic(command_name, fs.cwd())
    elif subproject == "dustpedia": setup_dustpedia(command_name, fs.cwd())
    elif subproject == "evolve": setup_evolve(command_name, fs.cwd())

    # Initialize the logger
    log = initialize_pts(config, remote=args.remote, command_name=command_name)

    # Exact command name
    exact_command_name = subproject + "/" + command_name

    # If the PTS command has to be executed remotely
    if args.remote is not None: run_remotely(exact_command_name, config, args.keep, args.remote, log)

    # The PTS command has to be executed locally
    else: run_locally(exact_command_name, module_path, class_name, config, args.input_files, args.output_files, args.output, log)

    # Finish function
    if subproject == "modeling": finish_modeling(command_name, fs.cwd(), config_path=config_filepath)
    elif subproject == "magic": finish_magic(command_name, fs.cwd())
    elif subproject == "dustpedia": finish_dustpedia(command_name, fs.cwd())
    elif subproject == "evolve": finish_evolve(command_name, fs.cwd())
Ejemplo n.º 25
0
# Visualization
parser.add_argument("--visualise", action="store_true", help="make visualisations")

# Parse the command line arguments
arguments = parser.parse_args()

# -----------------------------------------------------------------

# If an input directory is given
if arguments.input is not None:

    # Determine the full path to the input directory
    input_path = fs.absolute(arguments.input)

    # Give an error if the input directory does not exist
    if not fs.is_directory(input_path): raise argparse.ArgumentError(input_path, "The input directory does not exist")

# If no input directory is given, assume the input is placed in the current working directory
else: input_path = fs.cwd()

# -----------------------------------------------------------------

# If an output directory is given
if arguments.output is not None:
    
    # Determine the full path to the output directory
    output_path = fs.absolute(arguments.output)
    
    # Create the directory if it does not yet exist
    if not fs.is_directory(output_path): fs.create_directory(output_path)
Ejemplo n.º 26
0
# No input?
if len(sys.argv) == 1: no_input(parser, scripts, tables)

# -----------------------------------------------------------------

# Parse the command-line arguments
args = parser.parse_args()

# -----------------------------------------------------------------

if args.version: show_version()

# -----------------------------------------------------------------

# Check input and output options, should be directories
if args.input is not None and not fs.is_directory(args.input): raise ValueError("Input path should be an existing directory")
if args.output is not None and not fs.is_directory(args.output): raise ValueError("Output path should be an existing directory")

# -----------------------------------------------------------------

# Get the name of the do script
script_name = args.do_command

# Construct clean arguments list
sys.argv = ["pts", args.do_command] + args.options

# -----------------------------------------------------------------

# Find matches
matches = introspection.find_matches_scripts(script_name, scripts)
table_matches = introspection.find_matches_tables(script_name, tables)
Ejemplo n.º 27
0
    # Check whether the remote is available
    if config.full:
        remote = Remote()
        if not remote.setup(host_id):
            log.warning("The remote host '" + host_id +
                        "' is not available: skipping ...")
            continue
    else:
        remote = None

    # Determine the path to the run directory for the specified remote host
    host_run_path = fs.join(introspection.skirt_run_dir, host_id)

    # Check if there are simulations
    if not fs.is_directory(host_run_path):
        log.debug("No run directory for host '" + host_id + "'")
        continue
    if fs.is_empty(host_run_path):
        log.debug("No simulations for host '" + host_id + "'")

    # Loop over the simulation files in the run directory
    for path, name in fs.files_in_path(host_run_path,
                                       extension="sim",
                                       returns=["path", "name"],
                                       sort=int):

        # Skip
        if config.ids is not None and int(name) not in config.ids: continue

        # Inform the user
Ejemplo n.º 28
0
# No input?
if len(sys.argv) == 1: no_input(parser, scripts, tables)

# -----------------------------------------------------------------

# Parse the command-line arguments
args = parser.parse_args()

# -----------------------------------------------------------------

if args.version: show_version()

# -----------------------------------------------------------------

# Check input and output options, should be directories
if args.input is not None and not fs.is_directory(args.input):
    raise ValueError("Input path should be an existing directory")
if args.output is not None and not fs.is_directory(args.output):
    raise ValueError("Output path should be an existing directory")

# -----------------------------------------------------------------

# Get the name of the do script
script_name = args.do_command

# Construct clean arguments list
sys.argv = ["pts", args.do_command] + args.options

# -----------------------------------------------------------------

# Find matches
Ejemplo n.º 29
0
    if galactic_catalog["Principal"][i]:

        galaxy_name = galactic_catalog["Name"][i]
        break

# If the galaxy name is still None, something is wrong with the galaxy catalog (principal not defined)
if galaxy_name is None: raise RuntimeError("The galactic catalog is invalid: principal galaxy not defined")

# Determine the path to the user catalogs directory
catalogs_user_path = fs.join(introspection.pts_user_dir, "magic", "catalogs")

# Determint the path to the directory to contain the catalogs for this galaxy
galaxy_user_path = fs.join(catalogs_user_path, galaxy_name)

# Cache the galaxy and stellar catalog
if fs.is_directory(galaxy_user_path):

    old_galactic_catalog_path = fs.join(galaxy_user_path, "galaxies.dat")
    old_stellar_catalog_path = fs.join(galaxy_user_path, "stars.dat")

    if fs.is_file(old_galactic_catalog_path):

        # Open the 'old' galaxy catalog
        old_galaxy_catalog = tables.from_file(old_galactic_catalog_path)

        # Create merged galaxy catalog
        galaxy_catalog = catalogs.merge_galactic_catalogs(galactic_catalog, old_galaxy_catalog)

        # Save the merged catalog
        path = fs.join(galaxy_user_path, "galaxies.dat")
        tables.write(galaxy_catalog, path)
Ejemplo n.º 30
0
def run_configurable(table_matches, args, tables):
    """
    This function ...
    :param table_matches:
    :param args:
    :param tables:
    :return:
    """

    # Determine the configuration method
    configuration_method = None
    if args.interactive: configuration_method = "interactive"
    elif args.arguments: configuration_method = "arguments"
    elif args.configfile is not None:
        configuration_method = "file:" + args.configfile
    elif args.rerun:
        configuration_method = "last"

    # Resolve
    subproject, index = table_matches[0]
    resolved = introspection.resolve_from_match(subproject, tables[subproject],
                                                index)

    # Get properties
    title = resolved.title
    command_name = resolved.command_name
    hidden = resolved.hidden
    description = resolved.description
    module_path = resolved.module_path
    class_name = resolved.class_name
    configuration_method_table = resolved.configuration_method
    configuration_module_path = resolved.configuration_module_path
    subproject_path = introspection.pts_subproject_dir(subproject)

    # Set
    sys.argv[0] = fs.join(
        introspection.pts_root_dir,
        module_path.replace(".", "/") + ".py"
    )  # this is actually not necessary (and not really correct, it's not like we are calling the module where the class is..)
    del sys.argv[1]  # but this is important

    # Get a list of the leftover arguments
    leftover_arguments = sys.argv[1:]

    # Welcome message
    if subproject == "modeling": welcome_modeling()
    elif subproject == "magic": welcome_magic()
    elif subproject == "dustpedia": welcome_dustpedia()
    elif subproject == "evolve": welcome_evolve()

    # Get the configuration definition
    definition = introspection.get_configuration_definition_pts_not_yet_in_pythonpath(
        configuration_module_path)

    # If not specified on the command line (before the command name), then use the default specified in the commands.dat file
    if configuration_method is None:
        configuration_method = configuration_method_table

    # Check whether arguments are passed and the configuration method is interactive
    if configuration_method == "interactive" and len(leftover_arguments) > 0:
        raise ValueError(
            "Arguments on the command-line are not supported by default for this command. Run with pts --arguments to change this behaviour."
        )

    # Create the configuration
    config = create_configuration(definition, command_name, description,
                                  configuration_method)

    ## SAVE THE CONFIG if requested
    if config.write_config:
        config.saveto(config.config_file_path(command_name))

    # If this is not a re-run
    if not args.rerun:
        if not fs.is_directory(introspection.pts_user_config_dir):
            fs.create_directory(introspection.pts_user_config_dir)
        # CACHE THE CONFIG
        config_cache_path = fs.join(introspection.pts_user_config_dir,
                                    command_name + ".cfg")
        config.saveto(config_cache_path)

    # Setup function
    if subproject == "modeling": setup_modeling(command_name, fs.cwd())
    elif subproject == "magic": setup_magic(command_name, fs.cwd())
    elif subproject == "dustpedia": setup_dustpedia(command_name, fs.cwd())
    elif subproject == "evolve": setup_evolve(command_name, fs.cwd())

    # Initialize the logger
    log = initialize_pts(config, remote=args.remote, command_name=command_name)

    # Exact command name
    exact_command_name = subproject + "/" + command_name

    # If the PTS command has to be executed remotely
    if args.remote is not None:
        run_remotely(exact_command_name, config, args.keep, args.remote, log)

        # The PTS command has to be executed locally
    else:
        run_locally(exact_command_name, module_path, class_name, config,
                    args.input_files, args.output_files, args.output, log)

    # Finish function
    if subproject == "modeling": finish_modeling(command_name, fs.cwd())
    elif subproject == "magic": finish_magic(command_name, fs.cwd())
    elif subproject == "dustpedia": finish_dustpedia(command_name, fs.cwd())
    elif subproject == "evolve": finish_evolve(command_name, fs.cwd())
Ejemplo n.º 31
0
# -----------------------------------------------------------------

# Set figsize
if config.small:
    figsize = "8,6"
    figsize_timelines = "8,8"
else:
    figsize = "12,9"
    figsize_timelines = "12,12"

# -----------------------------------------------------------------

# Locate the scaling test suite directory
suite_path = fs.join(fs.cwd(), config.suite_name)
if not fs.is_directory(suite_path): raise ValueError("The directory '" + suite_path + "' does not exist")

# -----------------------------------------------------------------

# Make directory for output
output_path = fs.create_directory_in(fs.cwd(), time.unique_name("scaling_plots"))

# Make subdirectories
single_node_path = fs.create_directory_in(output_path, "Single-node comparison")
multi_node_path = fs.create_directory_in(output_path, "Load balancing and multi-node scaling")
communication_path = fs.create_directory_in(output_path, "Communication")
hybridization_path = fs.create_directory_in(output_path, "Hybridization")
photon_packages_path = fs.create_directory_in(output_path, "Increased number of photon packages")
memory_path = fs.create_directory_in(output_path, "Memory scaling")

# -----------------------------------------------------------------