def get_parameters(folder): parameters = {} for xml_file in glob.glob(os.path.join(folder, "*.xml")): if 'report' in xml_file: continue try: config = read_config(xml_file) initial_data_parameters = get_xml_node( config, "config.fvm.initialData.parameters") for initial_data_parameter in initial_data_parameters.getElementsByTagName( "parameter"): name = get_in_xml(initial_data_parameter, "name") length = int(get_in_xml(initial_data_parameter, "length")) if length == 1: value = float(get_in_xml(initial_data_parameter, "value")) parameters[name] = value print(parameters) except Exception as e: print(e) return parameters
parser.add_argument('--number_of_samples_per_run', type=int, required=True, help='Number of samples per run/individual file') parser.add_argument('--config', type=str, required=True, help="Path to configuration file") args = parser.parse_args() config = read_config(args.config) samples = get_in_xml(config, "config.uq.samples") if int(samples) % args.number_of_samples_per_run: raise Exception("number_of_samples_per_run must be a multiple of the total number of samples. Given:" +\ f"\tnumber_of_samples_per_run: {args.number_of_samples_per_run}\n" +\ f"\ttotal number of samples : {samples}") number_of_config_files = int(samples) // args.number_of_samples_per_run for n in range(number_of_config_files): sample_start = n * args.number_of_samples_per_run sample_end = (n + 1) * args.number_of_samples_per_run outfolder = f"sample_{sample_start}_{sample_end}" os.mkdir(outfolder)
type=str, help='Moves the results to another folder after simulation is done') parser.add_argument('--base_path', type=str, default=repo.working_dir, help='Base path of this repository') args = parser.parse_args() configuration_file = args.config configuration_path = os.path.abspath(os.path.dirname(configuration_file)) config = read_config(configuration_file) resolution = int( get_in_xml(config, 'config.fvm.grid.dimension').split(" ")[0]) number_of_nodes_per_direction = max(1, resolution // 256) total_number_of_nodes = number_of_nodes_per_direction**3 * args.multi_sample r_command_append = "rusage[ngpus_excl_p=8,mem=8000] span[ptile=8]" job_chain = os.path.abspath(configuration_file) job_chain = ''.join(ch for ch in job_chain if ch.isalnum() or ch == '_') command_to_run = [ 'bsub', '-R', r_command_append, '-W',
for n, perturbation in enumerate(args.perturbations): perturbation_folder = "p{}".format(perturbation.replace(".", "_")) os.mkdir(perturbation_folder) for m, resolution in enumerate(args.resolutions): resolution_folder = f"{perturbation_folder}/N{resolution}" os.mkdir(resolution_folder) samples = args.samples[m] set_in_xml(config, "config.uq.samples", samples) set_in_xml(config, "config.fvm.grid.dimension", f"{resolution} {resolution} {resolution}") python_file = get_in_xml(config, "config.initialData.python") initial_data_parameters = get_xml_node( config, "config.initialData.parameters") for initial_data_parameter in initial_data_parameters.getElementsByTagName( "parameter"): name = get_in_xml(initial_data_parameter, "name") if name == "epsilon": set_in_xml(initial_data_parameter, "value", perturbation) shutil.copyfile( os.path.join(os.path.dirname(args.config), python_file), os.path.join(resolution_folder, python_file))
parser.add_argument( '--multi_sample', type=int, default=1, help='Number of processes to use in the sample direction') parser.add_argument('--dry_run', action='store_true', help='Only do a dry run, no actual submission done') args = parser.parse_args() configuration_file = args.config configuration_path = os.path.abspath(os.path.dirname(configuration_file)) config = read_config(configuration_file) name = get_in_xml(config, 'config.fvm.name').strip() resolution = int( get_in_xml(config, 'config.fvm.grid.dimension').split(" ")[0]) number_of_nodes_per_direction = max(1, resolution // 256) total_number_of_nodes = number_of_nodes_per_direction**3 * args.multi_sample submit_file = f"""#!/bin/bash -l #SBATCH --job-name="{name}_{resolution}" #SBATCH --time=24:00:00 #SBATCH --nodes={total_number_of_nodes} #SBATCH --ntasks-per-core=1 #SBATCH --ntasks-per-node=1 #SBATCH --cpus-per-task=1 #SBATCH --partition=normal