assert parameters["Unique name"][i] == chi_squared["Unique name"][i] # Get the scores scores = chi_squared["Chi-squared"] check = parameters # Set the scores ga.set_scores(scores, check) # ----------------------------------------------------------------- new_generation = last_generation + 1 if last_generation is not None else 0 # Path to the new generation new_generation_path = fs.join(fs.cwd(), "Generation " + str(new_generation)) fs.create_directory(new_generation_path) # path to the new GA instance new_path = fs.join(new_generation_path, "ga.pickle") # path to the new parameters table new_parameters_path = fs.join(new_generation_path, "parameters.dat") # ----------------------------------------------------------------- # Generate the new population ga.generate_new_population() # ----------------------------------------------------------------- name_column = []
# Determine the preparation name if frame.filter is not None: prep_name = str(frame.filter) else: prep_name = image_name # Set the row entries names_column.append(image_name) paths_column.append(image_path) prep_names_column.append(prep_name) # Create the table data = [names_column, paths_column, prep_names_column] table = tables.new(data, names) # Check whether the preparation directory exists prep_path = fs.join(config.path, "prep") if not fs.is_directory(prep_path): fs.create_directory(prep_path) # Save the table prep_info_table_path = fs.join(prep_path, "prep_info.dat") tables.write(table, prep_info_table_path, format="ascii.ecsv") # ----------------------------------------------------------------- # Create a PreparationInitializer instance initializer = PreparationInitializer(config) # Run the data initializer initializer.run() # -----------------------------------------------------------------
if not fs.is_directory(input_path): raise argparse.ArgumentError(input_path, "The input directory does not exist") # If no input directory is given, assume the input is placed in the current working directory else: input_path = fs.cwd() # ----------------------------------------------------------------- # If an output directory is given if arguments.output is not None: # Determine the full path to the output directory output_path = fs.absolute(arguments.output) # Create the directory if it does not yet exist if not fs.is_directory(output_path): fs.create_directory(output_path) # If no output directory is given, place the output in the current working directory else: output_path = fs.cwd() # ----------------------------------------------------------------- # Determine the log file path logfile_path = fs.join(output_path, time.unique_name("log") + ".txt") if arguments.report else None # Determine the log level level = "DEBUG" if arguments.debug else "INFO" # Initialize the logger log = logging.setup_log(level=level, path=logfile_path) log.start("Starting find_sources ...")
# Check whether there is a stellar catalog file in the galaxy's directory if fs.is_file(old_stellar_catalog_path): # Open the new stellar catalog stellar_catalog = tables.from_file(stellar_catalog_path) # Open the 'old' stellar catalog old_stellar_catalog = tables.from_file(old_stellar_catalog_path) # Create merged stellar catalog stellar_catalog = catalogs.merge_stellar_catalogs(stellar_catalog, old_stellar_catalog) # Save the merged catalog path = fs.join(galaxy_user_path, "stars.dat") tables.write(stellar_catalog, path) # If a stellar catalog file does not exist yet else: fs.copy_file(stellar_catalog_path, galaxy_user_path, "stars.dat") else: # Create the directory to contain the catalogs for this galaxy fs.create_directory(galaxy_user_path) # Copy the galaxy and stellar catalog files into the new directory fs.copy_file(galactic_catalog_path, galaxy_user_path, "galaxies.dat") fs.copy_file(stellar_catalog_path, galaxy_user_path, "stars.dat") # -----------------------------------------------------------------
ga.setMinimax(Consts.minimaxType["minimize"]) ga.setGenerations(5) ga.setCrossoverRate(0.5) ga.setPopulationSize(100) ga.setMutationRate(0.5) # Evolve ga.evolve(freq_stats=1) print("Final generation:", ga.currentGeneration) # ----------------------------------------------------------------- # Determine the path to the reference directory ref_path = fs.join(fs.cwd(), "original") fs.create_directory(ref_path) # ----------------------------------------------------------------- best = ga.bestIndividual() best_parameter_a = best.genomeList[0] best_parameter_b = best.genomeList[1] best_path = fs.join(ref_path, "best.dat") with open(best_path, "w") as best_file: best_file.write("Parameter a: " + str(best_parameter_a) + "\n") best_file.write("Parameter b: " + str(best_parameter_b) + "\n") popt, pcov = curve_fit(fit_function, test_data_x, test_data_y)
# Give an error if the input directory does not exist if not fs.is_directory(input_path): raise argparse.ArgumentError(input_path, "The input directory does not exist") # If no input directory is given, assume the input is placed in the current working directory else: input_path = fs.cwd() # ----------------------------------------------------------------- # If an output directory is given if arguments.output is not None: # Determine the full path to the output directory output_path = fs.absolute(arguments.output) # Create the directory if it does not yet exist if not fs.is_directory(output_path): fs.create_directory(output_path) # If no output directory is given, place the output in the current working directory else: output_path = fs.cwd() # ----------------------------------------------------------------- # Determine the log file path logfile_path = fs.join(output_path, time.unique_name("log") + ".txt") if arguments.report else None # Determine the log level level = "DEBUG" if arguments.debug else "INFO" # Initialize the logger log = logging.setup_log(level=level, path=logfile_path) log.start("Starting interpolate ...")
ga.setMinimax(Consts.minimaxType["minimize"]) ga.setGenerations(5) ga.setCrossoverRate(0.5) ga.setPopulationSize(100) ga.setMutationRate(0.5) # Evolve ga.evolve(freq_stats=1) print("Final generation:", ga.currentGeneration) # ----------------------------------------------------------------- # Determine the path to the reference directory ref_path = fs.join(fs.cwd(), "original") fs.create_directory(ref_path) # ----------------------------------------------------------------- best = ga.bestIndividual() best_parameter_a = best.genomeList[0] best_parameter_b = best.genomeList[1] best_path = fs.join(ref_path, "best.dat") with open(best_path, 'w') as best_file: best_file.write("Parameter a: " + str(best_parameter_a) + "\n") best_file.write("Parameter b: " + str(best_parameter_b) + "\n") popt, pcov = curve_fit(fit_function, test_data_x, test_data_y)
# Loop over the remotes, look for match for host_id in simulation_paths: if simulation_name in simulation_paths[host_id]: the_host_id = host_id break # Simulation file not found if the_host_id is None: if config.ignore_missing: continue else: raise ValueError("Cannot find simulation file for simulation '" + simulation_name + "'") # Determine the output path if config.per_host: new_path = fs.join(output_path, the_host_id) if not fs.is_directory(new_path): fs.create_directory(new_path) else: new_path = output_path # Get the original simulation file path filepath = simulation_paths[the_host_id][simulation_name] # Debugging log.debug("Moving the '" + simulation_name + "' simulation ...") # Move the file if config.rename: new_name = simulation_name + ".sim" else: new_name = None fs.move_file(filepath, new_path, new_name=new_name) # -----------------------------------------------------------------
# Reset? if config.reset: # Loop over all FITS files in the remote data directory for name, path in remote.files_in_path(remote_data_path, recursive=True, extension="fits", returns=["name", "path"]): # Determine origin origin = instrument_to_origin(name.split("_")[1]) # Determine local directory for this image origin_path = fs.join(environment.data_images_path, origin) if not fs.is_directory(origin_path): fs.create_directory(origin_path) # Determine local path local_path = fs.join(origin_path, name) #print("local_path") # Check whether the image is not present if fs.is_file(local_path): log.warning( "The '" + name + "' remotely cached image is still present locally. Keeping this file and throwing the remote file away." ) continue else: # Infomr
# Load wavelength grid wavelength_grid_path = fs.join(modeling_path, "fit", "in", "wavelengths_lowres.txt") wavelength_grid = WavelengthGrid.from_skirt_input(wavelength_grid_path) wavelengths = wavelength_grid.wavelengths(asarray=True) # list of wavelengths # Load simulated datacube datacube_path = fs.join(modeling_path, "fit", "best", "images", "M81_earth_total.fits") datacube = DataCube.from_file(datacube_path, wavelength_grid) x = [] y = [] new_path = fs.join(modeling_path, "fit", "best", "images", "new") fs.create_directory(new_path) ### NEW # Convert datacube to flux (wavelength) density datacube.convert_to_fluxdensity("W / (m2 * arcsec2 * micron)") # Pack datacube into a 3D array fluxdensities = datacube.asarray() ### # Loop over filters for filter_name in sorted_filter_names: # Filter wavelength
# Send the appropriate command remote.launch_pts_command("get_poisson_errors", arguments_no_remote) # Retrieve the remote directory remote.download(remote_path, fs.cwd(), show_output=True) local_path = fs.join(fs.cwd(), fs.name(remote_path)) # Remove the temporary remote directory remote.remove_directory(remote_path) # Locally else: # Make a local directory local_path = fs.join(fs.cwd(), temp_name) fs.create_directory(local_path) # Create the DustPedia data processing instance dpdp = DustPediaDataProcessing() # GALEX if "GALEX" in config.band: dpdp.make_galex_mosaic_and_poisson_frame(config.galaxy_name, local_path) # SDSS elif "SDSS" in config.band: band = config.band.split(" ")[1]
if fs.is_file(old_stellar_catalog_path): # Open the new stellar catalog stellar_catalog = tables.from_file(stellar_catalog_path) # Open the 'old' stellar catalog old_stellar_catalog = tables.from_file(old_stellar_catalog_path) # Create merged stellar catalog stellar_catalog = catalogs.merge_stellar_catalogs( stellar_catalog, old_stellar_catalog) # Save the merged catalog path = fs.join(galaxy_user_path, "stars.dat") tables.write(stellar_catalog, path) # If a stellar catalog file does not exist yet else: fs.copy_file(stellar_catalog_path, galaxy_user_path, "stars.dat") else: # Create the directory to contain the catalogs for this galaxy fs.create_directory(galaxy_user_path) # Copy the galaxy and stellar catalog files into the new directory fs.copy_file(galactic_catalog_path, galaxy_user_path, "galaxies.dat") fs.copy_file(stellar_catalog_path, galaxy_user_path, "stars.dat") # -----------------------------------------------------------------
def run_configurable(table_matches, args, tables): """ This function ... :param table_matches: :param args: :param tables: :return: """ # Determine the configuration method configuration_method = None if args.interactive: configuration_method = "interactive" elif args.arguments: configuration_method = "arguments" elif args.configfile is not None: configuration_method = "file:" + args.configfile elif args.rerun: configuration_method = "last" # Resolve subproject, index = table_matches[0] resolved = introspection.resolve_from_match(subproject, tables[subproject], index) # Get properties title = resolved.title command_name = resolved.command_name hidden = resolved.hidden description = resolved.description module_path = resolved.module_path class_name = resolved.class_name configuration_method_table = resolved.configuration_method configuration_module_path = resolved.configuration_module_path subproject_path = introspection.pts_subproject_dir(subproject) # Set sys.argv[0] = fs.join( introspection.pts_root_dir, module_path.replace(".", "/") + ".py" ) # this is actually not necessary (and not really correct, it's not like we are calling the module where the class is..) del sys.argv[1] # but this is important # Get a list of the leftover arguments leftover_arguments = sys.argv[1:] # Welcome message if subproject == "modeling": welcome_modeling() elif subproject == "magic": welcome_magic() elif subproject == "dustpedia": welcome_dustpedia() elif subproject == "evolve": welcome_evolve() # Get the configuration definition definition = introspection.get_configuration_definition_pts_not_yet_in_pythonpath( configuration_module_path) # If not specified on the command line (before the command name), then use the default specified in the commands.dat file if configuration_method is None: configuration_method = configuration_method_table # Check whether arguments are passed and the configuration method is interactive if configuration_method == "interactive" and len(leftover_arguments) > 0: raise ValueError( "Arguments on the command-line are not supported by default for this command. Run with pts --arguments to change this behaviour." ) # Create the configuration config = create_configuration(definition, command_name, description, configuration_method) ## SAVE THE CONFIG if requested if config.write_config: config.saveto(config.config_file_path(command_name)) # If this is not a re-run if not args.rerun: if not fs.is_directory(introspection.pts_user_config_dir): fs.create_directory(introspection.pts_user_config_dir) # CACHE THE CONFIG config_cache_path = fs.join(introspection.pts_user_config_dir, command_name + ".cfg") config.saveto(config_cache_path) # Setup function if subproject == "modeling": setup_modeling(command_name, fs.cwd()) elif subproject == "magic": setup_magic(command_name, fs.cwd()) elif subproject == "dustpedia": setup_dustpedia(command_name, fs.cwd()) elif subproject == "evolve": setup_evolve(command_name, fs.cwd()) # Initialize the logger log = initialize_pts(config, remote=args.remote, command_name=command_name) # Exact command name exact_command_name = subproject + "/" + command_name # If the PTS command has to be executed remotely if args.remote is not None: run_remotely(exact_command_name, config, args.keep, args.remote, log) # The PTS command has to be executed locally else: run_locally(exact_command_name, module_path, class_name, config, args.input_files, args.output_files, args.output, log) # Finish function if subproject == "modeling": finish_modeling(command_name, fs.cwd()) elif subproject == "magic": finish_magic(command_name, fs.cwd()) elif subproject == "dustpedia": finish_dustpedia(command_name, fs.cwd()) elif subproject == "evolve": finish_evolve(command_name, fs.cwd())
# Basic wavelength grids if config.basic: # Get the list of the different npoints basic_npoints_list = config.npoints_range_basic.linear(config.ngrids_basic) # Set paths basic_grid_paths = OrderedDict() for npoints in basic_npoints_list: # Determine path dirname = "basic_" + str(npoints) path = fs.join(grids_path, dirname) if fs.is_directory(path): fs.clear_directory(path) else: fs.create_directory(path) # Set path basic_grid_paths[npoints] = path # Generate the grids basic_grids = create_basic_wavelength_grids(config.ngrids_basic, config.npoints_range_basic, config.range, filters=fitting_run.fitting_filters, fixed=fixed_wavelengths, plot_seds=seds, table=table, out_paths=basic_grid_paths, plot_paths=basic_grid_paths) # ----------------------------------------------------------------- # Refined wavelength grids if config.refined: # Get the list of the different npoints
def run_configurable(table_matches, args, tables): """ This function ... :param table_matches: :param args: :param tables: :return: """ # Determine the configuration method configuration_method = None if args.interactive: configuration_method = "interactive" elif args.arguments: configuration_method = "arguments" elif args.configfile is not None: configuration_method = "file:" + args.configfile elif args.rerun: configuration_method = "last" # Regenerate the configuration method option if args.interactive: configuration_method_argument = "--interactive" elif args.arguments: configuration_method_argument = "--arguments" elif args.configfile is not None: configuration_method_argument = "--configfile '" + args.configfile + "'" elif args.rerun: configuration_method_argument = "--rerun" else: configuration_method_argument = "" # Resolve subproject, index = table_matches[0] resolved = introspection.resolve_from_match(subproject, tables[subproject], index) # Get properties title = resolved.title command_name = resolved.command_name hidden = resolved.hidden description = resolved.description module_path = resolved.module_path class_name = resolved.class_name configuration_method_table = resolved.configuration_method configuration_module_path = resolved.configuration_module_path subproject_path = introspection.pts_subproject_dir(subproject) # Set sys.argv[0] = fs.join(introspection.pts_root_dir, module_path.replace(".", "/") + ".py") # this is actually not necessary (and not really correct, it's not like we are calling the module where the class is..) del sys.argv[1] # but this is important # Get a list of the leftover arguments leftover_arguments = sys.argv[1:] # Welcome message if subproject == "modeling": welcome_modeling() elif subproject == "magic": welcome_magic() elif subproject == "dustpedia": welcome_dustpedia() elif subproject == "evolve": welcome_evolve() # Special if subproject == "modeling": check_modeling_cwd(command_name, fs.cwd()) # Get the configuration definition definition = introspection.get_configuration_definition_pts_not_yet_in_pythonpath(configuration_module_path) # If not specified on the command line (before the command name), then use the default specified in the commands.dat file if configuration_method is None: configuration_method = configuration_method_table # Check whether arguments are passed and the configuration method is interactive if configuration_method == "interactive" and len(leftover_arguments) > 0: raise ValueError("Arguments on the command-line are not supported by default for this command. Run with pts --arguments to change this behaviour.") # Create the configuration config = create_configuration(definition, command_name, description, configuration_method) ## SAVE THE CONFIG if requested if config.write_config: config_filepath = config.config_file_path(command_name) config.saveto(config_filepath) else: config_filepath = None # If this is not a re-run if not args.rerun: if not fs.is_directory(introspection.pts_user_config_dir): fs.create_directory(introspection.pts_user_config_dir) # CACHE THE CONFIG config_cache_path = fs.join(introspection.pts_user_config_dir, command_name + ".cfg") config.saveto(config_cache_path) # Setup function if subproject == "modeling": setup_modeling(command_name, fs.cwd(), configuration_method_argument) elif subproject == "magic": setup_magic(command_name, fs.cwd()) elif subproject == "dustpedia": setup_dustpedia(command_name, fs.cwd()) elif subproject == "evolve": setup_evolve(command_name, fs.cwd()) # Initialize the logger log = initialize_pts(config, remote=args.remote, command_name=command_name) # Exact command name exact_command_name = subproject + "/" + command_name # If the PTS command has to be executed remotely if args.remote is not None: run_remotely(exact_command_name, config, args.keep, args.remote, log) # The PTS command has to be executed locally else: run_locally(exact_command_name, module_path, class_name, config, args.input_files, args.output_files, args.output, log) # Finish function if subproject == "modeling": finish_modeling(command_name, fs.cwd(), config_path=config_filepath) elif subproject == "magic": finish_magic(command_name, fs.cwd()) elif subproject == "dustpedia": finish_dustpedia(command_name, fs.cwd()) elif subproject == "evolve": finish_evolve(command_name, fs.cwd())
level = "DEBUG" if config.debug else "INFO" # Initialize the logger log = logging.setup_log(level=level) log.start("Starting setup ...") # ----------------------------------------------------------------- # Inform the user log.info("Resolving the galaxy name ...") # Get the NGC name of the galaxy ngc_name = catalogs.get_ngc_name(config.name) # Inform the user log.info("Galaxy NGC ID is '" + ngc_name + "'") # Determine the path to the new directory path = fs.join(fs.cwd(), ngc_name) # Create the directory fs.create_directory(path) # Determine the path to the data directory data_path = fs.join(path, "data") # Create the data directory fs.create_directory(data_path) # -----------------------------------------------------------------
# Send the appropriate command remote.launch_pts_command("get_poisson_errors", arguments_no_remote) # Retrieve the remote directory remote.download(remote_path, fs.cwd(), show_output=True) local_path = fs.join(fs.cwd(), fs.name(remote_path)) # Remove the temporary remote directory remote.remove_directory(remote_path) # Locally else: # Make a local directory local_path = fs.join(fs.cwd(), temp_name) fs.create_directory(local_path) # Create the DustPedia data processing instance dpdp = DustPediaDataProcessing() # GALEX if "GALEX" in config.band: dpdp.make_galex_mosaic_and_poisson_frame(config.galaxy_name, local_path) # SDSS elif "SDSS" in config.band: band = config.band.split(" ")[1] # Make ...