def show_luminosities(sed): """ This function ... :param sed: :return: """ # Get spectral luminosity density lum = sed.photometry_at(fltr_wavelength, unit="W/micron") lum2 = sed.photometry_at(fltr_wavelength, unit="W/micron", interpolate=False) # log.info("Luminosity: " + tostr(lum)) log.info("No interpolation: " + tostr(lum2)) # Convert to solar SPECTRAL luminosity DENSITY at wavelength lum_spectral_solar = lum.to("W/micron").value / solar_wavelength_density.to("W/micron").value # Convert to neutral lum_neutral = lum.to("W", density=True, wavelength=fltr_wavelength) lum_solar = lum.to("Lsun", density=True, wavelength=fltr_wavelength) # Neutral and solar log.info("Luminosity in spectral solar units: " + tostr(lum_spectral_solar) + " Lsun_" + fltr.band) log.info("Luminosity in neutral units: " + tostr(lum_neutral)) log.info("Luminosity in solar units: " + tostr(lum_solar))
def show_heating_info(run): """ Thisf unction ... :param run: :return: """ print(" - " + fmt.bold + "heating simulations:" + fmt.reset) print("") # BASIC print(" - " + fmt.bold + "basic:" + fmt.reset) print(" - " + fmt.bold + "old scale heights: " + fmt.reset + tostr(run.heating_config.old_scale_heights)) print("") # Loop over the contributions npackages = None selfabsorption = None transient_heating = None for contribution in contributions: # Get the ski path #ski_path = run.heating_ski_path_for_contribution(contribution) #ski = SkiFile(ski_path) ski = run.get_ski_for_contribution(contribution) if npackages is None: npackages = ski.packages() elif ski.packages() != npackages: raise RuntimeError("") if selfabsorption is None: selfabsorption = ski.dustselfabsorption() elif ski.dustselfabsorption() != selfabsorption: raise RuntimeError("") if transient_heating is None: transient_heating = ski.transient_dust_emissivity elif ski.transient_dust_emissivity != transient_heating: raise RuntimeError("") # Get the output path #output_path = run.heating_output_path_for_contribution(contribution) # LAUNCH INFO print(" - " + fmt.bold + "launch info:" + fmt.reset) print(" - " + fmt.bold + "number of photon packages: " + fmt.reset + tostr(npackages)) print(" - " + fmt.bold + "dust self-absorption: " + fmt.reset + tostr(selfabsorption)) print(" - " + fmt.bold + "transient heating: " + fmt.reset + tostr(transient_heating)) print("") # Loop over the contributions print(" - " + fmt.bold + "finished:" + fmt.reset) for contribution in contributions: output_path = run.output_path_for_contribution(contribution) print(" - " + fmt.bold + contribution + ": " + fmt.reset + tostr(output_path))
def show_wavelength_grid_info(run): """ This function ... :param run: :return: """ print(" - " + fmt.bold + "wavelength grid:" + fmt.reset) print(" - " + fmt.bold + "number of points: " + fmt.reset + tostr(run.nwavelengths)) print(" - " + fmt.bold + "emission lines: " + fmt.reset + tostr(run.config.wg.add_emission_lines))
def show_directory_info(run): """ This function ... :param run: :return: """ # Show the launch options print(" - " + fmt.bold + "directory info:" + fmt.reset) print(" - " + fmt.bold + "number of files: " + fmt.reset + tostr(run.nfiles)) print(" - " + fmt.bold + "directory size: " + fmt.reset + tostr(run.disk_space))
def show_memory_info(run): """ This function ... :param run: :return: """ # Show the memory info print(" - " + fmt.bold + "memory:" + fmt.reset) print(" - " + fmt.bold + "peak: " + fmt.reset + tostr(run.memory.peak)) print(" - " + fmt.bold + "peak per process: " + fmt.reset + tostr(run.memory.peak_per_process))
def show_model_info(run): """ This function ... :param run: :return: """ print(" - " + fmt.bold + "model:" + fmt.reset) print("") print(" - old stars:") print(" - map name: " + run.model_old_map_name) print(" - methods: " + tostr(run.old_map_methods)) print(" - origins: " + tostr(run.old_map_origins)) print(" - young stars:") print(" - map name: " + run.model_young_map_name) print(" - methods: " + tostr(run.young_map_methods)) print(" - origins: " + tostr(run.young_map_origins)) print(" - ionizing stars:") print(" - map name: " + run.model_ionizing_map_name) print(" - methods: " + tostr(run.ionizing_map_methods)) print(" - origins: " + tostr(run.ionizing_map_origins)) print(" - dust:") print(" - map name: " + run.model_dust_map_name) print(" - methods: " + tostr(run.dust_map_methods)) print(" - origins: " + tostr(run.dust_map_origins))
def show_parallelization_info(run): """ This function ... :param run: :return: """ # Get the log file logfile = run.logfile # Show the parallelization info print(" - " + fmt.bold + "parallelization options:" + fmt.reset) print(" - " + fmt.bold + "number of processes: " + fmt.reset + tostr(logfile.nprocesses)) print(" - " + fmt.bold + "number of threads: " + fmt.reset + tostr(logfile.nthreads)) print(" - " + fmt.bold + "data parallelization: " + fmt.reset + tostr(logfile.data_parallel))
def show_table(tab): """ This function ... :param table: :return: """ # Only unique values per column if config.unique: # Loop over the columns for colname in tab.column_names: # Get the values values = tab.get_column_values(colname, add_unit=False) unique_values = sequences.unique_values(values, ignore_none=True) if config.sorted_unique: unique_values = list(sorted(unique_values)) nunique = len(unique_values) # Show the values print(" - " + fmt.bold + colname + fmt.reset_bold + ": " + tostr(unique_values, decimal_places=config.ndecimal_places, round=config.round) + " (" + str(nunique) + ")") # Interactive view elif config.interactive: tab.more() # Regular view else: fmt.print_table(tab, ndecimal_places=config.ndecimal_places, round=config.round)
def show_colours_info(run): """ This function ... :param run: :return: """ print(" - " + fmt.bold + "colours:" + fmt.reset) print("") print(" - colours: " + tostr(run.colour_names, delimiter=", "))
def show_residuals_info(run): """ This function ... :param run: :return: """ print(" - " + fmt.bold + "residuals:" + fmt.reset) print("") print(" - images: " + tostr(run.residual_image_names, delimiter=", "))
def show_basic_info(run): """ This function ... :param run: :return: """ # Show the contents of the info file print(run.info.to_string(line_prefix=" ", ignore_none=True, ignore=ignore_properties)) # From config print(" - " + fmt.bold + "old scale heights: " + fmt.reset + tostr(run.config.old_scale_heights))
def show_timing_info(run): """ Thisn function ... :param run: :return: """ # Show the timing info print(" - " + fmt.bold + "timing:" + fmt.reset) print(" - " + fmt.bold + "total: " + fmt.reset + tostr(run.timeline.total)) print(" - " + fmt.bold + "setup: " + fmt.reset + tostr(run.timeline.setup)) print(" - " + fmt.bold + "stellar: " + fmt.reset + tostr(run.timeline.stellar)) print(" - " + fmt.bold + "spectra: " + fmt.reset + tostr(run.timeline.spectra)) print(" - " + fmt.bold + "dust: " + fmt.reset + tostr(run.timeline.dust)) print(" - " + fmt.bold + "writing: " + fmt.reset + tostr(run.timeline.writing)) print(" - " + fmt.bold + "communication: " + fmt.reset + tostr(run.timeline.communication)) print(" - " + fmt.bold + "waiting: " + fmt.reset + tostr(run.timeline.waiting))
def from_data(cls, data, genome_type, crossover_method): """ This function ... :param data: :param genome_type: :param crossover_method: :return: """ # Create a new table table = cls() # Setup table._setup() # Loop over the entries for entry in data: # If crossover applied if entry[5]: crossover = crossover_method else: crossover = None # Add the last entry (the crossover details) #if crossover_method == "single_point": # Not impplemented #else: raise NotImplementedError("Not implemented") # Convert crossover details into string details = tostr(entry[-1]) # Construct row: cut generation index from the entry and cut before the 'crossover' flag row = entry[1:5] + [crossover, details] #print("row:", row) #rows.append(row) table.add_row(row) # Add meta table.meta["genome_type"] = genome_type # Return the table return table
def from_data(cls, data, genome_type, crossover_method): """ This function ... :param data: :param genome_type: :param crossover_method: :return: """ # Create a new table table = cls() # Setup table.setup() # Loop over the entries for entry in data: # If crossover applied if entry[5]: crossover = crossover_method else: crossover = None # Add the last entry (the crossover details) #if crossover_method == "single_point": # Not impplemented #else: raise NotImplementedError("Not implemented") # Convert crossover details into string details = tostr(entry[-1]) # Construct row: cut generation index from the entry and cut before the 'crossover' flag row = entry[1:5] + [crossover, details] #print("row:", row) #rows.append(row) table.add_row(row) # Add meta table.meta["genome_type"] = genome_type # Return the table return table
def generate_initial_parameter_values(self): """ This function ... :return: """ # Inform the user log.info("Generating random initial parameter values ...") # Get the low and high value low_factor = self.config.relative_range_initial.min high_factor = self.config.relative_range_initial.max # Determine the exponents, to generate random points log_low = np.log10(low_factor) log_high = np.log10(high_factor) # Loop over the real parameter values for parameter_name in self.real_parameter_values: # Get the parameter value value = self.real_parameter_values[parameter_name] # Multiply the value with a random number between 1/3 and 3. random = np.random.uniform(log_low, log_high) random_factor = 10**random value = value * random_factor # DON'T DO VALUE *= RANDOM_FACTOR HERE: CHANGES THE UNDERLYING QUANTITY OBJECT AS WELL IN SELF.REAL_PARAMETER_VALUES !! # Set the value as the initial parameter value self.initial_parameter_values[parameter_name] = value # Debugging log.debug("The initial parameter values are:") log.debug("") for parameter_name in self.real_parameter_values: log.debug(" - " + parameter_name + ": " + tostr(self.initial_parameter_values[parameter_name], scientific=True, fancy=True, ndigits=parameter_ndigits[parameter_name])) log.debug("")
def show_launch_info(run): """ This function ... :param run: :return: """ # Get the ski file ski = run.ski_file # Show the launch options print(" - " + fmt.bold + "launch info:" + fmt.reset) print(" - " + fmt.bold + "number of photon packages: " + fmt.reset + tostr(ski.packages())) print(" - " + fmt.bold + "dust self-absorption: " + fmt.reset + tostr(ski.dustselfabsorption())) print(" - " + fmt.bold + "transient heating: " + fmt.reset + tostr(ski.transient_dust_emissivity)) print(" - " + fmt.bold + "has output: " + fmt.reset + tostr(run.has_output)) print(" - " + fmt.bold + "has extracted data: " + fmt.reset + tostr(run.has_extracted)) print(" - " + fmt.bold + "has plots: " + fmt.reset + tostr(run.has_plots)) print(" - " + fmt.bold + "has misc output: " + fmt.reset + tostr(run.has_misc))
def check_best(self): """ This function ... :return: """ # Inform the user log.info("Checking the best individual ...") # Get the values from the tables values_last_generation = self.get_best_parameter_values_last_generation() values_all_generations = self.get_best_parameter_values_all_generations() print("") for label in self.real_parameter_values: # Get the values form the tables value_last_generation = values_last_generation[label] value_all_generations = values_all_generations[label] # Get the values and calculate the difference value = self.best_parameter_values[label] real_value = self.real_parameter_values[label] absolute_difference = abs(value - real_value) relative_difference = absolute_difference / real_value # Get the print(label + ":") print(" - Best value in last generation: " + tostr(value_last_generation)) print(" - Best value across all generations: " + tostr(value_all_generations)) print(" - Real value: " + tostr(real_value)) print(" - Best fitted value: " + tostr(value)) print(" - Absolute difference: " + tostr(absolute_difference)) print(" - Relative difference: " + tostr(relative_difference) + " (" + tostr(relative_difference * 100) + "%)") print("")
# Show the info print("") show_run_info(run) #print("") # Show the parameters elif config.parameters is not None: # Load the run run = context.get_cached_run(name) print("") # Show the parameter values for name in config.parameters: print(" - " + fmt.bold + name + ": " + fmt.reset + tostr(run.info.parameter_values[name])) print("") # ----------------------------------------------------------------- # Empty line to separate if not (config.info or config.parameters is not None): print("") # Show the local analysis runs print(fmt.yellow + "LOCAL:" + fmt.reset) print("") # Loop over the names for name in context.analysis_run_names: # Check in runs
config.pts = True elif config.only_sampled: config.skirt = config.pts = False config.sampled = True # ----------------------------------------------------------------- fltr = config.filter fltr_wavelength = fltr.wavelength sun = Sun() print("") solar_neutral_density = sun.luminosity_for_wavelength(fltr_wavelength, unit="W", density=True) solar_wavelength_density = sun.luminosity_for_wavelength(fltr_wavelength, unit="W/micron") log.info("solar in neutral density: " + tostr(solar_neutral_density)) log.info("solar in wavelength density: " + tostr(solar_wavelength_density)) log.info("bolometric solar luminosity: " + tostr(sun.total_luminosity())) print("") # ----------------------------------------------------------------- sfr_scalar = config.sfr.to("Msun/yr").value # ----------------------------------------------------------------- def show_luminosities(sed): """ This function ... :param sed:
else: raise RuntimeError("Something went wrong") # Parse unit = parse_unit(config.unit, density=density, brightness=brightness, density_strict=True, brightness_strict=True) # Not a photometric unit else: unit = parse_unit(config.unit) # ----------------------------------------------------------------- # Set conversion info conversion_info = dict() if config.distance is not None: conversion_info["distance"] = config.distance if config.wavelength is not None: conversion_info["wavelength"] = config.wavelength if config.frequency is not None: conversion_info["frequency"] = config.frequency if config.pixelscale is not None: conversion_info["pixelscale"] = config.pixelscale if config.solid_angle is not None: conversion_info["solid_angle"] = config.solid_angle if config.filter is not None: conversion_info["fltr"] = config.filter # ----------------------------------------------------------------- # Convert converted = quantity.to(unit, **conversion_info) # ----------------------------------------------------------------- # Show print(tostr(converted)) # -----------------------------------------------------------------
# Create the DustPedia sample sample = DustPediaSample() galaxy_name = sample.get_name(config.galaxy_name) # Create the database database = DustPediaDatabase() # Login username, password = get_account() database.login(username, password) # Get the parameters parameters = database.get_dust_black_body_parameters(galaxy_name) dust_mass = parameters[0] dust_mass_error = parameters[1] temperature = parameters[2] temperature_error = parameters[3] luminosity = parameters[4] print("") print(galaxy_name) print(" - Mass: " + tostr(dust_mass, scientific=True, ndigits=3)) print(" - Temperature: " + tostr(temperature, scientific=True, ndigits=3)) print(" - Luminosity: " + tostr(luminosity, scientific=True, ndigits=3)) print("") # -----------------------------------------------------------------
def show_dust_grid_info(run): """ This function ... :param run: :return: """ # Title print(" - " + fmt.bold + "dust grid:" + fmt.reset) # From log file or tree file print(" - " + fmt.bold + "number of cells: " + fmt.reset + tostr(run.ncells)) # From config print(" - " + fmt.bold + "type: " + fmt.reset + run.config.dg.grid_type) print(" - " + fmt.bold + "relative scale: " + fmt.reset + tostr(run.config.dg.scale)) print(" - " + fmt.bold + "scale heights: " + fmt.reset + tostr(run.config.dg.scale_heights)) if run.config.dg.grid_type == "bintree": print(" - " + fmt.bold + "min level: " + fmt.reset + tostr(run.config.dg.bintree_min_level)) elif run.config.dg.grid_type == "octtree": print(" - " + fmt.bold + "min level: " + fmt.reset + tostr(run.config.dg.octtree_min_level)) else: raise ValueError("Invalid grid type: " + run.config.dg.grid_type) print(" - " + fmt.bold + "maximum mass fraction: " + fmt.reset + tostr(run.config.dg.max_mass_fraction)) # From dust grid object print(" - " + fmt.bold + "sample count: " + fmt.reset + tostr(run.dust_grid.sample_count)) print(" - " + fmt.bold + "min x: " + fmt.reset + tostr(run.dust_grid.min_x)) print(" - " + fmt.bold + "max x: " + fmt.reset + tostr(run.dust_grid.max_x)) print(" - " + fmt.bold + "min y: " + fmt.reset + tostr(run.dust_grid.min_y)) print(" - " + fmt.bold + "max y: " + fmt.reset + tostr(run.dust_grid.max_y)) print(" - " + fmt.bold + "min z: " + fmt.reset + tostr(run.dust_grid.min_z)) print(" - " + fmt.bold + "max z: " + fmt.reset + tostr(run.dust_grid.max_z)) print(" - " + fmt.bold + "direction method: " + fmt.reset + tostr(run.dust_grid.direction_method)) print(" - " + fmt.bold + "maximum optical depth: " + fmt.reset + tostr(run.dust_grid.max_optical_depth)) print(" - " + fmt.bold + "maximum density dispersion fraction: " + fmt.reset + tostr(run.dust_grid.max_dens_disp_fraction)) print(" - " + fmt.bold + "search method: " + fmt.reset + tostr(run.dust_grid.search_method))
# Not recognized else: raise ValueError("Unrecognized filetype") # ----------------------------------------------------------------- # Load structure, tab = load_structure(config.filename, config.filetype) # ----------------------------------------------------------------- # Modify table? if tab is not None: # Remove columns? if config.columns is not None: if sequences.contains_more(config.columns, tab.column_names): raise ValueError("There are invalid column names: '" + tostr(sequences.get_other(config.columns, tab.column_names)) + "'") tab.remove_other_columns(config.columns) # Sort? if config.sort is not None: tab.sort(config.sort) # ----------------------------------------------------------------- # Latex representation if config.latex: if tab is None: raise ValueError("Not supported: not a table") tab.print_latex(round=config.round, ndecimal_places=config.ndecimal_places) # Table elif config.table: if tab is None: raise ValueError("Not supported: not a table")
def check_grid_convolution(): """ This function ... :return: """ # Wavelengths used for each filter wavelengths_for_filters = OrderedDict() print("") print(fmt.underlined + fmt.blue + "Filters for convolution:" + fmt.reset) print("") # Loop over the fitting filters for fltr in fitting_run.fitting_filters: # Get the wavelength indices in the ranges indices_in_minmax = [i for i in range(len(wavelengths)) if wavelengths[i] in fltr.range.to("micron").value] indices_in_fwhm = [i for i in range(len(wavelengths)) if wavelengths[i] in fltr.fwhm_range.to("micron").value] # Get the number of wavelengths in the ranges nwavelengths_in_minmax = len(indices_in_minmax) nwavelengths_in_fwhm = len(indices_in_fwhm) # SHow checks if nwavelengths_in_minmax < config.min_npoints: #raise ValueError("Too few wavelengths within the filter wavelength range (" + str(fltr.min.to("micron").value) + " to " + str(fltr.max.to("micron").value) + " micron) for convolution (" + str(nwavelengths_in_minmax) + ")") print(fmt.red + " - " + str(fltr) + ": too few wavelengths within the filter wavelength range (" + tostr(fltr.min) + " to " + tostr(fltr.max) + ") for convolution (" + str(nwavelengths_in_minmax) + " instead of " + str(config.min_npoints) + ")" + fmt.reset) elif nwavelengths_in_fwhm < config.min_npoints_fwhm: #raise ValueError("Too few wavelengths within the filter FWHM wavelength range (" + str(fltr.fwhm_min.to("micron").value) + " to " + str(fltr.fwhm_max.to("micron").value) + " micron) for convolution (" + str(nwavelengths_in_fwhm) + ")") print(fmt.red + " - " + str(fltr) + ": too few wavelengths within the filter FWHM wavelength range (" + tostr(fltr.fwhm_min) + " to " + tostr(fltr.fwhm_max) + ") for convolution (" + str(nwavelengths_in_fwhm) + " instead of " + str(config.min_npoints_fwhm) + ")" + fmt.reset) else: print(fmt.green + " - " + str(fltr) + ": filter range is sampled well by the wavelengths") # Set the wavelengths in the range wavelengths_in_minmax = [wavelengths[index] for index in indices_in_minmax] # Set the used wavelengths for this filter wavelengths_for_filters[fltr] = wavelengths_in_minmax # Show which wavelengths are used to create filter frames if len(wavelengths_for_filters) > 0: print("") print(fmt.underlined + fmt.blue + "Wavelengths used for filters:" + fmt.reset) print("") for fltr in wavelengths_for_filters: filter_name = str(fltr) wavelength_strings = [str(wavelength) for wavelength in wavelengths_for_filters[fltr]] print(" - " + fmt.bold + filter_name + fmt.reset + ": " + ", ".join(wavelength_strings)) print("")
print(fmt.bold + "HEADER:" + fmt.reset_bold) print("") header = fs.get_header_lines(config.filename) nheaderlines = len(header) for line in header: print(line) # ----------------------------------------------------------------- print("") # ----------------------------------------------------------------- colnames = fs.get_column_names(config.filename) print(fmt.bold + "COLUMN NAMES: " + fmt.reset_bold + tostr(colnames)) # ----------------------------------------------------------------- print("") # ----------------------------------------------------------------- nrows = fs.get_nlines(config.filename) - nheaderlines print(fmt.bold + "NUMBER OF ROWS: " + fmt.reset_bold + str(nrows)) # ----------------------------------------------------------------- print("") # -----------------------------------------------------------------
def get_real_parameter_values(self): """ This function ... :return: """ # Inform the user log.info("Getting the real parameter values ...") # Store the different values encountered in the ski file values = defaultdict(list) # Add the labels for the free parameters # Loop over the free parameters for parameter_name in self.config.free_parameters: parameter_type = free_parameter_types[parameter_name] parsing_type = parsing_types_for_parameter_types[parameter_type] # Get the parsing function for this parameter parser = getattr(parsing, parsing_type) # Search in the absolute parameters if parameter_name in free_parameters_absolute_paths: # Determine the path to the property path = free_parameters_absolute_paths # Get the current value value = parser(self.ski.get_value_for_path(path)) # Set the value values[parameter_name].append(value) # Search in the stellar components if parameter_name in free_parameters_relative_stellar_component_paths: # Determine the relative path to the property and the stellar component name path, component_name = free_parameters_relative_stellar_component_paths[parameter_name] if component_name is not None: # Get the stellar component stellar_component = self.ski.get_stellar_component(component_name) # Get the current value value = parser(self.ski.get_value_for_path(path, stellar_component)) # Set the value values[parameter_name].append(value) else: # Loop over the stellar components for component_id in self.ski.get_stellar_component_ids(): # Get the stellar component stellar_component = self.ski.get_stellar_component(component_id) # Get the current value value = parser(self.ski.get_value_for_path(path, stellar_component)) # Set the value values[parameter_name].append(value) # Search in the dust components if parameter_name in free_parameters_relative_dust_component_paths: # Determine the relative path to the property and the dust component name path, component_name = free_parameters_relative_dust_component_paths[parameter_name] if component_name is not None: # Get the dust component dust_component = self.ski.get_dust_component(component_name) # Get the current value value = parser(self.ski.get_value_for_path(path, dust_component)) # Set the value values[parameter_name].append(value) else: # Loop over the dust components for component_id in self.ski.get_dust_component_ids(): # Get the dust component dust_component = self.ski.get_dust_component(component_id) # Get the current value value = parser(self.ski.get_value_for_path(path, dust_component)) # Set the value values[parameter_name].append(value) # Search in instruments if parameter_name in free_parameters_relative_instruments_paths: # Determine the relative path to the property and the instrument name path, instrument_name = free_parameters_relative_instruments_paths[parameter_name] if instrument_name is not None: # Get the instrument instrument = self.ski.get_instrument(instrument_name) # Get the current value value = parser(self.ski.get_value_for_path(path, instrument)) # Set the value values[parameter_name].append(value) else: # Loop over the instruments for instrument_name in self.ski.get_instrument_names(): # Get the instruemnt instrument = self.ski.get_instrument(instrument_name) # Get the current value value = parser(self.ski.get_value_for_path(path, instrument)) # Set the value values[parameter_name].append(value) # Check whether we have only one value for each parameter for parameter_name in self.config.free_parameters: # Check if any if len(values[parameter_name]) == 0: raise ValueError("No parameter values for '" + parameter_name + "' were found in the ski file") # Check if all equal if not sequences.all_equal(values[parameter_name]): raise ValueError("Parameter values for '" + parameter_name + "' are not equal throughout the ski file") # Set the unique real parameter value self.real_parameter_values[parameter_name] = values[parameter_name][0] # Debugging log.debug("The real parameter values are: ") log.debug("") for parameter_name in self.real_parameter_values: log.debug(" - " + parameter_name + ": " + tostr(self.real_parameter_values[parameter_name], scientific=True, fancy=True, ndigits=parameter_ndigits[parameter_name])) log.debug("")
def get_best_parameter_values(self): """ This function ... :return: """ # Inform the user log.info("Getting the best parameter values ...") # Get the best parameter values self.best_parameter_values, self.best_chi_squared = self.modeler.modeler.fitter.fitting_run.best_parameter_values_and_chi_squared # Debugging log.debug("The best parameter values are:") log.debug("") for parameter_name in self.best_parameter_values: log.debug(" - " + parameter_name + ": " + tostr(self.best_parameter_values[parameter_name], scientific=True, fancy=True, ndigits=parameter_ndigits[parameter_name])) log.debug("") # Debugging log.debug("The best chi squared value is " + str(self.best_chi_squared))
": attenuation is zero but preparation attenuation value was " + str(statistics.attenuation)) fix[prep_name] = (att, statistics.attenuation) # Attenuation nonzero but not corrected if att != 0.0 and statistics.attenuation == 0.0: # Give warning and add log.warning(prep_name + ": attenuation is nonzero but not corrected") fix[prep_name] = (att, statistics.attenuation) # Attenuations not equal elif not numbers.is_close(att, statistics.attenuation): # Give warning and add log.warning(prep_name + ": attenuation of " + tostr(att) + " does not correspond to value of " + tostr(statistics.attenuation) + " used for correction") fix[prep_name] = (att, statistics.attenuation) # ----------------------------------------------------------------- # Remember the correction factors correction_factors = dict() # ----------------------------------------------------------------- # Loop over the prep names that need to be fixed for prep_name in fix: # Get the image paths after and including the extinction correction step
# Get the list of regimes if config.wavelength_range is not None: regimes = physical_regimes_in_range(config.wavelength_range) else: regimes = physical_regimes print("") for name in regimes: print(fmt.bold + fmt.blue + name + fmt.reset) print("") # Get wavelength range wavelength_range = physical_ranges[name] # Show minimum wavelength if config.wavelength_range is not None and wavelength_range.min < config.wavelength_range.min: print(" - minimum wavelength: " + fmt.red + tostr(wavelength_range.min) + fmt.reset) else: print(" - minimum wavelength: " + tostr(wavelength_range.min)) # Show maximum wavelength if config.wavelength_range is not None and wavelength_range.max > config.wavelength_range.max: print(" - maximum wavelength: " + fmt.red + tostr(wavelength_range.max) + fmt.reset) else: print(" - maximum wavelength: " + tostr(wavelength_range.max)) if config.filters: filters = get_broad_band_filters(wavelength_range.min, wavelength_range.max) if config.wavelength_range is not None: colour = "red" indices_outside_range = [index for index in range(len(filters)) if filters[index].wavelength not in config.wavelength_range] else: colour = indices_outside_range = None
#print("wavelength range of fitting filters: " + tostr(fitting_run.fitting_wavelength_range)) #print("wavelength range of fitting filters (min and max): " + tostr(fitting_run.absolute_fitting_wavelength_range)) #print("filters: " + tostr(fitting_run.normalization_filters)) #print("wavelengths: " + tostr(fitting_run.normalization_wavelengths)) #print("center wavelengths: " + tostr(fitting_run.normalization_center_wavelengths)) #print("effective wavelengths: " + tostr(fitting_run.normalization_effective_wavelengths)) #print("pivot wavelengths: " + tostr(fitting_run.normalization_pivot_wavelengths)) #print("mean wavelengths: " + tostr(fitting_run.normalization_mean_wavelengths)) #print("peak wavelengths: " + tostr(fitting_run.normalization_peak_wavelengths)) # Show the fitting filters print("") print("FITTING FILTERS:") print("") for fltr in fitting_run.fitting_filters: print(" - " + tostr(fltr)) print("") # ----------------------------------------------------------------- # Determine the path to the wavelength grids directory grids_path = fitting_run.wavelength_grids_path # ----------------------------------------------------------------- # Check whether there are already wavelength grids if fs.is_directory(grids_path) and not fs.is_empty(grids_path): if config.backup: fs.backup_directory(grids_path) fs.clear_directory(grids_path) # -----------------------------------------------------------------
def check_grid_no_convolution(): """ This function ... :return: """ # Keep track of the wavelengths that have already been used to used_wavelengths = defaultdict(list) print("") print(fmt.underlined + fmt.blue + "Filters without convolution:" + fmt.reset) print("") # Loop over the fitting filters for fltr in fitting_run.fitting_filters: # Determine the filter wavelength filter_wavelength = fltr.wavelength.to(wavelength_unit).value # Get the index of the wavelength closest to that of the filter index = sequences.find_closest_index(wavelengths, filter_wavelength) # Get the actual wavelength wavelength = wavelengths[index] * wavelength_unit # Get the difference #difference = abs(filter_wavelength - wavelengths[index]) #reldifference = difference / filter_wavelength # Check grid wavelength in FWHM in_fwhm = wavelength in fltr.fwhm_range # Check whether the relative difference is smaller than 1e-6 #close = reldifference < max_reldifference # Check grid wavelength in inner range in_inner = wavelength in fltr.inner_range # Show if not in_fwhm: print(fmt.red + " - " + str(fltr) + ": wavelength (" + tostr(wavelength) + ") not in the FWHM range (" + tostr(fltr.fwhm_range) + ") of the filter" + fmt.reset) #elif not close: print(fmt.yellow + " - " + str(fltr) + ": wavelength closest to the filter (" + tostr(wavelength) + ") differs more than " + tostr(max_reldifference*100) + "%" + fmt.reset) elif not in_inner: print(fmt.yellow + " - " + str(fltr) + ": wavelength (" + tostr(wavelength) + ") not in the inner range (" + tostr(fltr.inner_range) + ") of the filter" + fmt.reset) else: print(fmt.green + " - " + str(fltr) + ": wavelength found close to the filter (" + tostr(wavelength) + ")" + fmt.reset) wavelength_micron = wavelength.to("micron").value if wavelength_micron in used_wavelengths: filters = used_wavelengths[wavelength_micron] filter_names = [str(f) for f in filters] # log.warning("The simulated flux for the wavelength '" + str(wavelength) + "' has already been used to create SED point(s) for the " + ", ".join(filter_names) + " filter(s)") # Add the filter for the wavelength used_wavelengths[wavelength_micron].append(fltr) # Show which wavelengths are used to create filter frames if len(used_wavelengths) > 0: print("") print(fmt.underlined + fmt.blue + "Used wavelengths and corresponding filter(s):" + fmt.reset) print("") for wavelength_micron in used_wavelengths: filters = used_wavelengths[wavelength_micron] filter_names = [str(f) for f in filters] nfilters = len(filter_names) if nfilters == 1: print(fmt.green + " - " + str(wavelength_micron) + " micron: " + filter_names[0] + fmt.reset) else: print(fmt.yellow + " - " + str(wavelength_micron) + " micron: " + fmt.bold + ", ".join(filter_names) + fmt.reset) print("")
def generate_initial_parameter_values(self): """ This function ... :return: """ # Inform the user log.info("Generating random initial parameter values ...") # Get the low and high value low_factor = self.config.relative_range_initial.min high_factor = self.config.relative_range_initial.max # Determine the exponents, to generate random points log_low = np.log10(low_factor) log_high = np.log10(high_factor) # Loop over the real parameter values for parameter_name in self.real_parameter_values: # Get the parameter value value = self.real_parameter_values[parameter_name] # Multiply the value with a random number between 1/3 and 3. random = np.random.uniform(log_low, log_high) random_factor = 10 ** random value = value * random_factor # DON'T DO VALUE *= RANDOM_FACTOR HERE: CHANGES THE UNDERLYING QUANTITY OBJECT AS WELL IN SELF.REAL_PARAMETER_VALUES !! # Set the value as the initial parameter value self.initial_parameter_values[parameter_name] = value # Debugging log.debug("The initial parameter values are:") log.debug("") for parameter_name in self.real_parameter_values: log.debug(" - " + parameter_name + ": " + tostr(self.initial_parameter_values[parameter_name], scientific=True, fancy=True, ndigits=parameter_ndigits[parameter_name])) log.debug("")
print(" - " + fmt.bold + "prefix: " + fmt.reset + simulation.prefix()) print(" - " + fmt.bold + "ski path: " + fmt.reset + simulation.ski_path) if simulation.has_input: print(" - " + fmt.bold + "input files: " + fmt.reset) input = simulation.input print("") for name in input.names: print(" * " + fmt.bold + name + ": " + fmt.reset + input.get_filepath(name)) print("") print(" - " + fmt.bold + "output path: " + fmt.reset + simulation.output_path) print(" - " + fmt.bold + "submitted at: " + fmt.reset + simulation.submitted_at) print("") # More info print(" - " + fmt.bold + "remote simulation path: " + fmt.reset + simulation.remote_simulation_path) print(" - " + fmt.bold + "remote input path(s): " + fmt.reset + tostr(simulation.remote_input_path)) print(" - " + fmt.bold + "remote output path: " + fmt.reset + simulation.remote_output_path) print("") # More info print(fmt.green + " RETRIEVAL" + fmt.reset) print("") print(" - " + fmt.bold + "remove_remote_input: " + fmt.reset + str(simulation.remove_remote_input)) print(" - " + fmt.bold + "remove_remote_output: " + fmt.reset + str(simulation.remove_remote_output)) print(" - " + fmt.bold + "remove_remote_simulation_directory: " + fmt.reset + str(simulation.remove_remote_simulation_directory)) print(" - " + fmt.bold + "remove_local_output: " + fmt.reset + str(simulation.remove_local_output)) if simulation.retrieve_types is not None: print(" - " + fmt.bold + "retrieve_types: " + fmt.reset + tostr(simulation.retrieve_types, delimiter=", ")) print("") if simulation.handle is not None: print(fmt.green + " EXECUTION" + fmt.reset)