def _custom_init_(self, model_name, other_name=None, log_interp=True): """ Custom initialization for this model :param model_name: the name of the model, corresponding to the root of the .h5 file in the data directory :param other_name: (optional) the name to be used as name of the model when used in astromodels. If None (default), use the same name as model_name :return: none """ # Get the data directory data_dir_path = get_user_data_path() # Sanitize the data file filename_sanitized = os.path.abspath( os.path.join(data_dir_path, "%s.h5" % model_name)) if not os.path.exists(filename_sanitized): raise MissingDataFile( "The data file %s does not exists. Did you use the " "TemplateFactory?" % (filename_sanitized)) # Open the template definition and read from it self._data_file = filename_sanitized with HDFStore(filename_sanitized) as store: self._data_frame = store["data_frame"] self._parameters_grids = collections.OrderedDict() processed_parameters = 0 for key in list(store.keys()): match = re.search("p_([0-9]+)_(.+)", key) if match is None: continue else: tokens = match.groups() this_parameter_number = int(tokens[0]) this_parameter_name = str(tokens[1]) assert (this_parameter_number == processed_parameters ), "Parameters out of order!" self._parameters_grids[this_parameter_name] = store[key] processed_parameters += 1 self._energies = np.array(store["energies"]) # Now get the metadata metadata = store.get_storer("data_frame").attrs.metadata description = metadata["description"] name = metadata["name"] self._interpolation_degree = metadata["interpolation_degree"] self._spline_smoothing_factor = metadata["spline_smoothing_factor"] # Make the dictionary of parameters function_definition = collections.OrderedDict() function_definition["description"] = description function_definition["latex"] = "n.a." # Now build the parameters according to the content of the parameter grid parameters = collections.OrderedDict() parameters["K"] = Parameter("K", 1.0) parameters["scale"] = Parameter("scale", 1.0) for parameter_name in list(self._parameters_grids.keys()): grid = self._parameters_grids[parameter_name] parameters[parameter_name] = Parameter( parameter_name, grid.median(), min_value=grid.min(), max_value=grid.max(), ) if other_name is None: super(TemplateModel, self).__init__(name, function_definition, parameters) else: super(TemplateModel, self).__init__(other_name, function_definition, parameters) # Finally prepare the interpolators self._prepare_interpolators(log_interp)
def xspec_model_factory(model_name, xspec_function, model_type, definition): class_name = 'XS_%s' % model_name # Get the path to the user data directory user_data_path = str(get_user_data_path()) # Check if the code for this function already exists code_file_name = os.path.join(user_data_path, '%s.py' % class_name) if os.path.exists(code_file_name): # Code already exists pass else: print("Generating code for Xspec model %s..." % model_name) # If this is an additive model (model_type == 'add') we need to add # one more parameter (normalization) if model_type == 'add': definition['parameters']['norm'] = { 'initial value': 1.0, 'desc': '(see https://heasarc.gsfc.nasa.gov/xanadu/xspec/manual/' 'XspecModels.html)', 'min': 0, 'max': None, 'delta': 0.1, 'unit': 'keV / (cm2 s)', 'free': True } assert model_type != 'con', "Convolution models are not yet supported" # Get a list of the parameter names parameters_names = ", ".join(list(definition['parameters'].keys())) # Create the docstring docstring = my_yaml.dump(definition, default_flow_style=False) # Create the class by substituting in the class_definition_code the # relevant things for this model code = class_definition_code.replace('$MODEL_NAME$', model_name) code = code.replace('$DOCSTRING$', docstring) code = code.replace('$PARAMETERS_NAMES$', parameters_names) code = code.replace('$XSPEC_FUNCTION$', xspec_function) code = code.replace('$MODEL_TYPE$', model_type) # Write to the file with open(code_file_name, 'w+') as f: f.write( "# This code has been automatically generated. Do not edit.\n") f.write("\n\n%s\n" % code) time.sleep(1) # Add the path to sys.path if it doesn't if user_data_path not in sys.path: sys.path.append(user_data_path) # Import the class in the current namespace (locals) with warnings.catch_warnings(): warnings.simplefilter("error") exec('from %s import %s' % (class_name, class_name)) # Return the class we just created return class_name, locals()[class_name]
def save_data(self, overwrite=False): # First make sure that the whole data matrix has been filled assert not self._data_frame.isnull().values.any(), ( "You have NaNs in the data matrix. Usually this means " "that you didn't fill it up completely, or that some of " "your data contains nans. Cannot save the file.") # Get the data directory data_dir_path = get_user_data_path() # Sanitize the data file filename_sanitized = os.path.abspath( os.path.join(data_dir_path, "%s.h5" % self._name)) # Check that it does not exists if os.path.exists(filename_sanitized): if overwrite: try: os.remove(filename_sanitized) except: raise IOError( "The file %s already exists and cannot be removed (maybe you do not have " "permissions to do so?). " % filename_sanitized) else: raise IOError( "The file %s already exists! You cannot call two different " "template models with the same name" % filename_sanitized) # Open the HDF5 file and write objects with HDFStore(filename_sanitized) as store: # The _clean_cols_for_hdf is needed because for some reasons the format of some columns # is not accepted by .to_hdf otherwise self._clean_cols_for_hdf(self._data_frame).to_hdf( store, "data_frame") store.get_storer("data_frame").attrs.metadata = { "description": self._description, "name": self._name, "interpolation_degree": int(self._interpolation_degree), "spline_smoothing_factor": self._spline_smoothing_factor, } for i, parameter_name in enumerate(self._parameters_grids.keys()): store["p_%i_%s" % (i, parameter_name)] = pd.Series( self._parameters_grids[parameter_name]) store["energies"] = pd.Series(self._energies)
def xspec_model_factory(model_name, xspec_function, model_type, definition): class_name = 'XS_%s' % model_name # Get the path to the user data directory user_data_path = get_user_data_path() # Check if the code for this function already exists code_file_name = os.path.join(user_data_path, '%s.py' % class_name) if os.path.exists(code_file_name): # Code already exists pass else: print("Generating code for Xspec model %s..." % model_name) # If this is an additive model (model_type == 'add') we need to add # one more parameter (normalization) if model_type == 'add': definition['parameters']['norm'] = {'initial value': 1.0, 'desc': '(see https://heasarc.gsfc.nasa.gov/xanadu/xspec/manual/' 'XspecModels.html)', 'min': 0, 'max': None, 'delta': 0.1, 'unit': 'keV / (cm2 s)', 'free': True} assert model_type != 'con', "Convolution models are not yet supported" # Get a list of the parameter names parameters_names = ", ".join(definition['parameters'].keys()) # Create the docstring docstring = my_yaml.dump(definition) # Create the class by substituting in the class_definition_code the # relevant things for this model code = class_definition_code.replace('$MODEL_NAME$', model_name) code = code.replace('$DOCSTRING$', docstring) code = code.replace('$PARAMETERS_NAMES$', parameters_names) code = code.replace('$XSPEC_FUNCTION$', xspec_function) code = code.replace('$MODEL_TYPE$', model_type) # Write to the file with open(code_file_name, 'w+') as f: f.write("# This code has been automatically generated. Do not edit.\n") f.write("\n\n%s\n" % code) # Add the path to sys.path if it doesn't if user_data_path not in sys.path: sys.path.append(user_data_path) # Import the class in the current namespace (locals) with warnings.catch_warnings(): warnings.simplefilter("error") exec('from %s import %s' % (class_name, class_name)) # Return the class we just created return class_name, locals()[class_name]