def finalize_data( self, dataset_model: DatasetModel, dataset: xr.Dataset, is_full_model: bool = False, as_global: bool = False, ): if not is_full_model: global_dimension = dataset_model.get_global_dimension() model_dimension = dataset_model.get_model_dimension() dataset.coords["coherent_artifact_order"] = np.arange( 1, self.order + 1) response_dimensions = (model_dimension, "coherent_artifact_order") if dataset_model.is_index_dependent() is True: response_dimensions = (global_dimension, *response_dimensions) dataset["coherent_artifact_response"] = ( response_dimensions, dataset.matrix.sel(clp_label=self.compartments()).values, ) dataset["coherent_artifact_associated_spectra"] = ( (global_dimension, "coherent_artifact_order"), dataset.clp.sel(clp_label=self.compartments()).values, ) retrieve_irf(dataset_model, dataset, dataset_model.get_global_dimension())
def simulate_global_model( dataset_model: DatasetModel, parameters: ParameterGroup, clp: xr.DataArray = None, ): """Simulates a global model.""" # TODO: implement full model clp if clp is not None: raise NotImplementedError( "Simulation of full models with clp is not supported yet.") if any( m.index_dependent(dataset_model) for m in dataset_model.global_megacomplex): raise ValueError( "Index dependent models for global dimension are not supported.") global_matrix = calculate_matrix(dataset_model, {}, as_global_model=True) global_clp_labels = global_matrix.clp_labels global_matrix = xr.DataArray( global_matrix.matrix.T, coords=[ ("clp_label", global_clp_labels), (dataset_model.get_global_dimension(), dataset_model.get_global_axis()), ], ) return simulate_clp( dataset_model, parameters, global_matrix, )
def calculate_matrix( self, dataset_model: DatasetModel, indices: dict[str, int], **kwargs, ): if not 1 <= self.order <= 3: raise ModelError( "Coherent artifact order must be between in [1,3]") if dataset_model.irf is None: raise ModelError(f'No irf in dataset "{dataset_model.label}"') if not isinstance(dataset_model.irf, IrfMultiGaussian): raise ModelError( f'Irf in dataset "{dataset_model.label} is not a gaussian irf."' ) global_dimension = dataset_model.get_global_dimension() global_index = indices.get(global_dimension) global_axis = dataset_model.get_global_axis() model_axis = dataset_model.get_model_axis() irf = dataset_model.irf center, width, _, shift, _, _ = irf.parameter(global_index, global_axis) center = center[0] - shift width = self.width.value if self.width is not None else width[0] matrix = _calculate_coherent_artifact_matrix(center, width, model_axis, self.order) return self.compartments(), matrix
def finalize_data( self, dataset_model: DatasetModel, dataset: xr.Dataset, is_full_model: bool = False, as_global: bool = False, ): species_dimension = "spectral_species" if as_global else "species" if species_dimension in dataset.coords: return species = [] megacomplexes = (dataset_model.global_megacomplex if as_global else dataset_model.megacomplex) for m in megacomplexes: if isinstance(m, SpectralMegacomplex): species += [ compartment for compartment in m.shape if compartment not in species ] dataset.coords[species_dimension] = species matrix = dataset.global_matrix if as_global else dataset.matrix clp_dim = "global_clp_label" if as_global else "clp_label" dataset["species_spectra"] = ( ( dataset_model.get_model_dimension() if not as_global else dataset_model.get_global_dimension(), species_dimension, ), matrix.sel({ clp_dim: species }).values, ) if not is_full_model: dataset["species_associated_concentrations"] = ( ( dataset_model.get_global_dimension(), species_dimension, ), dataset.clp.sel(clp_label=species).data, )
def _calculate_index_dependent_matrix(self, label: str, dataset_model: DatasetModel): self._group._matrices[label] = [] self._group._reduced_matrices[label] = [] for i, index in enumerate(dataset_model.get_global_axis()): matrix = calculate_matrix( dataset_model, {dataset_model.get_global_dimension(): i}, ) self._group._matrices[label].append(matrix) if not dataset_model.has_global_model(): reduced_matrix = reduce_matrix(matrix, self._group.model, self._group.parameters, index) self._group._reduced_matrices[label].append(reduced_matrix)
def calculate_matrix( self, dataset_model: DatasetModel, indices: dict[str, int], **kwargs, ): clp_label = [f"{label}_cos" for label in self.labels] + [ f"{label}_sin" for label in self.labels ] model_axis = dataset_model.get_model_axis() delta = np.abs(model_axis[1:] - model_axis[:-1]) delta_min = delta[np.argmin(delta)] # c multiply by 0.03 to convert wavenumber (cm-1) to frequency (THz) # where 0.03 is the product of speed of light 3*10**10 cm/s and time-unit ps (10^-12) frequency_max = 1 / (2 * 0.03 * delta_min) frequencies = np.array(self.frequencies) * 0.03 * 2 * np.pi frequencies[frequencies >= frequency_max] = np.mod( frequencies[frequencies >= frequency_max], frequency_max ) rates = np.array(self.rates) matrix = np.ones((model_axis.size, len(clp_label)), dtype=np.float64) if dataset_model.irf is None: calculate_damped_oscillation_matrix_no_irf(matrix, frequencies, rates, model_axis) elif isinstance(dataset_model.irf, IrfMultiGaussian): global_dimension = dataset_model.get_global_dimension() global_axis = dataset_model.get_global_axis() global_index = indices.get(global_dimension) centers, widths, scales, shift, _, _ = dataset_model.irf.parameter( global_index, global_axis ) for center, width, scale in zip(centers, widths, scales): matrix += calculate_damped_oscillation_matrix_gaussian_irf( frequencies, rates, model_axis, center, width, shift, scale, ) matrix /= np.sum(scales) return clp_label, matrix
def finalize_data( dataset_model: DatasetModel, dataset: xr.Dataset, is_full_model: bool = False, as_global: bool = False, ): decay_megacomplexes = collect_megacomplexes(dataset_model) global_dimension = dataset_model.get_global_dimension() name = "images" if global_dimension == "pixel" else "spectra" species_dimension = "decay_species" if as_global else "species" if species_dimension not in dataset.coords: # We are the first Decay complex called and add SAD for all decay megacomplexes all_species = [] for megacomplex in decay_megacomplexes: for species in megacomplex.get_compartments(dataset_model): if species not in all_species: all_species.append(species) retrieve_species_associated_data( dataset_model, dataset, all_species, species_dimension, global_dimension, name, is_full_model, as_global, ) retrieve_irf(dataset_model, dataset, global_dimension) if not is_full_model: multiple_complexes = len(decay_megacomplexes) > 1 for megacomplex in decay_megacomplexes: retrieve_decay_associated_data( megacomplex, dataset_model, dataset, global_dimension, name, multiple_complexes, )
def calculate_matrix( megacomplex: Megacomplex, dataset_model: DatasetModel, indices: dict[str, int], **kwargs, ): compartments = megacomplex.get_compartments(dataset_model) initial_concentration = megacomplex.get_initial_concentration( dataset_model) k_matrix = megacomplex.get_k_matrix() # the rates are the eigenvalues of the k matrix rates = k_matrix.rates(compartments, initial_concentration) global_dimension = dataset_model.get_global_dimension() global_index = indices.get(global_dimension) global_axis = dataset_model.get_global_axis() model_axis = dataset_model.get_model_axis() # init the matrix size = (model_axis.size, rates.size) matrix = np.zeros(size, dtype=np.float64) decay_matrix_implementation(matrix, rates, global_index, global_axis, model_axis, dataset_model) if not np.all(np.isfinite(matrix)): raise ValueError( f"Non-finite concentrations for K-Matrix '{k_matrix.label}':\n" f"{k_matrix.matrix_as_markdown(fill_parameters=True)}") # apply A matrix matrix = matrix @ megacomplex.get_a_matrix(dataset_model) # done return compartments, matrix
def finalize_data( self, dataset_model: DatasetModel, dataset: xr.Dataset, is_full_model: bool = False, as_global: bool = False, ): if is_full_model: return megacomplexes = ( dataset_model.global_megacomplex if is_full_model else dataset_model.megacomplex ) unique = len([m for m in megacomplexes if isinstance(m, DampedOscillationMegacomplex)]) < 2 prefix = "damped_oscillation" if unique else f"{self.label}_damped_oscillation" dataset.coords[f"{prefix}"] = self.labels dataset.coords[f"{prefix}_frequency"] = (prefix, self.frequencies) dataset.coords[f"{prefix}_rate"] = (prefix, self.rates) dim1 = dataset_model.get_global_axis().size dim2 = len(self.labels) doas = np.zeros((dim1, dim2), dtype=np.float64) phase = np.zeros((dim1, dim2), dtype=np.float64) for i, label in enumerate(self.labels): sin = dataset.clp.sel(clp_label=f"{label}_sin") cos = dataset.clp.sel(clp_label=f"{label}_cos") doas[:, i] = np.sqrt(sin * sin + cos * cos) phase[:, i] = np.unwrap(np.arctan2(sin, cos)) dataset[f"{prefix}_associated_spectra"] = ( (dataset_model.get_global_dimension(), prefix), doas, ) dataset[f"{prefix}_phase"] = ( (dataset_model.get_global_dimension(), prefix), phase, ) if self.index_dependent(dataset_model): dataset[f"{prefix}_sin"] = ( ( dataset_model.get_global_dimension(), dataset_model.get_model_dimension(), prefix, ), dataset.matrix.sel(clp_label=[f"{label}_sin" for label in self.labels]).values, ) dataset[f"{prefix}_cos"] = ( ( dataset_model.get_global_dimension(), dataset_model.get_model_dimension(), prefix, ), dataset.matrix.sel(clp_label=[f"{label}_cos" for label in self.labels]).values, ) else: dataset[f"{prefix}_sin"] = ( (dataset_model.get_model_dimension(), prefix), dataset.matrix.sel(clp_label=[f"{label}_sin" for label in self.labels]).values, ) dataset[f"{prefix}_cos"] = ( (dataset_model.get_model_dimension(), prefix), dataset.matrix.sel(clp_label=[f"{label}_cos" for label in self.labels]).values, )