def calculate_matrix(
        self,
        dataset_model: DatasetModel,
        indices: dict[str, int],
        **kwargs,
    ):
        if not 1 <= self.order <= 3:
            raise ModelError(
                "Coherent artifact order must be between in [1,3]")

        if dataset_model.irf is None:
            raise ModelError(f'No irf in dataset "{dataset_model.label}"')

        if not isinstance(dataset_model.irf, IrfMultiGaussian):
            raise ModelError(
                f'Irf in dataset "{dataset_model.label} is not a gaussian irf."'
            )

        global_dimension = dataset_model.get_global_dimension()
        global_index = indices.get(global_dimension)
        global_axis = dataset_model.get_global_axis()
        model_axis = dataset_model.get_model_axis()

        irf = dataset_model.irf

        center, width, _, shift, _, _ = irf.parameter(global_index,
                                                      global_axis)
        center = center[0] - shift
        width = self.width.value if self.width is not None else width[0]

        matrix = _calculate_coherent_artifact_matrix(center, width, model_axis,
                                                     self.order)
        return self.compartments(), matrix
Exemple #2
0
def simulate_global_model(
    dataset_model: DatasetModel,
    parameters: ParameterGroup,
    clp: xr.DataArray = None,
):
    """Simulates a global model."""

    # TODO: implement full model clp
    if clp is not None:
        raise NotImplementedError(
            "Simulation of full models with clp is not supported yet.")

    if any(
            m.index_dependent(dataset_model)
            for m in dataset_model.global_megacomplex):
        raise ValueError(
            "Index dependent models for global dimension are not supported.")

    global_matrix = calculate_matrix(dataset_model, {}, as_global_model=True)
    global_clp_labels = global_matrix.clp_labels
    global_matrix = xr.DataArray(
        global_matrix.matrix.T,
        coords=[
            ("clp_label", global_clp_labels),
            (dataset_model.get_global_dimension(),
             dataset_model.get_global_axis()),
        ],
    )

    return simulate_clp(
        dataset_model,
        parameters,
        global_matrix,
    )
Exemple #3
0
    def _calculate_residual(self, label: str, dataset_model: DatasetModel):
        self._group._reduced_clps[label] = []
        self._group._clps[label] = []
        self._group._weighted_residuals[label] = []
        self._group._residuals[label] = []

        data = dataset_model.get_data()
        global_axis = dataset_model.get_global_axis()

        for i, index in enumerate(global_axis):
            reduced_clp_labels, reduced_matrix = (
                self._group.reduced_matrices[label][i]
                if dataset_model.is_index_dependent() else
                self._group.reduced_matrices[label])
            if not dataset_model.is_index_dependent():
                reduced_matrix = reduced_matrix.copy()

            if dataset_model.scale is not None:
                reduced_matrix *= dataset_model.scale

            weight = dataset_model.get_weight()
            if weight is not None:
                apply_weight(reduced_matrix, weight[:, i])

            reduced_clps, residual = self._group._residual_function(
                reduced_matrix, data[:, i])

            self._group._reduced_clps[label].append(reduced_clps)

            clp_labels = self._get_clp_labels(label, i)
            self._group._clps[label].append(
                retrieve_clps(
                    self._group.model,
                    self._group.parameters,
                    clp_labels,
                    reduced_clp_labels,
                    reduced_clps,
                    index,
                ))
            self._group._weighted_residuals[label].append(residual)
            if weight is not None:
                self._group._residuals[label].append(residual / weight[:, i])
            else:
                self._group._residuals[label].append(residual)

        clp_labels = self._get_clp_labels(label)
        additional_penalty = calculate_clp_penalties(
            self._group.model,
            self._group.parameters,
            clp_labels,
            self._group._clps[label],
            global_axis,
            self._group.dataset_models,
        )
        if additional_penalty.size != 0:
            self._group._additional_penalty.append(additional_penalty)
Exemple #4
0
 def _calculate_index_dependent_matrix(self, label: str,
                                       dataset_model: DatasetModel):
     self._group._matrices[label] = []
     self._group._reduced_matrices[label] = []
     for i, index in enumerate(dataset_model.get_global_axis()):
         matrix = calculate_matrix(
             dataset_model,
             {dataset_model.get_global_dimension(): i},
         )
         self._group._matrices[label].append(matrix)
         if not dataset_model.has_global_model():
             reduced_matrix = reduce_matrix(matrix, self._group.model,
                                            self._group.parameters, index)
             self._group._reduced_matrices[label].append(reduced_matrix)
    def calculate_matrix(
        self,
        dataset_model: DatasetModel,
        indices: dict[str, int],
        **kwargs,
    ):

        clp_label = [f"{label}_cos" for label in self.labels] + [
            f"{label}_sin" for label in self.labels
        ]

        model_axis = dataset_model.get_model_axis()
        delta = np.abs(model_axis[1:] - model_axis[:-1])
        delta_min = delta[np.argmin(delta)]
        # c multiply by 0.03 to convert wavenumber (cm-1) to frequency (THz)
        # where 0.03 is the product of speed of light 3*10**10 cm/s and time-unit ps (10^-12)
        frequency_max = 1 / (2 * 0.03 * delta_min)
        frequencies = np.array(self.frequencies) * 0.03 * 2 * np.pi
        frequencies[frequencies >= frequency_max] = np.mod(
            frequencies[frequencies >= frequency_max], frequency_max
        )
        rates = np.array(self.rates)

        matrix = np.ones((model_axis.size, len(clp_label)), dtype=np.float64)

        if dataset_model.irf is None:
            calculate_damped_oscillation_matrix_no_irf(matrix, frequencies, rates, model_axis)
        elif isinstance(dataset_model.irf, IrfMultiGaussian):
            global_dimension = dataset_model.get_global_dimension()
            global_axis = dataset_model.get_global_axis()
            global_index = indices.get(global_dimension)
            centers, widths, scales, shift, _, _ = dataset_model.irf.parameter(
                global_index, global_axis
            )
            for center, width, scale in zip(centers, widths, scales):
                matrix += calculate_damped_oscillation_matrix_gaussian_irf(
                    frequencies,
                    rates,
                    model_axis,
                    center,
                    width,
                    shift,
                    scale,
                )
            matrix /= np.sum(scales)

        return clp_label, matrix
Exemple #6
0
def calculate_matrix(
    megacomplex: Megacomplex,
    dataset_model: DatasetModel,
    indices: dict[str, int],
    **kwargs,
):

    compartments = megacomplex.get_compartments(dataset_model)
    initial_concentration = megacomplex.get_initial_concentration(
        dataset_model)
    k_matrix = megacomplex.get_k_matrix()

    # the rates are the eigenvalues of the k matrix
    rates = k_matrix.rates(compartments, initial_concentration)

    global_dimension = dataset_model.get_global_dimension()
    global_index = indices.get(global_dimension)
    global_axis = dataset_model.get_global_axis()
    model_axis = dataset_model.get_model_axis()

    # init the matrix
    size = (model_axis.size, rates.size)
    matrix = np.zeros(size, dtype=np.float64)

    decay_matrix_implementation(matrix, rates, global_index, global_axis,
                                model_axis, dataset_model)

    if not np.all(np.isfinite(matrix)):
        raise ValueError(
            f"Non-finite concentrations for K-Matrix '{k_matrix.label}':\n"
            f"{k_matrix.matrix_as_markdown(fill_parameters=True)}")

    # apply A matrix
    matrix = matrix @ megacomplex.get_a_matrix(dataset_model)

    # done
    return compartments, matrix
    def finalize_data(
        self,
        dataset_model: DatasetModel,
        dataset: xr.Dataset,
        is_full_model: bool = False,
        as_global: bool = False,
    ):
        if is_full_model:
            return

        megacomplexes = (
            dataset_model.global_megacomplex if is_full_model else dataset_model.megacomplex
        )
        unique = len([m for m in megacomplexes if isinstance(m, DampedOscillationMegacomplex)]) < 2

        prefix = "damped_oscillation" if unique else f"{self.label}_damped_oscillation"

        dataset.coords[f"{prefix}"] = self.labels
        dataset.coords[f"{prefix}_frequency"] = (prefix, self.frequencies)
        dataset.coords[f"{prefix}_rate"] = (prefix, self.rates)

        dim1 = dataset_model.get_global_axis().size
        dim2 = len(self.labels)
        doas = np.zeros((dim1, dim2), dtype=np.float64)
        phase = np.zeros((dim1, dim2), dtype=np.float64)
        for i, label in enumerate(self.labels):
            sin = dataset.clp.sel(clp_label=f"{label}_sin")
            cos = dataset.clp.sel(clp_label=f"{label}_cos")
            doas[:, i] = np.sqrt(sin * sin + cos * cos)
            phase[:, i] = np.unwrap(np.arctan2(sin, cos))

        dataset[f"{prefix}_associated_spectra"] = (
            (dataset_model.get_global_dimension(), prefix),
            doas,
        )

        dataset[f"{prefix}_phase"] = (
            (dataset_model.get_global_dimension(), prefix),
            phase,
        )

        if self.index_dependent(dataset_model):
            dataset[f"{prefix}_sin"] = (
                (
                    dataset_model.get_global_dimension(),
                    dataset_model.get_model_dimension(),
                    prefix,
                ),
                dataset.matrix.sel(clp_label=[f"{label}_sin" for label in self.labels]).values,
            )

            dataset[f"{prefix}_cos"] = (
                (
                    dataset_model.get_global_dimension(),
                    dataset_model.get_model_dimension(),
                    prefix,
                ),
                dataset.matrix.sel(clp_label=[f"{label}_cos" for label in self.labels]).values,
            )
        else:
            dataset[f"{prefix}_sin"] = (
                (dataset_model.get_model_dimension(), prefix),
                dataset.matrix.sel(clp_label=[f"{label}_sin" for label in self.labels]).values,
            )

            dataset[f"{prefix}_cos"] = (
                (dataset_model.get_model_dimension(), prefix),
                dataset.matrix.sel(clp_label=[f"{label}_cos" for label in self.labels]).values,
            )