Exemplo n.º 1
0
    def add_restraints_to_target_xl_unit_cell(self, experiment_id, values,
                                              sigma):
        # On input we will have one id value, 6 target values and 6 sigmas.

        # select the right parameterisation, if one exists
        try:
            param_i = self._exp_to_xluc_param[experiment_id]
        except KeyError:
            return

        # fail now if this is already restrained.
        if param_i.parameterisation in self._param_to_restraint:
            raise DialsRefineConfigError(
                "Parameterisation already restrained. Cannot create "
                "additional restraint with experiment {}".format(
                    experiment_id))

        # create new restraint
        tie = SingleUnitCellTie(
            model_parameterisation=param_i.parameterisation,
            target=values,
            sigma=sigma)

        # add to the restraint list along with the global parameter index
        self._single_model_restraints.append(
            RestraintIndex(tie, param_i.istart))

        # also add the parameterisation to the set for uniqueness testing
        self._param_to_restraint.add(param_i.parameterisation)
Exemplo n.º 2
0
    def build_constraint(self, constraint_scope, parameterisation, model_type):
        """Create a constraint for a single parameter specified by
        constraint_scope"""

        if constraint_scope.id is None:
            # get one experiment id for each parameterisation to apply to all
            constraint_scope.id = [e.get_experiment_ids()[0] for e in parameterisation]

        # find which parameterisations are involved, and if any are scan-varying
        # how many sample points there are
        prefixes = []
        n_samples = 0
        for i, p in enumerate(parameterisation):
            if hasattr(p, "num_samples"):
                ns = p.num_samples()
                if n_samples == 0:
                    n_samples = ns
                if ns != n_samples:
                    raise DialsRefineConfigError(
                        "Constraints cannot be created between scan-varying "
                        "parameterisations when these have a different number of "
                        "sample points."
                    )
            for j in p.get_experiment_ids():
                if j in constraint_scope.id:
                    prefixes.append(model_type + "{}".format(j + 1))
                    break

        # ignore model name prefixes
        patt1 = re.compile("^" + model_type + "[0-9]+")
        pname = patt1.sub("", constraint_scope.parameter)

        # Use a regex to find the parameters to constrain from a list of all the
        # parameter names. There are multiple parts to this. The first part
        # identifies the relevant model type and parameterisation ordinal index,
        # accepting those that were chosen according to the supplied experiment
        # ids. The next part allows for additional text, like 'Group1' that may
        # be used by a multi-panel detector parameterisation. Then the parameter
        # name itself, like 'Dist'. Finally, to accommodate scan-varying
        # parameterisations, suffixes like '_sample0' and '_sample1' are
        # distinguished so that these are constrained separately.
        for i in range(max(n_samples, 1)):
            patt2 = re.compile(
                "^("
                + "|".join(prefixes)
                + r"){1}(?![0-9])(\w*"
                + pname
                + ")(_sample{})?$".format(i)
            )
            indices = [j for j, s in enumerate(self._all_names) if patt2.match(s)]
            if len(indices) == 1:
                continue
            logger.debug(
                "\nThe following parameters will be constrained "
                "to enforce equal shifts at each step of refinement:"
            )
            for k in indices:
                logger.debug(self._all_names[k])
        return EqualShiftConstraint(indices, self._all_vals)
Exemplo n.º 3
0
    def _check_scan_range(exp_phi, scan):
        """Check that the observed reflections fill the scan-range"""

        # Allow up to 5 degrees between the observed phi extrema and the
        # scan edges
        start, stop = scan.get_oscillation_range(deg=False)
        if min(exp_phi) - start > 0.087266 or stop - max(exp_phi) > 0.087266:
            raise DialsRefineConfigError(
                "The reflections do not fill the scan range. "
                "A common reason for this is that the crystal has died at the end of the scan."
            )
Exemplo n.º 4
0
    def add_restraints_to_group_xl_unit_cell(self, target, experiment_ids,
                                             sigma):

        # select the right parameterisations, if they exist
        if experiment_ids == "all":
            param_indices = list(self._exp_to_xluc_param.values())
        else:
            param_indices = []
            for exp_id in experiment_ids:
                try:
                    param_indices.append(self._exp_to_xluc_param[exp_id])
                except KeyError:
                    # ignore experiment without a parameterisation
                    pass
        params = [e.parameterisation for e in param_indices]
        istarts = [e.istart for e in param_indices]

        # fail if any of the parameterisations has already been restrained.
        for param in params:
            if param in self._param_to_restraint:
                raise DialsRefineConfigError(
                    "Parameterisation already restrained. Cannot create "
                    "additional group restraint for experiment(s) {}".format(
                        str(experiment_ids)))

        # create new group of restraints
        if target == "mean":
            tie = MeanUnitCellTie(model_parameterisations=params, sigma=sigma)
        elif target == "low_memory_mean":
            tie = LowMemoryMeanUnitCellTie(model_parameterisations=params,
                                           sigma=sigma)
        elif target == "median":
            tie = MedianUnitCellTie(model_parameterisations=params,
                                    sigma=sigma)
        else:
            raise DialsRefineConfigError(
                "target type {} not available".format(target))

        # add to the restraint list along with the global parameter indices
        self._group_model_restraints.append(RestraintIndex(tie, istarts))
Exemplo n.º 5
0
    def calculate_weights(self, reflections):
        """Statistical weights for X, Y. Weights for DeltaPsi must be already
        provided in the reflection table"""

        # call parent class method to set X and Y weights
        reflections = super().calculate_weights(reflections)

        if "delpsical.weights" not in reflections:

            raise DialsRefineConfigError(
                'The key "delpsical.weights" is expected within the input reflections'
            )

        return reflections
Exemplo n.º 6
0
    def detector_reduce(self):
        """Reduce detector parameters.

        Special case intended for metrology refinement of multi-panel detectors."""
        reduce_list = self._options.detector_reduce_list
        for i, dp in enumerate(self.det_params):
            to_fix = flex.bool(dp.get_fixed())
            try:  # test for hierarchical detector parameterisation
                pnl_groups = dp.get_panel_ids_by_group()
                for igp, gp in enumerate(pnl_groups):
                    surplus = self._panel_gp_surplus_reflections(dp, gp, igp)
                    if surplus < 0:
                        msg = (
                            "Require {0} more reflections to parameterise Detector{1} "
                            "panel group {2}")
                        logger.warning(
                            msg.format(-1 * surplus, i + 1, igp + 1) +
                            "\nAttempting reduction of non-essential parameters"
                        )
                        names = cls._filter_parameter_names(dp)
                        prefix = "Group{}".format(igp + 1)
                        reduce_this_group = [prefix + e for e in reduce_list]
                        to_fix |= flex.bool(
                            string_sel(reduce_this_group, names))
                        # try again, and fail if still unsuccessful
                        surplus = self._panel_gp_surplus_reflections(
                            dp, gp, igp)
                        if surplus < 0:
                            msg = msg.format(-1 * surplus, i + 1, igp + 1)
                            raise DialsRefineConfigError(msg + "\nFailing.")
            except AttributeError:
                if self._surplus_reflections(dp) < 0:
                    mdl = "Detector{}".format(i + 1)
                    msg = self._failmsg.format(mdl)
                    raise DialsRefineConfigError(msg)
            dp.set_fixed(to_fix)
    def _update(self):
        """Update information derived from the parameterisations"""

        # Check there are free parameters to refine
        self._length = self._len()
        if self._length == 0:
            raise DialsRefineConfigError(
                "There are no free parameters for refinement")

        # Calculate Experiment to parameterisation mapping
        e2bp = {
            ids: i
            for i, p in enumerate(self._beam_parameterisations)
            for ids in p.get_experiment_ids()
        }
        e2xop = {
            ids: i
            for i, p in enumerate(self._xl_orientation_parameterisations)
            for ids in p.get_experiment_ids()
        }
        e2xucp = {
            ids: i
            for i, p in enumerate(self._xl_unit_cell_parameterisations)
            for ids in p.get_experiment_ids()
        }
        e2dp = {
            ids: i
            for i, p in enumerate(self._detector_parameterisations)
            for ids in p.get_experiment_ids()
        }
        e2gp = {
            ids: i
            for i, p in enumerate(self._goniometer_parameterisations)
            for ids in p.get_experiment_ids()
        }

        ParamSet = namedtuple(
            "ParamSet",
            [
                "beam_param", "xl_ori_param", "xl_uc_param", "det_param",
                "gonio_param"
            ],
        )
        self._exp_to_param = {
            i: ParamSet(e2bp.get(i), e2xop.get(i), e2xucp.get(i), e2dp.get(i),
                        e2gp.get(i))
            for i, _ in enumerate(self._experiments)
        }
Exemplo n.º 8
0
    def calculate_weights(reflections):
        """set 'statistical weights', that is w(x) = 1/var(x)"""

        weights = (reflections["xyzobs.mm.variance"]).deep_copy()
        parts = weights.parts()
        for w in parts:
            sel = w > 0.0
            w.set_selected(sel, 1.0 / w.select(sel))
        reflections["xyzobs.mm.weights"] = flex.vec3_double(*parts)
        indexed = reflections.select(reflections.get_flags(reflections.flags.indexed))
        if any(indexed["xyzobs.mm.weights"].norms() == 0.0):
            raise DialsRefineConfigError(
                "Cannot set statistical weights as some indexed reflections have observed variances equal to zero"
            )

        return reflections
Exemplo n.º 9
0
    def check_and_fail(self):
        """Check for too few reflections to support the model parameterisation.

        Test each parameterisation of each type against the reflections it affects.

        Returns:
            None

        Raises:
            DialsRefineConfigError: If there are too few reflections to support
            a parameterisation.
        """

        for i, bp in enumerate(self.beam_params):
            if self._surplus_reflections(bp) < 0:
                mdl = "Beam{}".format(i + 1)
                msg = self._failmsg.format(mdl)
                raise DialsRefineConfigError(msg)

        for i, xlo in enumerate(self.xl_ori_params):
            if self._surplus_reflections(xlo) < 0:
                mdl = "Crystal{} orientation".format(i + 1)
                msg = self._failmsg.format(mdl)
                raise DialsRefineConfigError(msg)

        for i, xluc in enumerate(self.xl_uc_params):
            if self._unit_cell_surplus_reflections(xluc) < 0:
                mdl = "Crystal{} unit cell".format(i + 1)
                msg = self._failmsg.format(mdl)
                raise DialsRefineConfigError(msg)

        for i, dp in enumerate(self.det_params):
            try:  # test for hierarchical detector parameterisation
                pnl_groups = dp.get_panel_ids_by_group()
                for igp, gp in enumerate(pnl_groups):
                    if self._panel_gp_surplus_reflections(dp, gp, igp) < 0:
                        msg = "Too few reflections to parameterise Detector{0} panel group {1}"
                        msg = msg.format(i + 1, igp + 1)
                        msg += "\nTry modifying refinement.parameterisation.auto_reduction options"
                        raise DialsRefineConfigError(msg)
            except AttributeError:
                if self._surplus_reflections(dp) < 0:
                    mdl = "Detector{}".format(i + 1)
                    msg = self._failmsg.format(mdl)
                    raise DialsRefineConfigError(msg)

        for i, gonp in enumerate(self.gon_params):
            if self._surplus_reflections(gonp) < 0:
                mdl = "Goniometer{}".format(i + 1)
                msg = self._failmsg.format(mdl)
                raise DialsRefineConfigError(msg)
Exemplo n.º 10
0
    def check_and_fail(self):
        """Check for too few reflections to support the model parameterisation.

        Test each parameterisation of each type against the reflections it affects.

        Returns:
            None

        Raises:
            DialsRefineConfigError: If there are too few reflections to support
            a parameterisation.
        """

        sel = (self._nref_per_param() <
               self._options.min_nref_per_parameter).iselection()
        if len(sel) > 0:
            names = ", ".join([self.param_names[i] for i in sel])
            msg = "Too few reflections to parameterise {0}.\n".format(names)
            msg += "Try modifying refinement.parameterisation.auto_reduction options"
            raise DialsRefineConfigError(msg)
Exemplo n.º 11
0
    def check_and_fail(self):
        """Check for too few reflections to support the model parameterisation.

        Test each parameterisation of each type against the reflections it affects.

        Returns:
            None

        Raises:
            DialsRefineConfigError: If there are too few reflections to support
            a parameterisation.
        """

        sel = (self._nref_per_param() <
               self._options.min_nref_per_parameter).iselection()
        if len(sel) > 0:
            names = ", ".join([self.param_names[i] for i in sel])
            msg = f"Too few reflections to parameterise {names}.\n"
            msg += (
                "Try setting "
                "refinement.parameterisation.auto_reduction.action "
                "to fix these parameters (=fix) or additionally remove the "
                "associated reflections (=remove).")
            raise DialsRefineConfigError(msg)
Exemplo n.º 12
0
    def _id_refs_to_keep(self, obs_data):
        """Create a selection of observations that pass certain conditions.

        This step includes rejection of reflections too close to the spindle,
        reflections measured outside the scan range, rejection of the (0,0,0)
        Miller index and rejection of reflections with the overload flag set.
        Outlier rejection is done later."""

        # first exclude reflections with miller index set to 0,0,0
        sel1 = obs_data["miller_index"] != (0, 0, 0)

        # exclude reflections with overloads, as these have worse centroids
        sel2 = ~obs_data.get_flags(obs_data.flags.overloaded)

        # combine selections
        sel = sel1 & sel2
        inc = flex.size_t_range(len(obs_data)).select(sel)
        obs_data = obs_data.select(sel)

        # Default to True to pass the following test if there is no rotation axis
        # for a particular experiment
        to_keep = flex.bool(len(inc), True)

        for iexp, exp in enumerate(self._experiments):
            axis = self._axes[iexp]
            if not axis or exp.scan is None:
                continue
            if exp.scan.is_still():
                continue
            sel = obs_data["id"] == iexp
            s0 = self._s0vecs[iexp]
            s1 = obs_data["s1"].select(sel)
            phi = obs_data["xyzobs.mm.value"].parts()[2].select(sel)

            # first test: reject reflections for which the parallelepiped formed
            # between the gonio axis, s0 and s1 has a volume of less than the cutoff.
            # Those reflections are by definition closer to the spindle-beam
            # plane and for low values of the cutoff are troublesome to
            # integrate anyway.
            p_vol = flex.abs(s1.cross(flex.vec3_double(s1.size(), s0)).dot(axis))
            passed1 = p_vol > self._close_to_spindle_cutoff

            # second test: reject reflections that lie outside the scan range
            passed2 = exp.scan.is_angle_valid(phi, deg=False)

            # sanity check to catch a mutilated scan that does not make sense
            if passed2.count(True) == 0:
                raise DialsRefineConfigError(
                    "Experiment id {} contains no reflections with valid "
                    "scan angles".format(iexp)
                )

            # combine tests so far
            to_update = passed1 & passed2

            # third test: reject reflections close to the centres of the first and
            # last images in the scan
            if self._scan_margin > 0.0:
                edge1, edge2 = [e + 0.5 for e in exp.scan.get_image_range()]
                edge1 = exp.scan.get_angle_from_image_index(edge1, deg=False)
                edge1 += self._scan_margin
                edge2 = exp.scan.get_angle_from_image_index(edge2, deg=False)
                edge2 -= self._scan_margin
                passed3 = (edge1 <= phi) & (phi <= edge2)

                # combine the last test only if there would be a reasonable number of
                # reflections left for refinement
                tmp = to_update
                to_update = to_update & passed3
                if to_update.count(True) < 40:
                    logger.warning(
                        "Too few reflections to trim centroids from the scan "
                        "edges. Resetting scan_margin=0.0"
                    )
                    to_update = tmp

            # make selection
            to_keep.set_selected(sel, to_update)

        inc = inc.select(to_keep)

        return inc
Exemplo n.º 13
0
    def finalise(self, analysis=None):
        """Complete initialisation by performing outlier rejection and any
        requested subsetting. If a list of results from a CentroidAnalysis
        object is provided, these may be used to determine outlier rejection
        block widths"""

        logger.debug("Finalising the Reflection Manager")

        # Initially, assume all reflections with predictions can be used
        mask = self._reflections.get_flags(self._reflections.flags.predicted)
        self._reflections.set_flags(mask, self._reflections.flags.used_in_refinement)

        # print summary before outlier rejection
        self.print_stats_on_matches()

        # reset centroid_outlier flags in both the working reflections and the
        # original indexed reflections
        mask = self._reflections.get_flags(self._reflections.flags.centroid_outlier)
        self._reflections.unset_flags(mask, self._reflections.flags.centroid_outlier)
        mask = self._indexed.get_flags(self._indexed.flags.centroid_outlier)
        self._indexed.unset_flags(mask, self._indexed.flags.centroid_outlier)

        # outlier rejection if requested
        if self._outlier_detector is None:
            rejection_occurred = False
        else:
            if self._outlier_detector.get_block_width() is libtbx.Auto:
                if analysis is None:
                    # without analysis available, set 18.0 degrees universally
                    self._outlier_detector.set_block_width(18.0)
                else:
                    # with analysis, choose the maximum of 18 degrees or the block size
                    # for each experiment
                    widths = [e.get("block_size") for e in analysis]
                    widths = [max(e, 18.0) if e is not None else None for e in widths]
                    self._outlier_detector.set_block_width(widths)
            rejection_occurred = self._outlier_detector(self._reflections)

        # set the centroid_outlier flag in the original indexed reflections
        ioutliers = self._reflections.get_flags(
            self._reflections.flags.centroid_outlier
        )
        ioutliers = self._reflections["iobs"].select(ioutliers)
        self._indexed.sort("iobs")  # re-sort the indexed reflections
        self._indexed.set_flags(ioutliers, self._indexed.flags.centroid_outlier)

        msg = "Removing reflections not matched to predictions"
        if rejection_occurred:
            msg += " or marked as outliers"
        logger.debug(msg)

        # delete all reflections from the manager that do not have a prediction
        # or were flagged as outliers
        has_pred = self._reflections.get_flags(self._reflections.flags.predicted)
        inlier = ~self._reflections.get_flags(self._reflections.flags.centroid_outlier)
        self._reflections = self._reflections.select(has_pred & inlier)
        self._reflections.set_flags(
            flex.bool(len(self._reflections), True),
            self._reflections.flags.used_in_refinement,
        )

        logger.info("%d reflections remain in the manager", len(self._reflections))
        if len(self._reflections) == 0:
            raise DialsRefineConfigError("No reflections available for refinement")

        # print summary after outlier rejection
        if rejection_occurred:
            self.print_stats_on_matches()

        # form working and free subsets
        self._create_working_set()

        logger.debug("Working set size = %d observations", self.get_sample_size())
Exemplo n.º 14
0
    def from_parameters_reflections_experiments(
        params, reflections, experiments, do_stills=False
    ):
        """Given a set of parameters and models, build a reflection manager

        Params:
            params The input parameters

        Returns:
            The reflection manager instance
        """

        # While a random subset of reflections is used, continue to
        # set random.seed to get consistent behaviour
        if params.random_seed is not None:
            random.seed(params.random_seed)
            flex.set_random_seed(params.random_seed)
            logger.debug("Random seed set to %d", params.random_seed)

        # check whether we deal with stills or scans
        if do_stills:
            refman = StillsReflectionManager
            # check incompatible weighting strategy
            if params.weighting_strategy.override == "statistical":
                raise DialsRefineConfigError(
                    'The "statistical" weighting strategy is not compatible '
                    "with stills refinement"
                )
        else:
            refman = ReflectionManager
            # check incompatible weighting strategy
            if params.weighting_strategy.override in ["stills", "external_deltapsi"]:
                msg = (
                    'The "{0}" weighting strategy is not compatible with '
                    "scan refinement"
                ).format(params.weighting_strategy.override)
                raise DialsRefineConfigError(msg)

        # set automatic outlier rejection options
        if params.outlier.algorithm in ("auto", libtbx.Auto):
            if do_stills:
                params.outlier.algorithm = "sauter_poon"
            else:
                params.outlier.algorithm = "mcd"

        if params.outlier.separate_panels is libtbx.Auto:
            if do_stills:
                params.outlier.separate_panels = False
            else:
                params.outlier.separate_panels = True

        if params.outlier.algorithm == "sauter_poon":
            if params.outlier.sauter_poon.px_sz is libtbx.Auto:
                # get this from the first panel of the first detector
                params.outlier.sauter_poon.px_sz = experiments.detectors()[0][
                    0
                ].get_pixel_size()

        # do outlier rejection?
        if params.outlier.algorithm in ("null", None):
            outlier_detector = None
        else:
            if do_stills:
                colnames = ["x_resid", "y_resid"]
                params.outlier.block_width = None
            else:
                colnames = ["x_resid", "y_resid", "phi_resid"]
            from dials.algorithms.refinement.outlier_detection import (
                CentroidOutlierFactory,
            )

            outlier_detector = CentroidOutlierFactory.from_parameters_and_colnames(
                params, colnames
            )

        # override default weighting strategy?
        weighting_strategy = None
        if params.weighting_strategy.override == "statistical":
            from dials.algorithms.refinement.weighting_strategies import (
                StatisticalWeightingStrategy,
            )

            weighting_strategy = StatisticalWeightingStrategy()
        elif params.weighting_strategy.override == "stills":
            from dials.algorithms.refinement.weighting_strategies import (
                StillsWeightingStrategy,
            )

            weighting_strategy = StillsWeightingStrategy(
                params.weighting_strategy.delpsi_constant
            )
        elif params.weighting_strategy.override == "external_deltapsi":
            from dials.algorithms.refinement.weighting_strategies import (
                ExternalDelPsiWeightingStrategy,
            )

            weighting_strategy = ExternalDelPsiWeightingStrategy()
        elif params.weighting_strategy.override == "constant":
            from dials.algorithms.refinement.weighting_strategies import (
                ConstantWeightingStrategy,
            )

            weighting_strategy = ConstantWeightingStrategy(
                *params.weighting_strategy.constants, stills=do_stills
            )

        # Check for deprecated parameter
        if params.trim_scan_edges is not None:
            warnings.warn(
                "The parameter trim_scan_edges is deprecated and will be removed shortly",
                FutureWarning,
            )
            params.scan_margin = params.trim_scan_edges

        return refman(
            reflections=reflections,
            experiments=experiments,
            nref_per_degree=params.reflections_per_degree,
            max_sample_size=params.maximum_sample_size,
            min_sample_size=params.minimum_sample_size,
            close_to_spindle_cutoff=params.close_to_spindle_cutoff,
            scan_margin=params.scan_margin,
            outlier_detector=outlier_detector,
            weighting_strategy_override=weighting_strategy,
        )
Exemplo n.º 15
0
    def check_and_remove(self):
        """Fix parameters and remove reflections when there are too few reflections.

        Test each parameter against the reflections it affects and fix any for
        which there are too few reflections. In addition, remove all reflections
        that are associated with that parameter to ensure they play no part in
        refinement. This process is iterative.

        Returns:
            None

        Raises:
            DialsRefineConfigError: error if only one single panel detector is present.
        """

        # If there is only one detector in a single experiment, the detector should
        # be multi-panel for remove to make sense
        det_params = self.pred_param.get_detector_parameterisations()
        if len(det_params) == 1:
            n_exp = len(det_params[0].get_experiment_ids())
            if n_exp == 1 and not det_params[0].is_multi_state():
                raise DialsRefineConfigError(
                    "For single experiment, single panel refinement "
                    "auto_reduction.action=remove cannot be used as it could only "
                    "remove all reflections from refinement")

        while True:
            obs = self.reflection_manager.get_obs()
            try:
                self.pred_param.compose(obs)
            except AttributeError:
                pass

            refs_by_parameters = self.pred_param.get_gradients(
                obs, callback=id_associated_refs)
            nref_per_param = flex.size_t(
                [refs.count(True) for refs in refs_by_parameters])

            if self.constraints_manager is not None:
                for (
                        link
                ) in self.constraints_manager.get_constrained_parameter_indices(
                ):
                    sel = flex.size_t(link)
                    total = flex.sum(nref_per_param.select(sel))
                    nref_per_param.set_selected(sel, total)

            sel = nref_per_param < self._options.min_nref_per_parameter
            if sel.count(True) == 0:
                break

            names = ", ".join([self.param_names[i] for i in sel.iselection()])
            msg = "Too few reflections to parameterise {0}.\n".format(names)
            msg += ("These parameters will be fixed for refinement and "
                    "the associated reflections will be removed.")
            logger.warning(msg)

            self.pred_param.fix_params(sel)

            if self.constraints_manager is not None:
                self.constraints_manager = self.constraints_manager_factory()

            refs_to_filter = flex.bool(len(obs), True)
            for remove, refs in zip(sel, refs_by_parameters):
                if remove:
                    refs_to_filter = refs_to_filter & ~refs

            # only keep refs not associated with this parameterisation
            self.reflection_manager.filter_obs(refs_to_filter)
Exemplo n.º 16
0
def _parameterise_goniometers(options, experiments, analysis):
    gon_params = []
    sv_gon = options.scan_varying and not options.goniometer.force_static
    for igoniometer, goniometer in enumerate(experiments.goniometers()):
        if goniometer is None:
            continue
        # A Goniometer is parameterised with reference to the beam axis.
        # Use the first Beam this Goniometer is associated with.
        exp_ids = experiments.indices(goniometer)
        assoc_models = [(experiments[i].beam, experiments[i].scan)
                        for i in exp_ids]
        beam, scan = assoc_models[0]

        if sv_gon:
            # If a goniometer is scan-varying, then it must always be found
            # alongside the same Scan in any Experiments in which it appears
            if not scan:
                raise DialsRefineConfigError(
                    "A scan-varying goniometer model cannot be created "
                    "because a scan model is missing")
            if not all(s is scan for (g, s) in assoc_models):
                raise DialsRefineConfigError(
                    "A single scan-varying goniometer model cannot be "
                    "refined when associated with more than one scan")
            array_range = scan.get_array_range()
            n_intervals = _set_n_intervals(options.goniometer.smoother,
                                           analysis, scan, exp_ids)
            gon_param = ScanVaryingGoniometerParameterisation(
                goniometer,
                array_range,
                n_intervals,
                beam=beam,
                experiment_ids=exp_ids)
        else:  # force model to be static
            gon_param = GoniometerParameterisation(goniometer,
                                                   beam,
                                                   experiment_ids=exp_ids)

        # Set the model identifier to name the parameterisation
        gon_param.model_identifier = "Goniometer{}".format(igoniometer + 1)

        # get number of fixable units, either parameters or parameter sets in
        # the scan-varying case
        num_gon = getattr(gon_param, "num_sets",
                          getattr(gon_param, "num_total"))()

        fix_list = []
        if options.goniometer.fix_list:
            fix_list.extend(options.goniometer.fix_list)

        if options.goniometer.fix:
            if "all" in options.goniometer.fix:
                gon_param.set_fixed([True] * num_gon)
            if "in_beam_plane" in options.goniometer.fix:
                fix_list.append("Gamma1")
            if "out_beam_plane" in options.goniometer.fix:
                fix_list.append("Gamma2")

        if fix_list:
            names = _filter_parameter_names(gon_param)
            assert len(names) == num_gon
            to_fix = string_sel(fix_list, names, gon_param.model_identifier)
            gon_param.set_fixed(to_fix)

        if gon_param.num_free() > 0:
            gon_params.append(gon_param)

    return gon_params
Exemplo n.º 17
0
def _parameterise_detectors(options, experiments, analysis):
    det_params = []
    sv_det = options.scan_varying and not options.detector.force_static
    for idetector, detector in enumerate(experiments.detectors()):
        # keep associated gonio and scan in case we are scan-varying
        exp_ids = experiments.indices(detector)
        assoc_models = [(experiments[i].goniometer, experiments[i].scan)
                        for i in exp_ids]
        goniometer, scan = assoc_models[0]

        if sv_det:
            if not all((goniometer, scan)):
                raise DialsRefineConfigError(
                    "A scan-varying detector model cannot be created "
                    "because a scan or goniometer model is missing")
            # If a detector is scan-varying, then it must always be found alongside
            # the same Scan and Goniometer in any Experiments in which it appears
            if not all(g is goniometer and s is scan
                       for (g, s) in assoc_models):
                raise DialsRefineConfigError(
                    "A single scan-varying detector model cannot be "
                    "refined when associated with more than one scan or goniometer"
                )

            # Additional checks on whether a scan-varying parameterisation is allowed
            if options.detector.panels == "automatic" and len(detector) > 1:
                raise DialsRefineConfigError(
                    "Scan-varying multiple panel detectors are not "
                    "currently supported")
            if options.detector.panels == "multiple":
                raise DialsRefineConfigError(
                    "Scan-varying multiple panel detectors are not "
                    "currently supported")
            if options.detector.panels == "hierarchical":
                raise DialsRefineConfigError(
                    "Scan-varying hierarchical detectors are not "
                    "currently supported")

            array_range = scan.get_array_range()
            n_intervals = _set_n_intervals(options.detector.smoother, analysis,
                                           scan, exp_ids)
            det_param = ScanVaryingDetectorParameterisationSinglePanel(
                detector, array_range, n_intervals, experiment_ids=exp_ids)
        else:
            # Convert automatic into correct specific option
            if options.detector.panels == "automatic":
                if len(detector) > 1:
                    if hasattr(detector, "hierarchy"):
                        options.detector.panels = "hierarchical"
                    else:
                        options.detector.panels = "multiple"
                else:
                    options.detector.panels = "single"

            # Construct parameterisation based on panels choice
            if options.detector.panels == "single":
                if len(detector) > 1:
                    raise DialsRefineConfigError(
                        "A single panel parameterisation cannot be created "
                        "for a multiple panel detector")
                det_param = DetectorParameterisationSinglePanel(
                    detector, experiment_ids=exp_ids)
            elif options.detector.panels == "multiple":
                # Take first associated beam model
                beam = experiments[exp_ids[0]].beam
                det_param = DetectorParameterisationMultiPanel(
                    detector, beam, experiment_ids=exp_ids)
            elif options.detector.panels == "hierarchical":
                try:  # Use hierarchy in parameterisation if the detector has one
                    detector.hierarchy()
                    det_param = DetectorParameterisationHierarchical(
                        detector,
                        experiment_ids=exp_ids,
                        level=options.detector.hierarchy_level,
                    )
                except AttributeError:
                    raise DialsRefineConfigError(
                        "A hierarchical detector parameterisation cannot be "
                        "created for a detector without a hierarchy")

        # Set the model identifier to name the parameterisation
        det_param.model_identifier = "Detector{}".format(idetector + 1)

        # get number of fixable units, either parameters or parameter sets in
        # the scan-varying case
        num_det = getattr(det_param, "num_sets",
                          getattr(det_param, "num_total"))()

        fix_list = []
        if options.detector.fix_list:
            fix_list.extend(options.detector.fix_list)

        if options.detector.fix:
            if options.detector.fix == "all":
                det_param.set_fixed([True] * num_det)
            elif options.detector.fix == "position":
                fix_list.extend(["Dist", "Shift1", "Shift2"])
            elif options.detector.fix == "orientation":
                fix_list.extend(["Tau"])
            elif options.detector.fix == "distance":
                fix_list.extend(["Dist", "Tau2", "Tau3"])
            else:  # can only get here if refinement.phil is broken
                raise RuntimeError("detector.fix value not recognised")

        if fix_list:
            names = _filter_parameter_names(det_param)
            assert len(names) == num_det
            to_fix = string_sel(fix_list, names, det_param.model_identifier)
            det_param.set_fixed(to_fix)

        if det_param.num_free() > 0:
            det_params.append(det_param)

    return det_params
Exemplo n.º 18
0
def _parameterise_crystals(options, experiments, analysis):
    xl_ori_params = []
    xl_uc_params = []
    sv_xl_ori = options.scan_varying and not options.crystal.orientation.force_static
    sv_xl_uc = options.scan_varying and not options.crystal.unit_cell.force_static
    for icrystal, crystal in enumerate(experiments.crystals()):
        # This crystal can only ever appear either in scans or in stills
        # (otherwise it requires a different crystal model)
        exp_ids = experiments.indices(crystal)
        assoc_models = [(experiments[i].goniometer, experiments[i].scan)
                        for i in exp_ids]
        goniometer, scan = assoc_models[0]
        if goniometer is None:
            # There should be no associated goniometer and scan models
            if any(g or s for (g, s) in assoc_models):
                raise DialsRefineConfigError(
                    "A crystal model appears in a mixture of scan and still "
                    "experiments, which is not supported")

        if sv_xl_ori or sv_xl_uc:
            if not all((goniometer, scan)):
                raise DialsRefineConfigError(
                    "A scan-varying crystal model cannot be created because "
                    "a scan or goniometer model is missing")
            # If a crystal is scan-varying, then it must always be found alongside
            # the same Scan and Goniometer in any Experiments in which it appears
            if not all(g is goniometer and s is scan
                       for (g, s) in assoc_models):
                raise DialsRefineConfigError(
                    "A single scan-varying crystal model cannot be refined "
                    "when associated with more than one scan or goniometer")
            array_range = scan.get_array_range()

        # orientation parameterisation
        if sv_xl_ori:
            n_intervals = _set_n_intervals(
                options.crystal.orientation.smoother, analysis, scan, exp_ids)
            xl_ori_param = ScanVaryingCrystalOrientationParameterisation(
                crystal, array_range, n_intervals, experiment_ids=exp_ids)
        else:  # force model to be static
            xl_ori_param = CrystalOrientationParameterisation(
                crystal, experiment_ids=exp_ids)

        # Set the model identifier to name the parameterisation
        xl_ori_param.model_identifier = "Crystal{}".format(icrystal + 1)

        # unit cell parameterisation
        if sv_xl_uc:
            n_intervals = _set_n_intervals(options.crystal.unit_cell.smoother,
                                           analysis, scan, exp_ids)
            set_errors = options.set_scan_varying_errors
            xl_uc_param = ScanVaryingCrystalUnitCellParameterisation(
                crystal,
                array_range,
                n_intervals,
                experiment_ids=exp_ids,
                set_state_uncertainties=set_errors,
            )
        else:  # force model to be static
            xl_uc_param = CrystalUnitCellParameterisation(
                crystal, experiment_ids=exp_ids)

        # Set the model identifier to name the parameterisation
        xl_uc_param.model_identifier = "Crystal{}".format(icrystal + 1)

        # get number of fixable units, either parameters or parameter sets in
        # the scan-varying case
        num_ori = getattr(xl_ori_param, "num_sets",
                          getattr(xl_ori_param, "num_total"))()
        num_uc = getattr(xl_uc_param, "num_sets",
                         getattr(xl_uc_param, "num_total"))()

        ori_fix_list = []
        if options.crystal.orientation.fix_list:
            ori_fix_list.extend(options.crystal.orientation.fix_list)

        cell_fix_list = []
        if options.crystal.unit_cell.fix_list:
            cell_fix_list.extend(options.crystal.unit_cell.fix_list)

        if options.crystal.fix:
            if options.crystal.fix == "all":
                xl_ori_param.set_fixed([True] * num_ori)
                xl_uc_param.set_fixed([True] * num_uc)
            elif options.crystal.fix == "cell":
                xl_uc_param.set_fixed([True] * num_uc)
            elif options.crystal.fix == "orientation":
                xl_ori_param.set_fixed([True] * num_ori)
            else:  # can only get here if refinement.phil is broken
                raise RuntimeError("crystal.fix value not recognised")

        if cell_fix_list:
            names = _filter_parameter_names(xl_uc_param)
            assert len(names) == num_uc
            to_fix = string_sel(cell_fix_list, names,
                                xl_uc_param.model_identifier)
            xl_uc_param.set_fixed(to_fix)

        if ori_fix_list:
            names = _filter_parameter_names(xl_ori_param)
            assert len(names) == num_ori
            to_fix = string_sel(ori_fix_list, names,
                                xl_ori_param.model_identifier)
            xl_ori_param.set_fixed(to_fix)

        if xl_ori_param.num_free() > 0:
            xl_ori_params.append(xl_ori_param)
        if xl_uc_param.num_free() > 0:
            xl_uc_params.append(xl_uc_param)

    return xl_ori_params, xl_uc_params
Exemplo n.º 19
0
def _parameterise_beams(options, experiments, analysis):
    beam_params = []
    sv_beam = options.scan_varying and not options.beam.force_static
    for ibeam, beam in enumerate(experiments.beams()):
        # The Beam is parameterised with reference to a goniometer axis (or None).
        # Use the first (if any) Goniometers this Beam is associated with.
        exp_ids = experiments.indices(beam)
        assoc_models = [(experiments[i].goniometer, experiments[i].scan)
                        for i in exp_ids]
        goniometer, scan = assoc_models[0]

        if sv_beam:
            if not all((goniometer, scan)):
                raise DialsRefineConfigError(
                    "A scan-varying beam model cannot be created because "
                    "a scan or goniometer model is missing")
            # If a beam is scan-varying, then it must always be found alongside
            # the same Scan and Goniometer in any Experiments in which it appears
            if not all(g is goniometer and s is scan
                       for (g, s) in assoc_models):
                raise DialsRefineConfigError(
                    "A single scan-varying beam model cannot be refined "
                    "when associated with more than one scan or goniometer")
            array_range = scan.get_array_range()
            n_intervals = _set_n_intervals(options.beam.smoother, analysis,
                                           scan, exp_ids)
            beam_param = ScanVaryingBeamParameterisation(
                beam,
                array_range,
                n_intervals,
                goniometer=goniometer,
                experiment_ids=exp_ids,
            )
        else:
            # Parameterise scan static beam, passing the goniometer
            beam_param = BeamParameterisation(beam,
                                              goniometer,
                                              experiment_ids=exp_ids)

        # Set the model identifier to name the parameterisation
        beam_param.model_identifier = "Beam{}".format(ibeam + 1)

        # get number of fixable units, either parameters or parameter sets in
        # the scan-varying case
        num_beam = getattr(beam_param, "num_sets",
                           getattr(beam_param, "num_total"))()

        fix_list = []
        if options.beam.fix_list:
            fix_list.extend(options.beam.fix_list)

        if options.beam.fix:
            if "all" in options.beam.fix:
                beam_param.set_fixed([True] * num_beam)
            if "in_spindle_plane" in options.beam.fix:
                fix_list.append("Mu1")
            if "out_spindle_plane" in options.beam.fix:
                fix_list.append("Mu2")
            if "wavelength" in options.beam.fix:
                fix_list.append("nu")

        if fix_list:
            names = _filter_parameter_names(beam_param)
            assert len(names) == num_beam
            to_fix = string_sel(fix_list, names, beam_param.model_identifier)
            beam_param.set_fixed(to_fix)

        if beam_param.num_free() > 0:
            beam_params.append(beam_param)

    return beam_params
Exemplo n.º 20
0
    def _build_components(cls, params, reflections, experiments):
        """low level build"""

        # Currently a refinement job can only have one parameterisation of the
        # prediction equation. This can either be of the XYDelPsi (stills) type, the
        # XYPhi (scans) type or the scan-varying XYPhi type with a varying crystal
        # model
        single_as_still = params.refinement.parameterisation.treat_single_image_as_still
        exps_are_stills = []
        for exp in experiments:
            if exp.scan is None:
                exps_are_stills.append(True)
            elif exp.scan.get_num_images() == 1:
                if single_as_still:
                    exps_are_stills.append(True)
                elif exp.scan.get_oscillation()[1] == 0.0:
                    exps_are_stills.append(True)
                else:
                    exps_are_stills.append(False)
            else:
                if exp.scan.get_oscillation()[1] <= 0.0:
                    raise DialsRefineConfigError(
                        "Cannot refine a zero-width scan")
                exps_are_stills.append(False)

        # check experiment types are consistent
        if not all(exps_are_stills[0] == e for e in exps_are_stills):
            raise DialsRefineConfigError(
                "Cannot refine a mixture of stills and scans")
        do_stills = exps_are_stills[0]

        # If experiments are stills, ensure scan-varying refinement won't be attempted
        if do_stills:
            params.refinement.parameterisation.scan_varying = False

        # Refiner does not accept scan_varying=Auto. This is a special case for
        # doing macrocycles of refinement in dials.refine.
        if params.refinement.parameterisation.scan_varying is libtbx.Auto:
            params.refinement.parameterisation.scan_varying = False

        # calculate reflection block_width if required for scan-varying refinement
        if params.refinement.parameterisation.scan_varying:
            from dials.algorithms.refinement.reflection_manager import BlockCalculator

            block_calculator = BlockCalculator(experiments, reflections)
            if params.refinement.parameterisation.compose_model_per == "block":
                reflections = block_calculator.per_width(
                    params.refinement.parameterisation.block_width, deg=True)
            elif params.refinement.parameterisation.compose_model_per == "image":
                reflections = block_calculator.per_image()

        logger.debug("\nBuilding reflection manager")
        logger.debug("Input reflection list size = %d observations",
                     len(reflections))

        # create reflection manager
        from dials.algorithms.refinement.reflection_manager import (
            ReflectionManagerFactory, )

        refman = ReflectionManagerFactory.from_parameters_reflections_experiments(
            params.refinement.reflections, reflections, experiments, do_stills)

        logger.debug(
            "Number of observations that pass initial inclusion criteria = %d",
            refman.get_accepted_refs_size(),
        )
        sample_size = refman.get_sample_size()
        if sample_size > 0:
            logger.debug("Working set size = %d observations", sample_size)
        logger.debug("Reflection manager built\n")

        # configure use of sparse data types
        params = cls.config_sparse(params, experiments)
        do_sparse = params.refinement.parameterisation.sparse

        # create managed reflection predictor
        from dials.algorithms.refinement.prediction.managed_predictors import (
            ExperimentsPredictorFactory, )

        ref_predictor = ExperimentsPredictorFactory.from_experiments(
            experiments,
            force_stills=do_stills,
            spherical_relp=params.refinement.parameterisation.
            spherical_relp_model,
        )

        # Predict for the managed observations, set columns for residuals and set
        # the used_in_refinement flag to the predictions
        obs = refman.get_obs()
        ref_predictor(obs)
        x_obs, y_obs, phi_obs = obs["xyzobs.mm.value"].parts()
        x_calc, y_calc, phi_calc = obs["xyzcal.mm"].parts()
        obs["x_resid"] = x_calc - x_obs
        obs["y_resid"] = y_calc - y_obs
        obs["phi_resid"] = phi_calc - phi_obs

        # determine whether to do basic centroid analysis to automatically
        # determine outlier rejection block
        if params.refinement.reflections.outlier.block_width is libtbx.Auto:
            ca = refman.get_centroid_analyser()
            analysis = ca(calc_average_residuals=False,
                          calc_periodograms=False)
        else:
            analysis = None

        # Now predictions and centroid analysis are available, so we can finalise
        # the reflection manager
        refman.finalise(analysis)

        # Create model parameterisations
        logger.debug("Building prediction equation parameterisation")
        pred_param, param_reporter = cls.config_parameterisation(
            params.refinement.parameterisation, experiments, refman, do_stills)
        logger.debug("Prediction equation parameterisation built")
        logger.debug("Parameter order : name mapping")
        for i, e in enumerate(pred_param.get_param_names()):
            logger.debug("Parameter %03d : %s", i + 1, e)

        # Build a restraints parameterisation (if requested).
        # Only unit cell restraints are supported at the moment.
        restraints_parameterisation = cls.config_restraints(
            params.refinement.parameterisation, pred_param)

        # Build a constraints manager, if requested
        from dials.algorithms.refinement.constraints import ConstraintManagerFactory

        cmf = ConstraintManagerFactory(params, pred_param)
        constraints_manager = cmf()

        # Create target function
        logger.debug("Building target function")
        target = cls.config_target(
            params.refinement.target,
            experiments,
            refman,
            ref_predictor,
            pred_param,
            restraints_parameterisation,
            do_stills,
            do_sparse,
        )
        logger.debug("Target function built")

        # create refinery
        logger.debug("Building refinement engine")
        refinery = cls.config_refinery(params, target, pred_param,
                                       constraints_manager)
        logger.debug("Refinement engine built")

        nparam = len(pred_param)
        ndim = target.dim
        nref = len(refman.get_matches())
        logger.info(
            "There are {0} parameters to refine against {1} reflections in {2} dimensions"
            .format(nparam, nref, ndim))
        from dials.algorithms.refinement.engine import AdaptLstbx

        if not params.refinement.parameterisation.sparse and isinstance(
                refinery, AdaptLstbx):
            dense_jacobian_gigabytes = (nparam * nref * ndim *
                                        flex.double.element_size()) / 1e9
            tot_memory_gigabytes = machine_memory_info().memory_total() / 1e9
            # Report if the Jacobian requires a large amount of storage
            if (dense_jacobian_gigabytes > 0.2 * tot_memory_gigabytes
                    or dense_jacobian_gigabytes > 0.5):
                logger.info(
                    "Storage of the Jacobian matrix requires {:.1f} GB".format(
                        dense_jacobian_gigabytes))

        # build refiner interface and return
        if params.refinement.parameterisation.scan_varying:
            refiner = ScanVaryingRefiner
        else:
            refiner = Refiner
        return refiner(experiments, pred_param, param_reporter, refman, target,
                       refinery)
Exemplo n.º 21
0
import libtbx
from scitbx.array_family import flex

from dials.algorithms.refinement import DialsRefineConfigError
from dials.algorithms.refinement.engine import AdaptLstbx as AdaptLstbxBase
from dials.algorithms.refinement.engine import DisableMPmixin
from dials.algorithms.refinement.engine import (
    GaussNewtonIterations as GaussNewtonIterationsBase, )
from dials.algorithms.refinement.engine import LevenbergMarquardtIterations

try:
    from scitbx.examples.bevington import non_linear_ls_eigen_wrapper
except ImportError:
    raise DialsRefineConfigError(
        """Eigen package is not available.  Please untar the Eigen source package
     (http://eigen.tuxfamily.org) and place a link to it (eigen--> Eigen source dir) in
     the modules directory of your developer install; then recompile.
""")

logger = logging.getLogger(__name__)


class AdaptLstbxSparse(DisableMPmixin, AdaptLstbxBase,
                       non_linear_ls_eigen_wrapper):
    """Adapt the base class for Eigen"""
    def __init__(
        self,
        target,
        prediction_parameterisation,
        constraints_manager=None,
        log=None,
Exemplo n.º 22
0
    def check_and_remove(self):
        """Fix parameters and remove reflections when there are too few reflections.

        Test each parameterisation of each type against the reflections it affects.
        If there are too few reflections to support that parameterisation, fix the
        parameters and remove those reflections so that they will not be included
        in refinement.

        Returns:
            None

        Raises:
            DialsRefineConfigError: error if only one single panel detector is present.
        """

        # If there is only one detector in a single experiment, the detector should
        # be multi-panel for remove to make sense
        if len(self.det_params) == 1:
            n_exp = len(self.det_params[0].get_experiment_ids())
            if n_exp == 1 and not self.det_params[0].is_multi_state():
                raise DialsRefineConfigError(
                    "For single experiment, single panel refinement "
                    "auto_reduction.action=remove cannot be used as it could only "
                    "remove all reflections from refinement")

        # Define a warning message template to use each search iteration
        warnmsg = "Too few reflections to parameterise {0}"
        warnmsg += (
            "\nAssociated reflections will be removed from the Reflection Manager"
        )

        while True:
            # Identify a poorly-supported parameterisation
            dat = self._weak_parameterisation_search()
            if dat["parameterisation"] is None:
                break
            exp_ids = dat["parameterisation"].get_experiment_ids()
            msg = warnmsg.format(dat["name"])

            # Fix relevant parameters and identify observations to remove
            obs = self.reflection_manager.get_obs()
            isel = flex.size_t()
            if dat["panels"] is not None:
                fixlist = dat["parameterisation"].get_fixed()
                pnl_gps = dat["parameterisation"].get_param_panel_groups()
                for i, gp in enumerate(pnl_gps):
                    if gp == dat["panel_group_id"]:
                        fixlist[i] = True
                dat["parameterisation"].set_fixed(fixlist)
                # identify observations on this panel group from associated experiments
                for exp_id in exp_ids:
                    subsel = (obs["id"] == exp_id).iselection()
                    panels_this_exp = obs["panel"].select(subsel)
                    for pnl in dat["panels"]:
                        isel.extend(subsel.select(panels_this_exp == pnl))
            else:
                fixlist = [True] * dat["parameterisation"].num_total()
                dat["parameterisation"].set_fixed(fixlist)
                # identify observations from the associated experiments
                for exp_id in exp_ids:
                    isel.extend((obs["id"] == exp_id).iselection())

            # Now remove the selected reflections
            sel = flex.bool(len(obs), True)
            sel.set_selected(isel, False)
            self.reflection_manager.filter_obs(sel)
            logger.warning(msg)

        # Strip out parameterisations with zero free parameters
        self.beam_params = [p for p in self.beam_params if p.num_free() > 0]
        self.xl_ori_params = [
            p for p in self.xl_ori_params if p.num_free() > 0
        ]
        self.xl_uc_params = [p for p in self.xl_uc_params if p.num_free() > 0]
        self.det_params = [p for p in self.det_params if p.num_free() > 0]
        self.gon_params = [p for p in self.gon_params if p.num_free() > 0]
Exemplo n.º 23
0
    def config_restraints(params, pred_param):
        """Given a set of user parameters plus a model parameterisation, create
        restraints plus a parameterisation of these restraints

        Params:
            params: The input PHIL parameters
            pred_param: A PredictionParameters object

        Returns:
            A restraints parameterisation or None
        """

        if not any([
                params.crystal.unit_cell.restraints.tie_to_target,
                params.crystal.unit_cell.restraints.tie_to_group,
        ]):
            return None
        if params.scan_varying:
            logger.warning("Restraints will be ignored for scan_varying=True")
            return None

        det_params = pred_param.get_detector_parameterisations()
        beam_params = pred_param.get_beam_parameterisations()
        xl_ori_params = pred_param.get_crystal_orientation_parameterisations()
        xl_uc_params = pred_param.get_crystal_unit_cell_parameterisations()
        gon_params = pred_param.get_goniometer_parameterisations()

        from dials.algorithms.refinement.restraints import RestraintsParameterisation

        rp = RestraintsParameterisation(
            detector_parameterisations=det_params,
            beam_parameterisations=beam_params,
            xl_orientation_parameterisations=xl_ori_params,
            xl_unit_cell_parameterisations=xl_uc_params,
            goniometer_parameterisations=gon_params,
        )

        # Shorten params path
        # FIXME Only unit cell restraints currently supported
        # beam_r = params.beam.restraints
        cell_r = params.crystal.unit_cell.restraints
        # orientation_r = params.crystal.orientation.restraints
        # detector_r = params.detector.restraints

        for tie in cell_r.tie_to_target:
            if len(tie.values) != 6:
                raise DialsRefineConfigError(
                    "6 cell parameters must be provided as the tie_to_target.values."
                )
            if len(tie.sigmas) != 6:
                raise DialsRefineConfigError(
                    "6 sigmas must be provided as the tie_to_target.sigmas. "
                    "Note that individual sigmas of 0.0 will remove "
                    "the restraint for the corresponding cell parameter.")
            if tie.id is None:
                # get one experiment id for each parameterisation to apply to all
                tie.id = [e.get_experiment_ids()[0] for e in xl_uc_params]
            for exp_id in tie.id:
                rp.add_restraints_to_target_xl_unit_cell(
                    exp_id, tie.values, tie.sigmas)

        for tie in cell_r.tie_to_group:
            if len(tie.sigmas) != 6:
                raise DialsRefineConfigError(
                    "6 sigmas must be provided as the tie_to_group.sigmas. "
                    "Note that individual sigmas of 0.0 will remove "
                    "the restraint for the corresponding cell parameter.")
            if tie.id is None:
                rp.add_restraints_to_group_xl_unit_cell(
                    tie.target, "all", tie.sigmas)
            else:
                rp.add_restraints_to_group_xl_unit_cell(
                    tie.target, tie.id, tie.sigmas)

        return rp
Exemplo n.º 24
0
    def __init__(
        self,
        experiments,
        detector_parameterisations=None,
        beam_parameterisations=None,
        xl_orientation_parameterisations=None,
        xl_unit_cell_parameterisations=None,
        goniometer_parameterisations=None,
    ):

        if detector_parameterisations is None:
            detector_parameterisations = []
        if beam_parameterisations is None:
            beam_parameterisations = []
        if xl_orientation_parameterisations is None:
            xl_orientation_parameterisations = []
        if xl_unit_cell_parameterisations is None:
            xl_unit_cell_parameterisations = []
        if goniometer_parameterisations is None:
            goniometer_parameterisations = []

        # References to the underlying models
        self._experiments = experiments

        # Keep references to all parameterised models
        self._detector_parameterisations = detector_parameterisations
        self._beam_parameterisations = beam_parameterisations
        self._xl_orientation_parameterisations = xl_orientation_parameterisations
        self._xl_unit_cell_parameterisations = xl_unit_cell_parameterisations
        self._goniometer_parameterisations = goniometer_parameterisations

        # Check there are free parameters to refine
        self._length = self._len()
        if self._length == 0:
            raise DialsRefineConfigError(
                "There are no free parameters for refinement")

        # Calculate Experiment to parameterisation mapping
        e2bp = {
            ids: i
            for i, p in enumerate(beam_parameterisations)
            for ids in p.get_experiment_ids()
        }
        e2xop = {
            ids: i
            for i, p in enumerate(xl_orientation_parameterisations)
            for ids in p.get_experiment_ids()
        }
        e2xucp = {
            ids: i
            for i, p in enumerate(xl_unit_cell_parameterisations)
            for ids in p.get_experiment_ids()
        }
        e2dp = {
            ids: i
            for i, p in enumerate(detector_parameterisations)
            for ids in p.get_experiment_ids()
        }
        e2gp = {
            ids: i
            for i, p in enumerate(goniometer_parameterisations)
            for ids in p.get_experiment_ids()
        }
        from collections import namedtuple

        ParamSet = namedtuple(
            "ParamSet",
            [
                "beam_param", "xl_ori_param", "xl_uc_param", "det_param",
                "gonio_param"
            ],
        )
        self._exp_to_param = {
            i: ParamSet(e2bp.get(i), e2xop.get(i), e2xucp.get(i), e2dp.get(i),
                        e2gp.get(i))
            for i, _ in enumerate(experiments)
        }