コード例 #1
0
    def run_scale_and_filter(self):
        """Run cycles of scaling and filtering."""
        start_time = time.time()
        results = AnalysisResults()

        for counter in range(1,
                             self.params.filtering.deltacchalf.max_cycles + 1):
            self.run_scaling_cycle()

            if counter == 1:
                results.initial_expids_and_image_ranges = [
                    (exp.identifier,
                     exp.scan.get_image_range()) if exp.scan else None
                    for exp in self.experiments
                ]

            delta_cc_params = deltacc_phil_scope.extract()
            delta_cc_params.mode = self.params.filtering.deltacchalf.mode
            delta_cc_params.group_size = self.params.filtering.deltacchalf.group_size
            delta_cc_params.stdcutoff = self.params.filtering.deltacchalf.stdcutoff
            logger.info("\nPerforming a round of filtering.\n")

            script = deltaccscript(delta_cc_params, self.experiments,
                                   self.reflections)
            script.run()

            valid_image_ranges = get_valid_image_ranges(self.experiments)
            results.expids_and_image_ranges = [
                (exp.identifier, valid_image_ranges[i]) if exp.scan else None
                for i, exp in enumerate(self.experiments)
            ]

            self.experiments = script.experiments
            self.params.dataset_selection.use_datasets = None
            self.params.dataset_selection.exclude_datasets = None

            results = log_cycle_results(results, self, script)
            logger.info(
                "Cycle %s of filtering, n_reflections removed this cycle: %s",
                counter,
                results.get_last_cycle_results()["n_removed"],
            )

            # Test termination conditions
            latest_results = results.get_last_cycle_results()
            if latest_results["n_removed"] == 0:
                logger.info(
                    "Finishing scaling and filtering as no data removed in this cycle."
                )
                if self.params.scaling_options.full_matrix:
                    self.reflections = parse_multiple_datasets(
                        script.reflections)
                    results = self._run_final_scale_cycle(results)
                else:
                    self.reflections = script.reflections
                results.finish(termination_reason="no_more_removed")
                break

            # Need to split reflections for further processing.
            self.reflections = parse_multiple_datasets(script.reflections)

            if (latest_results["cumul_percent_removed"] >
                    self.params.filtering.deltacchalf.max_percent_removed):
                logger.info(
                    "Finishing scale and filtering as have now removed more than the limit."
                )
                results = self._run_final_scale_cycle(results)
                results.finish(termination_reason="max_percent_removed")
                break

            if self.params.filtering.deltacchalf.min_completeness:
                if (latest_results["merging_stats"]["completeness"] <
                        self.params.filtering.deltacchalf.min_completeness):
                    logger.info(
                        "Finishing scaling and filtering as completeness now below cutoff."
                    )
                    results = self._run_final_scale_cycle(results)
                    results.finish(
                        termination_reason="below_completeness_limit")
                    break

            if counter == self.params.filtering.deltacchalf.max_cycles:
                logger.info("Finishing as reached max number of cycles.")
                results = self._run_final_scale_cycle(results)
                results.finish(termination_reason="max_cycles")
                break

            #  If not finished then need to create new scaler to try again
            self._create_model_and_scaler()
            register_scaler_observers(self.scaler)
        self.filtering_results = results
        # Print summary of results
        logger.info(results.make_summary())

        # All done!
        logger.info("\nTotal time taken: {:.4f}s ".format(time.time() -
                                                          start_time))
        logger.info("%s%s%s", "\n", "=" * 80, "\n")
コード例 #2
0
 def _run_final_scale_cycle(self, results):
     self._create_model_and_scaler()
     register_scaler_observers(self.scaler)
     self.run()
     results.add_final_stats(self.merging_statistics_result)
     return results
コード例 #3
0
ファイル: algorithm.py プロジェクト: jbeilstenedmands/dials
 def _run_final_scale_cycle(self, results):
     self._create_model_and_scaler()
     register_scaler_observers(self.scaler)
     super(ScaleAndFilterAlgorithm, self).run()
     results.add_final_stats(self.merging_statistics_result)
     return results